1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2013 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
29 #include "tree-iterator.h"
30 #include "tree-inline.h"
31 #include "tree-pretty-print.h"
32 #include "langhooks.h"
33 #include "tree-flow.h"
40 #include "diagnostic-core.h"
42 #include "pointer-set.h"
43 #include "splay-tree.h"
47 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
48 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
50 enum gimplify_omp_var_data
56 GOVD_FIRSTPRIVATE = 16,
57 GOVD_LASTPRIVATE = 32,
60 GOVD_DEBUG_PRIVATE = 256,
61 GOVD_PRIVATE_OUTER_REF = 512,
62 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
63 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LOCAL)
71 ORT_COMBINED_PARALLEL = 3,
76 struct gimplify_omp_ctx
78 struct gimplify_omp_ctx *outer_context;
80 struct pointer_set_t *privatized_types;
82 enum omp_clause_default_kind default_kind;
83 enum omp_region_type region_type;
86 static struct gimplify_ctx *gimplify_ctxp;
87 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
90 /* Formal (expression) temporary table handling: multiple occurrences of
91 the same scalar expression are evaluated into the same temporary. */
93 typedef struct gimple_temp_hash_elt
96 tree temp; /* Value */
99 /* Forward declaration. */
100 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
102 /* Mark X addressable. Unlike the langhook we expect X to be in gimple
103 form and we don't do any syntax checking. */
106 mark_addressable (tree x)
108 while (handled_component_p (x))
109 x = TREE_OPERAND (x, 0);
110 if (TREE_CODE (x) == MEM_REF
111 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
112 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
113 if (TREE_CODE (x) != VAR_DECL
114 && TREE_CODE (x) != PARM_DECL
115 && TREE_CODE (x) != RESULT_DECL)
117 TREE_ADDRESSABLE (x) = 1;
119 /* Also mark the artificial SSA_NAME that points to the partition of X. */
120 if (TREE_CODE (x) == VAR_DECL
121 && !DECL_EXTERNAL (x)
123 && cfun->gimple_df != NULL
124 && cfun->gimple_df->decls_to_pointers != NULL)
127 = pointer_map_contains (cfun->gimple_df->decls_to_pointers, x);
129 TREE_ADDRESSABLE (*(tree *)namep) = 1;
133 /* Return a hash value for a formal temporary table entry. */
136 gimple_tree_hash (const void *p)
138 tree t = ((const elt_t *) p)->val;
139 return iterative_hash_expr (t, 0);
142 /* Compare two formal temporary table entries. */
145 gimple_tree_eq (const void *p1, const void *p2)
147 tree t1 = ((const elt_t *) p1)->val;
148 tree t2 = ((const elt_t *) p2)->val;
149 enum tree_code code = TREE_CODE (t1);
151 if (TREE_CODE (t2) != code
152 || TREE_TYPE (t1) != TREE_TYPE (t2))
155 if (!operand_equal_p (t1, t2, 0))
158 #ifdef ENABLE_CHECKING
159 /* Only allow them to compare equal if they also hash equal; otherwise
160 results are nondeterminate, and we fail bootstrap comparison. */
161 gcc_assert (gimple_tree_hash (p1) == gimple_tree_hash (p2));
167 /* Link gimple statement GS to the end of the sequence *SEQ_P. If
168 *SEQ_P is NULL, a new sequence is allocated. This function is
169 similar to gimple_seq_add_stmt, but does not scan the operands.
170 During gimplification, we need to manipulate statement sequences
171 before the def/use vectors have been constructed. */
174 gimple_seq_add_stmt_without_update (gimple_seq *seq_p, gimple gs)
176 gimple_stmt_iterator si;
181 si = gsi_last (*seq_p);
182 gsi_insert_after_without_update (&si, gs, GSI_NEW_STMT);
185 /* Shorter alias name for the above function for use in gimplify.c
189 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple gs)
191 gimple_seq_add_stmt_without_update (seq_p, gs);
194 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
195 NULL, a new sequence is allocated. This function is
196 similar to gimple_seq_add_seq, but does not scan the operands.
197 During gimplification, we need to manipulate statement sequences
198 before the def/use vectors have been constructed. */
201 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
203 gimple_stmt_iterator si;
208 si = gsi_last (*dst_p);
209 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
212 /* Set up a context for the gimplifier. */
215 push_gimplify_context (struct gimplify_ctx *c)
217 memset (c, '\0', sizeof (*c));
218 c->prev_context = gimplify_ctxp;
222 /* Tear down a context for the gimplifier. If BODY is non-null, then
223 put the temporaries into the outer BIND_EXPR. Otherwise, put them
226 BODY is not a sequence, but the first tuple in a sequence. */
229 pop_gimplify_context (gimple body)
231 struct gimplify_ctx *c = gimplify_ctxp;
234 && (!c->bind_expr_stack.exists ()
235 || c->bind_expr_stack.is_empty ()));
236 c->bind_expr_stack.release ();
237 gimplify_ctxp = c->prev_context;
240 declare_vars (c->temps, body, false);
242 record_vars (c->temps);
245 htab_delete (c->temp_htab);
248 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
251 gimple_push_bind_expr (gimple gimple_bind)
253 gimplify_ctxp->bind_expr_stack.reserve (8);
254 gimplify_ctxp->bind_expr_stack.safe_push (gimple_bind);
257 /* Pop the first element off the stack of bindings. */
260 gimple_pop_bind_expr (void)
262 gimplify_ctxp->bind_expr_stack.pop ();
265 /* Return the first element of the stack of bindings. */
268 gimple_current_bind_expr (void)
270 return gimplify_ctxp->bind_expr_stack.last ();
273 /* Return the stack of bindings created during gimplification. */
276 gimple_bind_expr_stack (void)
278 return gimplify_ctxp->bind_expr_stack;
281 /* Return true iff there is a COND_EXPR between us and the innermost
282 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
285 gimple_conditional_context (void)
287 return gimplify_ctxp->conditions > 0;
290 /* Note that we've entered a COND_EXPR. */
293 gimple_push_condition (void)
295 #ifdef ENABLE_GIMPLE_CHECKING
296 if (gimplify_ctxp->conditions == 0)
297 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
299 ++(gimplify_ctxp->conditions);
302 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
303 now, add any conditional cleanups we've seen to the prequeue. */
306 gimple_pop_condition (gimple_seq *pre_p)
308 int conds = --(gimplify_ctxp->conditions);
310 gcc_assert (conds >= 0);
313 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
314 gimplify_ctxp->conditional_cleanups = NULL;
318 /* A stable comparison routine for use with splay trees and DECLs. */
321 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
326 return DECL_UID (a) - DECL_UID (b);
329 /* Create a new omp construct that deals with variable remapping. */
331 static struct gimplify_omp_ctx *
332 new_omp_context (enum omp_region_type region_type)
334 struct gimplify_omp_ctx *c;
336 c = XCNEW (struct gimplify_omp_ctx);
337 c->outer_context = gimplify_omp_ctxp;
338 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
339 c->privatized_types = pointer_set_create ();
340 c->location = input_location;
341 c->region_type = region_type;
342 if ((region_type & ORT_TASK) == 0)
343 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
345 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
350 /* Destroy an omp construct that deals with variable remapping. */
353 delete_omp_context (struct gimplify_omp_ctx *c)
355 splay_tree_delete (c->variables);
356 pointer_set_destroy (c->privatized_types);
360 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
361 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
363 /* Both gimplify the statement T and append it to *SEQ_P. This function
364 behaves exactly as gimplify_stmt, but you don't have to pass T as a
368 gimplify_and_add (tree t, gimple_seq *seq_p)
370 gimplify_stmt (&t, seq_p);
373 /* Gimplify statement T into sequence *SEQ_P, and return the first
374 tuple in the sequence of generated tuples for this statement.
375 Return NULL if gimplifying T produced no tuples. */
378 gimplify_and_return_first (tree t, gimple_seq *seq_p)
380 gimple_stmt_iterator last = gsi_last (*seq_p);
382 gimplify_and_add (t, seq_p);
384 if (!gsi_end_p (last))
387 return gsi_stmt (last);
390 return gimple_seq_first_stmt (*seq_p);
393 /* Strip off a legitimate source ending from the input string NAME of
394 length LEN. Rather than having to know the names used by all of
395 our front ends, we strip off an ending of a period followed by
396 up to five characters. (Java uses ".class".) */
399 remove_suffix (char *name, int len)
403 for (i = 2; i < 8 && len > i; i++)
405 if (name[len - i] == '.')
407 name[len - i] = '\0';
413 /* Create a new temporary name with PREFIX. Return an identifier. */
415 static GTY(()) unsigned int tmp_var_id_num;
418 create_tmp_var_name (const char *prefix)
424 char *preftmp = ASTRDUP (prefix);
426 remove_suffix (preftmp, strlen (preftmp));
427 clean_symbol_name (preftmp);
432 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix ? prefix : "T", tmp_var_id_num++);
433 return get_identifier (tmp_name);
436 /* Create a new temporary variable declaration of type TYPE.
437 Do NOT push it into the current binding. */
440 create_tmp_var_raw (tree type, const char *prefix)
444 tmp_var = build_decl (input_location,
445 VAR_DECL, prefix ? create_tmp_var_name (prefix) : NULL,
448 /* The variable was declared by the compiler. */
449 DECL_ARTIFICIAL (tmp_var) = 1;
450 /* And we don't want debug info for it. */
451 DECL_IGNORED_P (tmp_var) = 1;
453 /* Make the variable writable. */
454 TREE_READONLY (tmp_var) = 0;
456 DECL_EXTERNAL (tmp_var) = 0;
457 TREE_STATIC (tmp_var) = 0;
458 TREE_USED (tmp_var) = 1;
463 /* Create a new temporary variable declaration of type TYPE. DO push the
464 variable into the current binding. Further, assume that this is called
465 only from gimplification or optimization, at which point the creation of
466 certain types are bugs. */
469 create_tmp_var (tree type, const char *prefix)
473 /* We don't allow types that are addressable (meaning we can't make copies),
474 or incomplete. We also used to reject every variable size objects here,
475 but now support those for which a constant upper bound can be obtained.
476 The processing for variable sizes is performed in gimple_add_tmp_var,
477 point at which it really matters and possibly reached via paths not going
478 through this function, e.g. after direct calls to create_tmp_var_raw. */
479 gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type));
481 tmp_var = create_tmp_var_raw (type, prefix);
482 gimple_add_tmp_var (tmp_var);
486 /* Create a new temporary variable declaration of type TYPE by calling
487 create_tmp_var and if TYPE is a vector or a complex number, mark the new
488 temporary as gimple register. */
491 create_tmp_reg (tree type, const char *prefix)
495 tmp = create_tmp_var (type, prefix);
496 if (TREE_CODE (type) == COMPLEX_TYPE
497 || TREE_CODE (type) == VECTOR_TYPE)
498 DECL_GIMPLE_REG_P (tmp) = 1;
503 /* Returns true iff T is a valid RHS for an assignment to a renamed
504 user -- or front-end generated artificial -- variable. */
507 is_gimple_reg_rhs (tree t)
509 return get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS;
512 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
513 LHS, or for a call argument. */
516 is_gimple_mem_rhs (tree t)
518 /* If we're dealing with a renamable type, either source or dest must be
519 a renamed variable. */
520 if (is_gimple_reg_type (TREE_TYPE (t)))
521 return is_gimple_val (t);
523 return is_gimple_val (t) || is_gimple_lvalue (t);
526 /* Return true if T is a CALL_EXPR or an expression that can be
527 assigned to a temporary. Note that this predicate should only be
528 used during gimplification. See the rationale for this in
529 gimplify_modify_expr. */
532 is_gimple_reg_rhs_or_call (tree t)
534 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
535 || TREE_CODE (t) == CALL_EXPR);
538 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
539 this predicate should only be used during gimplification. See the
540 rationale for this in gimplify_modify_expr. */
543 is_gimple_mem_rhs_or_call (tree t)
545 /* If we're dealing with a renamable type, either source or dest must be
546 a renamed variable. */
547 if (is_gimple_reg_type (TREE_TYPE (t)))
548 return is_gimple_val (t);
550 return (is_gimple_val (t) || is_gimple_lvalue (t)
551 || TREE_CODE (t) == CALL_EXPR);
554 /* Create a temporary with a name derived from VAL. Subroutine of
555 lookup_tmp_var; nobody else should call this function. */
558 create_tmp_from_val (tree val, bool is_formal)
560 /* Drop all qualifiers and address-space information from the value type. */
561 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
562 tree var = create_tmp_var (type, get_name (val));
564 && (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
565 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE))
566 DECL_GIMPLE_REG_P (var) = 1;
570 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
571 an existing expression temporary. */
574 lookup_tmp_var (tree val, bool is_formal)
578 /* If not optimizing, never really reuse a temporary. local-alloc
579 won't allocate any variable that is used in more than one basic
580 block, which means it will go into memory, causing much extra
581 work in reload and final and poorer code generation, outweighing
582 the extra memory allocation here. */
583 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
584 ret = create_tmp_from_val (val, is_formal);
591 if (gimplify_ctxp->temp_htab == NULL)
592 gimplify_ctxp->temp_htab
593 = htab_create (1000, gimple_tree_hash, gimple_tree_eq, free);
594 slot = htab_find_slot (gimplify_ctxp->temp_htab, (void *)&elt, INSERT);
597 elt_p = XNEW (elt_t);
599 elt_p->temp = ret = create_tmp_from_val (val, is_formal);
600 *slot = (void *) elt_p;
604 elt_p = (elt_t *) *slot;
612 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
615 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
620 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
621 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
622 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
625 if (gimplify_ctxp->into_ssa
626 && is_gimple_reg_type (TREE_TYPE (val)))
627 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)), NULL);
629 t = lookup_tmp_var (val, is_formal);
631 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
633 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_HERE (val));
635 /* gimplify_modify_expr might want to reduce this further. */
636 gimplify_and_add (mod, pre_p);
642 /* Return a formal temporary variable initialized with VAL. PRE_P is as
643 in gimplify_expr. Only use this function if:
645 1) The value of the unfactored expression represented by VAL will not
646 change between the initialization and use of the temporary, and
647 2) The temporary will not be otherwise modified.
649 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
650 and #2 means it is inappropriate for && temps.
652 For other cases, use get_initialized_tmp_var instead. */
655 get_formal_tmp_var (tree val, gimple_seq *pre_p)
657 return internal_get_tmp_var (val, pre_p, NULL, true);
660 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
661 are as in gimplify_expr. */
664 get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p)
666 return internal_get_tmp_var (val, pre_p, post_p, false);
669 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
670 generate debug info for them; otherwise don't. */
673 declare_vars (tree vars, gimple scope, bool debug_info)
680 gcc_assert (gimple_code (scope) == GIMPLE_BIND);
682 temps = nreverse (last);
684 block = gimple_bind_block (scope);
685 gcc_assert (!block || TREE_CODE (block) == BLOCK);
686 if (!block || !debug_info)
688 DECL_CHAIN (last) = gimple_bind_vars (scope);
689 gimple_bind_set_vars (scope, temps);
693 /* We need to attach the nodes both to the BIND_EXPR and to its
694 associated BLOCK for debugging purposes. The key point here
695 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
696 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
697 if (BLOCK_VARS (block))
698 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
701 gimple_bind_set_vars (scope,
702 chainon (gimple_bind_vars (scope), temps));
703 BLOCK_VARS (block) = temps;
709 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
710 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
711 no such upper bound can be obtained. */
714 force_constant_size (tree var)
716 /* The only attempt we make is by querying the maximum size of objects
717 of the variable's type. */
719 HOST_WIDE_INT max_size;
721 gcc_assert (TREE_CODE (var) == VAR_DECL);
723 max_size = max_int_size_in_bytes (TREE_TYPE (var));
725 gcc_assert (max_size >= 0);
728 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
730 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
733 /* Push the temporary variable TMP into the current binding. */
736 gimple_add_tmp_var (tree tmp)
738 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
740 /* Later processing assumes that the object size is constant, which might
741 not be true at this point. Force the use of a constant upper bound in
743 if (!host_integerp (DECL_SIZE_UNIT (tmp), 1))
744 force_constant_size (tmp);
746 DECL_CONTEXT (tmp) = current_function_decl;
747 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
751 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
752 gimplify_ctxp->temps = tmp;
754 /* Mark temporaries local within the nearest enclosing parallel. */
755 if (gimplify_omp_ctxp)
757 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
758 while (ctx && ctx->region_type == ORT_WORKSHARE)
759 ctx = ctx->outer_context;
761 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
770 /* This case is for nested functions. We need to expose the locals
772 body_seq = gimple_body (current_function_decl);
773 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
777 /* Determine whether to assign a location to the statement GS. */
780 should_carry_location_p (gimple gs)
782 /* Don't emit a line note for a label. We particularly don't want to
783 emit one for the break label, since it doesn't actually correspond
784 to the beginning of the loop/switch. */
785 if (gimple_code (gs) == GIMPLE_LABEL)
791 /* Return true if a location should not be emitted for this statement
792 by annotate_one_with_location. */
795 gimple_do_not_emit_location_p (gimple g)
797 return gimple_plf (g, GF_PLF_1);
800 /* Mark statement G so a location will not be emitted by
801 annotate_one_with_location. */
804 gimple_set_do_not_emit_location (gimple g)
806 /* The PLF flags are initialized to 0 when a new tuple is created,
807 so no need to initialize it anywhere. */
808 gimple_set_plf (g, GF_PLF_1, true);
811 /* Set the location for gimple statement GS to LOCATION. */
814 annotate_one_with_location (gimple gs, location_t location)
816 if (!gimple_has_location (gs)
817 && !gimple_do_not_emit_location_p (gs)
818 && should_carry_location_p (gs))
819 gimple_set_location (gs, location);
822 /* Set LOCATION for all the statements after iterator GSI in sequence
823 SEQ. If GSI is pointing to the end of the sequence, start with the
824 first statement in SEQ. */
827 annotate_all_with_location_after (gimple_seq seq, gimple_stmt_iterator gsi,
831 gsi = gsi_start (seq);
835 for (; !gsi_end_p (gsi); gsi_next (&gsi))
836 annotate_one_with_location (gsi_stmt (gsi), location);
839 /* Set the location for all the statements in a sequence STMT_P to LOCATION. */
842 annotate_all_with_location (gimple_seq stmt_p, location_t location)
844 gimple_stmt_iterator i;
846 if (gimple_seq_empty_p (stmt_p))
849 for (i = gsi_start (stmt_p); !gsi_end_p (i); gsi_next (&i))
851 gimple gs = gsi_stmt (i);
852 annotate_one_with_location (gs, location);
856 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
857 nodes that are referenced more than once in GENERIC functions. This is
858 necessary because gimplification (translation into GIMPLE) is performed
859 by modifying tree nodes in-place, so gimplication of a shared node in a
860 first context could generate an invalid GIMPLE form in a second context.
862 This is achieved with a simple mark/copy/unmark algorithm that walks the
863 GENERIC representation top-down, marks nodes with TREE_VISITED the first
864 time it encounters them, duplicates them if they already have TREE_VISITED
865 set, and finally removes the TREE_VISITED marks it has set.
867 The algorithm works only at the function level, i.e. it generates a GENERIC
868 representation of a function with no nodes shared within the function when
869 passed a GENERIC function (except for nodes that are allowed to be shared).
871 At the global level, it is also necessary to unshare tree nodes that are
872 referenced in more than one function, for the same aforementioned reason.
873 This requires some cooperation from the front-end. There are 2 strategies:
875 1. Manual unsharing. The front-end needs to call unshare_expr on every
876 expression that might end up being shared across functions.
878 2. Deep unsharing. This is an extension of regular unsharing. Instead
879 of calling unshare_expr on expressions that might be shared across
880 functions, the front-end pre-marks them with TREE_VISITED. This will
881 ensure that they are unshared on the first reference within functions
882 when the regular unsharing algorithm runs. The counterpart is that
883 this algorithm must look deeper than for manual unsharing, which is
884 specified by LANG_HOOKS_DEEP_UNSHARING.
886 If there are only few specific cases of node sharing across functions, it is
887 probably easier for a front-end to unshare the expressions manually. On the
888 contrary, if the expressions generated at the global level are as widespread
889 as expressions generated within functions, deep unsharing is very likely the
892 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
893 These nodes model computations that must be done once. If we were to
894 unshare something like SAVE_EXPR(i++), the gimplification process would
895 create wrong code. However, if DATA is non-null, it must hold a pointer
896 set that is used to unshare the subtrees of these nodes. */
899 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
902 enum tree_code code = TREE_CODE (t);
904 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
905 copy their subtrees if we can make sure to do it only once. */
906 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
908 if (data && !pointer_set_insert ((struct pointer_set_t *)data, t))
914 /* Stop at types, decls, constants like copy_tree_r. */
915 else if (TREE_CODE_CLASS (code) == tcc_type
916 || TREE_CODE_CLASS (code) == tcc_declaration
917 || TREE_CODE_CLASS (code) == tcc_constant
918 /* We can't do anything sensible with a BLOCK used as an
919 expression, but we also can't just die when we see it
920 because of non-expression uses. So we avert our eyes
921 and cross our fingers. Silly Java. */
925 /* Cope with the statement expression extension. */
926 else if (code == STATEMENT_LIST)
929 /* Leave the bulk of the work to copy_tree_r itself. */
931 copy_tree_r (tp, walk_subtrees, NULL);
936 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
937 If *TP has been visited already, then *TP is deeply copied by calling
938 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
941 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
944 enum tree_code code = TREE_CODE (t);
946 /* Skip types, decls, and constants. But we do want to look at their
947 types and the bounds of types. Mark them as visited so we properly
948 unmark their subtrees on the unmark pass. If we've already seen them,
949 don't look down further. */
950 if (TREE_CODE_CLASS (code) == tcc_type
951 || TREE_CODE_CLASS (code) == tcc_declaration
952 || TREE_CODE_CLASS (code) == tcc_constant)
954 if (TREE_VISITED (t))
957 TREE_VISITED (t) = 1;
960 /* If this node has been visited already, unshare it and don't look
962 else if (TREE_VISITED (t))
964 walk_tree (tp, mostly_copy_tree_r, data, NULL);
968 /* Otherwise, mark the node as visited and keep looking. */
970 TREE_VISITED (t) = 1;
975 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
976 copy_if_shared_r callback unmodified. */
979 copy_if_shared (tree *tp, void *data)
981 walk_tree (tp, copy_if_shared_r, data, NULL);
984 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
985 any nested functions. */
988 unshare_body (tree fndecl)
990 struct cgraph_node *cgn = cgraph_get_node (fndecl);
991 /* If the language requires deep unsharing, we need a pointer set to make
992 sure we don't repeatedly unshare subtrees of unshareable nodes. */
993 struct pointer_set_t *visited
994 = lang_hooks.deep_unsharing ? pointer_set_create () : NULL;
996 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
997 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
998 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
1001 pointer_set_destroy (visited);
1004 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
1005 unshare_body (cgn->symbol.decl);
1008 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
1009 Subtrees are walked until the first unvisited node is encountered. */
1012 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1016 /* If this node has been visited, unmark it and keep looking. */
1017 if (TREE_VISITED (t))
1018 TREE_VISITED (t) = 0;
1020 /* Otherwise, don't look any deeper. */
1027 /* Unmark the visited trees rooted at *TP. */
1030 unmark_visited (tree *tp)
1032 walk_tree (tp, unmark_visited_r, NULL, NULL);
1035 /* Likewise, but mark all trees as not visited. */
1038 unvisit_body (tree fndecl)
1040 struct cgraph_node *cgn = cgraph_get_node (fndecl);
1042 unmark_visited (&DECL_SAVED_TREE (fndecl));
1043 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
1044 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
1047 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
1048 unvisit_body (cgn->symbol.decl);
1051 /* Unconditionally make an unshared copy of EXPR. This is used when using
1052 stored expressions which span multiple functions, such as BINFO_VTABLE,
1053 as the normal unsharing process can't tell that they're shared. */
1056 unshare_expr (tree expr)
1058 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1062 /* Worker for unshare_expr_without_location. */
1065 prune_expr_location (tree *tp, int *walk_subtrees, void *)
1068 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
1074 /* Similar to unshare_expr but also prune all expression locations
1078 unshare_expr_without_location (tree expr)
1080 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1082 walk_tree (&expr, prune_expr_location, NULL, NULL);
1086 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1087 contain statements and have a value. Assign its value to a temporary
1088 and give it void_type_node. Return the temporary, or NULL_TREE if
1089 WRAPPER was already void. */
1092 voidify_wrapper_expr (tree wrapper, tree temp)
1094 tree type = TREE_TYPE (wrapper);
1095 if (type && !VOID_TYPE_P (type))
1099 /* Set p to point to the body of the wrapper. Loop until we find
1100 something that isn't a wrapper. */
1101 for (p = &wrapper; p && *p; )
1103 switch (TREE_CODE (*p))
1106 TREE_SIDE_EFFECTS (*p) = 1;
1107 TREE_TYPE (*p) = void_type_node;
1108 /* For a BIND_EXPR, the body is operand 1. */
1109 p = &BIND_EXPR_BODY (*p);
1112 case CLEANUP_POINT_EXPR:
1113 case TRY_FINALLY_EXPR:
1114 case TRY_CATCH_EXPR:
1115 TREE_SIDE_EFFECTS (*p) = 1;
1116 TREE_TYPE (*p) = void_type_node;
1117 p = &TREE_OPERAND (*p, 0);
1120 case STATEMENT_LIST:
1122 tree_stmt_iterator i = tsi_last (*p);
1123 TREE_SIDE_EFFECTS (*p) = 1;
1124 TREE_TYPE (*p) = void_type_node;
1125 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1130 /* Advance to the last statement. Set all container types to
1132 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1134 TREE_SIDE_EFFECTS (*p) = 1;
1135 TREE_TYPE (*p) = void_type_node;
1139 case TRANSACTION_EXPR:
1140 TREE_SIDE_EFFECTS (*p) = 1;
1141 TREE_TYPE (*p) = void_type_node;
1142 p = &TRANSACTION_EXPR_BODY (*p);
1146 /* Assume that any tree upon which voidify_wrapper_expr is
1147 directly called is a wrapper, and that its body is op0. */
1150 TREE_SIDE_EFFECTS (*p) = 1;
1151 TREE_TYPE (*p) = void_type_node;
1152 p = &TREE_OPERAND (*p, 0);
1160 if (p == NULL || IS_EMPTY_STMT (*p))
1164 /* The wrapper is on the RHS of an assignment that we're pushing
1166 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1167 || TREE_CODE (temp) == MODIFY_EXPR);
1168 TREE_OPERAND (temp, 1) = *p;
1173 temp = create_tmp_var (type, "retval");
1174 *p = build2 (INIT_EXPR, type, temp, *p);
1183 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1184 a temporary through which they communicate. */
1187 build_stack_save_restore (gimple *save, gimple *restore)
1191 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1192 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1193 gimple_call_set_lhs (*save, tmp_var);
1196 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1200 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1202 static enum gimplify_status
1203 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1205 tree bind_expr = *expr_p;
1206 bool old_save_stack = gimplify_ctxp->save_stack;
1209 gimple_seq body, cleanup;
1212 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1214 /* Mark variables seen in this bind expr. */
1215 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1217 if (TREE_CODE (t) == VAR_DECL)
1219 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1221 /* Mark variable as local. */
1222 if (ctx && !DECL_EXTERNAL (t)
1223 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1224 || splay_tree_lookup (ctx->variables,
1225 (splay_tree_key) t) == NULL))
1226 omp_add_variable (gimplify_omp_ctxp, t, GOVD_LOCAL | GOVD_SEEN);
1228 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1230 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1231 cfun->has_local_explicit_reg_vars = true;
1234 /* Preliminarily mark non-addressed complex variables as eligible
1235 for promotion to gimple registers. We'll transform their uses
1237 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1238 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1239 && !TREE_THIS_VOLATILE (t)
1240 && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
1241 && !needs_to_live_in_memory (t))
1242 DECL_GIMPLE_REG_P (t) = 1;
1245 gimple_bind = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1246 BIND_EXPR_BLOCK (bind_expr));
1247 gimple_push_bind_expr (gimple_bind);
1249 gimplify_ctxp->save_stack = false;
1251 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1253 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1254 gimple_bind_set_body (gimple_bind, body);
1258 if (gimplify_ctxp->save_stack)
1260 gimple stack_restore;
1262 /* Save stack on entry and restore it on exit. Add a try_finally
1263 block to achieve this. Note that mudflap depends on the
1264 format of the emitted code: see mx_register_decls(). */
1265 build_stack_save_restore (&stack_save, &stack_restore);
1267 gimplify_seq_add_stmt (&cleanup, stack_restore);
1270 /* Add clobbers for all variables that go out of scope. */
1271 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1273 if (TREE_CODE (t) == VAR_DECL
1274 && !is_global_var (t)
1275 && DECL_CONTEXT (t) == current_function_decl
1276 && !DECL_HARD_REGISTER (t)
1277 && !TREE_THIS_VOLATILE (t)
1278 && !DECL_HAS_VALUE_EXPR_P (t)
1279 /* Only care for variables that have to be in memory. Others
1280 will be rewritten into SSA names, hence moved to the top-level. */
1281 && !is_gimple_reg (t)
1282 && flag_stack_reuse != SR_NONE)
1284 tree clobber = build_constructor (TREE_TYPE (t),
1286 TREE_THIS_VOLATILE (clobber) = 1;
1287 gimplify_seq_add_stmt (&cleanup, gimple_build_assign (t, clobber));
1294 gimple_seq new_body;
1297 gs = gimple_build_try (gimple_bind_body (gimple_bind), cleanup,
1298 GIMPLE_TRY_FINALLY);
1301 gimplify_seq_add_stmt (&new_body, stack_save);
1302 gimplify_seq_add_stmt (&new_body, gs);
1303 gimple_bind_set_body (gimple_bind, new_body);
1306 gimplify_ctxp->save_stack = old_save_stack;
1307 gimple_pop_bind_expr ();
1309 gimplify_seq_add_stmt (pre_p, gimple_bind);
1317 *expr_p = NULL_TREE;
1321 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1322 GIMPLE value, it is assigned to a new temporary and the statement is
1323 re-written to return the temporary.
1325 PRE_P points to the sequence where side effects that must happen before
1326 STMT should be stored. */
1328 static enum gimplify_status
1329 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1332 tree ret_expr = TREE_OPERAND (stmt, 0);
1333 tree result_decl, result;
1335 if (ret_expr == error_mark_node)
1339 || TREE_CODE (ret_expr) == RESULT_DECL
1340 || ret_expr == error_mark_node)
1342 gimple ret = gimple_build_return (ret_expr);
1343 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1344 gimplify_seq_add_stmt (pre_p, ret);
1348 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1349 result_decl = NULL_TREE;
1352 result_decl = TREE_OPERAND (ret_expr, 0);
1354 /* See through a return by reference. */
1355 if (TREE_CODE (result_decl) == INDIRECT_REF)
1356 result_decl = TREE_OPERAND (result_decl, 0);
1358 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1359 || TREE_CODE (ret_expr) == INIT_EXPR)
1360 && TREE_CODE (result_decl) == RESULT_DECL);
1363 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1364 Recall that aggregate_value_p is FALSE for any aggregate type that is
1365 returned in registers. If we're returning values in registers, then
1366 we don't want to extend the lifetime of the RESULT_DECL, particularly
1367 across another call. In addition, for those aggregates for which
1368 hard_function_value generates a PARALLEL, we'll die during normal
1369 expansion of structure assignments; there's special code in expand_return
1370 to handle this case that does not exist in expand_expr. */
1373 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1375 if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST)
1377 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1378 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1379 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1380 should be effectively allocated by the caller, i.e. all calls to
1381 this function must be subject to the Return Slot Optimization. */
1382 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1383 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1385 result = result_decl;
1387 else if (gimplify_ctxp->return_temp)
1388 result = gimplify_ctxp->return_temp;
1391 result = create_tmp_reg (TREE_TYPE (result_decl), NULL);
1393 /* ??? With complex control flow (usually involving abnormal edges),
1394 we can wind up warning about an uninitialized value for this. Due
1395 to how this variable is constructed and initialized, this is never
1396 true. Give up and never warn. */
1397 TREE_NO_WARNING (result) = 1;
1399 gimplify_ctxp->return_temp = result;
1402 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1403 Then gimplify the whole thing. */
1404 if (result != result_decl)
1405 TREE_OPERAND (ret_expr, 0) = result;
1407 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1409 ret = gimple_build_return (result);
1410 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1411 gimplify_seq_add_stmt (pre_p, ret);
1416 /* Gimplify a variable-length array DECL. */
1419 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1421 /* This is a variable-sized decl. Simplify its size and mark it
1422 for deferred expansion. Note that mudflap depends on the format
1423 of the emitted code: see mx_register_decls(). */
1424 tree t, addr, ptr_type;
1426 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1427 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1429 /* All occurrences of this decl in final gimplified code will be
1430 replaced by indirection. Setting DECL_VALUE_EXPR does two
1431 things: First, it lets the rest of the gimplifier know what
1432 replacement to use. Second, it lets the debug info know
1433 where to find the value. */
1434 ptr_type = build_pointer_type (TREE_TYPE (decl));
1435 addr = create_tmp_var (ptr_type, get_name (decl));
1436 DECL_IGNORED_P (addr) = 0;
1437 t = build_fold_indirect_ref (addr);
1438 TREE_THIS_NOTRAP (t) = 1;
1439 SET_DECL_VALUE_EXPR (decl, t);
1440 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1442 t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
1443 t = build_call_expr (t, 2, DECL_SIZE_UNIT (decl),
1444 size_int (DECL_ALIGN (decl)));
1445 /* The call has been built for a variable-sized object. */
1446 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1447 t = fold_convert (ptr_type, t);
1448 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1450 gimplify_and_add (t, seq_p);
1452 /* Indicate that we need to restore the stack level when the
1453 enclosing BIND_EXPR is exited. */
1454 gimplify_ctxp->save_stack = true;
1457 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1458 and initialization explicit. */
1460 static enum gimplify_status
1461 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1463 tree stmt = *stmt_p;
1464 tree decl = DECL_EXPR_DECL (stmt);
1466 *stmt_p = NULL_TREE;
1468 if (TREE_TYPE (decl) == error_mark_node)
1471 if ((TREE_CODE (decl) == TYPE_DECL
1472 || TREE_CODE (decl) == VAR_DECL)
1473 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1474 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1476 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1477 in case its size expressions contain problematic nodes like CALL_EXPR. */
1478 if (TREE_CODE (decl) == TYPE_DECL
1479 && DECL_ORIGINAL_TYPE (decl)
1480 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1481 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1483 if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl))
1485 tree init = DECL_INITIAL (decl);
1487 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1488 || (!TREE_STATIC (decl)
1489 && flag_stack_check == GENERIC_STACK_CHECK
1490 && compare_tree_int (DECL_SIZE_UNIT (decl),
1491 STACK_CHECK_MAX_VAR_SIZE) > 0))
1492 gimplify_vla_decl (decl, seq_p);
1494 /* Some front ends do not explicitly declare all anonymous
1495 artificial variables. We compensate here by declaring the
1496 variables, though it would be better if the front ends would
1497 explicitly declare them. */
1498 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1499 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1500 gimple_add_tmp_var (decl);
1502 if (init && init != error_mark_node)
1504 if (!TREE_STATIC (decl))
1506 DECL_INITIAL (decl) = NULL_TREE;
1507 init = build2 (INIT_EXPR, void_type_node, decl, init);
1508 gimplify_and_add (init, seq_p);
1512 /* We must still examine initializers for static variables
1513 as they may contain a label address. */
1514 walk_tree (&init, force_labels_r, NULL, NULL);
1521 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1522 and replacing the LOOP_EXPR with goto, but if the loop contains an
1523 EXIT_EXPR, we need to append a label for it to jump to. */
1525 static enum gimplify_status
1526 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1528 tree saved_label = gimplify_ctxp->exit_label;
1529 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1531 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1533 gimplify_ctxp->exit_label = NULL_TREE;
1535 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1537 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1539 if (gimplify_ctxp->exit_label)
1540 gimplify_seq_add_stmt (pre_p,
1541 gimple_build_label (gimplify_ctxp->exit_label));
1543 gimplify_ctxp->exit_label = saved_label;
1549 /* Gimplify a statement list onto a sequence. These may be created either
1550 by an enlightened front-end, or by shortcut_cond_expr. */
1552 static enum gimplify_status
1553 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1555 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1557 tree_stmt_iterator i = tsi_start (*expr_p);
1559 while (!tsi_end_p (i))
1561 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1574 /* Compare two case labels. Because the front end should already have
1575 made sure that case ranges do not overlap, it is enough to only compare
1576 the CASE_LOW values of each case label. */
1579 compare_case_labels (const void *p1, const void *p2)
1581 const_tree const case1 = *(const_tree const*)p1;
1582 const_tree const case2 = *(const_tree const*)p2;
1584 /* The 'default' case label always goes first. */
1585 if (!CASE_LOW (case1))
1587 else if (!CASE_LOW (case2))
1590 return tree_int_cst_compare (CASE_LOW (case1), CASE_LOW (case2));
1593 /* Sort the case labels in LABEL_VEC in place in ascending order. */
1596 sort_case_labels (vec<tree> label_vec)
1598 label_vec.qsort (compare_case_labels);
1601 /* Prepare a vector of case labels to be used in a GIMPLE_SWITCH statement.
1603 LABELS is a vector that contains all case labels to look at.
1605 INDEX_TYPE is the type of the switch index expression. Case labels
1606 in LABELS are discarded if their values are not in the value range
1607 covered by INDEX_TYPE. The remaining case label values are folded
1610 If a default case exists in LABELS, it is removed from LABELS and
1611 returned in DEFAULT_CASEP. If no default case exists, but the
1612 case labels already cover the whole range of INDEX_TYPE, a default
1613 case is returned pointing to one of the existing case labels.
1614 Otherwise DEFAULT_CASEP is set to NULL_TREE.
1616 DEFAULT_CASEP may be NULL, in which case the above comment doesn't
1617 apply and no action is taken regardless of whether a default case is
1621 preprocess_case_label_vec_for_gimple (vec<tree> labels,
1623 tree *default_casep)
1625 tree min_value, max_value;
1626 tree default_case = NULL_TREE;
1630 min_value = TYPE_MIN_VALUE (index_type);
1631 max_value = TYPE_MAX_VALUE (index_type);
1632 while (i < labels.length ())
1634 tree elt = labels[i];
1635 tree low = CASE_LOW (elt);
1636 tree high = CASE_HIGH (elt);
1637 bool remove_element = FALSE;
1641 gcc_checking_assert (TREE_CODE (low) == INTEGER_CST);
1642 gcc_checking_assert (!high || TREE_CODE (high) == INTEGER_CST);
1644 /* This is a non-default case label, i.e. it has a value.
1646 See if the case label is reachable within the range of
1647 the index type. Remove out-of-range case values. Turn
1648 case ranges into a canonical form (high > low strictly)
1649 and convert the case label values to the index type.
1651 NB: The type of gimple_switch_index() may be the promoted
1652 type, but the case labels retain the original type. */
1656 /* This is a case range. Discard empty ranges.
1657 If the bounds or the range are equal, turn this
1658 into a simple (one-value) case. */
1659 int cmp = tree_int_cst_compare (high, low);
1661 remove_element = TRUE;
1668 /* If the simple case value is unreachable, ignore it. */
1669 if ((TREE_CODE (min_value) == INTEGER_CST
1670 && tree_int_cst_compare (low, min_value) < 0)
1671 || (TREE_CODE (max_value) == INTEGER_CST
1672 && tree_int_cst_compare (low, max_value) > 0))
1673 remove_element = TRUE;
1675 low = fold_convert (index_type, low);
1679 /* If the entire case range is unreachable, ignore it. */
1680 if ((TREE_CODE (min_value) == INTEGER_CST
1681 && tree_int_cst_compare (high, min_value) < 0)
1682 || (TREE_CODE (max_value) == INTEGER_CST
1683 && tree_int_cst_compare (low, max_value) > 0))
1684 remove_element = TRUE;
1687 /* If the lower bound is less than the index type's
1688 minimum value, truncate the range bounds. */
1689 if (TREE_CODE (min_value) == INTEGER_CST
1690 && tree_int_cst_compare (low, min_value) < 0)
1692 low = fold_convert (index_type, low);
1694 /* If the upper bound is greater than the index type's
1695 maximum value, truncate the range bounds. */
1696 if (TREE_CODE (max_value) == INTEGER_CST
1697 && tree_int_cst_compare (high, max_value) > 0)
1699 high = fold_convert (index_type, high);
1701 /* We may have folded a case range to a one-value case. */
1702 if (tree_int_cst_equal (low, high))
1707 CASE_LOW (elt) = low;
1708 CASE_HIGH (elt) = high;
1712 gcc_assert (!default_case);
1714 /* The default case must be passed separately to the
1715 gimple_build_switch routine. But if DEFAULT_CASEP
1716 is NULL, we do not remove the default case (it would
1717 be completely lost). */
1719 remove_element = TRUE;
1723 labels.ordered_remove (i);
1729 if (!labels.is_empty ())
1730 sort_case_labels (labels);
1732 if (default_casep && !default_case)
1734 /* If the switch has no default label, add one, so that we jump
1735 around the switch body. If the labels already cover the whole
1736 range of the switch index_type, add the default label pointing
1737 to one of the existing labels. */
1739 && TYPE_MIN_VALUE (index_type)
1740 && TYPE_MAX_VALUE (index_type)
1741 && tree_int_cst_equal (CASE_LOW (labels[0]),
1742 TYPE_MIN_VALUE (index_type)))
1744 tree low, high = CASE_HIGH (labels[len - 1]);
1746 high = CASE_LOW (labels[len - 1]);
1747 if (tree_int_cst_equal (high, TYPE_MAX_VALUE (index_type)))
1749 for (i = 1; i < len; i++)
1751 high = CASE_LOW (labels[i]);
1752 low = CASE_HIGH (labels[i - 1]);
1754 low = CASE_LOW (labels[i - 1]);
1755 if ((TREE_INT_CST_LOW (low) + 1
1756 != TREE_INT_CST_LOW (high))
1757 || (TREE_INT_CST_HIGH (low)
1758 + (TREE_INT_CST_LOW (high) == 0)
1759 != TREE_INT_CST_HIGH (high)))
1764 tree label = CASE_LABEL (labels[0]);
1765 default_case = build_case_label (NULL_TREE, NULL_TREE,
1773 *default_casep = default_case;
1776 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
1779 static enum gimplify_status
1780 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
1782 tree switch_expr = *expr_p;
1783 gimple_seq switch_body_seq = NULL;
1784 enum gimplify_status ret;
1785 tree index_type = TREE_TYPE (switch_expr);
1786 if (index_type == NULL_TREE)
1787 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
1789 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
1791 if (ret == GS_ERROR || ret == GS_UNHANDLED)
1794 if (SWITCH_BODY (switch_expr))
1797 vec<tree> saved_labels;
1798 tree default_case = NULL_TREE;
1799 gimple gimple_switch;
1801 /* If someone can be bothered to fill in the labels, they can
1802 be bothered to null out the body too. */
1803 gcc_assert (!SWITCH_LABELS (switch_expr));
1805 /* Save old labels, get new ones from body, then restore the old
1806 labels. Save all the things from the switch body to append after. */
1807 saved_labels = gimplify_ctxp->case_labels;
1808 gimplify_ctxp->case_labels.create (8);
1810 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
1811 labels = gimplify_ctxp->case_labels;
1812 gimplify_ctxp->case_labels = saved_labels;
1814 preprocess_case_label_vec_for_gimple (labels, index_type,
1822 = build_case_label (NULL_TREE, NULL_TREE,
1823 create_artificial_label (UNKNOWN_LOCATION));
1824 new_default = gimple_build_label (CASE_LABEL (default_case));
1825 gimplify_seq_add_stmt (&switch_body_seq, new_default);
1828 gimple_switch = gimple_build_switch (SWITCH_COND (switch_expr),
1829 default_case, labels);
1830 gimplify_seq_add_stmt (pre_p, gimple_switch);
1831 gimplify_seq_add_seq (pre_p, switch_body_seq);
1835 gcc_assert (SWITCH_LABELS (switch_expr));
1840 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
1842 static enum gimplify_status
1843 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
1845 struct gimplify_ctx *ctxp;
1846 gimple gimple_label;
1848 /* Invalid OpenMP programs can play Duff's Device type games with
1849 #pragma omp parallel. At least in the C front end, we don't
1850 detect such invalid branches until after gimplification. */
1851 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
1852 if (ctxp->case_labels.exists ())
1855 gimple_label = gimple_build_label (CASE_LABEL (*expr_p));
1856 ctxp->case_labels.safe_push (*expr_p);
1857 gimplify_seq_add_stmt (pre_p, gimple_label);
1862 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
1866 build_and_jump (tree *label_p)
1868 if (label_p == NULL)
1869 /* If there's nowhere to jump, just fall through. */
1872 if (*label_p == NULL_TREE)
1874 tree label = create_artificial_label (UNKNOWN_LOCATION);
1878 return build1 (GOTO_EXPR, void_type_node, *label_p);
1881 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
1882 This also involves building a label to jump to and communicating it to
1883 gimplify_loop_expr through gimplify_ctxp->exit_label. */
1885 static enum gimplify_status
1886 gimplify_exit_expr (tree *expr_p)
1888 tree cond = TREE_OPERAND (*expr_p, 0);
1891 expr = build_and_jump (&gimplify_ctxp->exit_label);
1892 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
1898 /* A helper function to be called via walk_tree. Mark all labels under *TP
1899 as being forced. To be called for DECL_INITIAL of static variables. */
1902 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1906 if (TREE_CODE (*tp) == LABEL_DECL)
1907 FORCED_LABEL (*tp) = 1;
1912 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
1913 different from its canonical type, wrap the whole thing inside a
1914 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
1917 The canonical type of a COMPONENT_REF is the type of the field being
1918 referenced--unless the field is a bit-field which can be read directly
1919 in a smaller mode, in which case the canonical type is the
1920 sign-appropriate type corresponding to that mode. */
1923 canonicalize_component_ref (tree *expr_p)
1925 tree expr = *expr_p;
1928 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
1930 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
1931 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
1933 type = TREE_TYPE (TREE_OPERAND (expr, 1));
1935 /* One could argue that all the stuff below is not necessary for
1936 the non-bitfield case and declare it a FE error if type
1937 adjustment would be needed. */
1938 if (TREE_TYPE (expr) != type)
1940 #ifdef ENABLE_TYPES_CHECKING
1941 tree old_type = TREE_TYPE (expr);
1945 /* We need to preserve qualifiers and propagate them from
1947 type_quals = TYPE_QUALS (type)
1948 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
1949 if (TYPE_QUALS (type) != type_quals)
1950 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
1952 /* Set the type of the COMPONENT_REF to the underlying type. */
1953 TREE_TYPE (expr) = type;
1955 #ifdef ENABLE_TYPES_CHECKING
1956 /* It is now a FE error, if the conversion from the canonical
1957 type to the original expression type is not useless. */
1958 gcc_assert (useless_type_conversion_p (old_type, type));
1963 /* If a NOP conversion is changing a pointer to array of foo to a pointer
1964 to foo, embed that change in the ADDR_EXPR by converting
1969 where L is the lower bound. For simplicity, only do this for constant
1971 The constraint is that the type of &array[L] is trivially convertible
1975 canonicalize_addr_expr (tree *expr_p)
1977 tree expr = *expr_p;
1978 tree addr_expr = TREE_OPERAND (expr, 0);
1979 tree datype, ddatype, pddatype;
1981 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
1982 if (!POINTER_TYPE_P (TREE_TYPE (expr))
1983 || TREE_CODE (addr_expr) != ADDR_EXPR)
1986 /* The addr_expr type should be a pointer to an array. */
1987 datype = TREE_TYPE (TREE_TYPE (addr_expr));
1988 if (TREE_CODE (datype) != ARRAY_TYPE)
1991 /* The pointer to element type shall be trivially convertible to
1992 the expression pointer type. */
1993 ddatype = TREE_TYPE (datype);
1994 pddatype = build_pointer_type (ddatype);
1995 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
1999 /* The lower bound and element sizes must be constant. */
2000 if (!TYPE_SIZE_UNIT (ddatype)
2001 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
2002 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
2003 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
2006 /* All checks succeeded. Build a new node to merge the cast. */
2007 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
2008 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
2009 NULL_TREE, NULL_TREE);
2010 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
2012 /* We can have stripped a required restrict qualifier above. */
2013 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
2014 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
2017 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
2018 underneath as appropriate. */
2020 static enum gimplify_status
2021 gimplify_conversion (tree *expr_p)
2023 location_t loc = EXPR_LOCATION (*expr_p);
2024 gcc_assert (CONVERT_EXPR_P (*expr_p));
2026 /* Then strip away all but the outermost conversion. */
2027 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
2029 /* And remove the outermost conversion if it's useless. */
2030 if (tree_ssa_useless_type_conversion (*expr_p))
2031 *expr_p = TREE_OPERAND (*expr_p, 0);
2033 /* If we still have a conversion at the toplevel,
2034 then canonicalize some constructs. */
2035 if (CONVERT_EXPR_P (*expr_p))
2037 tree sub = TREE_OPERAND (*expr_p, 0);
2039 /* If a NOP conversion is changing the type of a COMPONENT_REF
2040 expression, then canonicalize its type now in order to expose more
2041 redundant conversions. */
2042 if (TREE_CODE (sub) == COMPONENT_REF)
2043 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
2045 /* If a NOP conversion is changing a pointer to array of foo
2046 to a pointer to foo, embed that change in the ADDR_EXPR. */
2047 else if (TREE_CODE (sub) == ADDR_EXPR)
2048 canonicalize_addr_expr (expr_p);
2051 /* If we have a conversion to a non-register type force the
2052 use of a VIEW_CONVERT_EXPR instead. */
2053 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
2054 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
2055 TREE_OPERAND (*expr_p, 0));
2060 /* Nonlocal VLAs seen in the current function. */
2061 static struct pointer_set_t *nonlocal_vlas;
2063 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
2064 DECL_VALUE_EXPR, and it's worth re-examining things. */
2066 static enum gimplify_status
2067 gimplify_var_or_parm_decl (tree *expr_p)
2069 tree decl = *expr_p;
2071 /* ??? If this is a local variable, and it has not been seen in any
2072 outer BIND_EXPR, then it's probably the result of a duplicate
2073 declaration, for which we've already issued an error. It would
2074 be really nice if the front end wouldn't leak these at all.
2075 Currently the only known culprit is C++ destructors, as seen
2076 in g++.old-deja/g++.jason/binding.C. */
2077 if (TREE_CODE (decl) == VAR_DECL
2078 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
2079 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
2080 && decl_function_context (decl) == current_function_decl)
2082 gcc_assert (seen_error ());
2086 /* When within an OpenMP context, notice uses of variables. */
2087 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
2090 /* If the decl is an alias for another expression, substitute it now. */
2091 if (DECL_HAS_VALUE_EXPR_P (decl))
2093 tree value_expr = DECL_VALUE_EXPR (decl);
2095 /* For referenced nonlocal VLAs add a decl for debugging purposes
2096 to the current function. */
2097 if (TREE_CODE (decl) == VAR_DECL
2098 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
2099 && nonlocal_vlas != NULL
2100 && TREE_CODE (value_expr) == INDIRECT_REF
2101 && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
2102 && decl_function_context (decl) != current_function_decl)
2104 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
2105 while (ctx && ctx->region_type == ORT_WORKSHARE)
2106 ctx = ctx->outer_context;
2107 if (!ctx && !pointer_set_insert (nonlocal_vlas, decl))
2109 tree copy = copy_node (decl), block;
2111 lang_hooks.dup_lang_specific_decl (copy);
2112 SET_DECL_RTL (copy, 0);
2113 TREE_USED (copy) = 1;
2114 block = DECL_INITIAL (current_function_decl);
2115 DECL_CHAIN (copy) = BLOCK_VARS (block);
2116 BLOCK_VARS (block) = copy;
2117 SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
2118 DECL_HAS_VALUE_EXPR_P (copy) = 1;
2122 *expr_p = unshare_expr (value_expr);
2129 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
2133 : min_lval '[' val ']'
2135 | compound_lval '[' val ']'
2136 | compound_lval '.' ID
2138 This is not part of the original SIMPLE definition, which separates
2139 array and member references, but it seems reasonable to handle them
2140 together. Also, this way we don't run into problems with union
2141 aliasing; gcc requires that for accesses through a union to alias, the
2142 union reference must be explicit, which was not always the case when we
2143 were splitting up array and member refs.
2145 PRE_P points to the sequence where side effects that must happen before
2146 *EXPR_P should be stored.
2148 POST_P points to the sequence where side effects that must happen after
2149 *EXPR_P should be stored. */
2151 static enum gimplify_status
2152 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2153 fallback_t fallback)
2156 vec<tree> expr_stack;
2157 enum gimplify_status ret = GS_ALL_DONE, tret;
2159 location_t loc = EXPR_LOCATION (*expr_p);
2160 tree expr = *expr_p;
2162 /* Create a stack of the subexpressions so later we can walk them in
2163 order from inner to outer. */
2164 expr_stack.create (10);
2166 /* We can handle anything that get_inner_reference can deal with. */
2167 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
2170 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
2171 if (TREE_CODE (*p) == INDIRECT_REF)
2172 *p = fold_indirect_ref_loc (loc, *p);
2174 if (handled_component_p (*p))
2176 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
2177 additional COMPONENT_REFs. */
2178 else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL)
2179 && gimplify_var_or_parm_decl (p) == GS_OK)
2184 expr_stack.safe_push (*p);
2187 gcc_assert (expr_stack.length ());
2189 /* Now EXPR_STACK is a stack of pointers to all the refs we've
2190 walked through and P points to the innermost expression.
2192 Java requires that we elaborated nodes in source order. That
2193 means we must gimplify the inner expression followed by each of
2194 the indices, in order. But we can't gimplify the inner
2195 expression until we deal with any variable bounds, sizes, or
2196 positions in order to deal with PLACEHOLDER_EXPRs.
2198 So we do this in three steps. First we deal with the annotations
2199 for any variables in the components, then we gimplify the base,
2200 then we gimplify any indices, from left to right. */
2201 for (i = expr_stack.length () - 1; i >= 0; i--)
2203 tree t = expr_stack[i];
2205 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2207 /* Gimplify the low bound and element type size and put them into
2208 the ARRAY_REF. If these values are set, they have already been
2210 if (TREE_OPERAND (t, 2) == NULL_TREE)
2212 tree low = unshare_expr (array_ref_low_bound (t));
2213 if (!is_gimple_min_invariant (low))
2215 TREE_OPERAND (t, 2) = low;
2216 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2217 post_p, is_gimple_reg,
2219 ret = MIN (ret, tret);
2224 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2225 is_gimple_reg, fb_rvalue);
2226 ret = MIN (ret, tret);
2229 if (TREE_OPERAND (t, 3) == NULL_TREE)
2231 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
2232 tree elmt_size = unshare_expr (array_ref_element_size (t));
2233 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
2235 /* Divide the element size by the alignment of the element
2238 = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
2240 if (!is_gimple_min_invariant (elmt_size))
2242 TREE_OPERAND (t, 3) = elmt_size;
2243 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
2244 post_p, is_gimple_reg,
2246 ret = MIN (ret, tret);
2251 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
2252 is_gimple_reg, fb_rvalue);
2253 ret = MIN (ret, tret);
2256 else if (TREE_CODE (t) == COMPONENT_REF)
2258 /* Set the field offset into T and gimplify it. */
2259 if (TREE_OPERAND (t, 2) == NULL_TREE)
2261 tree offset = unshare_expr (component_ref_field_offset (t));
2262 tree field = TREE_OPERAND (t, 1);
2264 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
2266 /* Divide the offset by its alignment. */
2267 offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
2269 if (!is_gimple_min_invariant (offset))
2271 TREE_OPERAND (t, 2) = offset;
2272 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2273 post_p, is_gimple_reg,
2275 ret = MIN (ret, tret);
2280 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2281 is_gimple_reg, fb_rvalue);
2282 ret = MIN (ret, tret);
2287 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
2288 so as to match the min_lval predicate. Failure to do so may result
2289 in the creation of large aggregate temporaries. */
2290 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
2291 fallback | fb_lvalue);
2292 ret = MIN (ret, tret);
2294 /* And finally, the indices and operands of ARRAY_REF. During this
2295 loop we also remove any useless conversions. */
2296 for (; expr_stack.length () > 0; )
2298 tree t = expr_stack.pop ();
2300 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2302 /* Gimplify the dimension. */
2303 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
2305 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2306 is_gimple_val, fb_rvalue);
2307 ret = MIN (ret, tret);
2311 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
2313 /* The innermost expression P may have originally had
2314 TREE_SIDE_EFFECTS set which would have caused all the outer
2315 expressions in *EXPR_P leading to P to also have had
2316 TREE_SIDE_EFFECTS set. */
2317 recalculate_side_effects (t);
2320 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
2321 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
2323 canonicalize_component_ref (expr_p);
2326 expr_stack.release ();
2328 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
2333 /* Gimplify the self modifying expression pointed to by EXPR_P
2336 PRE_P points to the list where side effects that must happen before
2337 *EXPR_P should be stored.
2339 POST_P points to the list where side effects that must happen after
2340 *EXPR_P should be stored.
2342 WANT_VALUE is nonzero iff we want to use the value of this expression
2343 in another expression.
2345 ARITH_TYPE is the type the computation should be performed in. */
2347 enum gimplify_status
2348 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2349 bool want_value, tree arith_type)
2351 enum tree_code code;
2352 tree lhs, lvalue, rhs, t1;
2353 gimple_seq post = NULL, *orig_post_p = post_p;
2355 enum tree_code arith_code;
2356 enum gimplify_status ret;
2357 location_t loc = EXPR_LOCATION (*expr_p);
2359 code = TREE_CODE (*expr_p);
2361 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
2362 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
2364 /* Prefix or postfix? */
2365 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
2366 /* Faster to treat as prefix if result is not used. */
2367 postfix = want_value;
2371 /* For postfix, make sure the inner expression's post side effects
2372 are executed after side effects from this expression. */
2376 /* Add or subtract? */
2377 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
2378 arith_code = PLUS_EXPR;
2380 arith_code = MINUS_EXPR;
2382 /* Gimplify the LHS into a GIMPLE lvalue. */
2383 lvalue = TREE_OPERAND (*expr_p, 0);
2384 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
2385 if (ret == GS_ERROR)
2388 /* Extract the operands to the arithmetic operation. */
2390 rhs = TREE_OPERAND (*expr_p, 1);
2392 /* For postfix operator, we evaluate the LHS to an rvalue and then use
2393 that as the result value and in the postqueue operation. */
2396 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
2397 if (ret == GS_ERROR)
2400 lhs = get_initialized_tmp_var (lhs, pre_p, NULL);
2403 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
2404 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
2406 rhs = convert_to_ptrofftype_loc (loc, rhs);
2407 if (arith_code == MINUS_EXPR)
2408 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
2409 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
2412 t1 = fold_convert (TREE_TYPE (*expr_p),
2413 fold_build2 (arith_code, arith_type,
2414 fold_convert (arith_type, lhs),
2415 fold_convert (arith_type, rhs)));
2419 gimplify_assign (lvalue, t1, pre_p);
2420 gimplify_seq_add_seq (orig_post_p, post);
2426 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
2431 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
2434 maybe_with_size_expr (tree *expr_p)
2436 tree expr = *expr_p;
2437 tree type = TREE_TYPE (expr);
2440 /* If we've already wrapped this or the type is error_mark_node, we can't do
2442 if (TREE_CODE (expr) == WITH_SIZE_EXPR
2443 || type == error_mark_node)
2446 /* If the size isn't known or is a constant, we have nothing to do. */
2447 size = TYPE_SIZE_UNIT (type);
2448 if (!size || TREE_CODE (size) == INTEGER_CST)
2451 /* Otherwise, make a WITH_SIZE_EXPR. */
2452 size = unshare_expr (size);
2453 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
2454 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
2457 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
2458 Store any side-effects in PRE_P. CALL_LOCATION is the location of
2461 static enum gimplify_status
2462 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location)
2464 bool (*test) (tree);
2467 /* In general, we allow lvalues for function arguments to avoid
2468 extra overhead of copying large aggregates out of even larger
2469 aggregates into temporaries only to copy the temporaries to
2470 the argument list. Make optimizers happy by pulling out to
2471 temporaries those types that fit in registers. */
2472 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
2473 test = is_gimple_val, fb = fb_rvalue;
2476 test = is_gimple_lvalue, fb = fb_either;
2477 /* Also strip a TARGET_EXPR that would force an extra copy. */
2478 if (TREE_CODE (*arg_p) == TARGET_EXPR)
2480 tree init = TARGET_EXPR_INITIAL (*arg_p);
2482 && !VOID_TYPE_P (TREE_TYPE (init)))
2487 /* If this is a variable sized type, we must remember the size. */
2488 maybe_with_size_expr (arg_p);
2490 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
2491 /* Make sure arguments have the same location as the function call
2493 protected_set_expr_location (*arg_p, call_location);
2495 /* There is a sequence point before a function call. Side effects in
2496 the argument list must occur before the actual call. So, when
2497 gimplifying arguments, force gimplify_expr to use an internal
2498 post queue which is then appended to the end of PRE_P. */
2499 return gimplify_expr (arg_p, pre_p, NULL, test, fb);
2502 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
2503 WANT_VALUE is true if the result of the call is desired. */
2505 static enum gimplify_status
2506 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
2508 tree fndecl, parms, p, fnptrtype;
2509 enum gimplify_status ret;
2512 bool builtin_va_start_p = FALSE;
2513 location_t loc = EXPR_LOCATION (*expr_p);
2515 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
2517 /* For reliable diagnostics during inlining, it is necessary that
2518 every call_expr be annotated with file and line. */
2519 if (! EXPR_HAS_LOCATION (*expr_p))
2520 SET_EXPR_LOCATION (*expr_p, input_location);
2522 /* This may be a call to a builtin function.
2524 Builtin function calls may be transformed into different
2525 (and more efficient) builtin function calls under certain
2526 circumstances. Unfortunately, gimplification can muck things
2527 up enough that the builtin expanders are not aware that certain
2528 transformations are still valid.
2530 So we attempt transformation/gimplification of the call before
2531 we gimplify the CALL_EXPR. At this time we do not manage to
2532 transform all calls in the same manner as the expanders do, but
2533 we do transform most of them. */
2534 fndecl = get_callee_fndecl (*expr_p);
2536 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2537 switch (DECL_FUNCTION_CODE (fndecl))
2539 case BUILT_IN_VA_START:
2541 builtin_va_start_p = TRUE;
2542 if (call_expr_nargs (*expr_p) < 2)
2544 error ("too few arguments to function %<va_start%>");
2545 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2549 if (fold_builtin_next_arg (*expr_p, true))
2551 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2558 expanded_location loc = expand_location (EXPR_LOCATION (*expr_p));
2559 *expr_p = build_int_cst (TREE_TYPE (*expr_p), loc.line);
2564 expanded_location loc = expand_location (EXPR_LOCATION (*expr_p));
2565 *expr_p = build_string_literal (strlen (loc.file) + 1, loc.file);
2568 case BUILT_IN_FUNCTION:
2570 const char *function;
2571 function = IDENTIFIER_POINTER (DECL_NAME (current_function_decl));
2572 *expr_p = build_string_literal (strlen (function) + 1, function);
2578 if (fndecl && DECL_BUILT_IN (fndecl))
2580 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2581 if (new_tree && new_tree != *expr_p)
2583 /* There was a transformation of this call which computes the
2584 same value, but in a more efficient way. Return and try
2591 /* Remember the original function pointer type. */
2592 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
2594 /* There is a sequence point before the call, so any side effects in
2595 the calling expression must occur before the actual call. Force
2596 gimplify_expr to use an internal post queue. */
2597 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
2598 is_gimple_call_addr, fb_rvalue);
2600 nargs = call_expr_nargs (*expr_p);
2602 /* Get argument types for verification. */
2603 fndecl = get_callee_fndecl (*expr_p);
2606 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
2607 else if (POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_FN (*expr_p))))
2608 parms = TYPE_ARG_TYPES (TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (*expr_p))));
2610 if (fndecl && DECL_ARGUMENTS (fndecl))
2611 p = DECL_ARGUMENTS (fndecl);
2616 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
2619 /* If the last argument is __builtin_va_arg_pack () and it is not
2620 passed as a named argument, decrease the number of CALL_EXPR
2621 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
2624 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
2626 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
2627 tree last_arg_fndecl = get_callee_fndecl (last_arg);
2630 && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
2631 && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
2632 && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
2634 tree call = *expr_p;
2637 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
2638 CALL_EXPR_FN (call),
2639 nargs, CALL_EXPR_ARGP (call));
2641 /* Copy all CALL_EXPR flags, location and block, except
2642 CALL_EXPR_VA_ARG_PACK flag. */
2643 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
2644 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
2645 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
2646 = CALL_EXPR_RETURN_SLOT_OPT (call);
2647 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
2648 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
2650 /* Set CALL_EXPR_VA_ARG_PACK. */
2651 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
2655 /* Finally, gimplify the function arguments. */
2658 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
2659 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
2660 PUSH_ARGS_REVERSED ? i-- : i++)
2662 enum gimplify_status t;
2664 /* Avoid gimplifying the second argument to va_start, which needs to
2665 be the plain PARM_DECL. */
2666 if ((i != 1) || !builtin_va_start_p)
2668 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
2669 EXPR_LOCATION (*expr_p));
2677 /* Verify the function result. */
2678 if (want_value && fndecl
2679 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
2681 error_at (loc, "using result of function returning %<void%>");
2685 /* Try this again in case gimplification exposed something. */
2686 if (ret != GS_ERROR)
2688 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2690 if (new_tree && new_tree != *expr_p)
2692 /* There was a transformation of this call which computes the
2693 same value, but in a more efficient way. Return and try
2701 *expr_p = error_mark_node;
2705 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
2706 decl. This allows us to eliminate redundant or useless
2707 calls to "const" functions. */
2708 if (TREE_CODE (*expr_p) == CALL_EXPR)
2710 int flags = call_expr_flags (*expr_p);
2711 if (flags & (ECF_CONST | ECF_PURE)
2712 /* An infinite loop is considered a side effect. */
2713 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
2714 TREE_SIDE_EFFECTS (*expr_p) = 0;
2717 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
2718 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
2719 form and delegate the creation of a GIMPLE_CALL to
2720 gimplify_modify_expr. This is always possible because when
2721 WANT_VALUE is true, the caller wants the result of this call into
2722 a temporary, which means that we will emit an INIT_EXPR in
2723 internal_get_tmp_var which will then be handled by
2724 gimplify_modify_expr. */
2727 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
2728 have to do is replicate it as a GIMPLE_CALL tuple. */
2729 gimple_stmt_iterator gsi;
2730 call = gimple_build_call_from_tree (*expr_p);
2731 gimple_call_set_fntype (call, TREE_TYPE (fnptrtype));
2732 gimplify_seq_add_stmt (pre_p, call);
2733 gsi = gsi_last (*pre_p);
2735 *expr_p = NULL_TREE;
2738 /* Remember the original function type. */
2739 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
2740 CALL_EXPR_FN (*expr_p));
2745 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
2746 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
2748 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
2749 condition is true or false, respectively. If null, we should generate
2750 our own to skip over the evaluation of this specific expression.
2752 LOCUS is the source location of the COND_EXPR.
2754 This function is the tree equivalent of do_jump.
2756 shortcut_cond_r should only be called by shortcut_cond_expr. */
2759 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
2762 tree local_label = NULL_TREE;
2763 tree t, expr = NULL;
2765 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
2766 retain the shortcut semantics. Just insert the gotos here;
2767 shortcut_cond_expr will append the real blocks later. */
2768 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2770 location_t new_locus;
2772 /* Turn if (a && b) into
2774 if (a); else goto no;
2775 if (b) goto yes; else goto no;
2778 if (false_label_p == NULL)
2779 false_label_p = &local_label;
2781 /* Keep the original source location on the first 'if'. */
2782 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
2783 append_to_statement_list (t, &expr);
2785 /* Set the source location of the && on the second 'if'. */
2786 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2787 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2789 append_to_statement_list (t, &expr);
2791 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2793 location_t new_locus;
2795 /* Turn if (a || b) into
2798 if (b) goto yes; else goto no;
2801 if (true_label_p == NULL)
2802 true_label_p = &local_label;
2804 /* Keep the original source location on the first 'if'. */
2805 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
2806 append_to_statement_list (t, &expr);
2808 /* Set the source location of the || on the second 'if'. */
2809 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2810 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2812 append_to_statement_list (t, &expr);
2814 else if (TREE_CODE (pred) == COND_EXPR
2815 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
2816 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
2818 location_t new_locus;
2820 /* As long as we're messing with gotos, turn if (a ? b : c) into
2822 if (b) goto yes; else goto no;
2824 if (c) goto yes; else goto no;
2826 Don't do this if one of the arms has void type, which can happen
2827 in C++ when the arm is throw. */
2829 /* Keep the original source location on the first 'if'. Set the source
2830 location of the ? on the second 'if'. */
2831 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2832 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
2833 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2834 false_label_p, locus),
2835 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
2836 false_label_p, new_locus));
2840 expr = build3 (COND_EXPR, void_type_node, pred,
2841 build_and_jump (true_label_p),
2842 build_and_jump (false_label_p));
2843 SET_EXPR_LOCATION (expr, locus);
2848 t = build1 (LABEL_EXPR, void_type_node, local_label);
2849 append_to_statement_list (t, &expr);
2855 /* Given a conditional expression EXPR with short-circuit boolean
2856 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
2857 predicate apart into the equivalent sequence of conditionals. */
2860 shortcut_cond_expr (tree expr)
2862 tree pred = TREE_OPERAND (expr, 0);
2863 tree then_ = TREE_OPERAND (expr, 1);
2864 tree else_ = TREE_OPERAND (expr, 2);
2865 tree true_label, false_label, end_label, t;
2867 tree *false_label_p;
2868 bool emit_end, emit_false, jump_over_else;
2869 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
2870 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
2872 /* First do simple transformations. */
2875 /* If there is no 'else', turn
2878 if (a) if (b) then c. */
2879 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2881 /* Keep the original source location on the first 'if'. */
2882 location_t locus = EXPR_LOC_OR_HERE (expr);
2883 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2884 /* Set the source location of the && on the second 'if'. */
2885 if (EXPR_HAS_LOCATION (pred))
2886 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2887 then_ = shortcut_cond_expr (expr);
2888 then_se = then_ && TREE_SIDE_EFFECTS (then_);
2889 pred = TREE_OPERAND (pred, 0);
2890 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
2891 SET_EXPR_LOCATION (expr, locus);
2897 /* If there is no 'then', turn
2900 if (a); else if (b); else d. */
2901 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2903 /* Keep the original source location on the first 'if'. */
2904 location_t locus = EXPR_LOC_OR_HERE (expr);
2905 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2906 /* Set the source location of the || on the second 'if'. */
2907 if (EXPR_HAS_LOCATION (pred))
2908 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2909 else_ = shortcut_cond_expr (expr);
2910 else_se = else_ && TREE_SIDE_EFFECTS (else_);
2911 pred = TREE_OPERAND (pred, 0);
2912 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
2913 SET_EXPR_LOCATION (expr, locus);
2917 /* If we're done, great. */
2918 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
2919 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
2922 /* Otherwise we need to mess with gotos. Change
2925 if (a); else goto no;
2928 and recursively gimplify the condition. */
2930 true_label = false_label = end_label = NULL_TREE;
2932 /* If our arms just jump somewhere, hijack those labels so we don't
2933 generate jumps to jumps. */
2936 && TREE_CODE (then_) == GOTO_EXPR
2937 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
2939 true_label = GOTO_DESTINATION (then_);
2945 && TREE_CODE (else_) == GOTO_EXPR
2946 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
2948 false_label = GOTO_DESTINATION (else_);
2953 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
2955 true_label_p = &true_label;
2957 true_label_p = NULL;
2959 /* The 'else' branch also needs a label if it contains interesting code. */
2960 if (false_label || else_se)
2961 false_label_p = &false_label;
2963 false_label_p = NULL;
2965 /* If there was nothing else in our arms, just forward the label(s). */
2966 if (!then_se && !else_se)
2967 return shortcut_cond_r (pred, true_label_p, false_label_p,
2968 EXPR_LOC_OR_HERE (expr));
2970 /* If our last subexpression already has a terminal label, reuse it. */
2972 t = expr_last (else_);
2974 t = expr_last (then_);
2977 if (t && TREE_CODE (t) == LABEL_EXPR)
2978 end_label = LABEL_EXPR_LABEL (t);
2980 /* If we don't care about jumping to the 'else' branch, jump to the end
2981 if the condition is false. */
2983 false_label_p = &end_label;
2985 /* We only want to emit these labels if we aren't hijacking them. */
2986 emit_end = (end_label == NULL_TREE);
2987 emit_false = (false_label == NULL_TREE);
2989 /* We only emit the jump over the else clause if we have to--if the
2990 then clause may fall through. Otherwise we can wind up with a
2991 useless jump and a useless label at the end of gimplified code,
2992 which will cause us to think that this conditional as a whole
2993 falls through even if it doesn't. If we then inline a function
2994 which ends with such a condition, that can cause us to issue an
2995 inappropriate warning about control reaching the end of a
2996 non-void function. */
2997 jump_over_else = block_may_fallthru (then_);
2999 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
3000 EXPR_LOC_OR_HERE (expr));
3003 append_to_statement_list (pred, &expr);
3005 append_to_statement_list (then_, &expr);
3010 tree last = expr_last (expr);
3011 t = build_and_jump (&end_label);
3012 if (EXPR_HAS_LOCATION (last))
3013 SET_EXPR_LOCATION (t, EXPR_LOCATION (last));
3014 append_to_statement_list (t, &expr);
3018 t = build1 (LABEL_EXPR, void_type_node, false_label);
3019 append_to_statement_list (t, &expr);
3021 append_to_statement_list (else_, &expr);
3023 if (emit_end && end_label)
3025 t = build1 (LABEL_EXPR, void_type_node, end_label);
3026 append_to_statement_list (t, &expr);
3032 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
3035 gimple_boolify (tree expr)
3037 tree type = TREE_TYPE (expr);
3038 location_t loc = EXPR_LOCATION (expr);
3040 if (TREE_CODE (expr) == NE_EXPR
3041 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
3042 && integer_zerop (TREE_OPERAND (expr, 1)))
3044 tree call = TREE_OPERAND (expr, 0);
3045 tree fn = get_callee_fndecl (call);
3047 /* For __builtin_expect ((long) (x), y) recurse into x as well
3048 if x is truth_value_p. */
3050 && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL
3051 && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT
3052 && call_expr_nargs (call) == 2)
3054 tree arg = CALL_EXPR_ARG (call, 0);
3057 if (TREE_CODE (arg) == NOP_EXPR
3058 && TREE_TYPE (arg) == TREE_TYPE (call))
3059 arg = TREE_OPERAND (arg, 0);
3060 if (truth_value_p (TREE_CODE (arg)))
3062 arg = gimple_boolify (arg);
3063 CALL_EXPR_ARG (call, 0)
3064 = fold_convert_loc (loc, TREE_TYPE (call), arg);
3070 switch (TREE_CODE (expr))
3072 case TRUTH_AND_EXPR:
3074 case TRUTH_XOR_EXPR:
3075 case TRUTH_ANDIF_EXPR:
3076 case TRUTH_ORIF_EXPR:
3077 /* Also boolify the arguments of truth exprs. */
3078 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
3081 case TRUTH_NOT_EXPR:
3082 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3084 /* These expressions always produce boolean results. */
3085 if (TREE_CODE (type) != BOOLEAN_TYPE)
3086 TREE_TYPE (expr) = boolean_type_node;
3090 if (COMPARISON_CLASS_P (expr))
3092 /* There expressions always prduce boolean results. */
3093 if (TREE_CODE (type) != BOOLEAN_TYPE)
3094 TREE_TYPE (expr) = boolean_type_node;
3097 /* Other expressions that get here must have boolean values, but
3098 might need to be converted to the appropriate mode. */
3099 if (TREE_CODE (type) == BOOLEAN_TYPE)
3101 return fold_convert_loc (loc, boolean_type_node, expr);
3105 /* Given a conditional expression *EXPR_P without side effects, gimplify
3106 its operands. New statements are inserted to PRE_P. */
3108 static enum gimplify_status
3109 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
3111 tree expr = *expr_p, cond;
3112 enum gimplify_status ret, tret;
3113 enum tree_code code;
3115 cond = gimple_boolify (COND_EXPR_COND (expr));
3117 /* We need to handle && and || specially, as their gimplification
3118 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
3119 code = TREE_CODE (cond);
3120 if (code == TRUTH_ANDIF_EXPR)
3121 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
3122 else if (code == TRUTH_ORIF_EXPR)
3123 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
3124 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
3125 COND_EXPR_COND (*expr_p) = cond;
3127 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
3128 is_gimple_val, fb_rvalue);
3129 ret = MIN (ret, tret);
3130 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
3131 is_gimple_val, fb_rvalue);
3133 return MIN (ret, tret);
3136 /* Return true if evaluating EXPR could trap.
3137 EXPR is GENERIC, while tree_could_trap_p can be called
3141 generic_expr_could_trap_p (tree expr)
3145 if (!expr || is_gimple_val (expr))
3148 if (!EXPR_P (expr) || tree_could_trap_p (expr))
3151 n = TREE_OPERAND_LENGTH (expr);
3152 for (i = 0; i < n; i++)
3153 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
3159 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
3168 The second form is used when *EXPR_P is of type void.
3170 PRE_P points to the list where side effects that must happen before
3171 *EXPR_P should be stored. */
3173 static enum gimplify_status
3174 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
3176 tree expr = *expr_p;
3177 tree type = TREE_TYPE (expr);
3178 location_t loc = EXPR_LOCATION (expr);
3179 tree tmp, arm1, arm2;
3180 enum gimplify_status ret;
3181 tree label_true, label_false, label_cont;
3182 bool have_then_clause_p, have_else_clause_p;
3184 enum tree_code pred_code;
3185 gimple_seq seq = NULL;
3187 /* If this COND_EXPR has a value, copy the values into a temporary within
3189 if (!VOID_TYPE_P (type))
3191 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
3194 /* If either an rvalue is ok or we do not require an lvalue, create the
3195 temporary. But we cannot do that if the type is addressable. */
3196 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
3197 && !TREE_ADDRESSABLE (type))
3199 if (gimplify_ctxp->allow_rhs_cond_expr
3200 /* If either branch has side effects or could trap, it can't be
3201 evaluated unconditionally. */
3202 && !TREE_SIDE_EFFECTS (then_)
3203 && !generic_expr_could_trap_p (then_)
3204 && !TREE_SIDE_EFFECTS (else_)
3205 && !generic_expr_could_trap_p (else_))
3206 return gimplify_pure_cond_expr (expr_p, pre_p);
3208 tmp = create_tmp_var (type, "iftmp");
3212 /* Otherwise, only create and copy references to the values. */
3215 type = build_pointer_type (type);
3217 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3218 then_ = build_fold_addr_expr_loc (loc, then_);
3220 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3221 else_ = build_fold_addr_expr_loc (loc, else_);
3224 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
3226 tmp = create_tmp_var (type, "iftmp");
3227 result = build_simple_mem_ref_loc (loc, tmp);
3230 /* Build the new then clause, `tmp = then_;'. But don't build the
3231 assignment if the value is void; in C++ it can be if it's a throw. */
3232 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3233 TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_);
3235 /* Similarly, build the new else clause, `tmp = else_;'. */
3236 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3237 TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_);
3239 TREE_TYPE (expr) = void_type_node;
3240 recalculate_side_effects (expr);
3242 /* Move the COND_EXPR to the prequeue. */
3243 gimplify_stmt (&expr, pre_p);
3249 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
3250 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
3251 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
3252 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
3254 /* Make sure the condition has BOOLEAN_TYPE. */
3255 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3257 /* Break apart && and || conditions. */
3258 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
3259 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
3261 expr = shortcut_cond_expr (expr);
3263 if (expr != *expr_p)
3267 /* We can't rely on gimplify_expr to re-gimplify the expanded
3268 form properly, as cleanups might cause the target labels to be
3269 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
3270 set up a conditional context. */
3271 gimple_push_condition ();
3272 gimplify_stmt (expr_p, &seq);
3273 gimple_pop_condition (pre_p);
3274 gimple_seq_add_seq (pre_p, seq);
3280 /* Now do the normal gimplification. */
3282 /* Gimplify condition. */
3283 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
3285 if (ret == GS_ERROR)
3287 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
3289 gimple_push_condition ();
3291 have_then_clause_p = have_else_clause_p = false;
3292 if (TREE_OPERAND (expr, 1) != NULL
3293 && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR
3294 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL
3295 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1)))
3296 == current_function_decl)
3297 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3298 have different locations, otherwise we end up with incorrect
3299 location information on the branches. */
3301 || !EXPR_HAS_LOCATION (expr)
3302 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1))
3303 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1))))
3305 label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1));
3306 have_then_clause_p = true;
3309 label_true = create_artificial_label (UNKNOWN_LOCATION);
3310 if (TREE_OPERAND (expr, 2) != NULL
3311 && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR
3312 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL
3313 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2)))
3314 == current_function_decl)
3315 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3316 have different locations, otherwise we end up with incorrect
3317 location information on the branches. */
3319 || !EXPR_HAS_LOCATION (expr)
3320 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2))
3321 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2))))
3323 label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2));
3324 have_else_clause_p = true;
3327 label_false = create_artificial_label (UNKNOWN_LOCATION);
3329 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
3332 gimple_cond = gimple_build_cond (pred_code, arm1, arm2, label_true,
3335 gimplify_seq_add_stmt (&seq, gimple_cond);
3336 label_cont = NULL_TREE;
3337 if (!have_then_clause_p)
3339 /* For if (...) {} else { code; } put label_true after
3341 if (TREE_OPERAND (expr, 1) == NULL_TREE
3342 && !have_else_clause_p
3343 && TREE_OPERAND (expr, 2) != NULL_TREE)
3344 label_cont = label_true;
3347 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
3348 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
3349 /* For if (...) { code; } else {} or
3350 if (...) { code; } else goto label; or
3351 if (...) { code; return; } else { ... }
3352 label_cont isn't needed. */
3353 if (!have_else_clause_p
3354 && TREE_OPERAND (expr, 2) != NULL_TREE
3355 && gimple_seq_may_fallthru (seq))
3358 label_cont = create_artificial_label (UNKNOWN_LOCATION);
3360 g = gimple_build_goto (label_cont);
3362 /* GIMPLE_COND's are very low level; they have embedded
3363 gotos. This particular embedded goto should not be marked
3364 with the location of the original COND_EXPR, as it would
3365 correspond to the COND_EXPR's condition, not the ELSE or the
3366 THEN arms. To avoid marking it with the wrong location, flag
3367 it as "no location". */
3368 gimple_set_do_not_emit_location (g);
3370 gimplify_seq_add_stmt (&seq, g);
3374 if (!have_else_clause_p)
3376 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
3377 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
3380 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
3382 gimple_pop_condition (pre_p);
3383 gimple_seq_add_seq (pre_p, seq);
3385 if (ret == GS_ERROR)
3387 else if (have_then_clause_p || have_else_clause_p)
3391 /* Both arms are empty; replace the COND_EXPR with its predicate. */
3392 expr = TREE_OPERAND (expr, 0);
3393 gimplify_stmt (&expr, pre_p);
3400 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
3401 to be marked addressable.
3403 We cannot rely on such an expression being directly markable if a temporary
3404 has been created by the gimplification. In this case, we create another
3405 temporary and initialize it with a copy, which will become a store after we
3406 mark it addressable. This can happen if the front-end passed us something
3407 that it could not mark addressable yet, like a Fortran pass-by-reference
3408 parameter (int) floatvar. */
3411 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
3413 while (handled_component_p (*expr_p))
3414 expr_p = &TREE_OPERAND (*expr_p, 0);
3415 if (is_gimple_reg (*expr_p))
3416 *expr_p = get_initialized_tmp_var (*expr_p, seq_p, NULL);
3419 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3420 a call to __builtin_memcpy. */
3422 static enum gimplify_status
3423 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
3426 tree t, to, to_ptr, from, from_ptr;
3428 location_t loc = EXPR_LOCATION (*expr_p);
3430 to = TREE_OPERAND (*expr_p, 0);
3431 from = TREE_OPERAND (*expr_p, 1);
3433 /* Mark the RHS addressable. Beware that it may not be possible to do so
3434 directly if a temporary has been created by the gimplification. */
3435 prepare_gimple_addressable (&from, seq_p);
3437 mark_addressable (from);
3438 from_ptr = build_fold_addr_expr_loc (loc, from);
3439 gimplify_arg (&from_ptr, seq_p, loc);
3441 mark_addressable (to);
3442 to_ptr = build_fold_addr_expr_loc (loc, to);
3443 gimplify_arg (&to_ptr, seq_p, loc);
3445 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
3447 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
3451 /* tmp = memcpy() */
3452 t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3453 gimple_call_set_lhs (gs, t);
3454 gimplify_seq_add_stmt (seq_p, gs);
3456 *expr_p = build_simple_mem_ref (t);
3460 gimplify_seq_add_stmt (seq_p, gs);
3465 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3466 a call to __builtin_memset. In this case we know that the RHS is
3467 a CONSTRUCTOR with an empty element list. */
3469 static enum gimplify_status
3470 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
3473 tree t, from, to, to_ptr;
3475 location_t loc = EXPR_LOCATION (*expr_p);
3477 /* Assert our assumptions, to abort instead of producing wrong code
3478 silently if they are not met. Beware that the RHS CONSTRUCTOR might
3479 not be immediately exposed. */
3480 from = TREE_OPERAND (*expr_p, 1);
3481 if (TREE_CODE (from) == WITH_SIZE_EXPR)
3482 from = TREE_OPERAND (from, 0);
3484 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
3485 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
3488 to = TREE_OPERAND (*expr_p, 0);
3490 to_ptr = build_fold_addr_expr_loc (loc, to);
3491 gimplify_arg (&to_ptr, seq_p, loc);
3492 t = builtin_decl_implicit (BUILT_IN_MEMSET);
3494 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
3498 /* tmp = memset() */
3499 t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3500 gimple_call_set_lhs (gs, t);
3501 gimplify_seq_add_stmt (seq_p, gs);
3503 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
3507 gimplify_seq_add_stmt (seq_p, gs);
3512 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
3513 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
3514 assignment. Return non-null if we detect a potential overlap. */
3516 struct gimplify_init_ctor_preeval_data
3518 /* The base decl of the lhs object. May be NULL, in which case we
3519 have to assume the lhs is indirect. */
3522 /* The alias set of the lhs object. */
3523 alias_set_type lhs_alias_set;
3527 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
3529 struct gimplify_init_ctor_preeval_data *data
3530 = (struct gimplify_init_ctor_preeval_data *) xdata;
3533 /* If we find the base object, obviously we have overlap. */
3534 if (data->lhs_base_decl == t)
3537 /* If the constructor component is indirect, determine if we have a
3538 potential overlap with the lhs. The only bits of information we
3539 have to go on at this point are addressability and alias sets. */
3540 if ((INDIRECT_REF_P (t)
3541 || TREE_CODE (t) == MEM_REF)
3542 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3543 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
3546 /* If the constructor component is a call, determine if it can hide a
3547 potential overlap with the lhs through an INDIRECT_REF like above.
3548 ??? Ugh - this is completely broken. In fact this whole analysis
3549 doesn't look conservative. */
3550 if (TREE_CODE (t) == CALL_EXPR)
3552 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
3554 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
3555 if (POINTER_TYPE_P (TREE_VALUE (type))
3556 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3557 && alias_sets_conflict_p (data->lhs_alias_set,
3559 (TREE_TYPE (TREE_VALUE (type)))))
3563 if (IS_TYPE_OR_DECL_P (t))
3568 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
3569 force values that overlap with the lhs (as described by *DATA)
3570 into temporaries. */
3573 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3574 struct gimplify_init_ctor_preeval_data *data)
3576 enum gimplify_status one;
3578 /* If the value is constant, then there's nothing to pre-evaluate. */
3579 if (TREE_CONSTANT (*expr_p))
3581 /* Ensure it does not have side effects, it might contain a reference to
3582 the object we're initializing. */
3583 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
3587 /* If the type has non-trivial constructors, we can't pre-evaluate. */
3588 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
3591 /* Recurse for nested constructors. */
3592 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
3594 unsigned HOST_WIDE_INT ix;
3595 constructor_elt *ce;
3596 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
3598 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
3599 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
3604 /* If this is a variable sized type, we must remember the size. */
3605 maybe_with_size_expr (expr_p);
3607 /* Gimplify the constructor element to something appropriate for the rhs
3608 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
3609 the gimplifier will consider this a store to memory. Doing this
3610 gimplification now means that we won't have to deal with complicated
3611 language-specific trees, nor trees like SAVE_EXPR that can induce
3612 exponential search behavior. */
3613 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
3614 if (one == GS_ERROR)
3620 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
3621 with the lhs, since "a = { .x=a }" doesn't make sense. This will
3622 always be true for all scalars, since is_gimple_mem_rhs insists on a
3623 temporary variable for them. */
3624 if (DECL_P (*expr_p))
3627 /* If this is of variable size, we have no choice but to assume it doesn't
3628 overlap since we can't make a temporary for it. */
3629 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
3632 /* Otherwise, we must search for overlap ... */
3633 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
3636 /* ... and if found, force the value into a temporary. */
3637 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
3640 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
3641 a RANGE_EXPR in a CONSTRUCTOR for an array.
3645 object[var] = value;
3652 We increment var _after_ the loop exit check because we might otherwise
3653 fail if upper == TYPE_MAX_VALUE (type for upper).
3655 Note that we never have to deal with SAVE_EXPRs here, because this has
3656 already been taken care of for us, in gimplify_init_ctor_preeval(). */
3658 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
3659 gimple_seq *, bool);
3662 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
3663 tree value, tree array_elt_type,
3664 gimple_seq *pre_p, bool cleared)
3666 tree loop_entry_label, loop_exit_label, fall_thru_label;
3667 tree var, var_type, cref, tmp;
3669 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
3670 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
3671 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
3673 /* Create and initialize the index variable. */
3674 var_type = TREE_TYPE (upper);
3675 var = create_tmp_var (var_type, NULL);
3676 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
3678 /* Add the loop entry label. */
3679 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
3681 /* Build the reference. */
3682 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3683 var, NULL_TREE, NULL_TREE);
3685 /* If we are a constructor, just call gimplify_init_ctor_eval to do
3686 the store. Otherwise just assign value to the reference. */
3688 if (TREE_CODE (value) == CONSTRUCTOR)
3689 /* NB we might have to call ourself recursively through
3690 gimplify_init_ctor_eval if the value is a constructor. */
3691 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3694 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
3696 /* We exit the loop when the index var is equal to the upper bound. */
3697 gimplify_seq_add_stmt (pre_p,
3698 gimple_build_cond (EQ_EXPR, var, upper,
3699 loop_exit_label, fall_thru_label));
3701 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
3703 /* Otherwise, increment the index var... */
3704 tmp = build2 (PLUS_EXPR, var_type, var,
3705 fold_convert (var_type, integer_one_node));
3706 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
3708 /* ...and jump back to the loop entry. */
3709 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
3711 /* Add the loop exit label. */
3712 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
3715 /* Return true if FDECL is accessing a field that is zero sized. */
3718 zero_sized_field_decl (const_tree fdecl)
3720 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
3721 && integer_zerop (DECL_SIZE (fdecl)))
3726 /* Return true if TYPE is zero sized. */
3729 zero_sized_type (const_tree type)
3731 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
3732 && integer_zerop (TYPE_SIZE (type)))
3737 /* A subroutine of gimplify_init_constructor. Generate individual
3738 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
3739 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
3740 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
3744 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
3745 gimple_seq *pre_p, bool cleared)
3747 tree array_elt_type = NULL;
3748 unsigned HOST_WIDE_INT ix;
3749 tree purpose, value;
3751 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
3752 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
3754 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
3758 /* NULL values are created above for gimplification errors. */
3762 if (cleared && initializer_zerop (value))
3765 /* ??? Here's to hoping the front end fills in all of the indices,
3766 so we don't have to figure out what's missing ourselves. */
3767 gcc_assert (purpose);
3769 /* Skip zero-sized fields, unless value has side-effects. This can
3770 happen with calls to functions returning a zero-sized type, which
3771 we shouldn't discard. As a number of downstream passes don't
3772 expect sets of zero-sized fields, we rely on the gimplification of
3773 the MODIFY_EXPR we make below to drop the assignment statement. */
3774 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
3777 /* If we have a RANGE_EXPR, we have to build a loop to assign the
3779 if (TREE_CODE (purpose) == RANGE_EXPR)
3781 tree lower = TREE_OPERAND (purpose, 0);
3782 tree upper = TREE_OPERAND (purpose, 1);
3784 /* If the lower bound is equal to upper, just treat it as if
3785 upper was the index. */
3786 if (simple_cst_equal (lower, upper))
3790 gimplify_init_ctor_eval_range (object, lower, upper, value,
3791 array_elt_type, pre_p, cleared);
3798 /* Do not use bitsizetype for ARRAY_REF indices. */
3799 if (TYPE_DOMAIN (TREE_TYPE (object)))
3801 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
3803 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3804 purpose, NULL_TREE, NULL_TREE);
3808 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
3809 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
3810 unshare_expr (object), purpose, NULL_TREE);
3813 if (TREE_CODE (value) == CONSTRUCTOR
3814 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
3815 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3819 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
3820 gimplify_and_add (init, pre_p);
3826 /* Return the appropriate RHS predicate for this LHS. */
3829 rhs_predicate_for (tree lhs)
3831 if (is_gimple_reg (lhs))
3832 return is_gimple_reg_rhs_or_call;
3834 return is_gimple_mem_rhs_or_call;
3837 /* Gimplify a C99 compound literal expression. This just means adding
3838 the DECL_EXPR before the current statement and using its anonymous
3841 static enum gimplify_status
3842 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
3843 bool (*gimple_test_f) (tree),
3844 fallback_t fallback)
3846 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
3847 tree decl = DECL_EXPR_DECL (decl_s);
3848 tree init = DECL_INITIAL (decl);
3849 /* Mark the decl as addressable if the compound literal
3850 expression is addressable now, otherwise it is marked too late
3851 after we gimplify the initialization expression. */
3852 if (TREE_ADDRESSABLE (*expr_p))
3853 TREE_ADDRESSABLE (decl) = 1;
3854 /* Otherwise, if we don't need an lvalue and have a literal directly
3855 substitute it. Check if it matches the gimple predicate, as
3856 otherwise we'd generate a new temporary, and we can as well just
3857 use the decl we already have. */
3858 else if (!TREE_ADDRESSABLE (decl)
3860 && (fallback & fb_lvalue) == 0
3861 && gimple_test_f (init))
3867 /* Preliminarily mark non-addressed complex variables as eligible
3868 for promotion to gimple registers. We'll transform their uses
3870 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
3871 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
3872 && !TREE_THIS_VOLATILE (decl)
3873 && !needs_to_live_in_memory (decl))
3874 DECL_GIMPLE_REG_P (decl) = 1;
3876 /* If the decl is not addressable, then it is being used in some
3877 expression or on the right hand side of a statement, and it can
3878 be put into a readonly data section. */
3879 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
3880 TREE_READONLY (decl) = 1;
3882 /* This decl isn't mentioned in the enclosing block, so add it to the
3883 list of temps. FIXME it seems a bit of a kludge to say that
3884 anonymous artificial vars aren't pushed, but everything else is. */
3885 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
3886 gimple_add_tmp_var (decl);
3888 gimplify_and_add (decl_s, pre_p);
3893 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
3894 return a new CONSTRUCTOR if something changed. */
3897 optimize_compound_literals_in_ctor (tree orig_ctor)
3899 tree ctor = orig_ctor;
3900 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
3901 unsigned int idx, num = vec_safe_length (elts);
3903 for (idx = 0; idx < num; idx++)
3905 tree value = (*elts)[idx].value;
3906 tree newval = value;
3907 if (TREE_CODE (value) == CONSTRUCTOR)
3908 newval = optimize_compound_literals_in_ctor (value);
3909 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
3911 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
3912 tree decl = DECL_EXPR_DECL (decl_s);
3913 tree init = DECL_INITIAL (decl);
3915 if (!TREE_ADDRESSABLE (value)
3916 && !TREE_ADDRESSABLE (decl)
3918 && TREE_CODE (init) == CONSTRUCTOR)
3919 newval = optimize_compound_literals_in_ctor (init);
3921 if (newval == value)
3924 if (ctor == orig_ctor)
3926 ctor = copy_node (orig_ctor);
3927 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
3928 elts = CONSTRUCTOR_ELTS (ctor);
3930 (*elts)[idx].value = newval;
3935 /* A subroutine of gimplify_modify_expr. Break out elements of a
3936 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
3938 Note that we still need to clear any elements that don't have explicit
3939 initializers, so if not all elements are initialized we keep the
3940 original MODIFY_EXPR, we just remove all of the constructor elements.
3942 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
3943 GS_ERROR if we would have to create a temporary when gimplifying
3944 this constructor. Otherwise, return GS_OK.
3946 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
3948 static enum gimplify_status
3949 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3950 bool want_value, bool notify_temp_creation)
3952 tree object, ctor, type;
3953 enum gimplify_status ret;
3954 vec<constructor_elt, va_gc> *elts;
3956 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
3958 if (!notify_temp_creation)
3960 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
3961 is_gimple_lvalue, fb_lvalue);
3962 if (ret == GS_ERROR)
3966 object = TREE_OPERAND (*expr_p, 0);
3967 ctor = TREE_OPERAND (*expr_p, 1) =
3968 optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
3969 type = TREE_TYPE (ctor);
3970 elts = CONSTRUCTOR_ELTS (ctor);
3973 switch (TREE_CODE (type))
3977 case QUAL_UNION_TYPE:
3980 struct gimplify_init_ctor_preeval_data preeval_data;
3981 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
3982 bool cleared, complete_p, valid_const_initializer;
3984 /* Aggregate types must lower constructors to initialization of
3985 individual elements. The exception is that a CONSTRUCTOR node
3986 with no elements indicates zero-initialization of the whole. */
3987 if (vec_safe_is_empty (elts))
3989 if (notify_temp_creation)
3994 /* Fetch information about the constructor to direct later processing.
3995 We might want to make static versions of it in various cases, and
3996 can only do so if it known to be a valid constant initializer. */
3997 valid_const_initializer
3998 = categorize_ctor_elements (ctor, &num_nonzero_elements,
3999 &num_ctor_elements, &complete_p);
4001 /* If a const aggregate variable is being initialized, then it
4002 should never be a lose to promote the variable to be static. */
4003 if (valid_const_initializer
4004 && num_nonzero_elements > 1
4005 && TREE_READONLY (object)
4006 && TREE_CODE (object) == VAR_DECL
4007 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
4009 if (notify_temp_creation)
4011 DECL_INITIAL (object) = ctor;
4012 TREE_STATIC (object) = 1;
4013 if (!DECL_NAME (object))
4014 DECL_NAME (object) = create_tmp_var_name ("C");
4015 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
4017 /* ??? C++ doesn't automatically append a .<number> to the
4018 assembler name, and even when it does, it looks at FE private
4019 data structures to figure out what that number should be,
4020 which are not set for this variable. I suppose this is
4021 important for local statics for inline functions, which aren't
4022 "local" in the object file sense. So in order to get a unique
4023 TU-local symbol, we must invoke the lhd version now. */
4024 lhd_set_decl_assembler_name (object);
4026 *expr_p = NULL_TREE;
4030 /* If there are "lots" of initialized elements, even discounting
4031 those that are not address constants (and thus *must* be
4032 computed at runtime), then partition the constructor into
4033 constant and non-constant parts. Block copy the constant
4034 parts in, then generate code for the non-constant parts. */
4035 /* TODO. There's code in cp/typeck.c to do this. */
4037 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
4038 /* store_constructor will ignore the clearing of variable-sized
4039 objects. Initializers for such objects must explicitly set
4040 every field that needs to be set. */
4042 else if (!complete_p)
4043 /* If the constructor isn't complete, clear the whole object
4046 ??? This ought not to be needed. For any element not present
4047 in the initializer, we should simply set them to zero. Except
4048 we'd need to *find* the elements that are not present, and that
4049 requires trickery to avoid quadratic compile-time behavior in
4050 large cases or excessive memory use in small cases. */
4052 else if (num_ctor_elements - num_nonzero_elements
4053 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
4054 && num_nonzero_elements < num_ctor_elements / 4)
4055 /* If there are "lots" of zeros, it's more efficient to clear
4056 the memory and then set the nonzero elements. */
4061 /* If there are "lots" of initialized elements, and all of them
4062 are valid address constants, then the entire initializer can
4063 be dropped to memory, and then memcpy'd out. Don't do this
4064 for sparse arrays, though, as it's more efficient to follow
4065 the standard CONSTRUCTOR behavior of memset followed by
4066 individual element initialization. Also don't do this for small
4067 all-zero initializers (which aren't big enough to merit
4068 clearing), and don't try to make bitwise copies of
4069 TREE_ADDRESSABLE types. */
4070 if (valid_const_initializer
4071 && !(cleared || num_nonzero_elements == 0)
4072 && !TREE_ADDRESSABLE (type))
4074 HOST_WIDE_INT size = int_size_in_bytes (type);
4077 /* ??? We can still get unbounded array types, at least
4078 from the C++ front end. This seems wrong, but attempt
4079 to work around it for now. */
4082 size = int_size_in_bytes (TREE_TYPE (object));
4084 TREE_TYPE (ctor) = type = TREE_TYPE (object);
4087 /* Find the maximum alignment we can assume for the object. */
4088 /* ??? Make use of DECL_OFFSET_ALIGN. */
4089 if (DECL_P (object))
4090 align = DECL_ALIGN (object);
4092 align = TYPE_ALIGN (type);
4094 /* Do a block move either if the size is so small as to make
4095 each individual move a sub-unit move on average, or if it
4096 is so large as to make individual moves inefficient. */
4098 && num_nonzero_elements > 1
4099 && (size < num_nonzero_elements
4100 || !can_move_by_pieces (size, align)))
4102 if (notify_temp_creation)
4105 walk_tree (&ctor, force_labels_r, NULL, NULL);
4106 ctor = tree_output_constant_def (ctor);
4107 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
4108 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
4109 TREE_OPERAND (*expr_p, 1) = ctor;
4111 /* This is no longer an assignment of a CONSTRUCTOR, but
4112 we still may have processing to do on the LHS. So
4113 pretend we didn't do anything here to let that happen. */
4114 return GS_UNHANDLED;
4118 /* If the target is volatile, we have non-zero elements and more than
4119 one field to assign, initialize the target from a temporary. */
4120 if (TREE_THIS_VOLATILE (object)
4121 && !TREE_ADDRESSABLE (type)
4122 && num_nonzero_elements > 0
4123 && vec_safe_length (elts) > 1)
4125 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type), NULL);
4126 TREE_OPERAND (*expr_p, 0) = temp;
4127 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
4129 build2 (MODIFY_EXPR, void_type_node,
4134 if (notify_temp_creation)
4137 /* If there are nonzero elements and if needed, pre-evaluate to capture
4138 elements overlapping with the lhs into temporaries. We must do this
4139 before clearing to fetch the values before they are zeroed-out. */
4140 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
4142 preeval_data.lhs_base_decl = get_base_address (object);
4143 if (!DECL_P (preeval_data.lhs_base_decl))
4144 preeval_data.lhs_base_decl = NULL;
4145 preeval_data.lhs_alias_set = get_alias_set (object);
4147 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
4148 pre_p, post_p, &preeval_data);
4153 /* Zap the CONSTRUCTOR element list, which simplifies this case.
4154 Note that we still have to gimplify, in order to handle the
4155 case of variable sized types. Avoid shared tree structures. */
4156 CONSTRUCTOR_ELTS (ctor) = NULL;
4157 TREE_SIDE_EFFECTS (ctor) = 0;
4158 object = unshare_expr (object);
4159 gimplify_stmt (expr_p, pre_p);
4162 /* If we have not block cleared the object, or if there are nonzero
4163 elements in the constructor, add assignments to the individual
4164 scalar fields of the object. */
4165 if (!cleared || num_nonzero_elements > 0)
4166 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
4168 *expr_p = NULL_TREE;
4176 if (notify_temp_creation)
4179 /* Extract the real and imaginary parts out of the ctor. */
4180 gcc_assert (elts->length () == 2);
4181 r = (*elts)[0].value;
4182 i = (*elts)[1].value;
4183 if (r == NULL || i == NULL)
4185 tree zero = build_zero_cst (TREE_TYPE (type));
4192 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
4193 represent creation of a complex value. */
4194 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
4196 ctor = build_complex (type, r, i);
4197 TREE_OPERAND (*expr_p, 1) = ctor;
4201 ctor = build2 (COMPLEX_EXPR, type, r, i);
4202 TREE_OPERAND (*expr_p, 1) = ctor;
4203 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
4206 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
4214 unsigned HOST_WIDE_INT ix;
4215 constructor_elt *ce;
4217 if (notify_temp_creation)
4220 /* Go ahead and simplify constant constructors to VECTOR_CST. */
4221 if (TREE_CONSTANT (ctor))
4223 bool constant_p = true;
4226 /* Even when ctor is constant, it might contain non-*_CST
4227 elements, such as addresses or trapping values like
4228 1.0/0.0 - 1.0/0.0. Such expressions don't belong
4229 in VECTOR_CST nodes. */
4230 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
4231 if (!CONSTANT_CLASS_P (value))
4239 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
4243 /* Don't reduce an initializer constant even if we can't
4244 make a VECTOR_CST. It won't do anything for us, and it'll
4245 prevent us from representing it as a single constant. */
4246 if (initializer_constant_valid_p (ctor, type))
4249 TREE_CONSTANT (ctor) = 0;
4252 /* Vector types use CONSTRUCTOR all the way through gimple
4253 compilation as a general initializer. */
4254 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
4256 enum gimplify_status tret;
4257 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
4259 if (tret == GS_ERROR)
4262 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
4263 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
4268 /* So how did we get a CONSTRUCTOR for a scalar type? */
4272 if (ret == GS_ERROR)
4274 else if (want_value)
4281 /* If we have gimplified both sides of the initializer but have
4282 not emitted an assignment, do so now. */
4285 tree lhs = TREE_OPERAND (*expr_p, 0);
4286 tree rhs = TREE_OPERAND (*expr_p, 1);
4287 gimple init = gimple_build_assign (lhs, rhs);
4288 gimplify_seq_add_stmt (pre_p, init);
4296 /* Given a pointer value OP0, return a simplified version of an
4297 indirection through OP0, or NULL_TREE if no simplification is
4298 possible. Note that the resulting type may be different from
4299 the type pointed to in the sense that it is still compatible
4300 from the langhooks point of view. */
4303 gimple_fold_indirect_ref (tree t)
4305 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
4310 subtype = TREE_TYPE (sub);
4311 if (!POINTER_TYPE_P (subtype))
4314 if (TREE_CODE (sub) == ADDR_EXPR)
4316 tree op = TREE_OPERAND (sub, 0);
4317 tree optype = TREE_TYPE (op);
4319 if (useless_type_conversion_p (type, optype))
4322 /* *(foo *)&fooarray => fooarray[0] */
4323 if (TREE_CODE (optype) == ARRAY_TYPE
4324 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
4325 && useless_type_conversion_p (type, TREE_TYPE (optype)))
4327 tree type_domain = TYPE_DOMAIN (optype);
4328 tree min_val = size_zero_node;
4329 if (type_domain && TYPE_MIN_VALUE (type_domain))
4330 min_val = TYPE_MIN_VALUE (type_domain);
4331 if (TREE_CODE (min_val) == INTEGER_CST)
4332 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
4334 /* *(foo *)&complexfoo => __real__ complexfoo */
4335 else if (TREE_CODE (optype) == COMPLEX_TYPE
4336 && useless_type_conversion_p (type, TREE_TYPE (optype)))
4337 return fold_build1 (REALPART_EXPR, type, op);
4338 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
4339 else if (TREE_CODE (optype) == VECTOR_TYPE
4340 && useless_type_conversion_p (type, TREE_TYPE (optype)))
4342 tree part_width = TYPE_SIZE (type);
4343 tree index = bitsize_int (0);
4344 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
4348 /* *(p + CST) -> ... */
4349 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
4350 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
4352 tree addr = TREE_OPERAND (sub, 0);
4353 tree off = TREE_OPERAND (sub, 1);
4357 addrtype = TREE_TYPE (addr);
4359 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
4360 if (TREE_CODE (addr) == ADDR_EXPR
4361 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
4362 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
4363 && host_integerp (off, 1))
4365 unsigned HOST_WIDE_INT offset = tree_low_cst (off, 1);
4366 tree part_width = TYPE_SIZE (type);
4367 unsigned HOST_WIDE_INT part_widthi
4368 = tree_low_cst (part_width, 0) / BITS_PER_UNIT;
4369 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
4370 tree index = bitsize_int (indexi);
4371 if (offset / part_widthi
4372 <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
4373 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
4377 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
4378 if (TREE_CODE (addr) == ADDR_EXPR
4379 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
4380 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
4382 tree size = TYPE_SIZE_UNIT (type);
4383 if (tree_int_cst_equal (size, off))
4384 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
4387 /* *(p + CST) -> MEM_REF <p, CST>. */
4388 if (TREE_CODE (addr) != ADDR_EXPR
4389 || DECL_P (TREE_OPERAND (addr, 0)))
4390 return fold_build2 (MEM_REF, type,
4392 build_int_cst_wide (ptype,
4393 TREE_INT_CST_LOW (off),
4394 TREE_INT_CST_HIGH (off)));
4397 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
4398 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
4399 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
4400 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
4403 tree min_val = size_zero_node;
4405 sub = gimple_fold_indirect_ref (sub);
4407 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
4408 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
4409 if (type_domain && TYPE_MIN_VALUE (type_domain))
4410 min_val = TYPE_MIN_VALUE (type_domain);
4411 if (TREE_CODE (min_val) == INTEGER_CST)
4412 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
4418 /* Given a pointer value OP0, return a simplified version of an
4419 indirection through OP0, or NULL_TREE if no simplification is
4420 possible. This may only be applied to a rhs of an expression.
4421 Note that the resulting type may be different from the type pointed
4422 to in the sense that it is still compatible from the langhooks
4426 gimple_fold_indirect_ref_rhs (tree t)
4428 return gimple_fold_indirect_ref (t);
4431 /* Subroutine of gimplify_modify_expr to do simplifications of
4432 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
4433 something changes. */
4435 static enum gimplify_status
4436 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
4437 gimple_seq *pre_p, gimple_seq *post_p,
4440 enum gimplify_status ret = GS_UNHANDLED;
4446 switch (TREE_CODE (*from_p))
4449 /* If we're assigning from a read-only variable initialized with
4450 a constructor, do the direct assignment from the constructor,
4451 but only if neither source nor target are volatile since this
4452 latter assignment might end up being done on a per-field basis. */
4453 if (DECL_INITIAL (*from_p)
4454 && TREE_READONLY (*from_p)
4455 && !TREE_THIS_VOLATILE (*from_p)
4456 && !TREE_THIS_VOLATILE (*to_p)
4457 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
4459 tree old_from = *from_p;
4460 enum gimplify_status subret;
4462 /* Move the constructor into the RHS. */
4463 *from_p = unshare_expr (DECL_INITIAL (*from_p));
4465 /* Let's see if gimplify_init_constructor will need to put
4467 subret = gimplify_init_constructor (expr_p, NULL, NULL,
4469 if (subret == GS_ERROR)
4471 /* If so, revert the change. */
4483 /* If we have code like
4487 where the type of "x" is a (possibly cv-qualified variant
4488 of "A"), treat the entire expression as identical to "x".
4489 This kind of code arises in C++ when an object is bound
4490 to a const reference, and if "x" is a TARGET_EXPR we want
4491 to take advantage of the optimization below. */
4492 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
4493 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
4496 if (TREE_THIS_VOLATILE (t) != volatile_p)
4498 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
4499 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
4500 build_fold_addr_expr (t));
4501 if (REFERENCE_CLASS_P (t))
4502 TREE_THIS_VOLATILE (t) = volatile_p;
4513 /* If we are initializing something from a TARGET_EXPR, strip the
4514 TARGET_EXPR and initialize it directly, if possible. This can't
4515 be done if the initializer is void, since that implies that the
4516 temporary is set in some non-trivial way.
4518 ??? What about code that pulls out the temp and uses it
4519 elsewhere? I think that such code never uses the TARGET_EXPR as
4520 an initializer. If I'm wrong, we'll die because the temp won't
4521 have any RTL. In that case, I guess we'll need to replace
4522 references somehow. */
4523 tree init = TARGET_EXPR_INITIAL (*from_p);
4526 && !VOID_TYPE_P (TREE_TYPE (init)))
4536 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
4538 gimplify_compound_expr (from_p, pre_p, true);
4544 /* If we already made some changes, let the front end have a
4545 crack at this before we break it down. */
4546 if (ret != GS_UNHANDLED)
4548 /* If we're initializing from a CONSTRUCTOR, break this into
4549 individual MODIFY_EXPRs. */
4550 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
4554 /* If we're assigning to a non-register type, push the assignment
4555 down into the branches. This is mandatory for ADDRESSABLE types,
4556 since we cannot generate temporaries for such, but it saves a
4557 copy in other cases as well. */
4558 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
4560 /* This code should mirror the code in gimplify_cond_expr. */
4561 enum tree_code code = TREE_CODE (*expr_p);
4562 tree cond = *from_p;
4563 tree result = *to_p;
4565 ret = gimplify_expr (&result, pre_p, post_p,
4566 is_gimple_lvalue, fb_lvalue);
4567 if (ret != GS_ERROR)
4570 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
4571 TREE_OPERAND (cond, 1)
4572 = build2 (code, void_type_node, result,
4573 TREE_OPERAND (cond, 1));
4574 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
4575 TREE_OPERAND (cond, 2)
4576 = build2 (code, void_type_node, unshare_expr (result),
4577 TREE_OPERAND (cond, 2));
4579 TREE_TYPE (cond) = void_type_node;
4580 recalculate_side_effects (cond);
4584 gimplify_and_add (cond, pre_p);
4585 *expr_p = unshare_expr (result);
4594 /* For calls that return in memory, give *to_p as the CALL_EXPR's
4595 return slot so that we don't generate a temporary. */
4596 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
4597 && aggregate_value_p (*from_p, *from_p))
4601 if (!(rhs_predicate_for (*to_p))(*from_p))
4602 /* If we need a temporary, *to_p isn't accurate. */
4604 /* It's OK to use the return slot directly unless it's an NRV. */
4605 else if (TREE_CODE (*to_p) == RESULT_DECL
4606 && DECL_NAME (*to_p) == NULL_TREE
4607 && needs_to_live_in_memory (*to_p))
4609 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
4610 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
4611 /* Don't force regs into memory. */
4613 else if (TREE_CODE (*expr_p) == INIT_EXPR)
4614 /* It's OK to use the target directly if it's being
4617 else if (variably_modified_type_p (TREE_TYPE (*to_p), NULL_TREE))
4618 /* Always use the target and thus RSO for variable-sized types.
4619 GIMPLE cannot deal with a variable-sized assignment
4620 embedded in a call statement. */
4622 else if (TREE_CODE (*to_p) != SSA_NAME
4623 && (!is_gimple_variable (*to_p)
4624 || needs_to_live_in_memory (*to_p)))
4625 /* Don't use the original target if it's already addressable;
4626 if its address escapes, and the called function uses the
4627 NRV optimization, a conforming program could see *to_p
4628 change before the called function returns; see c++/19317.
4629 When optimizing, the return_slot pass marks more functions
4630 as safe after we have escape info. */
4637 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
4638 mark_addressable (*to_p);
4643 case WITH_SIZE_EXPR:
4644 /* Likewise for calls that return an aggregate of non-constant size,
4645 since we would not be able to generate a temporary at all. */
4646 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
4648 *from_p = TREE_OPERAND (*from_p, 0);
4649 /* We don't change ret in this case because the
4650 WITH_SIZE_EXPR might have been added in
4651 gimplify_modify_expr, so returning GS_OK would lead to an
4657 /* If we're initializing from a container, push the initialization
4659 case CLEANUP_POINT_EXPR:
4661 case STATEMENT_LIST:
4663 tree wrap = *from_p;
4666 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
4668 if (ret != GS_ERROR)
4671 t = voidify_wrapper_expr (wrap, *expr_p);
4672 gcc_assert (t == *expr_p);
4676 gimplify_and_add (wrap, pre_p);
4677 *expr_p = unshare_expr (*to_p);
4684 case COMPOUND_LITERAL_EXPR:
4686 tree complit = TREE_OPERAND (*expr_p, 1);
4687 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
4688 tree decl = DECL_EXPR_DECL (decl_s);
4689 tree init = DECL_INITIAL (decl);
4691 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
4692 into struct T x = { 0, 1, 2 } if the address of the
4693 compound literal has never been taken. */
4694 if (!TREE_ADDRESSABLE (complit)
4695 && !TREE_ADDRESSABLE (decl)
4698 *expr_p = copy_node (*expr_p);
4699 TREE_OPERAND (*expr_p, 1) = init;
4714 /* Return true if T looks like a valid GIMPLE statement. */
4717 is_gimple_stmt (tree t)
4719 const enum tree_code code = TREE_CODE (t);
4724 /* The only valid NOP_EXPR is the empty statement. */
4725 return IS_EMPTY_STMT (t);
4729 /* These are only valid if they're void. */
4730 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
4736 case CASE_LABEL_EXPR:
4737 case TRY_CATCH_EXPR:
4738 case TRY_FINALLY_EXPR:
4739 case EH_FILTER_EXPR:
4742 case STATEMENT_LIST:
4752 /* These are always void. */
4758 /* These are valid regardless of their type. */
4767 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
4768 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
4769 DECL_GIMPLE_REG_P set.
4771 IMPORTANT NOTE: This promotion is performed by introducing a load of the
4772 other, unmodified part of the complex object just before the total store.
4773 As a consequence, if the object is still uninitialized, an undefined value
4774 will be loaded into a register, which may result in a spurious exception
4775 if the register is floating-point and the value happens to be a signaling
4776 NaN for example. Then the fully-fledged complex operations lowering pass
4777 followed by a DCE pass are necessary in order to fix things up. */
4779 static enum gimplify_status
4780 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
4783 enum tree_code code, ocode;
4784 tree lhs, rhs, new_rhs, other, realpart, imagpart;
4786 lhs = TREE_OPERAND (*expr_p, 0);
4787 rhs = TREE_OPERAND (*expr_p, 1);
4788 code = TREE_CODE (lhs);
4789 lhs = TREE_OPERAND (lhs, 0);
4791 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
4792 other = build1 (ocode, TREE_TYPE (rhs), lhs);
4793 TREE_NO_WARNING (other) = 1;
4794 other = get_formal_tmp_var (other, pre_p);
4796 realpart = code == REALPART_EXPR ? rhs : other;
4797 imagpart = code == REALPART_EXPR ? other : rhs;
4799 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
4800 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
4802 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
4804 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
4805 *expr_p = (want_value) ? rhs : NULL_TREE;
4810 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
4816 PRE_P points to the list where side effects that must happen before
4817 *EXPR_P should be stored.
4819 POST_P points to the list where side effects that must happen after
4820 *EXPR_P should be stored.
4822 WANT_VALUE is nonzero iff we want to use the value of this expression
4823 in another expression. */
4825 static enum gimplify_status
4826 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4829 tree *from_p = &TREE_OPERAND (*expr_p, 1);
4830 tree *to_p = &TREE_OPERAND (*expr_p, 0);
4831 enum gimplify_status ret = GS_UNHANDLED;
4833 location_t loc = EXPR_LOCATION (*expr_p);
4834 gimple_stmt_iterator gsi;
4836 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
4837 || TREE_CODE (*expr_p) == INIT_EXPR);
4839 /* Trying to simplify a clobber using normal logic doesn't work,
4840 so handle it here. */
4841 if (TREE_CLOBBER_P (*from_p))
4843 gcc_assert (!want_value && TREE_CODE (*to_p) == VAR_DECL);
4844 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
4849 /* Insert pointer conversions required by the middle-end that are not
4850 required by the frontend. This fixes middle-end type checking for
4851 for example gcc.dg/redecl-6.c. */
4852 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
4854 STRIP_USELESS_TYPE_CONVERSION (*from_p);
4855 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
4856 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
4859 /* See if any simplifications can be done based on what the RHS is. */
4860 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4862 if (ret != GS_UNHANDLED)
4865 /* For zero sized types only gimplify the left hand side and right hand
4866 side as statements and throw away the assignment. Do this after
4867 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
4869 if (zero_sized_type (TREE_TYPE (*from_p)) && !want_value)
4871 gimplify_stmt (from_p, pre_p);
4872 gimplify_stmt (to_p, pre_p);
4873 *expr_p = NULL_TREE;
4877 /* If the value being copied is of variable width, compute the length
4878 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
4879 before gimplifying any of the operands so that we can resolve any
4880 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
4881 the size of the expression to be copied, not of the destination, so
4882 that is what we must do here. */
4883 maybe_with_size_expr (from_p);
4885 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4886 if (ret == GS_ERROR)
4889 /* As a special case, we have to temporarily allow for assignments
4890 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
4891 a toplevel statement, when gimplifying the GENERIC expression
4892 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
4893 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
4895 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
4896 prevent gimplify_expr from trying to create a new temporary for
4897 foo's LHS, we tell it that it should only gimplify until it
4898 reaches the CALL_EXPR. On return from gimplify_expr, the newly
4899 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
4900 and all we need to do here is set 'a' to be its LHS. */
4901 ret = gimplify_expr (from_p, pre_p, post_p, rhs_predicate_for (*to_p),
4903 if (ret == GS_ERROR)
4906 /* Now see if the above changed *from_p to something we handle specially. */
4907 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4909 if (ret != GS_UNHANDLED)
4912 /* If we've got a variable sized assignment between two lvalues (i.e. does
4913 not involve a call), then we can make things a bit more straightforward
4914 by converting the assignment to memcpy or memset. */
4915 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
4917 tree from = TREE_OPERAND (*from_p, 0);
4918 tree size = TREE_OPERAND (*from_p, 1);
4920 if (TREE_CODE (from) == CONSTRUCTOR)
4921 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
4923 if (is_gimple_addressable (from))
4926 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
4931 /* Transform partial stores to non-addressable complex variables into
4932 total stores. This allows us to use real instead of virtual operands
4933 for these variables, which improves optimization. */
4934 if ((TREE_CODE (*to_p) == REALPART_EXPR
4935 || TREE_CODE (*to_p) == IMAGPART_EXPR)
4936 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
4937 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
4939 /* Try to alleviate the effects of the gimplification creating artificial
4940 temporaries (see for example is_gimple_reg_rhs) on the debug info. */
4941 if (!gimplify_ctxp->into_ssa
4942 && TREE_CODE (*from_p) == VAR_DECL
4943 && DECL_IGNORED_P (*from_p)
4945 && !DECL_IGNORED_P (*to_p))
4947 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
4949 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
4950 DECL_DEBUG_EXPR_IS_FROM (*from_p) = 1;
4951 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
4954 if (want_value && TREE_THIS_VOLATILE (*to_p))
4955 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
4957 if (TREE_CODE (*from_p) == CALL_EXPR)
4959 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
4960 instead of a GIMPLE_ASSIGN. */
4961 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
4962 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
4963 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
4964 assign = gimple_build_call_from_tree (*from_p);
4965 gimple_call_set_fntype (assign, TREE_TYPE (fnptrtype));
4966 if (!gimple_call_noreturn_p (assign))
4967 gimple_call_set_lhs (assign, *to_p);
4971 assign = gimple_build_assign (*to_p, *from_p);
4972 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
4975 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
4977 /* We should have got an SSA name from the start. */
4978 gcc_assert (TREE_CODE (*to_p) == SSA_NAME);
4981 gimplify_seq_add_stmt (pre_p, assign);
4982 gsi = gsi_last (*pre_p);
4987 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
4996 /* Gimplify a comparison between two variable-sized objects. Do this
4997 with a call to BUILT_IN_MEMCMP. */
4999 static enum gimplify_status
5000 gimplify_variable_sized_compare (tree *expr_p)
5002 location_t loc = EXPR_LOCATION (*expr_p);
5003 tree op0 = TREE_OPERAND (*expr_p, 0);
5004 tree op1 = TREE_OPERAND (*expr_p, 1);
5005 tree t, arg, dest, src, expr;
5007 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
5008 arg = unshare_expr (arg);
5009 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
5010 src = build_fold_addr_expr_loc (loc, op1);
5011 dest = build_fold_addr_expr_loc (loc, op0);
5012 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
5013 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
5016 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
5017 SET_EXPR_LOCATION (expr, loc);
5023 /* Gimplify a comparison between two aggregate objects of integral scalar
5024 mode as a comparison between the bitwise equivalent scalar values. */
5026 static enum gimplify_status
5027 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
5029 location_t loc = EXPR_LOCATION (*expr_p);
5030 tree op0 = TREE_OPERAND (*expr_p, 0);
5031 tree op1 = TREE_OPERAND (*expr_p, 1);
5033 tree type = TREE_TYPE (op0);
5034 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
5036 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
5037 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
5040 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
5045 /* Gimplify an expression sequence. This function gimplifies each
5046 expression and rewrites the original expression with the last
5047 expression of the sequence in GIMPLE form.
5049 PRE_P points to the list where the side effects for all the
5050 expressions in the sequence will be emitted.
5052 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
5054 static enum gimplify_status
5055 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
5061 tree *sub_p = &TREE_OPERAND (t, 0);
5063 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
5064 gimplify_compound_expr (sub_p, pre_p, false);
5066 gimplify_stmt (sub_p, pre_p);
5068 t = TREE_OPERAND (t, 1);
5070 while (TREE_CODE (t) == COMPOUND_EXPR);
5077 gimplify_stmt (expr_p, pre_p);
5082 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
5083 gimplify. After gimplification, EXPR_P will point to a new temporary
5084 that holds the original value of the SAVE_EXPR node.
5086 PRE_P points to the list where side effects that must happen before
5087 *EXPR_P should be stored. */
5089 static enum gimplify_status
5090 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5092 enum gimplify_status ret = GS_ALL_DONE;
5095 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
5096 val = TREE_OPERAND (*expr_p, 0);
5098 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
5099 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
5101 /* The operand may be a void-valued expression such as SAVE_EXPRs
5102 generated by the Java frontend for class initialization. It is
5103 being executed only for its side-effects. */
5104 if (TREE_TYPE (val) == void_type_node)
5106 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5107 is_gimple_stmt, fb_none);
5111 val = get_initialized_tmp_var (val, pre_p, post_p);
5113 TREE_OPERAND (*expr_p, 0) = val;
5114 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
5122 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
5129 PRE_P points to the list where side effects that must happen before
5130 *EXPR_P should be stored.
5132 POST_P points to the list where side effects that must happen after
5133 *EXPR_P should be stored. */
5135 static enum gimplify_status
5136 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5138 tree expr = *expr_p;
5139 tree op0 = TREE_OPERAND (expr, 0);
5140 enum gimplify_status ret;
5141 location_t loc = EXPR_LOCATION (*expr_p);
5143 switch (TREE_CODE (op0))
5147 /* Check if we are dealing with an expression of the form '&*ptr'.
5148 While the front end folds away '&*ptr' into 'ptr', these
5149 expressions may be generated internally by the compiler (e.g.,
5150 builtins like __builtin_va_end). */
5151 /* Caution: the silent array decomposition semantics we allow for
5152 ADDR_EXPR means we can't always discard the pair. */
5153 /* Gimplification of the ADDR_EXPR operand may drop
5154 cv-qualification conversions, so make sure we add them if
5157 tree op00 = TREE_OPERAND (op0, 0);
5158 tree t_expr = TREE_TYPE (expr);
5159 tree t_op00 = TREE_TYPE (op00);
5161 if (!useless_type_conversion_p (t_expr, t_op00))
5162 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
5168 case VIEW_CONVERT_EXPR:
5169 /* Take the address of our operand and then convert it to the type of
5172 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
5173 all clear. The impact of this transformation is even less clear. */
5175 /* If the operand is a useless conversion, look through it. Doing so
5176 guarantees that the ADDR_EXPR and its operand will remain of the
5178 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
5179 op0 = TREE_OPERAND (op0, 0);
5181 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
5182 build_fold_addr_expr_loc (loc,
5183 TREE_OPERAND (op0, 0)));
5188 /* We use fb_either here because the C frontend sometimes takes
5189 the address of a call that returns a struct; see
5190 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
5191 the implied temporary explicit. */
5193 /* Make the operand addressable. */
5194 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
5195 is_gimple_addressable, fb_either);
5196 if (ret == GS_ERROR)
5199 /* Then mark it. Beware that it may not be possible to do so directly
5200 if a temporary has been created by the gimplification. */
5201 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
5203 op0 = TREE_OPERAND (expr, 0);
5205 /* For various reasons, the gimplification of the expression
5206 may have made a new INDIRECT_REF. */
5207 if (TREE_CODE (op0) == INDIRECT_REF)
5208 goto do_indirect_ref;
5210 mark_addressable (TREE_OPERAND (expr, 0));
5212 /* The FEs may end up building ADDR_EXPRs early on a decl with
5213 an incomplete type. Re-build ADDR_EXPRs in canonical form
5215 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
5216 *expr_p = build_fold_addr_expr (op0);
5218 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
5219 recompute_tree_invariant_for_addr_expr (*expr_p);
5221 /* If we re-built the ADDR_EXPR add a conversion to the original type
5223 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
5224 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
5232 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
5233 value; output operands should be a gimple lvalue. */
5235 static enum gimplify_status
5236 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5240 const char **oconstraints;
5243 const char *constraint;
5244 bool allows_mem, allows_reg, is_inout;
5245 enum gimplify_status ret, tret;
5247 vec<tree, va_gc> *inputs;
5248 vec<tree, va_gc> *outputs;
5249 vec<tree, va_gc> *clobbers;
5250 vec<tree, va_gc> *labels;
5254 noutputs = list_length (ASM_OUTPUTS (expr));
5255 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
5263 link_next = NULL_TREE;
5264 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
5267 size_t constraint_len;
5269 link_next = TREE_CHAIN (link);
5273 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5274 constraint_len = strlen (constraint);
5275 if (constraint_len == 0)
5278 ok = parse_output_constraint (&constraint, i, 0, 0,
5279 &allows_mem, &allows_reg, &is_inout);
5286 if (!allows_reg && allows_mem)
5287 mark_addressable (TREE_VALUE (link));
5289 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5290 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
5291 fb_lvalue | fb_mayfail);
5292 if (tret == GS_ERROR)
5294 error ("invalid lvalue in asm output %d", i);
5298 vec_safe_push (outputs, link);
5299 TREE_CHAIN (link) = NULL_TREE;
5303 /* An input/output operand. To give the optimizers more
5304 flexibility, split it into separate input and output
5309 /* Turn the in/out constraint into an output constraint. */
5310 char *p = xstrdup (constraint);
5312 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
5314 /* And add a matching input constraint. */
5317 sprintf (buf, "%d", i);
5319 /* If there are multiple alternatives in the constraint,
5320 handle each of them individually. Those that allow register
5321 will be replaced with operand number, the others will stay
5323 if (strchr (p, ',') != NULL)
5325 size_t len = 0, buflen = strlen (buf);
5326 char *beg, *end, *str, *dst;
5330 end = strchr (beg, ',');
5332 end = strchr (beg, '\0');
5333 if ((size_t) (end - beg) < buflen)
5336 len += end - beg + 1;
5343 str = (char *) alloca (len);
5344 for (beg = p + 1, dst = str;;)
5347 bool mem_p, reg_p, inout_p;
5349 end = strchr (beg, ',');
5354 parse_output_constraint (&tem, i, 0, 0,
5355 &mem_p, ®_p, &inout_p);
5360 memcpy (dst, buf, buflen);
5369 memcpy (dst, beg, len);
5378 input = build_string (dst - str, str);
5381 input = build_string (strlen (buf), buf);
5384 input = build_string (constraint_len - 1, constraint + 1);
5388 input = build_tree_list (build_tree_list (NULL_TREE, input),
5389 unshare_expr (TREE_VALUE (link)));
5390 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
5394 link_next = NULL_TREE;
5395 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
5397 link_next = TREE_CHAIN (link);
5398 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5399 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
5400 oconstraints, &allows_mem, &allows_reg);
5402 /* If we can't make copies, we can only accept memory. */
5403 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
5409 error ("impossible constraint in %<asm%>");
5410 error ("non-memory input %d must stay in memory", i);
5415 /* If the operand is a memory input, it should be an lvalue. */
5416 if (!allows_reg && allows_mem)
5418 tree inputv = TREE_VALUE (link);
5419 STRIP_NOPS (inputv);
5420 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
5421 || TREE_CODE (inputv) == PREINCREMENT_EXPR
5422 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
5423 || TREE_CODE (inputv) == POSTINCREMENT_EXPR)
5424 TREE_VALUE (link) = error_mark_node;
5425 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5426 is_gimple_lvalue, fb_lvalue | fb_mayfail);
5427 mark_addressable (TREE_VALUE (link));
5428 if (tret == GS_ERROR)
5430 if (EXPR_HAS_LOCATION (TREE_VALUE (link)))
5431 input_location = EXPR_LOCATION (TREE_VALUE (link));
5432 error ("memory input %d is not directly addressable", i);
5438 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5439 is_gimple_asm_val, fb_rvalue);
5440 if (tret == GS_ERROR)
5444 TREE_CHAIN (link) = NULL_TREE;
5445 vec_safe_push (inputs, link);
5448 for (link = ASM_CLOBBERS (expr); link; ++i, link = TREE_CHAIN (link))
5449 vec_safe_push (clobbers, link);
5451 for (link = ASM_LABELS (expr); link; ++i, link = TREE_CHAIN (link))
5452 vec_safe_push (labels, link);
5454 /* Do not add ASMs with errors to the gimple IL stream. */
5455 if (ret != GS_ERROR)
5457 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
5458 inputs, outputs, clobbers, labels);
5460 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr));
5461 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
5463 gimplify_seq_add_stmt (pre_p, stmt);
5469 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
5470 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
5471 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
5472 return to this function.
5474 FIXME should we complexify the prequeue handling instead? Or use flags
5475 for all the cleanups and let the optimizer tighten them up? The current
5476 code seems pretty fragile; it will break on a cleanup within any
5477 non-conditional nesting. But any such nesting would be broken, anyway;
5478 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
5479 and continues out of it. We can do that at the RTL level, though, so
5480 having an optimizer to tighten up try/finally regions would be a Good
5483 static enum gimplify_status
5484 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
5486 gimple_stmt_iterator iter;
5487 gimple_seq body_sequence = NULL;
5489 tree temp = voidify_wrapper_expr (*expr_p, NULL);
5491 /* We only care about the number of conditions between the innermost
5492 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
5493 any cleanups collected outside the CLEANUP_POINT_EXPR. */
5494 int old_conds = gimplify_ctxp->conditions;
5495 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
5496 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
5497 gimplify_ctxp->conditions = 0;
5498 gimplify_ctxp->conditional_cleanups = NULL;
5499 gimplify_ctxp->in_cleanup_point_expr = true;
5501 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
5503 gimplify_ctxp->conditions = old_conds;
5504 gimplify_ctxp->conditional_cleanups = old_cleanups;
5505 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
5507 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
5509 gimple wce = gsi_stmt (iter);
5511 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
5513 if (gsi_one_before_end_p (iter))
5515 /* Note that gsi_insert_seq_before and gsi_remove do not
5516 scan operands, unlike some other sequence mutators. */
5517 if (!gimple_wce_cleanup_eh_only (wce))
5518 gsi_insert_seq_before_without_update (&iter,
5519 gimple_wce_cleanup (wce),
5521 gsi_remove (&iter, true);
5528 enum gimple_try_flags kind;
5530 if (gimple_wce_cleanup_eh_only (wce))
5531 kind = GIMPLE_TRY_CATCH;
5533 kind = GIMPLE_TRY_FINALLY;
5534 seq = gsi_split_seq_after (iter);
5536 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
5537 /* Do not use gsi_replace here, as it may scan operands.
5538 We want to do a simple structural modification only. */
5539 gsi_set_stmt (&iter, gtry);
5540 iter = gsi_start (gtry->gimple_try.eval);
5547 gimplify_seq_add_seq (pre_p, body_sequence);
5560 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
5561 is the cleanup action required. EH_ONLY is true if the cleanup should
5562 only be executed if an exception is thrown, not on normal exit. */
5565 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p)
5568 gimple_seq cleanup_stmts = NULL;
5570 /* Errors can result in improperly nested cleanups. Which results in
5571 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
5575 if (gimple_conditional_context ())
5577 /* If we're in a conditional context, this is more complex. We only
5578 want to run the cleanup if we actually ran the initialization that
5579 necessitates it, but we want to run it after the end of the
5580 conditional context. So we wrap the try/finally around the
5581 condition and use a flag to determine whether or not to actually
5582 run the destructor. Thus
5586 becomes (approximately)
5590 if (test) { A::A(temp); flag = 1; val = f(temp); }
5593 if (flag) A::~A(temp);
5597 tree flag = create_tmp_var (boolean_type_node, "cleanup");
5598 gimple ffalse = gimple_build_assign (flag, boolean_false_node);
5599 gimple ftrue = gimple_build_assign (flag, boolean_true_node);
5601 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
5602 gimplify_stmt (&cleanup, &cleanup_stmts);
5603 wce = gimple_build_wce (cleanup_stmts);
5605 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
5606 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
5607 gimplify_seq_add_stmt (pre_p, ftrue);
5609 /* Because of this manipulation, and the EH edges that jump
5610 threading cannot redirect, the temporary (VAR) will appear
5611 to be used uninitialized. Don't warn. */
5612 TREE_NO_WARNING (var) = 1;
5616 gimplify_stmt (&cleanup, &cleanup_stmts);
5617 wce = gimple_build_wce (cleanup_stmts);
5618 gimple_wce_set_cleanup_eh_only (wce, eh_only);
5619 gimplify_seq_add_stmt (pre_p, wce);
5623 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
5625 static enum gimplify_status
5626 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5628 tree targ = *expr_p;
5629 tree temp = TARGET_EXPR_SLOT (targ);
5630 tree init = TARGET_EXPR_INITIAL (targ);
5631 enum gimplify_status ret;
5635 tree cleanup = NULL_TREE;
5637 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
5638 to the temps list. Handle also variable length TARGET_EXPRs. */
5639 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
5641 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
5642 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
5643 gimplify_vla_decl (temp, pre_p);
5646 gimple_add_tmp_var (temp);
5648 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
5649 expression is supposed to initialize the slot. */
5650 if (VOID_TYPE_P (TREE_TYPE (init)))
5651 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5654 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
5656 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5658 ggc_free (init_expr);
5660 if (ret == GS_ERROR)
5662 /* PR c++/28266 Make sure this is expanded only once. */
5663 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5667 gimplify_and_add (init, pre_p);
5669 /* If needed, push the cleanup for the temp. */
5670 if (TARGET_EXPR_CLEANUP (targ))
5672 if (CLEANUP_EH_ONLY (targ))
5673 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
5674 CLEANUP_EH_ONLY (targ), pre_p);
5676 cleanup = TARGET_EXPR_CLEANUP (targ);
5679 /* Add a clobber for the temporary going out of scope, like
5680 gimplify_bind_expr. */
5681 if (gimplify_ctxp->in_cleanup_point_expr
5682 && needs_to_live_in_memory (temp)
5683 && flag_stack_reuse == SR_ALL)
5685 tree clobber = build_constructor (TREE_TYPE (temp),
5687 TREE_THIS_VOLATILE (clobber) = true;
5688 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
5690 cleanup = build2 (COMPOUND_EXPR, void_type_node, cleanup,
5697 gimple_push_cleanup (temp, cleanup, false, pre_p);
5699 /* Only expand this once. */
5700 TREE_OPERAND (targ, 3) = init;
5701 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5704 /* We should have expanded this before. */
5705 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
5711 /* Gimplification of expression trees. */
5713 /* Gimplify an expression which appears at statement context. The
5714 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
5715 NULL, a new sequence is allocated.
5717 Return true if we actually added a statement to the queue. */
5720 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
5722 gimple_seq_node last;
5724 last = gimple_seq_last (*seq_p);
5725 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
5726 return last != gimple_seq_last (*seq_p);
5729 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
5730 to CTX. If entries already exist, force them to be some flavor of private.
5731 If there is no enclosing parallel, do nothing. */
5734 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
5738 if (decl == NULL || !DECL_P (decl))
5743 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5746 if (n->value & GOVD_SHARED)
5747 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
5751 else if (ctx->region_type != ORT_WORKSHARE)
5752 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
5754 ctx = ctx->outer_context;
5759 /* Similarly for each of the type sizes of TYPE. */
5762 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
5764 if (type == NULL || type == error_mark_node)
5766 type = TYPE_MAIN_VARIANT (type);
5768 if (pointer_set_insert (ctx->privatized_types, type))
5771 switch (TREE_CODE (type))
5777 case FIXED_POINT_TYPE:
5778 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
5779 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
5783 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5784 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
5789 case QUAL_UNION_TYPE:
5792 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
5793 if (TREE_CODE (field) == FIELD_DECL)
5795 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
5796 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
5802 case REFERENCE_TYPE:
5803 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5810 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
5811 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
5812 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
5815 /* Add an entry for DECL in the OpenMP context CTX with FLAGS. */
5818 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
5821 unsigned int nflags;
5824 if (error_operand_p (decl))
5827 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
5828 there are constructors involved somewhere. */
5829 if (TREE_ADDRESSABLE (TREE_TYPE (decl))
5830 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))
5833 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5836 /* We shouldn't be re-adding the decl with the same data
5838 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
5839 /* The only combination of data sharing classes we should see is
5840 FIRSTPRIVATE and LASTPRIVATE. */
5841 nflags = n->value | flags;
5842 gcc_assert ((nflags & GOVD_DATA_SHARE_CLASS)
5843 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE));
5848 /* When adding a variable-sized variable, we have to handle all sorts
5849 of additional bits of data: the pointer replacement variable, and
5850 the parameters of the type. */
5851 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5853 /* Add the pointer replacement variable as PRIVATE if the variable
5854 replacement is private, else FIRSTPRIVATE since we'll need the
5855 address of the original variable either for SHARED, or for the
5856 copy into or out of the context. */
5857 if (!(flags & GOVD_LOCAL))
5859 nflags = flags & GOVD_PRIVATE ? GOVD_PRIVATE : GOVD_FIRSTPRIVATE;
5860 nflags |= flags & GOVD_SEEN;
5861 t = DECL_VALUE_EXPR (decl);
5862 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5863 t = TREE_OPERAND (t, 0);
5864 gcc_assert (DECL_P (t));
5865 omp_add_variable (ctx, t, nflags);
5868 /* Add all of the variable and type parameters (which should have
5869 been gimplified to a formal temporary) as FIRSTPRIVATE. */
5870 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
5871 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
5872 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5874 /* The variable-sized variable itself is never SHARED, only some form
5875 of PRIVATE. The sharing would take place via the pointer variable
5876 which we remapped above. */
5877 if (flags & GOVD_SHARED)
5878 flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE
5879 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
5881 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
5882 alloca statement we generate for the variable, so make sure it
5883 is available. This isn't automatically needed for the SHARED
5884 case, since we won't be allocating local storage then.
5885 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
5886 in this case omp_notice_variable will be called later
5887 on when it is gimplified. */
5888 else if (! (flags & GOVD_LOCAL)
5889 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
5890 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
5892 else if (lang_hooks.decls.omp_privatize_by_reference (decl))
5894 gcc_assert ((flags & GOVD_LOCAL) == 0);
5895 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5897 /* Similar to the direct variable sized case above, we'll need the
5898 size of references being privatized. */
5899 if ((flags & GOVD_SHARED) == 0)
5901 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
5902 if (TREE_CODE (t) != INTEGER_CST)
5903 omp_notice_variable (ctx, t, true);
5907 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
5910 /* Notice a threadprivate variable DECL used in OpenMP context CTX.
5911 This just prints out diagnostics about threadprivate variable uses
5912 in untied tasks. If DECL2 is non-NULL, prevent this warning
5913 on that variable. */
5916 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
5921 if (ctx->region_type != ORT_UNTIED_TASK)
5923 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5926 error ("threadprivate variable %qE used in untied task",
5928 error_at (ctx->location, "enclosing task");
5929 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
5932 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
5936 /* Record the fact that DECL was used within the OpenMP context CTX.
5937 IN_CODE is true when real code uses DECL, and false when we should
5938 merely emit default(none) errors. Return true if DECL is going to
5939 be remapped and thus DECL shouldn't be gimplified into its
5940 DECL_VALUE_EXPR (if any). */
5943 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
5946 unsigned flags = in_code ? GOVD_SEEN : 0;
5947 bool ret = false, shared;
5949 if (error_operand_p (decl))
5952 /* Threadprivate variables are predetermined. */
5953 if (is_global_var (decl))
5955 if (DECL_THREAD_LOCAL_P (decl))
5956 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
5958 if (DECL_HAS_VALUE_EXPR_P (decl))
5960 tree value = get_base_address (DECL_VALUE_EXPR (decl));
5962 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
5963 return omp_notice_threadprivate_variable (ctx, decl, value);
5967 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5970 enum omp_clause_default_kind default_kind, kind;
5971 struct gimplify_omp_ctx *octx;
5973 if (ctx->region_type == ORT_WORKSHARE)
5976 /* ??? Some compiler-generated variables (like SAVE_EXPRs) could be
5977 remapped firstprivate instead of shared. To some extent this is
5978 addressed in omp_firstprivatize_type_sizes, but not effectively. */
5979 default_kind = ctx->default_kind;
5980 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
5981 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
5982 default_kind = kind;
5984 switch (default_kind)
5986 case OMP_CLAUSE_DEFAULT_NONE:
5987 error ("%qE not specified in enclosing parallel",
5988 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)));
5989 if ((ctx->region_type & ORT_TASK) != 0)
5990 error_at (ctx->location, "enclosing task");
5992 error_at (ctx->location, "enclosing parallel");
5994 case OMP_CLAUSE_DEFAULT_SHARED:
5995 flags |= GOVD_SHARED;
5997 case OMP_CLAUSE_DEFAULT_PRIVATE:
5998 flags |= GOVD_PRIVATE;
6000 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
6001 flags |= GOVD_FIRSTPRIVATE;
6003 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
6004 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
6005 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
6006 if (ctx->outer_context)
6007 omp_notice_variable (ctx->outer_context, decl, in_code);
6008 for (octx = ctx->outer_context; octx; octx = octx->outer_context)
6012 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
6013 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
6015 flags |= GOVD_FIRSTPRIVATE;
6018 if ((octx->region_type & ORT_PARALLEL) != 0)
6021 if (flags & GOVD_FIRSTPRIVATE)
6024 && (TREE_CODE (decl) == PARM_DECL
6025 || (!is_global_var (decl)
6026 && DECL_CONTEXT (decl) == current_function_decl)))
6028 flags |= GOVD_FIRSTPRIVATE;
6031 flags |= GOVD_SHARED;
6037 if ((flags & GOVD_PRIVATE)
6038 && lang_hooks.decls.omp_private_outer_ref (decl))
6039 flags |= GOVD_PRIVATE_OUTER_REF;
6041 omp_add_variable (ctx, decl, flags);
6043 shared = (flags & GOVD_SHARED) != 0;
6044 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
6048 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
6049 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
6051 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6054 tree t = DECL_VALUE_EXPR (decl);
6055 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
6056 t = TREE_OPERAND (t, 0);
6057 gcc_assert (DECL_P (t));
6058 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
6059 n2->value |= GOVD_SEEN;
6062 shared = ((flags | n->value) & GOVD_SHARED) != 0;
6063 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
6065 /* If nothing changed, there's nothing left to do. */
6066 if ((n->value & flags) == flags)
6072 /* If the variable is private in the current context, then we don't
6073 need to propagate anything to an outer context. */
6074 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
6076 if (ctx->outer_context
6077 && omp_notice_variable (ctx->outer_context, decl, in_code))
6082 /* Verify that DECL is private within CTX. If there's specific information
6083 to the contrary in the innermost scope, generate an error. */
6086 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl)
6090 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6093 if (n->value & GOVD_SHARED)
6095 if (ctx == gimplify_omp_ctxp)
6097 error ("iteration variable %qE should be private",
6099 n->value = GOVD_PRIVATE;
6105 else if ((n->value & GOVD_EXPLICIT) != 0
6106 && (ctx == gimplify_omp_ctxp
6107 || (ctx->region_type == ORT_COMBINED_PARALLEL
6108 && gimplify_omp_ctxp->outer_context == ctx)))
6110 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
6111 error ("iteration variable %qE should not be firstprivate",
6113 else if ((n->value & GOVD_REDUCTION) != 0)
6114 error ("iteration variable %qE should not be reduction",
6117 return (ctx == gimplify_omp_ctxp
6118 || (ctx->region_type == ORT_COMBINED_PARALLEL
6119 && gimplify_omp_ctxp->outer_context == ctx));
6122 if (ctx->region_type != ORT_WORKSHARE)
6124 else if (ctx->outer_context)
6125 return omp_is_private (ctx->outer_context, decl);
6129 /* Return true if DECL is private within a parallel region
6130 that binds to the current construct's context or in parallel
6131 region's REDUCTION clause. */
6134 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl)
6140 ctx = ctx->outer_context;
6142 return !(is_global_var (decl)
6143 /* References might be private, but might be shared too. */
6144 || lang_hooks.decls.omp_privatize_by_reference (decl));
6146 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6148 return (n->value & GOVD_SHARED) == 0;
6150 while (ctx->region_type == ORT_WORKSHARE);
6154 /* Scan the OpenMP clauses in *LIST_P, installing mappings into a new
6155 and previous omp contexts. */
6158 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
6159 enum omp_region_type region_type)
6161 struct gimplify_omp_ctx *ctx, *outer_ctx;
6162 struct gimplify_ctx gctx;
6165 ctx = new_omp_context (region_type);
6166 outer_ctx = ctx->outer_context;
6168 while ((c = *list_p) != NULL)
6170 bool remove = false;
6171 bool notice_outer = true;
6172 const char *check_non_private = NULL;
6176 switch (OMP_CLAUSE_CODE (c))
6178 case OMP_CLAUSE_PRIVATE:
6179 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
6180 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
6182 flags |= GOVD_PRIVATE_OUTER_REF;
6183 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
6186 notice_outer = false;
6188 case OMP_CLAUSE_SHARED:
6189 flags = GOVD_SHARED | GOVD_EXPLICIT;
6191 case OMP_CLAUSE_FIRSTPRIVATE:
6192 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
6193 check_non_private = "firstprivate";
6195 case OMP_CLAUSE_LASTPRIVATE:
6196 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
6197 check_non_private = "lastprivate";
6199 case OMP_CLAUSE_REDUCTION:
6200 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
6201 check_non_private = "reduction";
6205 decl = OMP_CLAUSE_DECL (c);
6206 if (error_operand_p (decl))
6211 omp_add_variable (ctx, decl, flags);
6212 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6213 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
6215 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
6216 GOVD_LOCAL | GOVD_SEEN);
6217 gimplify_omp_ctxp = ctx;
6218 push_gimplify_context (&gctx);
6220 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6221 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6223 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
6224 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
6225 pop_gimplify_context
6226 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
6227 push_gimplify_context (&gctx);
6228 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
6229 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
6230 pop_gimplify_context
6231 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
6232 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
6233 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
6235 gimplify_omp_ctxp = outer_ctx;
6237 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6238 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
6240 gimplify_omp_ctxp = ctx;
6241 push_gimplify_context (&gctx);
6242 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
6244 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
6246 TREE_SIDE_EFFECTS (bind) = 1;
6247 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
6248 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
6250 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
6251 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
6252 pop_gimplify_context
6253 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
6254 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
6256 gimplify_omp_ctxp = outer_ctx;
6262 case OMP_CLAUSE_COPYIN:
6263 case OMP_CLAUSE_COPYPRIVATE:
6264 decl = OMP_CLAUSE_DECL (c);
6265 if (error_operand_p (decl))
6272 omp_notice_variable (outer_ctx, decl, true);
6273 if (check_non_private
6274 && region_type == ORT_WORKSHARE
6275 && omp_check_private (ctx, decl))
6277 error ("%s variable %qE is private in outer context",
6278 check_non_private, DECL_NAME (decl));
6283 case OMP_CLAUSE_FINAL:
6285 OMP_CLAUSE_OPERAND (c, 0)
6286 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
6289 case OMP_CLAUSE_SCHEDULE:
6290 case OMP_CLAUSE_NUM_THREADS:
6291 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
6292 is_gimple_val, fb_rvalue) == GS_ERROR)
6296 case OMP_CLAUSE_NOWAIT:
6297 case OMP_CLAUSE_ORDERED:
6298 case OMP_CLAUSE_UNTIED:
6299 case OMP_CLAUSE_COLLAPSE:
6300 case OMP_CLAUSE_MERGEABLE:
6303 case OMP_CLAUSE_DEFAULT:
6304 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
6312 *list_p = OMP_CLAUSE_CHAIN (c);
6314 list_p = &OMP_CLAUSE_CHAIN (c);
6317 gimplify_omp_ctxp = ctx;
6320 /* For all variables that were not actually used within the context,
6321 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
6324 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
6326 tree *list_p = (tree *) data;
6327 tree decl = (tree) n->key;
6328 unsigned flags = n->value;
6329 enum omp_clause_code code;
6333 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
6335 if ((flags & GOVD_SEEN) == 0)
6337 if (flags & GOVD_DEBUG_PRIVATE)
6339 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE);
6340 private_debug = true;
6344 = lang_hooks.decls.omp_private_debug_clause (decl,
6345 !!(flags & GOVD_SHARED));
6347 code = OMP_CLAUSE_PRIVATE;
6348 else if (flags & GOVD_SHARED)
6350 if (is_global_var (decl))
6352 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
6356 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6357 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
6358 | GOVD_PRIVATE | GOVD_REDUCTION)) != 0)
6360 ctx = ctx->outer_context;
6365 code = OMP_CLAUSE_SHARED;
6367 else if (flags & GOVD_PRIVATE)
6368 code = OMP_CLAUSE_PRIVATE;
6369 else if (flags & GOVD_FIRSTPRIVATE)
6370 code = OMP_CLAUSE_FIRSTPRIVATE;
6374 clause = build_omp_clause (input_location, code);
6375 OMP_CLAUSE_DECL (clause) = decl;
6376 OMP_CLAUSE_CHAIN (clause) = *list_p;
6378 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
6379 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
6380 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
6382 lang_hooks.decls.omp_finish_clause (clause);
6388 gimplify_adjust_omp_clauses (tree *list_p)
6390 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
6393 while ((c = *list_p) != NULL)
6396 bool remove = false;
6398 switch (OMP_CLAUSE_CODE (c))
6400 case OMP_CLAUSE_PRIVATE:
6401 case OMP_CLAUSE_SHARED:
6402 case OMP_CLAUSE_FIRSTPRIVATE:
6403 decl = OMP_CLAUSE_DECL (c);
6404 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6405 remove = !(n->value & GOVD_SEEN);
6408 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
6409 if ((n->value & GOVD_DEBUG_PRIVATE)
6410 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
6412 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
6413 || ((n->value & GOVD_DATA_SHARE_CLASS)
6415 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
6416 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
6421 case OMP_CLAUSE_LASTPRIVATE:
6422 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
6423 accurately reflect the presence of a FIRSTPRIVATE clause. */
6424 decl = OMP_CLAUSE_DECL (c);
6425 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6426 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
6427 = (n->value & GOVD_FIRSTPRIVATE) != 0;
6430 case OMP_CLAUSE_REDUCTION:
6431 case OMP_CLAUSE_COPYIN:
6432 case OMP_CLAUSE_COPYPRIVATE:
6434 case OMP_CLAUSE_NUM_THREADS:
6435 case OMP_CLAUSE_SCHEDULE:
6436 case OMP_CLAUSE_NOWAIT:
6437 case OMP_CLAUSE_ORDERED:
6438 case OMP_CLAUSE_DEFAULT:
6439 case OMP_CLAUSE_UNTIED:
6440 case OMP_CLAUSE_COLLAPSE:
6441 case OMP_CLAUSE_FINAL:
6442 case OMP_CLAUSE_MERGEABLE:
6450 *list_p = OMP_CLAUSE_CHAIN (c);
6452 list_p = &OMP_CLAUSE_CHAIN (c);
6455 /* Add in any implicit data sharing. */
6456 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, list_p);
6458 gimplify_omp_ctxp = ctx->outer_context;
6459 delete_omp_context (ctx);
6462 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
6463 gimplification of the body, as well as scanning the body for used
6464 variables. We need to do this scan now, because variable-sized
6465 decls will be decomposed during gimplification. */
6468 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
6470 tree expr = *expr_p;
6472 gimple_seq body = NULL;
6473 struct gimplify_ctx gctx;
6475 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
6476 OMP_PARALLEL_COMBINED (expr)
6477 ? ORT_COMBINED_PARALLEL
6480 push_gimplify_context (&gctx);
6482 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
6483 if (gimple_code (g) == GIMPLE_BIND)
6484 pop_gimplify_context (g);
6486 pop_gimplify_context (NULL);
6488 gimplify_adjust_omp_clauses (&OMP_PARALLEL_CLAUSES (expr));
6490 g = gimple_build_omp_parallel (body,
6491 OMP_PARALLEL_CLAUSES (expr),
6492 NULL_TREE, NULL_TREE);
6493 if (OMP_PARALLEL_COMBINED (expr))
6494 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
6495 gimplify_seq_add_stmt (pre_p, g);
6496 *expr_p = NULL_TREE;
6499 /* Gimplify the contents of an OMP_TASK statement. This involves
6500 gimplification of the body, as well as scanning the body for used
6501 variables. We need to do this scan now, because variable-sized
6502 decls will be decomposed during gimplification. */
6505 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
6507 tree expr = *expr_p;
6509 gimple_seq body = NULL;
6510 struct gimplify_ctx gctx;
6512 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
6513 find_omp_clause (OMP_TASK_CLAUSES (expr),
6515 ? ORT_UNTIED_TASK : ORT_TASK);
6517 push_gimplify_context (&gctx);
6519 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
6520 if (gimple_code (g) == GIMPLE_BIND)
6521 pop_gimplify_context (g);
6523 pop_gimplify_context (NULL);
6525 gimplify_adjust_omp_clauses (&OMP_TASK_CLAUSES (expr));
6527 g = gimple_build_omp_task (body,
6528 OMP_TASK_CLAUSES (expr),
6529 NULL_TREE, NULL_TREE,
6530 NULL_TREE, NULL_TREE, NULL_TREE);
6531 gimplify_seq_add_stmt (pre_p, g);
6532 *expr_p = NULL_TREE;
6535 /* Gimplify the gross structure of an OMP_FOR statement. */
6537 static enum gimplify_status
6538 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
6540 tree for_stmt, decl, var, t;
6541 enum gimplify_status ret = GS_ALL_DONE;
6542 enum gimplify_status tret;
6544 gimple_seq for_body, for_pre_body;
6549 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p,
6552 /* Handle OMP_FOR_INIT. */
6553 for_pre_body = NULL;
6554 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
6555 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
6558 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
6559 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
6560 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
6561 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
6562 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
6564 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
6565 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
6566 decl = TREE_OPERAND (t, 0);
6567 gcc_assert (DECL_P (decl));
6568 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
6569 || POINTER_TYPE_P (TREE_TYPE (decl)));
6571 /* Make sure the iteration variable is private. */
6572 if (omp_is_private (gimplify_omp_ctxp, decl))
6573 omp_notice_variable (gimplify_omp_ctxp, decl, true);
6575 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
6577 /* If DECL is not a gimple register, create a temporary variable to act
6578 as an iteration counter. This is valid, since DECL cannot be
6579 modified in the body of the loop. */
6580 if (!is_gimple_reg (decl))
6582 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
6583 TREE_OPERAND (t, 0) = var;
6585 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
6587 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
6592 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
6593 is_gimple_val, fb_rvalue);
6594 ret = MIN (ret, tret);
6595 if (ret == GS_ERROR)
6598 /* Handle OMP_FOR_COND. */
6599 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
6600 gcc_assert (COMPARISON_CLASS_P (t));
6601 gcc_assert (TREE_OPERAND (t, 0) == decl);
6603 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
6604 is_gimple_val, fb_rvalue);
6605 ret = MIN (ret, tret);
6607 /* Handle OMP_FOR_INCR. */
6608 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6609 switch (TREE_CODE (t))
6611 case PREINCREMENT_EXPR:
6612 case POSTINCREMENT_EXPR:
6613 t = build_int_cst (TREE_TYPE (decl), 1);
6614 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
6615 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
6616 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
6619 case PREDECREMENT_EXPR:
6620 case POSTDECREMENT_EXPR:
6621 t = build_int_cst (TREE_TYPE (decl), -1);
6622 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
6623 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
6624 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
6628 gcc_assert (TREE_OPERAND (t, 0) == decl);
6629 TREE_OPERAND (t, 0) = var;
6631 t = TREE_OPERAND (t, 1);
6632 switch (TREE_CODE (t))
6635 if (TREE_OPERAND (t, 1) == decl)
6637 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
6638 TREE_OPERAND (t, 0) = var;
6644 case POINTER_PLUS_EXPR:
6645 gcc_assert (TREE_OPERAND (t, 0) == decl);
6646 TREE_OPERAND (t, 0) = var;
6652 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
6653 is_gimple_val, fb_rvalue);
6654 ret = MIN (ret, tret);
6661 if (var != decl || TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1)
6664 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
6665 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6666 && OMP_CLAUSE_DECL (c) == decl
6667 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
6669 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6670 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
6671 gcc_assert (TREE_OPERAND (t, 0) == var);
6672 t = TREE_OPERAND (t, 1);
6673 gcc_assert (TREE_CODE (t) == PLUS_EXPR
6674 || TREE_CODE (t) == MINUS_EXPR
6675 || TREE_CODE (t) == POINTER_PLUS_EXPR);
6676 gcc_assert (TREE_OPERAND (t, 0) == var);
6677 t = build2 (TREE_CODE (t), TREE_TYPE (decl), decl,
6678 TREE_OPERAND (t, 1));
6679 gimplify_assign (decl, t,
6680 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
6685 gimplify_and_add (OMP_FOR_BODY (for_stmt), &for_body);
6687 gimplify_adjust_omp_clauses (&OMP_FOR_CLAUSES (for_stmt));
6689 gfor = gimple_build_omp_for (for_body, OMP_FOR_CLAUSES (for_stmt),
6690 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
6693 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
6695 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
6696 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
6697 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
6698 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
6699 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
6700 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
6701 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6702 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
6705 gimplify_seq_add_stmt (pre_p, gfor);
6706 return ret == GS_ALL_DONE ? GS_ALL_DONE : GS_ERROR;
6709 /* Gimplify the gross structure of other OpenMP worksharing constructs.
6710 In particular, OMP_SECTIONS and OMP_SINGLE. */
6713 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
6715 tree expr = *expr_p;
6717 gimple_seq body = NULL;
6719 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ORT_WORKSHARE);
6720 gimplify_and_add (OMP_BODY (expr), &body);
6721 gimplify_adjust_omp_clauses (&OMP_CLAUSES (expr));
6723 if (TREE_CODE (expr) == OMP_SECTIONS)
6724 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
6725 else if (TREE_CODE (expr) == OMP_SINGLE)
6726 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
6730 gimplify_seq_add_stmt (pre_p, stmt);
6733 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
6734 stabilized the lhs of the atomic operation as *ADDR. Return true if
6735 EXPR is this stabilized form. */
6738 goa_lhs_expr_p (tree expr, tree addr)
6740 /* Also include casts to other type variants. The C front end is fond
6741 of adding these for e.g. volatile variables. This is like
6742 STRIP_TYPE_NOPS but includes the main variant lookup. */
6743 STRIP_USELESS_TYPE_CONVERSION (expr);
6745 if (TREE_CODE (expr) == INDIRECT_REF)
6747 expr = TREE_OPERAND (expr, 0);
6749 && (CONVERT_EXPR_P (expr)
6750 || TREE_CODE (expr) == NON_LVALUE_EXPR)
6751 && TREE_CODE (expr) == TREE_CODE (addr)
6752 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
6754 expr = TREE_OPERAND (expr, 0);
6755 addr = TREE_OPERAND (addr, 0);
6759 return (TREE_CODE (addr) == ADDR_EXPR
6760 && TREE_CODE (expr) == ADDR_EXPR
6761 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
6763 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
6768 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
6769 expression does not involve the lhs, evaluate it into a temporary.
6770 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
6771 or -1 if an error was encountered. */
6774 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
6777 tree expr = *expr_p;
6780 if (goa_lhs_expr_p (expr, lhs_addr))
6785 if (is_gimple_val (expr))
6789 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
6792 case tcc_comparison:
6793 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
6796 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
6799 case tcc_expression:
6800 switch (TREE_CODE (expr))
6802 case TRUTH_ANDIF_EXPR:
6803 case TRUTH_ORIF_EXPR:
6804 case TRUTH_AND_EXPR:
6806 case TRUTH_XOR_EXPR:
6807 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
6809 case TRUTH_NOT_EXPR:
6810 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
6814 /* Break out any preevaluations from cp_build_modify_expr. */
6815 for (; TREE_CODE (expr) == COMPOUND_EXPR;
6816 expr = TREE_OPERAND (expr, 1))
6817 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
6819 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
6830 enum gimplify_status gs;
6831 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
6832 if (gs != GS_ALL_DONE)
6839 /* Gimplify an OMP_ATOMIC statement. */
6841 static enum gimplify_status
6842 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
6844 tree addr = TREE_OPERAND (*expr_p, 0);
6845 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
6846 ? NULL : TREE_OPERAND (*expr_p, 1);
6847 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
6849 gimple loadstmt, storestmt;
6851 tmp_load = create_tmp_reg (type, NULL);
6852 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
6855 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
6859 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr);
6860 gimplify_seq_add_stmt (pre_p, loadstmt);
6861 if (rhs && gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
6865 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
6867 storestmt = gimple_build_omp_atomic_store (rhs);
6868 gimplify_seq_add_stmt (pre_p, storestmt);
6869 switch (TREE_CODE (*expr_p))
6871 case OMP_ATOMIC_READ:
6872 case OMP_ATOMIC_CAPTURE_OLD:
6874 gimple_omp_atomic_set_need_value (loadstmt);
6876 case OMP_ATOMIC_CAPTURE_NEW:
6878 gimple_omp_atomic_set_need_value (storestmt);
6888 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
6889 body, and adding some EH bits. */
6891 static enum gimplify_status
6892 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
6894 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
6896 gimple_seq body = NULL;
6897 struct gimplify_ctx gctx;
6900 /* Wrap the transaction body in a BIND_EXPR so we have a context
6901 where to put decls for OpenMP. */
6902 if (TREE_CODE (tbody) != BIND_EXPR)
6904 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
6905 TREE_SIDE_EFFECTS (bind) = 1;
6906 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
6907 TRANSACTION_EXPR_BODY (expr) = bind;
6910 push_gimplify_context (&gctx);
6911 temp = voidify_wrapper_expr (*expr_p, NULL);
6913 g = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
6914 pop_gimplify_context (g);
6916 g = gimple_build_transaction (body, NULL);
6917 if (TRANSACTION_EXPR_OUTER (expr))
6918 subcode = GTMA_IS_OUTER;
6919 else if (TRANSACTION_EXPR_RELAXED (expr))
6920 subcode = GTMA_IS_RELAXED;
6921 gimple_transaction_set_subcode (g, subcode);
6923 gimplify_seq_add_stmt (pre_p, g);
6931 *expr_p = NULL_TREE;
6935 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
6936 expression produces a value to be used as an operand inside a GIMPLE
6937 statement, the value will be stored back in *EXPR_P. This value will
6938 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
6939 an SSA_NAME. The corresponding sequence of GIMPLE statements is
6940 emitted in PRE_P and POST_P.
6942 Additionally, this process may overwrite parts of the input
6943 expression during gimplification. Ideally, it should be
6944 possible to do non-destructive gimplification.
6946 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
6947 the expression needs to evaluate to a value to be used as
6948 an operand in a GIMPLE statement, this value will be stored in
6949 *EXPR_P on exit. This happens when the caller specifies one
6950 of fb_lvalue or fb_rvalue fallback flags.
6952 PRE_P will contain the sequence of GIMPLE statements corresponding
6953 to the evaluation of EXPR and all the side-effects that must
6954 be executed before the main expression. On exit, the last
6955 statement of PRE_P is the core statement being gimplified. For
6956 instance, when gimplifying 'if (++a)' the last statement in
6957 PRE_P will be 'if (t.1)' where t.1 is the result of
6958 pre-incrementing 'a'.
6960 POST_P will contain the sequence of GIMPLE statements corresponding
6961 to the evaluation of all the side-effects that must be executed
6962 after the main expression. If this is NULL, the post
6963 side-effects are stored at the end of PRE_P.
6965 The reason why the output is split in two is to handle post
6966 side-effects explicitly. In some cases, an expression may have
6967 inner and outer post side-effects which need to be emitted in
6968 an order different from the one given by the recursive
6969 traversal. For instance, for the expression (*p--)++ the post
6970 side-effects of '--' must actually occur *after* the post
6971 side-effects of '++'. However, gimplification will first visit
6972 the inner expression, so if a separate POST sequence was not
6973 used, the resulting sequence would be:
6980 However, the post-decrement operation in line #2 must not be
6981 evaluated until after the store to *p at line #4, so the
6982 correct sequence should be:
6989 So, by specifying a separate post queue, it is possible
6990 to emit the post side-effects in the correct order.
6991 If POST_P is NULL, an internal queue will be used. Before
6992 returning to the caller, the sequence POST_P is appended to
6993 the main output sequence PRE_P.
6995 GIMPLE_TEST_F points to a function that takes a tree T and
6996 returns nonzero if T is in the GIMPLE form requested by the
6997 caller. The GIMPLE predicates are in gimple.c.
6999 FALLBACK tells the function what sort of a temporary we want if
7000 gimplification cannot produce an expression that complies with
7003 fb_none means that no temporary should be generated
7004 fb_rvalue means that an rvalue is OK to generate
7005 fb_lvalue means that an lvalue is OK to generate
7006 fb_either means that either is OK, but an lvalue is preferable.
7007 fb_mayfail means that gimplification may fail (in which case
7008 GS_ERROR will be returned)
7010 The return value is either GS_ERROR or GS_ALL_DONE, since this
7011 function iterates until EXPR is completely gimplified or an error
7014 enum gimplify_status
7015 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
7016 bool (*gimple_test_f) (tree), fallback_t fallback)
7019 gimple_seq internal_pre = NULL;
7020 gimple_seq internal_post = NULL;
7023 location_t saved_location;
7024 enum gimplify_status ret;
7025 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
7027 save_expr = *expr_p;
7028 if (save_expr == NULL_TREE)
7031 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
7032 is_statement = gimple_test_f == is_gimple_stmt;
7036 /* Consistency checks. */
7037 if (gimple_test_f == is_gimple_reg)
7038 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
7039 else if (gimple_test_f == is_gimple_val
7040 || gimple_test_f == is_gimple_call_addr
7041 || gimple_test_f == is_gimple_condexpr
7042 || gimple_test_f == is_gimple_mem_rhs
7043 || gimple_test_f == is_gimple_mem_rhs_or_call
7044 || gimple_test_f == is_gimple_reg_rhs
7045 || gimple_test_f == is_gimple_reg_rhs_or_call
7046 || gimple_test_f == is_gimple_asm_val
7047 || gimple_test_f == is_gimple_mem_ref_addr)
7048 gcc_assert (fallback & fb_rvalue);
7049 else if (gimple_test_f == is_gimple_min_lval
7050 || gimple_test_f == is_gimple_lvalue)
7051 gcc_assert (fallback & fb_lvalue);
7052 else if (gimple_test_f == is_gimple_addressable)
7053 gcc_assert (fallback & fb_either);
7054 else if (gimple_test_f == is_gimple_stmt)
7055 gcc_assert (fallback == fb_none);
7058 /* We should have recognized the GIMPLE_TEST_F predicate to
7059 know what kind of fallback to use in case a temporary is
7060 needed to hold the value or address of *EXPR_P. */
7064 /* We used to check the predicate here and return immediately if it
7065 succeeds. This is wrong; the design is for gimplification to be
7066 idempotent, and for the predicates to only test for valid forms, not
7067 whether they are fully simplified. */
7069 pre_p = &internal_pre;
7072 post_p = &internal_post;
7074 /* Remember the last statements added to PRE_P and POST_P. Every
7075 new statement added by the gimplification helpers needs to be
7076 annotated with location information. To centralize the
7077 responsibility, we remember the last statement that had been
7078 added to both queues before gimplifying *EXPR_P. If
7079 gimplification produces new statements in PRE_P and POST_P, those
7080 statements will be annotated with the same location information
7082 pre_last_gsi = gsi_last (*pre_p);
7083 post_last_gsi = gsi_last (*post_p);
7085 saved_location = input_location;
7086 if (save_expr != error_mark_node
7087 && EXPR_HAS_LOCATION (*expr_p))
7088 input_location = EXPR_LOCATION (*expr_p);
7090 /* Loop over the specific gimplifiers until the toplevel node
7091 remains the same. */
7094 /* Strip away as many useless type conversions as possible
7096 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
7098 /* Remember the expr. */
7099 save_expr = *expr_p;
7101 /* Die, die, die, my darling. */
7102 if (save_expr == error_mark_node
7103 || (TREE_TYPE (save_expr)
7104 && TREE_TYPE (save_expr) == error_mark_node))
7110 /* Do any language-specific gimplification. */
7111 ret = ((enum gimplify_status)
7112 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
7115 if (*expr_p == NULL_TREE)
7117 if (*expr_p != save_expr)
7120 else if (ret != GS_UNHANDLED)
7123 /* Make sure that all the cases set 'ret' appropriately. */
7125 switch (TREE_CODE (*expr_p))
7127 /* First deal with the special cases. */
7129 case POSTINCREMENT_EXPR:
7130 case POSTDECREMENT_EXPR:
7131 case PREINCREMENT_EXPR:
7132 case PREDECREMENT_EXPR:
7133 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
7134 fallback != fb_none,
7135 TREE_TYPE (*expr_p));
7139 case ARRAY_RANGE_REF:
7143 case VIEW_CONVERT_EXPR:
7144 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
7145 fallback ? fallback : fb_rvalue);
7149 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
7151 /* C99 code may assign to an array in a structure value of a
7152 conditional expression, and this has undefined behavior
7153 only on execution, so create a temporary if an lvalue is
7155 if (fallback == fb_lvalue)
7157 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
7158 mark_addressable (*expr_p);
7164 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
7166 /* C99 code may assign to an array in a structure returned
7167 from a function, and this has undefined behavior only on
7168 execution, so create a temporary if an lvalue is
7170 if (fallback == fb_lvalue)
7172 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
7173 mark_addressable (*expr_p);
7182 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
7185 case COMPOUND_LITERAL_EXPR:
7186 ret = gimplify_compound_literal_expr (expr_p, pre_p,
7187 gimple_test_f, fallback);
7192 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
7193 fallback != fb_none);
7196 case TRUTH_ANDIF_EXPR:
7197 case TRUTH_ORIF_EXPR:
7199 /* Preserve the original type of the expression and the
7200 source location of the outer expression. */
7201 tree org_type = TREE_TYPE (*expr_p);
7202 *expr_p = gimple_boolify (*expr_p);
7203 *expr_p = build3_loc (input_location, COND_EXPR,
7207 org_type, boolean_true_node),
7210 org_type, boolean_false_node));
7215 case TRUTH_NOT_EXPR:
7217 tree type = TREE_TYPE (*expr_p);
7218 /* The parsers are careful to generate TRUTH_NOT_EXPR
7219 only with operands that are always zero or one.
7220 We do not fold here but handle the only interesting case
7221 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
7222 *expr_p = gimple_boolify (*expr_p);
7223 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
7224 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
7225 TREE_TYPE (*expr_p),
7226 TREE_OPERAND (*expr_p, 0));
7228 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
7229 TREE_TYPE (*expr_p),
7230 TREE_OPERAND (*expr_p, 0),
7231 build_int_cst (TREE_TYPE (*expr_p), 1));
7232 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
7233 *expr_p = fold_convert_loc (input_location, type, *expr_p);
7239 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
7243 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
7247 if (IS_EMPTY_STMT (*expr_p))
7253 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
7254 || fallback == fb_none)
7256 /* Just strip a conversion to void (or in void context) and
7258 *expr_p = TREE_OPERAND (*expr_p, 0);
7263 ret = gimplify_conversion (expr_p);
7264 if (ret == GS_ERROR)
7266 if (*expr_p != save_expr)
7270 case FIX_TRUNC_EXPR:
7271 /* unary_expr: ... | '(' cast ')' val | ... */
7272 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7273 is_gimple_val, fb_rvalue);
7274 recalculate_side_effects (*expr_p);
7279 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
7280 bool notrap = TREE_THIS_NOTRAP (*expr_p);
7281 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
7283 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
7284 if (*expr_p != save_expr)
7290 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7291 is_gimple_reg, fb_rvalue);
7292 if (ret == GS_ERROR)
7295 recalculate_side_effects (*expr_p);
7296 *expr_p = fold_build2_loc (input_location, MEM_REF,
7297 TREE_TYPE (*expr_p),
7298 TREE_OPERAND (*expr_p, 0),
7299 build_int_cst (saved_ptr_type, 0));
7300 TREE_THIS_VOLATILE (*expr_p) = volatilep;
7301 TREE_THIS_NOTRAP (*expr_p) = notrap;
7306 /* We arrive here through the various re-gimplifcation paths. */
7308 /* First try re-folding the whole thing. */
7309 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
7310 TREE_OPERAND (*expr_p, 0),
7311 TREE_OPERAND (*expr_p, 1));
7315 recalculate_side_effects (*expr_p);
7319 /* Avoid re-gimplifying the address operand if it is already
7320 in suitable form. Re-gimplifying would mark the address
7321 operand addressable. Always gimplify when not in SSA form
7322 as we still may have to gimplify decls with value-exprs. */
7323 if (!gimplify_ctxp || !gimplify_ctxp->into_ssa
7324 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
7326 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7327 is_gimple_mem_ref_addr, fb_rvalue);
7328 if (ret == GS_ERROR)
7331 recalculate_side_effects (*expr_p);
7335 /* Constants need not be gimplified. */
7346 /* If we require an lvalue, such as for ADDR_EXPR, retain the
7347 CONST_DECL node. Otherwise the decl is replaceable by its
7349 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
7350 if (fallback & fb_lvalue)
7354 *expr_p = DECL_INITIAL (*expr_p);
7360 ret = gimplify_decl_expr (expr_p, pre_p);
7364 ret = gimplify_bind_expr (expr_p, pre_p);
7368 ret = gimplify_loop_expr (expr_p, pre_p);
7372 ret = gimplify_switch_expr (expr_p, pre_p);
7376 ret = gimplify_exit_expr (expr_p);
7380 /* If the target is not LABEL, then it is a computed jump
7381 and the target needs to be gimplified. */
7382 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
7384 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
7385 NULL, is_gimple_val, fb_rvalue);
7386 if (ret == GS_ERROR)
7389 gimplify_seq_add_stmt (pre_p,
7390 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
7395 gimplify_seq_add_stmt (pre_p,
7396 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
7397 PREDICT_EXPR_OUTCOME (*expr_p)));
7403 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
7404 == current_function_decl);
7405 gimplify_seq_add_stmt (pre_p,
7406 gimple_build_label (LABEL_EXPR_LABEL (*expr_p)));
7409 case CASE_LABEL_EXPR:
7410 ret = gimplify_case_label_expr (expr_p, pre_p);
7414 ret = gimplify_return_expr (*expr_p, pre_p);
7418 /* Don't reduce this in place; let gimplify_init_constructor work its
7419 magic. Buf if we're just elaborating this for side effects, just
7420 gimplify any element that has side-effects. */
7421 if (fallback == fb_none)
7423 unsigned HOST_WIDE_INT ix;
7425 tree temp = NULL_TREE;
7426 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
7427 if (TREE_SIDE_EFFECTS (val))
7428 append_to_statement_list (val, &temp);
7431 ret = temp ? GS_OK : GS_ALL_DONE;
7433 /* C99 code may assign to an array in a constructed
7434 structure or union, and this has undefined behavior only
7435 on execution, so create a temporary if an lvalue is
7437 else if (fallback == fb_lvalue)
7439 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
7440 mark_addressable (*expr_p);
7447 /* The following are special cases that are not handled by the
7448 original GIMPLE grammar. */
7450 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
7453 ret = gimplify_save_expr (expr_p, pre_p, post_p);
7457 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7458 post_p, is_gimple_lvalue, fb_either);
7459 recalculate_side_effects (*expr_p);
7462 case TARGET_MEM_REF:
7464 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
7466 if (TMR_BASE (*expr_p))
7467 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
7468 post_p, is_gimple_mem_ref_addr, fb_either);
7469 if (TMR_INDEX (*expr_p))
7470 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
7471 post_p, is_gimple_val, fb_rvalue);
7472 if (TMR_INDEX2 (*expr_p))
7473 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
7474 post_p, is_gimple_val, fb_rvalue);
7475 /* TMR_STEP and TMR_OFFSET are always integer constants. */
7480 case NON_LVALUE_EXPR:
7481 /* This should have been stripped above. */
7485 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
7488 case TRY_FINALLY_EXPR:
7489 case TRY_CATCH_EXPR:
7491 gimple_seq eval, cleanup;
7494 /* Calls to destructors are generated automatically in FINALLY/CATCH
7495 block. They should have location as UNKNOWN_LOCATION. However,
7496 gimplify_call_expr will reset these call stmts to input_location
7497 if it finds stmt's location is unknown. To prevent resetting for
7498 destructors, we set the input_location to unknown.
7499 Note that this only affects the destructor calls in FINALLY/CATCH
7500 block, and will automatically reset to its original value by the
7501 end of gimplify_expr. */
7502 input_location = UNKNOWN_LOCATION;
7503 eval = cleanup = NULL;
7504 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
7505 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
7506 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
7507 if (gimple_seq_empty_p (cleanup))
7509 gimple_seq_add_seq (pre_p, eval);
7513 try_ = gimple_build_try (eval, cleanup,
7514 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
7515 ? GIMPLE_TRY_FINALLY
7516 : GIMPLE_TRY_CATCH);
7517 if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
7518 gimple_set_location (try_, saved_location);
7520 gimple_set_location (try_, EXPR_LOCATION (save_expr));
7521 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
7522 gimple_try_set_catch_is_cleanup (try_,
7523 TRY_CATCH_IS_CLEANUP (*expr_p));
7524 gimplify_seq_add_stmt (pre_p, try_);
7529 case CLEANUP_POINT_EXPR:
7530 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
7534 ret = gimplify_target_expr (expr_p, pre_p, post_p);
7540 gimple_seq handler = NULL;
7541 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
7542 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
7543 gimplify_seq_add_stmt (pre_p, c);
7548 case EH_FILTER_EXPR:
7551 gimple_seq failure = NULL;
7553 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
7554 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
7555 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
7556 gimplify_seq_add_stmt (pre_p, ehf);
7563 enum gimplify_status r0, r1;
7564 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
7565 post_p, is_gimple_val, fb_rvalue);
7566 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
7567 post_p, is_gimple_val, fb_rvalue);
7568 TREE_SIDE_EFFECTS (*expr_p) = 0;
7574 /* We get here when taking the address of a label. We mark
7575 the label as "forced"; meaning it can never be removed and
7576 it is a potential target for any computed goto. */
7577 FORCED_LABEL (*expr_p) = 1;
7581 case STATEMENT_LIST:
7582 ret = gimplify_statement_list (expr_p, pre_p);
7585 case WITH_SIZE_EXPR:
7587 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7588 post_p == &internal_post ? NULL : post_p,
7589 gimple_test_f, fallback);
7590 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
7591 is_gimple_val, fb_rvalue);
7598 ret = gimplify_var_or_parm_decl (expr_p);
7602 /* When within an OpenMP context, notice uses of variables. */
7603 if (gimplify_omp_ctxp)
7604 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
7609 /* Allow callbacks into the gimplifier during optimization. */
7614 gimplify_omp_parallel (expr_p, pre_p);
7619 gimplify_omp_task (expr_p, pre_p);
7624 ret = gimplify_omp_for (expr_p, pre_p);
7629 gimplify_omp_workshare (expr_p, pre_p);
7638 gimple_seq body = NULL;
7641 gimplify_and_add (OMP_BODY (*expr_p), &body);
7642 switch (TREE_CODE (*expr_p))
7645 g = gimple_build_omp_section (body);
7648 g = gimple_build_omp_master (body);
7651 g = gimple_build_omp_ordered (body);
7654 g = gimple_build_omp_critical (body,
7655 OMP_CRITICAL_NAME (*expr_p));
7660 gimplify_seq_add_stmt (pre_p, g);
7666 case OMP_ATOMIC_READ:
7667 case OMP_ATOMIC_CAPTURE_OLD:
7668 case OMP_ATOMIC_CAPTURE_NEW:
7669 ret = gimplify_omp_atomic (expr_p, pre_p);
7672 case TRANSACTION_EXPR:
7673 ret = gimplify_transaction (expr_p, pre_p);
7676 case TRUTH_AND_EXPR:
7678 case TRUTH_XOR_EXPR:
7680 tree orig_type = TREE_TYPE (*expr_p);
7681 tree new_type, xop0, xop1;
7682 *expr_p = gimple_boolify (*expr_p);
7683 new_type = TREE_TYPE (*expr_p);
7684 if (!useless_type_conversion_p (orig_type, new_type))
7686 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
7691 /* Boolified binary truth expressions are semantically equivalent
7692 to bitwise binary expressions. Canonicalize them to the
7694 switch (TREE_CODE (*expr_p))
7696 case TRUTH_AND_EXPR:
7697 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
7700 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
7702 case TRUTH_XOR_EXPR:
7703 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
7708 /* Now make sure that operands have compatible type to
7709 expression's new_type. */
7710 xop0 = TREE_OPERAND (*expr_p, 0);
7711 xop1 = TREE_OPERAND (*expr_p, 1);
7712 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
7713 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
7716 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
7717 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
7720 /* Continue classified as tcc_binary. */
7727 /* Classified as tcc_expression. */
7730 case POINTER_PLUS_EXPR:
7732 enum gimplify_status r0, r1;
7733 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7734 post_p, is_gimple_val, fb_rvalue);
7735 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
7736 post_p, is_gimple_val, fb_rvalue);
7737 recalculate_side_effects (*expr_p);
7739 /* Convert &X + CST to invariant &MEM[&X, CST]. Do this
7740 after gimplifying operands - this is similar to how
7741 it would be folding all gimplified stmts on creation
7742 to have them canonicalized, which is what we eventually
7743 should do anyway. */
7744 if (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
7745 && is_gimple_min_invariant (TREE_OPERAND (*expr_p, 0)))
7747 *expr_p = build_fold_addr_expr_with_type_loc
7749 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (*expr_p)),
7750 TREE_OPERAND (*expr_p, 0),
7751 fold_convert (ptr_type_node,
7752 TREE_OPERAND (*expr_p, 1))),
7753 TREE_TYPE (*expr_p));
7754 ret = MIN (ret, GS_OK);
7760 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
7762 case tcc_comparison:
7763 /* Handle comparison of objects of non scalar mode aggregates
7764 with a call to memcmp. It would be nice to only have to do
7765 this for variable-sized objects, but then we'd have to allow
7766 the same nest of reference nodes we allow for MODIFY_EXPR and
7769 Compare scalar mode aggregates as scalar mode values. Using
7770 memcmp for them would be very inefficient at best, and is
7771 plain wrong if bitfields are involved. */
7773 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
7775 /* Vector comparisons need no boolification. */
7776 if (TREE_CODE (type) == VECTOR_TYPE)
7778 else if (!AGGREGATE_TYPE_P (type))
7780 tree org_type = TREE_TYPE (*expr_p);
7781 *expr_p = gimple_boolify (*expr_p);
7782 if (!useless_type_conversion_p (org_type,
7783 TREE_TYPE (*expr_p)))
7785 *expr_p = fold_convert_loc (input_location,
7792 else if (TYPE_MODE (type) != BLKmode)
7793 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
7795 ret = gimplify_variable_sized_compare (expr_p);
7800 /* If *EXPR_P does not need to be special-cased, handle it
7801 according to its class. */
7803 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7804 post_p, is_gimple_val, fb_rvalue);
7810 enum gimplify_status r0, r1;
7812 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7813 post_p, is_gimple_val, fb_rvalue);
7814 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
7815 post_p, is_gimple_val, fb_rvalue);
7823 enum gimplify_status r0, r1, r2;
7825 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7826 post_p, is_gimple_val, fb_rvalue);
7827 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
7828 post_p, is_gimple_val, fb_rvalue);
7829 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
7830 post_p, is_gimple_val, fb_rvalue);
7832 ret = MIN (MIN (r0, r1), r2);
7836 case tcc_declaration:
7839 goto dont_recalculate;
7845 recalculate_side_effects (*expr_p);
7851 gcc_assert (*expr_p || ret != GS_OK);
7853 while (ret == GS_OK);
7855 /* If we encountered an error_mark somewhere nested inside, either
7856 stub out the statement or propagate the error back out. */
7857 if (ret == GS_ERROR)
7864 /* This was only valid as a return value from the langhook, which
7865 we handled. Make sure it doesn't escape from any other context. */
7866 gcc_assert (ret != GS_UNHANDLED);
7868 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
7870 /* We aren't looking for a value, and we don't have a valid
7871 statement. If it doesn't have side-effects, throw it away. */
7872 if (!TREE_SIDE_EFFECTS (*expr_p))
7874 else if (!TREE_THIS_VOLATILE (*expr_p))
7876 /* This is probably a _REF that contains something nested that
7877 has side effects. Recurse through the operands to find it. */
7878 enum tree_code code = TREE_CODE (*expr_p);
7885 case VIEW_CONVERT_EXPR:
7886 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7887 gimple_test_f, fallback);
7891 case ARRAY_RANGE_REF:
7892 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7893 gimple_test_f, fallback);
7894 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
7895 gimple_test_f, fallback);
7899 /* Anything else with side-effects must be converted to
7900 a valid statement before we get here. */
7906 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
7907 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
7909 /* Historically, the compiler has treated a bare reference
7910 to a non-BLKmode volatile lvalue as forcing a load. */
7911 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
7913 /* Normally, we do not want to create a temporary for a
7914 TREE_ADDRESSABLE type because such a type should not be
7915 copied by bitwise-assignment. However, we make an
7916 exception here, as all we are doing here is ensuring that
7917 we read the bytes that make up the type. We use
7918 create_tmp_var_raw because create_tmp_var will abort when
7919 given a TREE_ADDRESSABLE type. */
7920 tree tmp = create_tmp_var_raw (type, "vol");
7921 gimple_add_tmp_var (tmp);
7922 gimplify_assign (tmp, *expr_p, pre_p);
7926 /* We can't do anything useful with a volatile reference to
7927 an incomplete type, so just throw it away. Likewise for
7928 a BLKmode type, since any implicit inner load should
7929 already have been turned into an explicit one by the
7930 gimplification process. */
7934 /* If we are gimplifying at the statement level, we're done. Tack
7935 everything together and return. */
7936 if (fallback == fb_none || is_statement)
7938 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
7939 it out for GC to reclaim it. */
7940 *expr_p = NULL_TREE;
7942 if (!gimple_seq_empty_p (internal_pre)
7943 || !gimple_seq_empty_p (internal_post))
7945 gimplify_seq_add_seq (&internal_pre, internal_post);
7946 gimplify_seq_add_seq (pre_p, internal_pre);
7949 /* The result of gimplifying *EXPR_P is going to be the last few
7950 statements in *PRE_P and *POST_P. Add location information
7951 to all the statements that were added by the gimplification
7953 if (!gimple_seq_empty_p (*pre_p))
7954 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
7956 if (!gimple_seq_empty_p (*post_p))
7957 annotate_all_with_location_after (*post_p, post_last_gsi,
7963 #ifdef ENABLE_GIMPLE_CHECKING
7966 enum tree_code code = TREE_CODE (*expr_p);
7967 /* These expressions should already be in gimple IR form. */
7968 gcc_assert (code != MODIFY_EXPR
7970 && code != BIND_EXPR
7971 && code != CATCH_EXPR
7972 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
7973 && code != EH_FILTER_EXPR
7974 && code != GOTO_EXPR
7975 && code != LABEL_EXPR
7976 && code != LOOP_EXPR
7977 && code != SWITCH_EXPR
7978 && code != TRY_FINALLY_EXPR
7979 && code != OMP_CRITICAL
7981 && code != OMP_MASTER
7982 && code != OMP_ORDERED
7983 && code != OMP_PARALLEL
7984 && code != OMP_SECTIONS
7985 && code != OMP_SECTION
7986 && code != OMP_SINGLE);
7990 /* Otherwise we're gimplifying a subexpression, so the resulting
7991 value is interesting. If it's a valid operand that matches
7992 GIMPLE_TEST_F, we're done. Unless we are handling some
7993 post-effects internally; if that's the case, we need to copy into
7994 a temporary before adding the post-effects to POST_P. */
7995 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
7998 /* Otherwise, we need to create a new temporary for the gimplified
8001 /* We can't return an lvalue if we have an internal postqueue. The
8002 object the lvalue refers to would (probably) be modified by the
8003 postqueue; we need to copy the value out first, which means an
8005 if ((fallback & fb_lvalue)
8006 && gimple_seq_empty_p (internal_post)
8007 && is_gimple_addressable (*expr_p))
8009 /* An lvalue will do. Take the address of the expression, store it
8010 in a temporary, and replace the expression with an INDIRECT_REF of
8012 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
8013 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
8014 *expr_p = build_simple_mem_ref (tmp);
8016 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
8018 /* An rvalue will do. Assign the gimplified expression into a
8019 new temporary TMP and replace the original expression with
8020 TMP. First, make sure that the expression has a type so that
8021 it can be assigned into a temporary. */
8022 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
8023 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
8027 #ifdef ENABLE_GIMPLE_CHECKING
8028 if (!(fallback & fb_mayfail))
8030 fprintf (stderr, "gimplification failed:\n");
8031 print_generic_expr (stderr, *expr_p, 0);
8032 debug_tree (*expr_p);
8033 internal_error ("gimplification failed");
8036 gcc_assert (fallback & fb_mayfail);
8038 /* If this is an asm statement, and the user asked for the
8039 impossible, don't die. Fail and let gimplify_asm_expr
8045 /* Make sure the temporary matches our predicate. */
8046 gcc_assert ((*gimple_test_f) (*expr_p));
8048 if (!gimple_seq_empty_p (internal_post))
8050 annotate_all_with_location (internal_post, input_location);
8051 gimplify_seq_add_seq (pre_p, internal_post);
8055 input_location = saved_location;
8059 /* Look through TYPE for variable-sized objects and gimplify each such
8060 size that we find. Add to LIST_P any statements generated. */
8063 gimplify_type_sizes (tree type, gimple_seq *list_p)
8067 if (type == NULL || type == error_mark_node)
8070 /* We first do the main variant, then copy into any other variants. */
8071 type = TYPE_MAIN_VARIANT (type);
8073 /* Avoid infinite recursion. */
8074 if (TYPE_SIZES_GIMPLIFIED (type))
8077 TYPE_SIZES_GIMPLIFIED (type) = 1;
8079 switch (TREE_CODE (type))
8085 case FIXED_POINT_TYPE:
8086 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
8087 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
8089 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
8091 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
8092 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
8097 /* These types may not have declarations, so handle them here. */
8098 gimplify_type_sizes (TREE_TYPE (type), list_p);
8099 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
8100 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
8101 with assigned stack slots, for -O1+ -g they should be tracked
8103 if (!(TYPE_NAME (type)
8104 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
8105 && DECL_IGNORED_P (TYPE_NAME (type)))
8106 && TYPE_DOMAIN (type)
8107 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
8109 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
8110 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
8111 DECL_IGNORED_P (t) = 0;
8112 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
8113 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
8114 DECL_IGNORED_P (t) = 0;
8120 case QUAL_UNION_TYPE:
8121 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
8122 if (TREE_CODE (field) == FIELD_DECL)
8124 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
8125 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
8126 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
8127 gimplify_type_sizes (TREE_TYPE (field), list_p);
8132 case REFERENCE_TYPE:
8133 /* We used to recurse on the pointed-to type here, which turned out to
8134 be incorrect because its definition might refer to variables not
8135 yet initialized at this point if a forward declaration is involved.
8137 It was actually useful for anonymous pointed-to types to ensure
8138 that the sizes evaluation dominates every possible later use of the
8139 values. Restricting to such types here would be safe since there
8140 is no possible forward declaration around, but would introduce an
8141 undesirable middle-end semantic to anonymity. We then defer to
8142 front-ends the responsibility of ensuring that the sizes are
8143 evaluated both early and late enough, e.g. by attaching artificial
8144 type declarations to the tree. */
8151 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
8152 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
8154 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
8156 TYPE_SIZE (t) = TYPE_SIZE (type);
8157 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
8158 TYPE_SIZES_GIMPLIFIED (t) = 1;
8162 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
8163 a size or position, has had all of its SAVE_EXPRs evaluated.
8164 We add any required statements to *STMT_P. */
8167 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
8169 tree expr = *expr_p;
8171 /* We don't do anything if the value isn't there, is constant, or contains
8172 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
8173 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
8174 will want to replace it with a new variable, but that will cause problems
8175 if this type is from outside the function. It's OK to have that here. */
8176 if (is_gimple_sizepos (expr))
8179 *expr_p = unshare_expr (expr);
8181 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue);
8184 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
8185 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
8186 is true, also gimplify the parameters. */
8189 gimplify_body (tree fndecl, bool do_parms)
8191 location_t saved_location = input_location;
8192 gimple_seq parm_stmts, seq;
8194 struct gimplify_ctx gctx;
8195 struct cgraph_node *cgn;
8197 timevar_push (TV_TREE_GIMPLIFY);
8199 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
8201 default_rtl_profile ();
8203 gcc_assert (gimplify_ctxp == NULL);
8204 push_gimplify_context (&gctx);
8206 /* Unshare most shared trees in the body and in that of any nested functions.
8207 It would seem we don't have to do this for nested functions because
8208 they are supposed to be output and then the outer function gimplified
8209 first, but the g++ front end doesn't always do it that way. */
8210 unshare_body (fndecl);
8211 unvisit_body (fndecl);
8213 cgn = cgraph_get_node (fndecl);
8214 if (cgn && cgn->origin)
8215 nonlocal_vlas = pointer_set_create ();
8217 /* Make sure input_location isn't set to something weird. */
8218 input_location = DECL_SOURCE_LOCATION (fndecl);
8220 /* Resolve callee-copies. This has to be done before processing
8221 the body so that DECL_VALUE_EXPR gets processed correctly. */
8222 parm_stmts = do_parms ? gimplify_parameters () : NULL;
8224 /* Gimplify the function's body. */
8226 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
8227 outer_bind = gimple_seq_first_stmt (seq);
8230 outer_bind = gimple_build_nop ();
8231 gimplify_seq_add_stmt (&seq, outer_bind);
8234 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
8235 not the case, wrap everything in a GIMPLE_BIND to make it so. */
8236 if (gimple_code (outer_bind) == GIMPLE_BIND
8237 && gimple_seq_first (seq) == gimple_seq_last (seq))
8240 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
8242 DECL_SAVED_TREE (fndecl) = NULL_TREE;
8244 /* If we had callee-copies statements, insert them at the beginning
8245 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
8246 if (!gimple_seq_empty_p (parm_stmts))
8250 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
8251 gimple_bind_set_body (outer_bind, parm_stmts);
8253 for (parm = DECL_ARGUMENTS (current_function_decl);
8254 parm; parm = DECL_CHAIN (parm))
8255 if (DECL_HAS_VALUE_EXPR_P (parm))
8257 DECL_HAS_VALUE_EXPR_P (parm) = 0;
8258 DECL_IGNORED_P (parm) = 0;
8264 pointer_set_destroy (nonlocal_vlas);
8265 nonlocal_vlas = NULL;
8268 pop_gimplify_context (outer_bind);
8269 gcc_assert (gimplify_ctxp == NULL);
8271 #ifdef ENABLE_CHECKING
8273 verify_gimple_in_seq (gimple_bind_body (outer_bind));
8276 timevar_pop (TV_TREE_GIMPLIFY);
8277 input_location = saved_location;
8282 typedef char *char_p; /* For DEF_VEC_P. */
8284 /* Return whether we should exclude FNDECL from instrumentation. */
8287 flag_instrument_functions_exclude_p (tree fndecl)
8291 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
8292 if (v && v->length () > 0)
8298 name = lang_hooks.decl_printable_name (fndecl, 0);
8299 FOR_EACH_VEC_ELT (*v, i, s)
8300 if (strstr (name, s) != NULL)
8304 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
8305 if (v && v->length () > 0)
8311 name = DECL_SOURCE_FILE (fndecl);
8312 FOR_EACH_VEC_ELT (*v, i, s)
8313 if (strstr (name, s) != NULL)
8320 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
8321 node for the function we want to gimplify.
8323 Return the sequence of GIMPLE statements corresponding to the body
8327 gimplify_function_tree (tree fndecl)
8333 gcc_assert (!gimple_body (fndecl));
8335 if (DECL_STRUCT_FUNCTION (fndecl))
8336 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
8338 push_struct_function (fndecl);
8340 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
8342 /* Preliminarily mark non-addressed complex variables as eligible
8343 for promotion to gimple registers. We'll transform their uses
8345 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
8346 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
8347 && !TREE_THIS_VOLATILE (parm)
8348 && !needs_to_live_in_memory (parm))
8349 DECL_GIMPLE_REG_P (parm) = 1;
8352 ret = DECL_RESULT (fndecl);
8353 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
8354 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
8355 && !needs_to_live_in_memory (ret))
8356 DECL_GIMPLE_REG_P (ret) = 1;
8358 bind = gimplify_body (fndecl, true);
8360 /* The tree body of the function is no longer needed, replace it
8361 with the new GIMPLE body. */
8363 gimple_seq_add_stmt (&seq, bind);
8364 gimple_set_body (fndecl, seq);
8366 /* If we're instrumenting function entry/exit, then prepend the call to
8367 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
8368 catch the exit hook. */
8369 /* ??? Add some way to ignore exceptions for this TFE. */
8370 if (flag_instrument_function_entry_exit
8371 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
8372 && !flag_instrument_functions_exclude_p (fndecl))
8377 gimple_seq cleanup = NULL, body = NULL;
8381 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
8382 call = gimple_build_call (x, 1, integer_zero_node);
8383 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
8384 gimple_call_set_lhs (call, tmp_var);
8385 gimplify_seq_add_stmt (&cleanup, call);
8386 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
8387 call = gimple_build_call (x, 2,
8388 build_fold_addr_expr (current_function_decl),
8390 gimplify_seq_add_stmt (&cleanup, call);
8391 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
8393 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
8394 call = gimple_build_call (x, 1, integer_zero_node);
8395 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
8396 gimple_call_set_lhs (call, tmp_var);
8397 gimplify_seq_add_stmt (&body, call);
8398 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
8399 call = gimple_build_call (x, 2,
8400 build_fold_addr_expr (current_function_decl),
8402 gimplify_seq_add_stmt (&body, call);
8403 gimplify_seq_add_stmt (&body, tf);
8404 new_bind = gimple_build_bind (NULL, body, gimple_bind_block (bind));
8405 /* Clear the block for BIND, since it is no longer directly inside
8406 the function, but within a try block. */
8407 gimple_bind_set_block (bind, NULL);
8409 /* Replace the current function body with the body
8410 wrapped in the try/finally TF. */
8412 gimple_seq_add_stmt (&seq, new_bind);
8413 gimple_set_body (fndecl, seq);
8416 DECL_SAVED_TREE (fndecl) = NULL_TREE;
8417 cfun->curr_properties = PROP_gimple_any;
8422 /* Some transformations like inlining may invalidate the GIMPLE form
8423 for operands. This function traverses all the operands in STMT and
8424 gimplifies anything that is not a valid gimple operand. Any new
8425 GIMPLE statements are inserted before *GSI_P. */
8428 gimple_regimplify_operands (gimple stmt, gimple_stmt_iterator *gsi_p)
8432 gimple_seq pre = NULL;
8433 gimple post_stmt = NULL;
8434 struct gimplify_ctx gctx;
8436 push_gimplify_context (&gctx);
8437 gimplify_ctxp->into_ssa = gimple_in_ssa_p (cfun);
8439 switch (gimple_code (stmt))
8442 gimplify_expr (gimple_cond_lhs_ptr (stmt), &pre, NULL,
8443 is_gimple_val, fb_rvalue);
8444 gimplify_expr (gimple_cond_rhs_ptr (stmt), &pre, NULL,
8445 is_gimple_val, fb_rvalue);
8448 gimplify_expr (gimple_switch_index_ptr (stmt), &pre, NULL,
8449 is_gimple_val, fb_rvalue);
8451 case GIMPLE_OMP_ATOMIC_LOAD:
8452 gimplify_expr (gimple_omp_atomic_load_rhs_ptr (stmt), &pre, NULL,
8453 is_gimple_val, fb_rvalue);
8457 size_t i, noutputs = gimple_asm_noutputs (stmt);
8458 const char *constraint, **oconstraints;
8459 bool allows_mem, allows_reg, is_inout;
8462 = (const char **) alloca ((noutputs) * sizeof (const char *));
8463 for (i = 0; i < noutputs; i++)
8465 tree op = gimple_asm_output_op (stmt, i);
8466 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
8467 oconstraints[i] = constraint;
8468 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
8469 &allows_reg, &is_inout);
8470 gimplify_expr (&TREE_VALUE (op), &pre, NULL,
8471 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
8472 fb_lvalue | fb_mayfail);
8474 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
8476 tree op = gimple_asm_input_op (stmt, i);
8477 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
8478 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
8479 oconstraints, &allows_mem, &allows_reg);
8480 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (op))) && allows_mem)
8482 if (!allows_reg && allows_mem)
8483 gimplify_expr (&TREE_VALUE (op), &pre, NULL,
8484 is_gimple_lvalue, fb_lvalue | fb_mayfail);
8486 gimplify_expr (&TREE_VALUE (op), &pre, NULL,
8487 is_gimple_asm_val, fb_rvalue);
8492 /* NOTE: We start gimplifying operands from last to first to
8493 make sure that side-effects on the RHS of calls, assignments
8494 and ASMs are executed before the LHS. The ordering is not
8495 important for other statements. */
8496 num_ops = gimple_num_ops (stmt);
8497 for (i = num_ops; i > 0; i--)
8499 tree op = gimple_op (stmt, i - 1);
8500 if (op == NULL_TREE)
8502 if (i == 1 && (is_gimple_call (stmt) || is_gimple_assign (stmt)))
8503 gimplify_expr (&op, &pre, NULL, is_gimple_lvalue, fb_lvalue);
8505 && is_gimple_assign (stmt)
8507 && get_gimple_rhs_class (gimple_expr_code (stmt))
8508 == GIMPLE_SINGLE_RHS)
8509 gimplify_expr (&op, &pre, NULL,
8510 rhs_predicate_for (gimple_assign_lhs (stmt)),
8512 else if (i == 2 && is_gimple_call (stmt))
8514 if (TREE_CODE (op) == FUNCTION_DECL)
8516 gimplify_expr (&op, &pre, NULL, is_gimple_call_addr, fb_rvalue);
8519 gimplify_expr (&op, &pre, NULL, is_gimple_val, fb_rvalue);
8520 gimple_set_op (stmt, i - 1, op);
8523 lhs = gimple_get_lhs (stmt);
8524 /* If the LHS changed it in a way that requires a simple RHS,
8525 create temporary. */
8526 if (lhs && !is_gimple_reg (lhs))
8528 bool need_temp = false;
8530 if (is_gimple_assign (stmt)
8532 && get_gimple_rhs_class (gimple_expr_code (stmt))
8533 == GIMPLE_SINGLE_RHS)
8534 gimplify_expr (gimple_assign_rhs1_ptr (stmt), &pre, NULL,
8535 rhs_predicate_for (gimple_assign_lhs (stmt)),
8537 else if (is_gimple_reg (lhs))
8539 if (is_gimple_reg_type (TREE_TYPE (lhs)))
8541 if (is_gimple_call (stmt))
8543 i = gimple_call_flags (stmt);
8544 if ((i & ECF_LOOPING_CONST_OR_PURE)
8545 || !(i & (ECF_CONST | ECF_PURE)))
8548 if (stmt_can_throw_internal (stmt))
8554 if (is_gimple_reg_type (TREE_TYPE (lhs)))
8556 else if (TYPE_MODE (TREE_TYPE (lhs)) != BLKmode)
8558 if (is_gimple_call (stmt))
8560 tree fndecl = gimple_call_fndecl (stmt);
8562 if (!aggregate_value_p (TREE_TYPE (lhs), fndecl)
8563 && !(fndecl && DECL_RESULT (fndecl)
8564 && DECL_BY_REFERENCE (DECL_RESULT (fndecl))))
8573 tree temp = create_tmp_reg (TREE_TYPE (lhs), NULL);
8574 if (gimple_in_ssa_p (cfun))
8575 temp = make_ssa_name (temp, NULL);
8576 gimple_set_lhs (stmt, temp);
8577 post_stmt = gimple_build_assign (lhs, temp);
8578 if (TREE_CODE (lhs) == SSA_NAME)
8579 SSA_NAME_DEF_STMT (lhs) = post_stmt;
8585 if (!gimple_seq_empty_p (pre))
8586 gsi_insert_seq_before (gsi_p, pre, GSI_SAME_STMT);
8588 gsi_insert_after (gsi_p, post_stmt, GSI_NEW_STMT);
8590 pop_gimplify_context (NULL);
8593 /* Expand EXPR to list of gimple statements STMTS. GIMPLE_TEST_F specifies
8594 the predicate that will hold for the result. If VAR is not NULL, make the
8595 base variable of the final destination be VAR if suitable. */
8598 force_gimple_operand_1 (tree expr, gimple_seq *stmts,
8599 gimple_predicate gimple_test_f, tree var)
8601 enum gimplify_status ret;
8602 struct gimplify_ctx gctx;
8603 location_t saved_location;
8607 /* gimple_test_f might be more strict than is_gimple_val, make
8608 sure we pass both. Just checking gimple_test_f doesn't work
8609 because most gimple predicates do not work recursively. */
8610 if (is_gimple_val (expr)
8611 && (*gimple_test_f) (expr))
8614 push_gimplify_context (&gctx);
8615 gimplify_ctxp->into_ssa = gimple_in_ssa_p (cfun);
8616 gimplify_ctxp->allow_rhs_cond_expr = true;
8617 saved_location = input_location;
8618 input_location = UNKNOWN_LOCATION;
8622 if (gimplify_ctxp->into_ssa
8623 && is_gimple_reg (var))
8624 var = make_ssa_name (var, NULL);
8625 expr = build2 (MODIFY_EXPR, TREE_TYPE (var), var, expr);
8628 if (TREE_CODE (expr) != MODIFY_EXPR
8629 && TREE_TYPE (expr) == void_type_node)
8631 gimplify_and_add (expr, stmts);
8636 ret = gimplify_expr (&expr, stmts, NULL, gimple_test_f, fb_rvalue);
8637 gcc_assert (ret != GS_ERROR);
8640 input_location = saved_location;
8641 pop_gimplify_context (NULL);
8646 /* Expand EXPR to list of gimple statements STMTS. If SIMPLE is true,
8647 force the result to be either ssa_name or an invariant, otherwise
8648 just force it to be a rhs expression. If VAR is not NULL, make the
8649 base variable of the final destination be VAR if suitable. */
8652 force_gimple_operand (tree expr, gimple_seq *stmts, bool simple, tree var)
8654 return force_gimple_operand_1 (expr, stmts,
8655 simple ? is_gimple_val : is_gimple_reg_rhs,
8659 /* Invoke force_gimple_operand_1 for EXPR with parameters GIMPLE_TEST_F
8660 and VAR. If some statements are produced, emits them at GSI.
8661 If BEFORE is true. the statements are appended before GSI, otherwise
8662 they are appended after it. M specifies the way GSI moves after
8663 insertion (GSI_SAME_STMT or GSI_CONTINUE_LINKING are the usual values). */
8666 force_gimple_operand_gsi_1 (gimple_stmt_iterator *gsi, tree expr,
8667 gimple_predicate gimple_test_f,
8668 tree var, bool before,
8669 enum gsi_iterator_update m)
8673 expr = force_gimple_operand_1 (expr, &stmts, gimple_test_f, var);
8675 if (!gimple_seq_empty_p (stmts))
8678 gsi_insert_seq_before (gsi, stmts, m);
8680 gsi_insert_seq_after (gsi, stmts, m);
8686 /* Invoke force_gimple_operand_1 for EXPR with parameter VAR.
8687 If SIMPLE is true, force the result to be either ssa_name or an invariant,
8688 otherwise just force it to be a rhs expression. If some statements are
8689 produced, emits them at GSI. If BEFORE is true, the statements are
8690 appended before GSI, otherwise they are appended after it. M specifies
8691 the way GSI moves after insertion (GSI_SAME_STMT or GSI_CONTINUE_LINKING
8692 are the usual values). */
8695 force_gimple_operand_gsi (gimple_stmt_iterator *gsi, tree expr,
8696 bool simple_p, tree var, bool before,
8697 enum gsi_iterator_update m)
8699 return force_gimple_operand_gsi_1 (gsi, expr,
8701 ? is_gimple_val : is_gimple_reg_rhs,
8706 #include "gt-gimplify.h"