1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
5 Major work done by Sebastian Pop <s.pop@laposte.net>,
6 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
26 #include "coretypes.h"
31 #include "tree-iterator.h"
32 #include "tree-inline.h"
33 #include "diagnostic.h"
34 #include "langhooks.h"
35 #include "langhooks-def.h"
36 #include "tree-flow.h"
50 #include "pointer-set.h"
51 #include "splay-tree.h"
54 #include "tree-pass.h"
57 enum gimplify_omp_var_data
63 GOVD_FIRSTPRIVATE = 16,
64 GOVD_LASTPRIVATE = 32,
67 GOVD_DEBUG_PRIVATE = 256,
68 GOVD_PRIVATE_OUTER_REF = 512,
69 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
70 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LOCAL)
79 ORT_COMBINED_PARALLEL = 3
82 struct gimplify_omp_ctx
84 struct gimplify_omp_ctx *outer_context;
86 struct pointer_set_t *privatized_types;
88 enum omp_clause_default_kind default_kind;
89 enum omp_region_type region_type;
92 static struct gimplify_ctx *gimplify_ctxp;
93 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
96 /* Formal (expression) temporary table handling: Multiple occurrences of
97 the same scalar expression are evaluated into the same temporary. */
99 typedef struct gimple_temp_hash_elt
102 tree temp; /* Value */
105 /* Forward declarations. */
106 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
108 /* Mark X addressable. Unlike the langhook we expect X to be in gimple
109 form and we don't do any syntax checking. */
111 mark_addressable (tree x)
113 while (handled_component_p (x))
114 x = TREE_OPERAND (x, 0);
115 if (TREE_CODE (x) != VAR_DECL
116 && TREE_CODE (x) != PARM_DECL
117 && TREE_CODE (x) != RESULT_DECL)
119 TREE_ADDRESSABLE (x) = 1;
122 /* Return a hash value for a formal temporary table entry. */
125 gimple_tree_hash (const void *p)
127 tree t = ((const elt_t *) p)->val;
128 return iterative_hash_expr (t, 0);
131 /* Compare two formal temporary table entries. */
134 gimple_tree_eq (const void *p1, const void *p2)
136 tree t1 = ((const elt_t *) p1)->val;
137 tree t2 = ((const elt_t *) p2)->val;
138 enum tree_code code = TREE_CODE (t1);
140 if (TREE_CODE (t2) != code
141 || TREE_TYPE (t1) != TREE_TYPE (t2))
144 if (!operand_equal_p (t1, t2, 0))
147 /* Only allow them to compare equal if they also hash equal; otherwise
148 results are nondeterminate, and we fail bootstrap comparison. */
149 gcc_assert (gimple_tree_hash (p1) == gimple_tree_hash (p2));
154 /* Link gimple statement GS to the end of the sequence *SEQ_P. If
155 *SEQ_P is NULL, a new sequence is allocated. This function is
156 similar to gimple_seq_add_stmt, but does not scan the operands.
157 During gimplification, we need to manipulate statement sequences
158 before the def/use vectors have been constructed. */
161 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple gs)
163 gimple_stmt_iterator si;
169 *seq_p = gimple_seq_alloc ();
171 si = gsi_last (*seq_p);
173 gsi_insert_after_without_update (&si, gs, GSI_NEW_STMT);
176 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
177 NULL, a new sequence is allocated. This function is
178 similar to gimple_seq_add_seq, but does not scan the operands.
179 During gimplification, we need to manipulate statement sequences
180 before the def/use vectors have been constructed. */
183 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
185 gimple_stmt_iterator si;
191 *dst_p = gimple_seq_alloc ();
193 si = gsi_last (*dst_p);
194 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
197 /* Set up a context for the gimplifier. */
200 push_gimplify_context (struct gimplify_ctx *c)
202 memset (c, '\0', sizeof (*c));
203 c->prev_context = gimplify_ctxp;
207 /* Tear down a context for the gimplifier. If BODY is non-null, then
208 put the temporaries into the outer BIND_EXPR. Otherwise, put them
211 BODY is not a sequence, but the first tuple in a sequence. */
214 pop_gimplify_context (gimple body)
216 struct gimplify_ctx *c = gimplify_ctxp;
218 gcc_assert (c && (c->bind_expr_stack == NULL
219 || VEC_empty (gimple, c->bind_expr_stack)));
220 VEC_free (gimple, heap, c->bind_expr_stack);
221 gimplify_ctxp = c->prev_context;
224 declare_vars (c->temps, body, false);
226 record_vars (c->temps);
229 htab_delete (c->temp_htab);
233 gimple_push_bind_expr (gimple gimple_bind)
235 if (gimplify_ctxp->bind_expr_stack == NULL)
236 gimplify_ctxp->bind_expr_stack = VEC_alloc (gimple, heap, 8);
237 VEC_safe_push (gimple, heap, gimplify_ctxp->bind_expr_stack, gimple_bind);
241 gimple_pop_bind_expr (void)
243 VEC_pop (gimple, gimplify_ctxp->bind_expr_stack);
247 gimple_current_bind_expr (void)
249 return VEC_last (gimple, gimplify_ctxp->bind_expr_stack);
252 /* Return the stack GIMPLE_BINDs created during gimplification. */
255 gimple_bind_expr_stack (void)
257 return gimplify_ctxp->bind_expr_stack;
260 /* Returns true iff there is a COND_EXPR between us and the innermost
261 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
264 gimple_conditional_context (void)
266 return gimplify_ctxp->conditions > 0;
269 /* Note that we've entered a COND_EXPR. */
272 gimple_push_condition (void)
274 #ifdef ENABLE_GIMPLE_CHECKING
275 if (gimplify_ctxp->conditions == 0)
276 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
278 ++(gimplify_ctxp->conditions);
281 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
282 now, add any conditional cleanups we've seen to the prequeue. */
285 gimple_pop_condition (gimple_seq *pre_p)
287 int conds = --(gimplify_ctxp->conditions);
289 gcc_assert (conds >= 0);
292 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
293 gimplify_ctxp->conditional_cleanups = NULL;
297 /* A stable comparison routine for use with splay trees and DECLs. */
300 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
305 return DECL_UID (a) - DECL_UID (b);
308 /* Create a new omp construct that deals with variable remapping. */
310 static struct gimplify_omp_ctx *
311 new_omp_context (enum omp_region_type region_type)
313 struct gimplify_omp_ctx *c;
315 c = XCNEW (struct gimplify_omp_ctx);
316 c->outer_context = gimplify_omp_ctxp;
317 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
318 c->privatized_types = pointer_set_create ();
319 c->location = input_location;
320 c->region_type = region_type;
321 if (region_type != ORT_TASK)
322 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
324 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
329 /* Destroy an omp construct that deals with variable remapping. */
332 delete_omp_context (struct gimplify_omp_ctx *c)
334 splay_tree_delete (c->variables);
335 pointer_set_destroy (c->privatized_types);
339 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
340 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
342 /* A subroutine of append_to_statement_list{,_force}. T is not NULL. */
345 append_to_statement_list_1 (tree t, tree *list_p)
348 tree_stmt_iterator i;
352 if (t && TREE_CODE (t) == STATEMENT_LIST)
357 *list_p = list = alloc_stmt_list ();
361 tsi_link_after (&i, t, TSI_CONTINUE_LINKING);
364 /* Add T to the end of the list container pointed to by LIST_P.
365 If T is an expression with no effects, it is ignored. */
368 append_to_statement_list (tree t, tree *list_p)
370 if (t && TREE_SIDE_EFFECTS (t))
371 append_to_statement_list_1 (t, list_p);
374 /* Similar, but the statement is always added, regardless of side effects. */
377 append_to_statement_list_force (tree t, tree *list_p)
380 append_to_statement_list_1 (t, list_p);
383 /* Both gimplify the statement T and append it to *SEQ_P. This function
384 behaves exactly as gimplify_stmt, but you don't have to pass T as a
388 gimplify_and_add (tree t, gimple_seq *seq_p)
390 gimplify_stmt (&t, seq_p);
393 /* Gimplify statement T into sequence *SEQ_P, and return the first
394 tuple in the sequence of generated tuples for this statement.
395 Return NULL if gimplifying T produced no tuples. */
398 gimplify_and_return_first (tree t, gimple_seq *seq_p)
400 gimple_stmt_iterator last = gsi_last (*seq_p);
402 gimplify_and_add (t, seq_p);
404 if (!gsi_end_p (last))
407 return gsi_stmt (last);
410 return gimple_seq_first_stmt (*seq_p);
413 /* Strip off a legitimate source ending from the input string NAME of
414 length LEN. Rather than having to know the names used by all of
415 our front ends, we strip off an ending of a period followed by
416 up to five characters. (Java uses ".class".) */
419 remove_suffix (char *name, int len)
423 for (i = 2; i < 8 && len > i; i++)
425 if (name[len - i] == '.')
427 name[len - i] = '\0';
433 /* Create a new temporary name with PREFIX. Returns an identifier. */
435 static GTY(()) unsigned int tmp_var_id_num;
438 create_tmp_var_name (const char *prefix)
444 char *preftmp = ASTRDUP (prefix);
446 remove_suffix (preftmp, strlen (preftmp));
450 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix ? prefix : "T", tmp_var_id_num++);
451 return get_identifier (tmp_name);
455 /* Create a new temporary variable declaration of type TYPE.
456 Does NOT push it into the current binding. */
459 create_tmp_var_raw (tree type, const char *prefix)
464 /* Make the type of the variable writable. */
465 new_type = build_type_variant (type, 0, 0);
466 TYPE_ATTRIBUTES (new_type) = TYPE_ATTRIBUTES (type);
468 tmp_var = build_decl (input_location,
469 VAR_DECL, prefix ? create_tmp_var_name (prefix) : NULL,
472 /* The variable was declared by the compiler. */
473 DECL_ARTIFICIAL (tmp_var) = 1;
474 /* And we don't want debug info for it. */
475 DECL_IGNORED_P (tmp_var) = 1;
477 /* Make the variable writable. */
478 TREE_READONLY (tmp_var) = 0;
480 DECL_EXTERNAL (tmp_var) = 0;
481 TREE_STATIC (tmp_var) = 0;
482 TREE_USED (tmp_var) = 1;
487 /* Create a new temporary variable declaration of type TYPE. DOES push the
488 variable into the current binding. Further, assume that this is called
489 only from gimplification or optimization, at which point the creation of
490 certain types are bugs. */
493 create_tmp_var (tree type, const char *prefix)
497 /* We don't allow types that are addressable (meaning we can't make copies),
498 or incomplete. We also used to reject every variable size objects here,
499 but now support those for which a constant upper bound can be obtained.
500 The processing for variable sizes is performed in gimple_add_tmp_var,
501 point at which it really matters and possibly reached via paths not going
502 through this function, e.g. after direct calls to create_tmp_var_raw. */
503 gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type));
505 tmp_var = create_tmp_var_raw (type, prefix);
506 gimple_add_tmp_var (tmp_var);
510 /* Create a new temporary variable declaration of type TYPE by calling
511 create_tmp_var and if TYPE is a vector or a complex number, mark the new
512 temporary as gimple register. */
515 create_tmp_reg (tree type, const char *prefix)
519 tmp = create_tmp_var (type, prefix);
520 if (TREE_CODE (type) == COMPLEX_TYPE
521 || TREE_CODE (type) == VECTOR_TYPE)
522 DECL_GIMPLE_REG_P (tmp) = 1;
527 /* Create a temporary with a name derived from VAL. Subroutine of
528 lookup_tmp_var; nobody else should call this function. */
531 create_tmp_from_val (tree val)
533 return create_tmp_var (TREE_TYPE (val), get_name (val));
536 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
537 an existing expression temporary. */
540 lookup_tmp_var (tree val, bool is_formal)
544 /* If not optimizing, never really reuse a temporary. local-alloc
545 won't allocate any variable that is used in more than one basic
546 block, which means it will go into memory, causing much extra
547 work in reload and final and poorer code generation, outweighing
548 the extra memory allocation here. */
549 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
550 ret = create_tmp_from_val (val);
557 if (gimplify_ctxp->temp_htab == NULL)
558 gimplify_ctxp->temp_htab
559 = htab_create (1000, gimple_tree_hash, gimple_tree_eq, free);
560 slot = htab_find_slot (gimplify_ctxp->temp_htab, (void *)&elt, INSERT);
563 elt_p = XNEW (elt_t);
565 elt_p->temp = ret = create_tmp_from_val (val);
566 *slot = (void *) elt_p;
570 elt_p = (elt_t *) *slot;
579 /* Return true if T is a CALL_EXPR or an expression that can be
580 assignmed to a temporary. Note that this predicate should only be
581 used during gimplification. See the rationale for this in
582 gimplify_modify_expr. */
585 is_gimple_reg_rhs_or_call (tree t)
587 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
588 || TREE_CODE (t) == CALL_EXPR);
591 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
592 this predicate should only be used during gimplification. See the
593 rationale for this in gimplify_modify_expr. */
596 is_gimple_mem_rhs_or_call (tree t)
598 /* If we're dealing with a renamable type, either source or dest must be
599 a renamed variable. */
600 if (is_gimple_reg_type (TREE_TYPE (t)))
601 return is_gimple_val (t);
603 return (is_gimple_val (t) || is_gimple_lvalue (t)
604 || TREE_CODE (t) == CALL_EXPR);
607 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
610 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
615 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
616 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
617 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
620 t = lookup_tmp_var (val, is_formal);
623 && (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
624 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE))
625 DECL_GIMPLE_REG_P (t) = 1;
627 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
629 if (EXPR_HAS_LOCATION (val))
630 SET_EXPR_LOCATION (mod, EXPR_LOCATION (val));
632 SET_EXPR_LOCATION (mod, input_location);
634 /* gimplify_modify_expr might want to reduce this further. */
635 gimplify_and_add (mod, pre_p);
638 /* If we're gimplifying into ssa, gimplify_modify_expr will have
639 given our temporary an SSA name. Find and return it. */
640 if (gimplify_ctxp->into_ssa)
642 gimple last = gimple_seq_last_stmt (*pre_p);
643 t = gimple_get_lhs (last);
649 /* Returns a formal temporary variable initialized with VAL. PRE_P is as
650 in gimplify_expr. Only use this function if:
652 1) The value of the unfactored expression represented by VAL will not
653 change between the initialization and use of the temporary, and
654 2) The temporary will not be otherwise modified.
656 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
657 and #2 means it is inappropriate for && temps.
659 For other cases, use get_initialized_tmp_var instead. */
662 get_formal_tmp_var (tree val, gimple_seq *pre_p)
664 return internal_get_tmp_var (val, pre_p, NULL, true);
667 /* Returns a temporary variable initialized with VAL. PRE_P and POST_P
668 are as in gimplify_expr. */
671 get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p)
673 return internal_get_tmp_var (val, pre_p, post_p, false);
676 /* Declares all the variables in VARS in SCOPE. If DEBUG_INFO is
677 true, generate debug info for them; otherwise don't. */
680 declare_vars (tree vars, gimple scope, bool debug_info)
687 gcc_assert (gimple_code (scope) == GIMPLE_BIND);
689 temps = nreverse (last);
691 block = gimple_bind_block (scope);
692 gcc_assert (!block || TREE_CODE (block) == BLOCK);
693 if (!block || !debug_info)
695 TREE_CHAIN (last) = gimple_bind_vars (scope);
696 gimple_bind_set_vars (scope, temps);
700 /* We need to attach the nodes both to the BIND_EXPR and to its
701 associated BLOCK for debugging purposes. The key point here
702 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
703 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
704 if (BLOCK_VARS (block))
705 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
708 gimple_bind_set_vars (scope,
709 chainon (gimple_bind_vars (scope), temps));
710 BLOCK_VARS (block) = temps;
716 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
717 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
718 no such upper bound can be obtained. */
721 force_constant_size (tree var)
723 /* The only attempt we make is by querying the maximum size of objects
724 of the variable's type. */
726 HOST_WIDE_INT max_size;
728 gcc_assert (TREE_CODE (var) == VAR_DECL);
730 max_size = max_int_size_in_bytes (TREE_TYPE (var));
732 gcc_assert (max_size >= 0);
735 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
737 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
741 gimple_add_tmp_var (tree tmp)
743 gcc_assert (!TREE_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
745 /* Later processing assumes that the object size is constant, which might
746 not be true at this point. Force the use of a constant upper bound in
748 if (!host_integerp (DECL_SIZE_UNIT (tmp), 1))
749 force_constant_size (tmp);
751 DECL_CONTEXT (tmp) = current_function_decl;
752 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
756 TREE_CHAIN (tmp) = gimplify_ctxp->temps;
757 gimplify_ctxp->temps = tmp;
759 /* Mark temporaries local within the nearest enclosing parallel. */
760 if (gimplify_omp_ctxp)
762 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
763 while (ctx && ctx->region_type == ORT_WORKSHARE)
764 ctx = ctx->outer_context;
766 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
775 /* This case is for nested functions. We need to expose the locals
777 body_seq = gimple_body (current_function_decl);
778 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
782 /* Determines whether to assign a location to the statement GS. */
785 should_carry_location_p (gimple gs)
787 /* Don't emit a line note for a label. We particularly don't want to
788 emit one for the break label, since it doesn't actually correspond
789 to the beginning of the loop/switch. */
790 if (gimple_code (gs) == GIMPLE_LABEL)
797 /* Return true if a location should not be emitted for this statement
798 by annotate_one_with_location. */
801 gimple_do_not_emit_location_p (gimple g)
803 return gimple_plf (g, GF_PLF_1);
806 /* Mark statement G so a location will not be emitted by
807 annotate_one_with_location. */
810 gimple_set_do_not_emit_location (gimple g)
812 /* The PLF flags are initialized to 0 when a new tuple is created,
813 so no need to initialize it anywhere. */
814 gimple_set_plf (g, GF_PLF_1, true);
817 /* Set the location for gimple statement GS to LOCATION. */
820 annotate_one_with_location (gimple gs, location_t location)
822 if (!gimple_has_location (gs)
823 && !gimple_do_not_emit_location_p (gs)
824 && should_carry_location_p (gs))
825 gimple_set_location (gs, location);
829 /* Set LOCATION for all the statements after iterator GSI in sequence
830 SEQ. If GSI is pointing to the end of the sequence, start with the
831 first statement in SEQ. */
834 annotate_all_with_location_after (gimple_seq seq, gimple_stmt_iterator gsi,
838 gsi = gsi_start (seq);
842 for (; !gsi_end_p (gsi); gsi_next (&gsi))
843 annotate_one_with_location (gsi_stmt (gsi), location);
847 /* Set the location for all the statements in a sequence STMT_P to LOCATION. */
850 annotate_all_with_location (gimple_seq stmt_p, location_t location)
852 gimple_stmt_iterator i;
854 if (gimple_seq_empty_p (stmt_p))
857 for (i = gsi_start (stmt_p); !gsi_end_p (i); gsi_next (&i))
859 gimple gs = gsi_stmt (i);
860 annotate_one_with_location (gs, location);
865 /* Similar to copy_tree_r() but do not copy SAVE_EXPR or TARGET_EXPR nodes.
866 These nodes model computations that should only be done once. If we
867 were to unshare something like SAVE_EXPR(i++), the gimplification
868 process would create wrong code. */
871 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
873 enum tree_code code = TREE_CODE (*tp);
874 /* Don't unshare types, decls, constants and SAVE_EXPR nodes. */
875 if (TREE_CODE_CLASS (code) == tcc_type
876 || TREE_CODE_CLASS (code) == tcc_declaration
877 || TREE_CODE_CLASS (code) == tcc_constant
878 || code == SAVE_EXPR || code == TARGET_EXPR
879 /* We can't do anything sensible with a BLOCK used as an expression,
880 but we also can't just die when we see it because of non-expression
881 uses. So just avert our eyes and cross our fingers. Silly Java. */
886 gcc_assert (code != BIND_EXPR);
887 copy_tree_r (tp, walk_subtrees, data);
893 /* Callback for walk_tree to unshare most of the shared trees rooted at
894 *TP. If *TP has been visited already (i.e., TREE_VISITED (*TP) == 1),
895 then *TP is deep copied by calling copy_tree_r.
897 This unshares the same trees as copy_tree_r with the exception of
898 SAVE_EXPR nodes. These nodes model computations that should only be
899 done once. If we were to unshare something like SAVE_EXPR(i++), the
900 gimplification process would create wrong code. */
903 copy_if_shared_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
904 void *data ATTRIBUTE_UNUSED)
907 enum tree_code code = TREE_CODE (t);
909 /* Skip types, decls, and constants. But we do want to look at their
910 types and the bounds of types. Mark them as visited so we properly
911 unmark their subtrees on the unmark pass. If we've already seen them,
912 don't look down further. */
913 if (TREE_CODE_CLASS (code) == tcc_type
914 || TREE_CODE_CLASS (code) == tcc_declaration
915 || TREE_CODE_CLASS (code) == tcc_constant)
917 if (TREE_VISITED (t))
920 TREE_VISITED (t) = 1;
923 /* If this node has been visited already, unshare it and don't look
925 else if (TREE_VISITED (t))
927 walk_tree (tp, mostly_copy_tree_r, NULL, NULL);
931 /* Otherwise, mark the tree as visited and keep looking. */
933 TREE_VISITED (t) = 1;
939 unmark_visited_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
940 void *data ATTRIBUTE_UNUSED)
942 if (TREE_VISITED (*tp))
943 TREE_VISITED (*tp) = 0;
950 /* Unshare all the trees in BODY_P, a pointer into the body of FNDECL, and the
951 bodies of any nested functions if we are unsharing the entire body of
955 unshare_body (tree *body_p, tree fndecl)
957 struct cgraph_node *cgn = cgraph_node (fndecl);
959 walk_tree (body_p, copy_if_shared_r, NULL, NULL);
960 if (body_p == &DECL_SAVED_TREE (fndecl))
961 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
962 unshare_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl);
965 /* Likewise, but mark all trees as not visited. */
968 unvisit_body (tree *body_p, tree fndecl)
970 struct cgraph_node *cgn = cgraph_node (fndecl);
972 walk_tree (body_p, unmark_visited_r, NULL, NULL);
973 if (body_p == &DECL_SAVED_TREE (fndecl))
974 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
975 unvisit_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl);
978 /* Unconditionally make an unshared copy of EXPR. This is used when using
979 stored expressions which span multiple functions, such as BINFO_VTABLE,
980 as the normal unsharing process can't tell that they're shared. */
983 unshare_expr (tree expr)
985 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
989 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
990 contain statements and have a value. Assign its value to a temporary
991 and give it void_type_node. Returns the temporary, or NULL_TREE if
992 WRAPPER was already void. */
995 voidify_wrapper_expr (tree wrapper, tree temp)
997 tree type = TREE_TYPE (wrapper);
998 if (type && !VOID_TYPE_P (type))
1002 /* Set p to point to the body of the wrapper. Loop until we find
1003 something that isn't a wrapper. */
1004 for (p = &wrapper; p && *p; )
1006 switch (TREE_CODE (*p))
1009 TREE_SIDE_EFFECTS (*p) = 1;
1010 TREE_TYPE (*p) = void_type_node;
1011 /* For a BIND_EXPR, the body is operand 1. */
1012 p = &BIND_EXPR_BODY (*p);
1015 case CLEANUP_POINT_EXPR:
1016 case TRY_FINALLY_EXPR:
1017 case TRY_CATCH_EXPR:
1018 TREE_SIDE_EFFECTS (*p) = 1;
1019 TREE_TYPE (*p) = void_type_node;
1020 p = &TREE_OPERAND (*p, 0);
1023 case STATEMENT_LIST:
1025 tree_stmt_iterator i = tsi_last (*p);
1026 TREE_SIDE_EFFECTS (*p) = 1;
1027 TREE_TYPE (*p) = void_type_node;
1028 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1033 /* Advance to the last statement. Set all container types to void. */
1034 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1036 TREE_SIDE_EFFECTS (*p) = 1;
1037 TREE_TYPE (*p) = void_type_node;
1047 if (p == NULL || IS_EMPTY_STMT (*p))
1051 /* The wrapper is on the RHS of an assignment that we're pushing
1053 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1054 || TREE_CODE (temp) == MODIFY_EXPR);
1055 TREE_OPERAND (temp, 1) = *p;
1060 temp = create_tmp_var (type, "retval");
1061 *p = build2 (INIT_EXPR, type, temp, *p);
1070 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1071 a temporary through which they communicate. */
1074 build_stack_save_restore (gimple *save, gimple *restore)
1078 *save = gimple_build_call (implicit_built_in_decls[BUILT_IN_STACK_SAVE], 0);
1079 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1080 gimple_call_set_lhs (*save, tmp_var);
1082 *restore = gimple_build_call (implicit_built_in_decls[BUILT_IN_STACK_RESTORE],
1086 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1088 static enum gimplify_status
1089 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1091 tree bind_expr = *expr_p;
1092 bool old_save_stack = gimplify_ctxp->save_stack;
1097 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1099 /* Mark variables seen in this bind expr. */
1100 for (t = BIND_EXPR_VARS (bind_expr); t ; t = TREE_CHAIN (t))
1102 if (TREE_CODE (t) == VAR_DECL)
1104 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1106 /* Mark variable as local. */
1107 if (ctx && !is_global_var (t)
1108 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1109 || splay_tree_lookup (ctx->variables,
1110 (splay_tree_key) t) == NULL))
1111 omp_add_variable (gimplify_omp_ctxp, t, GOVD_LOCAL | GOVD_SEEN);
1113 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1115 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1116 cfun->has_local_explicit_reg_vars = true;
1119 /* Preliminarily mark non-addressed complex variables as eligible
1120 for promotion to gimple registers. We'll transform their uses
1122 We exclude complex types if not optimizing because they can be
1123 subject to partial stores in GNU C by means of the __real__ and
1124 __imag__ operators and we cannot promote them to total stores
1125 (see gimplify_modify_expr_complex_part). */
1127 && (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1128 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1129 && !TREE_THIS_VOLATILE (t)
1130 && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
1131 && !needs_to_live_in_memory (t))
1132 DECL_GIMPLE_REG_P (t) = 1;
1135 gimple_bind = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1136 BIND_EXPR_BLOCK (bind_expr));
1137 gimple_push_bind_expr (gimple_bind);
1139 gimplify_ctxp->save_stack = false;
1141 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1143 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1144 gimple_bind_set_body (gimple_bind, body);
1146 if (gimplify_ctxp->save_stack)
1148 gimple stack_save, stack_restore, gs;
1149 gimple_seq cleanup, new_body;
1151 /* Save stack on entry and restore it on exit. Add a try_finally
1152 block to achieve this. Note that mudflap depends on the
1153 format of the emitted code: see mx_register_decls(). */
1154 build_stack_save_restore (&stack_save, &stack_restore);
1156 cleanup = new_body = NULL;
1157 gimplify_seq_add_stmt (&cleanup, stack_restore);
1158 gs = gimple_build_try (gimple_bind_body (gimple_bind), cleanup,
1159 GIMPLE_TRY_FINALLY);
1161 gimplify_seq_add_stmt (&new_body, stack_save);
1162 gimplify_seq_add_stmt (&new_body, gs);
1163 gimple_bind_set_body (gimple_bind, new_body);
1166 gimplify_ctxp->save_stack = old_save_stack;
1167 gimple_pop_bind_expr ();
1169 gimplify_seq_add_stmt (pre_p, gimple_bind);
1177 *expr_p = NULL_TREE;
1181 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1182 GIMPLE value, it is assigned to a new temporary and the statement is
1183 re-written to return the temporary.
1185 PRE_P points to the sequence where side effects that must happen before
1186 STMT should be stored. */
1188 static enum gimplify_status
1189 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1192 tree ret_expr = TREE_OPERAND (stmt, 0);
1193 tree result_decl, result;
1195 if (ret_expr == error_mark_node)
1199 || TREE_CODE (ret_expr) == RESULT_DECL
1200 || ret_expr == error_mark_node)
1202 gimple ret = gimple_build_return (ret_expr);
1203 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1204 gimplify_seq_add_stmt (pre_p, ret);
1208 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1209 result_decl = NULL_TREE;
1212 result_decl = TREE_OPERAND (ret_expr, 0);
1214 /* See through a return by reference. */
1215 if (TREE_CODE (result_decl) == INDIRECT_REF)
1216 result_decl = TREE_OPERAND (result_decl, 0);
1218 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1219 || TREE_CODE (ret_expr) == INIT_EXPR)
1220 && TREE_CODE (result_decl) == RESULT_DECL);
1223 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1224 Recall that aggregate_value_p is FALSE for any aggregate type that is
1225 returned in registers. If we're returning values in registers, then
1226 we don't want to extend the lifetime of the RESULT_DECL, particularly
1227 across another call. In addition, for those aggregates for which
1228 hard_function_value generates a PARALLEL, we'll die during normal
1229 expansion of structure assignments; there's special code in expand_return
1230 to handle this case that does not exist in expand_expr. */
1232 || aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1233 result = result_decl;
1234 else if (gimplify_ctxp->return_temp)
1235 result = gimplify_ctxp->return_temp;
1238 result = create_tmp_reg (TREE_TYPE (result_decl), NULL);
1240 /* ??? With complex control flow (usually involving abnormal edges),
1241 we can wind up warning about an uninitialized value for this. Due
1242 to how this variable is constructed and initialized, this is never
1243 true. Give up and never warn. */
1244 TREE_NO_WARNING (result) = 1;
1246 gimplify_ctxp->return_temp = result;
1249 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1250 Then gimplify the whole thing. */
1251 if (result != result_decl)
1252 TREE_OPERAND (ret_expr, 0) = result;
1254 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1256 ret = gimple_build_return (result);
1257 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1258 gimplify_seq_add_stmt (pre_p, ret);
1264 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1266 /* This is a variable-sized decl. Simplify its size and mark it
1267 for deferred expansion. Note that mudflap depends on the format
1268 of the emitted code: see mx_register_decls(). */
1269 tree t, addr, ptr_type;
1271 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1272 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1274 /* All occurrences of this decl in final gimplified code will be
1275 replaced by indirection. Setting DECL_VALUE_EXPR does two
1276 things: First, it lets the rest of the gimplifier know what
1277 replacement to use. Second, it lets the debug info know
1278 where to find the value. */
1279 ptr_type = build_pointer_type (TREE_TYPE (decl));
1280 addr = create_tmp_var (ptr_type, get_name (decl));
1281 DECL_IGNORED_P (addr) = 0;
1282 t = build_fold_indirect_ref (addr);
1283 SET_DECL_VALUE_EXPR (decl, t);
1284 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1286 t = built_in_decls[BUILT_IN_ALLOCA];
1287 t = build_call_expr (t, 1, DECL_SIZE_UNIT (decl));
1288 t = fold_convert (ptr_type, t);
1289 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1291 gimplify_and_add (t, seq_p);
1293 /* Indicate that we need to restore the stack level when the
1294 enclosing BIND_EXPR is exited. */
1295 gimplify_ctxp->save_stack = true;
1299 /* Gimplifies a DECL_EXPR node *STMT_P by making any necessary allocation
1300 and initialization explicit. */
1302 static enum gimplify_status
1303 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1305 tree stmt = *stmt_p;
1306 tree decl = DECL_EXPR_DECL (stmt);
1308 *stmt_p = NULL_TREE;
1310 if (TREE_TYPE (decl) == error_mark_node)
1313 if ((TREE_CODE (decl) == TYPE_DECL
1314 || TREE_CODE (decl) == VAR_DECL)
1315 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1316 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1318 if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl))
1320 tree init = DECL_INITIAL (decl);
1322 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1323 || (!TREE_STATIC (decl)
1324 && flag_stack_check == GENERIC_STACK_CHECK
1325 && compare_tree_int (DECL_SIZE_UNIT (decl),
1326 STACK_CHECK_MAX_VAR_SIZE) > 0))
1327 gimplify_vla_decl (decl, seq_p);
1329 if (init && init != error_mark_node)
1331 if (!TREE_STATIC (decl))
1333 DECL_INITIAL (decl) = NULL_TREE;
1334 init = build2 (INIT_EXPR, void_type_node, decl, init);
1335 gimplify_and_add (init, seq_p);
1339 /* We must still examine initializers for static variables
1340 as they may contain a label address. */
1341 walk_tree (&init, force_labels_r, NULL, NULL);
1344 /* Some front ends do not explicitly declare all anonymous
1345 artificial variables. We compensate here by declaring the
1346 variables, though it would be better if the front ends would
1347 explicitly declare them. */
1348 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1349 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1350 gimple_add_tmp_var (decl);
1356 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1357 and replacing the LOOP_EXPR with goto, but if the loop contains an
1358 EXIT_EXPR, we need to append a label for it to jump to. */
1360 static enum gimplify_status
1361 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1363 tree saved_label = gimplify_ctxp->exit_label;
1364 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1366 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1368 gimplify_ctxp->exit_label = NULL_TREE;
1370 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1372 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1374 if (gimplify_ctxp->exit_label)
1375 gimplify_seq_add_stmt (pre_p, gimple_build_label (gimplify_ctxp->exit_label));
1377 gimplify_ctxp->exit_label = saved_label;
1383 /* Gimplifies a statement list onto a sequence. These may be created either
1384 by an enlightened front-end, or by shortcut_cond_expr. */
1386 static enum gimplify_status
1387 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1389 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1391 tree_stmt_iterator i = tsi_start (*expr_p);
1393 while (!tsi_end_p (i))
1395 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1408 /* Compare two case labels. Because the front end should already have
1409 made sure that case ranges do not overlap, it is enough to only compare
1410 the CASE_LOW values of each case label. */
1413 compare_case_labels (const void *p1, const void *p2)
1415 const_tree const case1 = *(const_tree const*)p1;
1416 const_tree const case2 = *(const_tree const*)p2;
1418 /* The 'default' case label always goes first. */
1419 if (!CASE_LOW (case1))
1421 else if (!CASE_LOW (case2))
1424 return tree_int_cst_compare (CASE_LOW (case1), CASE_LOW (case2));
1428 /* Sort the case labels in LABEL_VEC in place in ascending order. */
1431 sort_case_labels (VEC(tree,heap)* label_vec)
1433 size_t len = VEC_length (tree, label_vec);
1434 qsort (VEC_address (tree, label_vec), len, sizeof (tree),
1435 compare_case_labels);
1439 /* Gimplify a SWITCH_EXPR, and collect a TREE_VEC of the labels it can
1442 static enum gimplify_status
1443 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
1445 tree switch_expr = *expr_p;
1446 gimple_seq switch_body_seq = NULL;
1447 enum gimplify_status ret;
1449 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
1451 if (ret == GS_ERROR || ret == GS_UNHANDLED)
1454 if (SWITCH_BODY (switch_expr))
1456 VEC (tree,heap) *labels;
1457 VEC (tree,heap) *saved_labels;
1458 tree default_case = NULL_TREE;
1460 gimple gimple_switch;
1462 /* If someone can be bothered to fill in the labels, they can
1463 be bothered to null out the body too. */
1464 gcc_assert (!SWITCH_LABELS (switch_expr));
1466 /* save old labels, get new ones from body, then restore the old
1467 labels. Save all the things from the switch body to append after. */
1468 saved_labels = gimplify_ctxp->case_labels;
1469 gimplify_ctxp->case_labels = VEC_alloc (tree, heap, 8);
1471 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
1472 labels = gimplify_ctxp->case_labels;
1473 gimplify_ctxp->case_labels = saved_labels;
1476 while (i < VEC_length (tree, labels))
1478 tree elt = VEC_index (tree, labels, i);
1479 tree low = CASE_LOW (elt);
1480 bool remove_element = FALSE;
1484 /* Discard empty ranges. */
1485 tree high = CASE_HIGH (elt);
1486 if (high && tree_int_cst_lt (high, low))
1487 remove_element = TRUE;
1491 /* The default case must be the last label in the list. */
1492 gcc_assert (!default_case);
1494 remove_element = TRUE;
1498 VEC_ordered_remove (tree, labels, i);
1504 if (!VEC_empty (tree, labels))
1505 sort_case_labels (labels);
1509 tree type = TREE_TYPE (switch_expr);
1511 /* If the switch has no default label, add one, so that we jump
1512 around the switch body. If the labels already cover the whole
1513 range of type, add the default label pointing to one of the
1515 if (type == void_type_node)
1516 type = TREE_TYPE (SWITCH_COND (switch_expr));
1518 && INTEGRAL_TYPE_P (type)
1519 && TYPE_MIN_VALUE (type)
1520 && TYPE_MAX_VALUE (type)
1521 && tree_int_cst_equal (CASE_LOW (VEC_index (tree, labels, 0)),
1522 TYPE_MIN_VALUE (type)))
1524 tree low, high = CASE_HIGH (VEC_index (tree, labels, len - 1));
1526 high = CASE_LOW (VEC_index (tree, labels, len - 1));
1527 if (tree_int_cst_equal (high, TYPE_MAX_VALUE (type)))
1529 for (i = 1; i < len; i++)
1531 high = CASE_LOW (VEC_index (tree, labels, i));
1532 low = CASE_HIGH (VEC_index (tree, labels, i - 1));
1534 low = CASE_LOW (VEC_index (tree, labels, i - 1));
1535 if ((TREE_INT_CST_LOW (low) + 1
1536 != TREE_INT_CST_LOW (high))
1537 || (TREE_INT_CST_HIGH (low)
1538 + (TREE_INT_CST_LOW (high) == 0)
1539 != TREE_INT_CST_HIGH (high)))
1543 default_case = build3 (CASE_LABEL_EXPR, void_type_node,
1544 NULL_TREE, NULL_TREE,
1545 CASE_LABEL (VEC_index (tree,
1555 = build3 (CASE_LABEL_EXPR, void_type_node,
1556 NULL_TREE, NULL_TREE,
1557 create_artificial_label (UNKNOWN_LOCATION));
1558 new_default = gimple_build_label (CASE_LABEL (default_case));
1559 gimplify_seq_add_stmt (&switch_body_seq, new_default);
1563 gimple_switch = gimple_build_switch_vec (SWITCH_COND (switch_expr),
1564 default_case, labels);
1565 gimplify_seq_add_stmt (pre_p, gimple_switch);
1566 gimplify_seq_add_seq (pre_p, switch_body_seq);
1567 VEC_free(tree, heap, labels);
1570 gcc_assert (SWITCH_LABELS (switch_expr));
1576 static enum gimplify_status
1577 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
1579 struct gimplify_ctx *ctxp;
1580 gimple gimple_label;
1582 /* Invalid OpenMP programs can play Duff's Device type games with
1583 #pragma omp parallel. At least in the C front end, we don't
1584 detect such invalid branches until after gimplification. */
1585 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
1586 if (ctxp->case_labels)
1589 gimple_label = gimple_build_label (CASE_LABEL (*expr_p));
1590 VEC_safe_push (tree, heap, ctxp->case_labels, *expr_p);
1591 gimplify_seq_add_stmt (pre_p, gimple_label);
1596 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
1600 build_and_jump (tree *label_p)
1602 if (label_p == NULL)
1603 /* If there's nowhere to jump, just fall through. */
1606 if (*label_p == NULL_TREE)
1608 tree label = create_artificial_label (UNKNOWN_LOCATION);
1612 return build1 (GOTO_EXPR, void_type_node, *label_p);
1615 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
1616 This also involves building a label to jump to and communicating it to
1617 gimplify_loop_expr through gimplify_ctxp->exit_label. */
1619 static enum gimplify_status
1620 gimplify_exit_expr (tree *expr_p)
1622 tree cond = TREE_OPERAND (*expr_p, 0);
1625 expr = build_and_jump (&gimplify_ctxp->exit_label);
1626 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
1632 /* A helper function to be called via walk_tree. Mark all labels under *TP
1633 as being forced. To be called for DECL_INITIAL of static variables. */
1636 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1640 if (TREE_CODE (*tp) == LABEL_DECL)
1641 FORCED_LABEL (*tp) = 1;
1646 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
1647 different from its canonical type, wrap the whole thing inside a
1648 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
1651 The canonical type of a COMPONENT_REF is the type of the field being
1652 referenced--unless the field is a bit-field which can be read directly
1653 in a smaller mode, in which case the canonical type is the
1654 sign-appropriate type corresponding to that mode. */
1657 canonicalize_component_ref (tree *expr_p)
1659 tree expr = *expr_p;
1662 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
1664 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
1665 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
1667 type = TREE_TYPE (TREE_OPERAND (expr, 1));
1669 /* One could argue that all the stuff below is not necessary for
1670 the non-bitfield case and declare it a FE error if type
1671 adjustment would be needed. */
1672 if (TREE_TYPE (expr) != type)
1674 #ifdef ENABLE_TYPES_CHECKING
1675 tree old_type = TREE_TYPE (expr);
1679 /* We need to preserve qualifiers and propagate them from
1681 type_quals = TYPE_QUALS (type)
1682 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
1683 if (TYPE_QUALS (type) != type_quals)
1684 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
1686 /* Set the type of the COMPONENT_REF to the underlying type. */
1687 TREE_TYPE (expr) = type;
1689 #ifdef ENABLE_TYPES_CHECKING
1690 /* It is now a FE error, if the conversion from the canonical
1691 type to the original expression type is not useless. */
1692 gcc_assert (useless_type_conversion_p (old_type, type));
1697 /* If a NOP conversion is changing a pointer to array of foo to a pointer
1698 to foo, embed that change in the ADDR_EXPR by converting
1703 where L is the lower bound. For simplicity, only do this for constant
1705 The constraint is that the type of &array[L] is trivially convertible
1709 canonicalize_addr_expr (tree *expr_p)
1711 tree expr = *expr_p;
1712 tree addr_expr = TREE_OPERAND (expr, 0);
1713 tree datype, ddatype, pddatype;
1715 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
1716 if (!POINTER_TYPE_P (TREE_TYPE (expr))
1717 || TREE_CODE (addr_expr) != ADDR_EXPR)
1720 /* The addr_expr type should be a pointer to an array. */
1721 datype = TREE_TYPE (TREE_TYPE (addr_expr));
1722 if (TREE_CODE (datype) != ARRAY_TYPE)
1725 /* The pointer to element type shall be trivially convertible to
1726 the expression pointer type. */
1727 ddatype = TREE_TYPE (datype);
1728 pddatype = build_pointer_type (ddatype);
1729 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
1733 /* The lower bound and element sizes must be constant. */
1734 if (!TYPE_SIZE_UNIT (ddatype)
1735 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
1736 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
1737 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
1740 /* All checks succeeded. Build a new node to merge the cast. */
1741 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
1742 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
1743 NULL_TREE, NULL_TREE);
1744 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
1746 /* We can have stripped a required restrict qualifier above. */
1747 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
1748 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
1751 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
1752 underneath as appropriate. */
1754 static enum gimplify_status
1755 gimplify_conversion (tree *expr_p)
1758 location_t loc = EXPR_LOCATION (*expr_p);
1759 gcc_assert (CONVERT_EXPR_P (*expr_p));
1761 /* Then strip away all but the outermost conversion. */
1762 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
1764 /* And remove the outermost conversion if it's useless. */
1765 if (tree_ssa_useless_type_conversion (*expr_p))
1766 *expr_p = TREE_OPERAND (*expr_p, 0);
1768 /* Attempt to avoid NOP_EXPR by producing reference to a subtype.
1769 For example this fold (subclass *)&A into &A->subclass avoiding
1770 a need for statement. */
1771 if (CONVERT_EXPR_P (*expr_p)
1772 && POINTER_TYPE_P (TREE_TYPE (*expr_p))
1773 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (*expr_p, 0)))
1774 && (tem = maybe_fold_offset_to_address
1775 (EXPR_LOCATION (*expr_p), TREE_OPERAND (*expr_p, 0),
1776 integer_zero_node, TREE_TYPE (*expr_p))) != NULL_TREE)
1779 /* If we still have a conversion at the toplevel,
1780 then canonicalize some constructs. */
1781 if (CONVERT_EXPR_P (*expr_p))
1783 tree sub = TREE_OPERAND (*expr_p, 0);
1785 /* If a NOP conversion is changing the type of a COMPONENT_REF
1786 expression, then canonicalize its type now in order to expose more
1787 redundant conversions. */
1788 if (TREE_CODE (sub) == COMPONENT_REF)
1789 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
1791 /* If a NOP conversion is changing a pointer to array of foo
1792 to a pointer to foo, embed that change in the ADDR_EXPR. */
1793 else if (TREE_CODE (sub) == ADDR_EXPR)
1794 canonicalize_addr_expr (expr_p);
1797 /* If we have a conversion to a non-register type force the
1798 use of a VIEW_CONVERT_EXPR instead. */
1799 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
1800 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
1801 TREE_OPERAND (*expr_p, 0));
1806 /* Nonlocal VLAs seen in the current function. */
1807 static struct pointer_set_t *nonlocal_vlas;
1809 /* Gimplify a VAR_DECL or PARM_DECL. Returns GS_OK if we expanded a
1810 DECL_VALUE_EXPR, and it's worth re-examining things. */
1812 static enum gimplify_status
1813 gimplify_var_or_parm_decl (tree *expr_p)
1815 tree decl = *expr_p;
1817 /* ??? If this is a local variable, and it has not been seen in any
1818 outer BIND_EXPR, then it's probably the result of a duplicate
1819 declaration, for which we've already issued an error. It would
1820 be really nice if the front end wouldn't leak these at all.
1821 Currently the only known culprit is C++ destructors, as seen
1822 in g++.old-deja/g++.jason/binding.C. */
1823 if (TREE_CODE (decl) == VAR_DECL
1824 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
1825 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
1826 && decl_function_context (decl) == current_function_decl)
1828 gcc_assert (errorcount || sorrycount);
1832 /* When within an OpenMP context, notice uses of variables. */
1833 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
1836 /* If the decl is an alias for another expression, substitute it now. */
1837 if (DECL_HAS_VALUE_EXPR_P (decl))
1839 tree value_expr = DECL_VALUE_EXPR (decl);
1841 /* For referenced nonlocal VLAs add a decl for debugging purposes
1842 to the current function. */
1843 if (TREE_CODE (decl) == VAR_DECL
1844 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1845 && nonlocal_vlas != NULL
1846 && TREE_CODE (value_expr) == INDIRECT_REF
1847 && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
1848 && decl_function_context (decl) != current_function_decl)
1850 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1851 while (ctx && ctx->region_type == ORT_WORKSHARE)
1852 ctx = ctx->outer_context;
1853 if (!ctx && !pointer_set_insert (nonlocal_vlas, decl))
1855 tree copy = copy_node (decl), block;
1857 lang_hooks.dup_lang_specific_decl (copy);
1858 SET_DECL_RTL (copy, NULL_RTX);
1859 TREE_USED (copy) = 1;
1860 block = DECL_INITIAL (current_function_decl);
1861 TREE_CHAIN (copy) = BLOCK_VARS (block);
1862 BLOCK_VARS (block) = copy;
1863 SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
1864 DECL_HAS_VALUE_EXPR_P (copy) = 1;
1868 *expr_p = unshare_expr (value_expr);
1876 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
1880 : min_lval '[' val ']'
1882 | compound_lval '[' val ']'
1883 | compound_lval '.' ID
1885 This is not part of the original SIMPLE definition, which separates
1886 array and member references, but it seems reasonable to handle them
1887 together. Also, this way we don't run into problems with union
1888 aliasing; gcc requires that for accesses through a union to alias, the
1889 union reference must be explicit, which was not always the case when we
1890 were splitting up array and member refs.
1892 PRE_P points to the sequence where side effects that must happen before
1893 *EXPR_P should be stored.
1895 POST_P points to the sequence where side effects that must happen after
1896 *EXPR_P should be stored. */
1898 static enum gimplify_status
1899 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
1900 fallback_t fallback)
1903 VEC(tree,heap) *stack;
1904 enum gimplify_status ret = GS_OK, tret;
1906 location_t loc = EXPR_LOCATION (*expr_p);
1908 /* Create a stack of the subexpressions so later we can walk them in
1909 order from inner to outer. */
1910 stack = VEC_alloc (tree, heap, 10);
1912 /* We can handle anything that get_inner_reference can deal with. */
1913 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
1916 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
1917 if (TREE_CODE (*p) == INDIRECT_REF)
1918 *p = fold_indirect_ref_loc (loc, *p);
1920 if (handled_component_p (*p))
1922 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
1923 additional COMPONENT_REFs. */
1924 else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL)
1925 && gimplify_var_or_parm_decl (p) == GS_OK)
1930 VEC_safe_push (tree, heap, stack, *p);
1933 gcc_assert (VEC_length (tree, stack));
1935 /* Now STACK is a stack of pointers to all the refs we've walked through
1936 and P points to the innermost expression.
1938 Java requires that we elaborated nodes in source order. That
1939 means we must gimplify the inner expression followed by each of
1940 the indices, in order. But we can't gimplify the inner
1941 expression until we deal with any variable bounds, sizes, or
1942 positions in order to deal with PLACEHOLDER_EXPRs.
1944 So we do this in three steps. First we deal with the annotations
1945 for any variables in the components, then we gimplify the base,
1946 then we gimplify any indices, from left to right. */
1947 for (i = VEC_length (tree, stack) - 1; i >= 0; i--)
1949 tree t = VEC_index (tree, stack, i);
1951 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1953 /* Gimplify the low bound and element type size and put them into
1954 the ARRAY_REF. If these values are set, they have already been
1956 if (TREE_OPERAND (t, 2) == NULL_TREE)
1958 tree low = unshare_expr (array_ref_low_bound (t));
1959 if (!is_gimple_min_invariant (low))
1961 TREE_OPERAND (t, 2) = low;
1962 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
1963 post_p, is_gimple_reg,
1965 ret = MIN (ret, tret);
1969 if (!TREE_OPERAND (t, 3))
1971 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
1972 tree elmt_size = unshare_expr (array_ref_element_size (t));
1973 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
1975 /* Divide the element size by the alignment of the element
1977 elmt_size = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
1979 if (!is_gimple_min_invariant (elmt_size))
1981 TREE_OPERAND (t, 3) = elmt_size;
1982 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
1983 post_p, is_gimple_reg,
1985 ret = MIN (ret, tret);
1989 else if (TREE_CODE (t) == COMPONENT_REF)
1991 /* Set the field offset into T and gimplify it. */
1992 if (!TREE_OPERAND (t, 2))
1994 tree offset = unshare_expr (component_ref_field_offset (t));
1995 tree field = TREE_OPERAND (t, 1);
1997 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
1999 /* Divide the offset by its alignment. */
2000 offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
2002 if (!is_gimple_min_invariant (offset))
2004 TREE_OPERAND (t, 2) = offset;
2005 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2006 post_p, is_gimple_reg,
2008 ret = MIN (ret, tret);
2014 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
2015 so as to match the min_lval predicate. Failure to do so may result
2016 in the creation of large aggregate temporaries. */
2017 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
2018 fallback | fb_lvalue);
2019 ret = MIN (ret, tret);
2021 /* And finally, the indices and operands to BIT_FIELD_REF. During this
2022 loop we also remove any useless conversions. */
2023 for (; VEC_length (tree, stack) > 0; )
2025 tree t = VEC_pop (tree, stack);
2027 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2029 /* Gimplify the dimension. */
2030 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
2032 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2033 is_gimple_val, fb_rvalue);
2034 ret = MIN (ret, tret);
2037 else if (TREE_CODE (t) == BIT_FIELD_REF)
2039 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2040 is_gimple_val, fb_rvalue);
2041 ret = MIN (ret, tret);
2042 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2043 is_gimple_val, fb_rvalue);
2044 ret = MIN (ret, tret);
2047 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
2049 /* The innermost expression P may have originally had
2050 TREE_SIDE_EFFECTS set which would have caused all the outer
2051 expressions in *EXPR_P leading to P to also have had
2052 TREE_SIDE_EFFECTS set. */
2053 recalculate_side_effects (t);
2056 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
2057 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
2059 canonicalize_component_ref (expr_p);
2060 ret = MIN (ret, GS_OK);
2063 VEC_free (tree, heap, stack);
2068 /* Gimplify the self modifying expression pointed to by EXPR_P
2071 PRE_P points to the list where side effects that must happen before
2072 *EXPR_P should be stored.
2074 POST_P points to the list where side effects that must happen after
2075 *EXPR_P should be stored.
2077 WANT_VALUE is nonzero iff we want to use the value of this expression
2078 in another expression. */
2080 static enum gimplify_status
2081 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2084 enum tree_code code;
2085 tree lhs, lvalue, rhs, t1;
2086 gimple_seq post = NULL, *orig_post_p = post_p;
2088 enum tree_code arith_code;
2089 enum gimplify_status ret;
2090 location_t loc = EXPR_LOCATION (*expr_p);
2092 code = TREE_CODE (*expr_p);
2094 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
2095 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
2097 /* Prefix or postfix? */
2098 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
2099 /* Faster to treat as prefix if result is not used. */
2100 postfix = want_value;
2104 /* For postfix, make sure the inner expression's post side effects
2105 are executed after side effects from this expression. */
2109 /* Add or subtract? */
2110 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
2111 arith_code = PLUS_EXPR;
2113 arith_code = MINUS_EXPR;
2115 /* Gimplify the LHS into a GIMPLE lvalue. */
2116 lvalue = TREE_OPERAND (*expr_p, 0);
2117 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
2118 if (ret == GS_ERROR)
2121 /* Extract the operands to the arithmetic operation. */
2123 rhs = TREE_OPERAND (*expr_p, 1);
2125 /* For postfix operator, we evaluate the LHS to an rvalue and then use
2126 that as the result value and in the postqueue operation. We also
2127 make sure to make lvalue a minimal lval, see
2128 gcc.c-torture/execute/20040313-1.c for an example where this matters. */
2131 if (!is_gimple_min_lval (lvalue))
2133 mark_addressable (lvalue);
2134 lvalue = build_fold_addr_expr_loc (input_location, lvalue);
2135 gimplify_expr (&lvalue, pre_p, post_p, is_gimple_val, fb_rvalue);
2136 lvalue = build_fold_indirect_ref_loc (input_location, lvalue);
2138 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
2139 if (ret == GS_ERROR)
2143 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
2144 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
2146 rhs = fold_convert_loc (loc, sizetype, rhs);
2147 if (arith_code == MINUS_EXPR)
2148 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
2149 arith_code = POINTER_PLUS_EXPR;
2152 t1 = build2 (arith_code, TREE_TYPE (*expr_p), lhs, rhs);
2156 gimplify_assign (lvalue, t1, orig_post_p);
2157 gimplify_seq_add_seq (orig_post_p, post);
2163 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
2169 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
2172 maybe_with_size_expr (tree *expr_p)
2174 tree expr = *expr_p;
2175 tree type = TREE_TYPE (expr);
2178 /* If we've already wrapped this or the type is error_mark_node, we can't do
2180 if (TREE_CODE (expr) == WITH_SIZE_EXPR
2181 || type == error_mark_node)
2184 /* If the size isn't known or is a constant, we have nothing to do. */
2185 size = TYPE_SIZE_UNIT (type);
2186 if (!size || TREE_CODE (size) == INTEGER_CST)
2189 /* Otherwise, make a WITH_SIZE_EXPR. */
2190 size = unshare_expr (size);
2191 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
2192 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
2196 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
2197 Store any side-effects in PRE_P. CALL_LOCATION is the location of
2200 static enum gimplify_status
2201 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location)
2203 bool (*test) (tree);
2206 /* In general, we allow lvalues for function arguments to avoid
2207 extra overhead of copying large aggregates out of even larger
2208 aggregates into temporaries only to copy the temporaries to
2209 the argument list. Make optimizers happy by pulling out to
2210 temporaries those types that fit in registers. */
2211 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
2212 test = is_gimple_val, fb = fb_rvalue;
2214 test = is_gimple_lvalue, fb = fb_either;
2216 /* If this is a variable sized type, we must remember the size. */
2217 maybe_with_size_expr (arg_p);
2219 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
2220 /* Make sure arguments have the same location as the function call
2222 protected_set_expr_location (*arg_p, call_location);
2224 /* There is a sequence point before a function call. Side effects in
2225 the argument list must occur before the actual call. So, when
2226 gimplifying arguments, force gimplify_expr to use an internal
2227 post queue which is then appended to the end of PRE_P. */
2228 return gimplify_expr (arg_p, pre_p, NULL, test, fb);
2232 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
2233 WANT_VALUE is true if the result of the call is desired. */
2235 static enum gimplify_status
2236 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
2238 tree fndecl, parms, p;
2239 enum gimplify_status ret;
2242 bool builtin_va_start_p = FALSE;
2243 location_t loc = EXPR_LOCATION (*expr_p);
2245 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
2247 /* For reliable diagnostics during inlining, it is necessary that
2248 every call_expr be annotated with file and line. */
2249 if (! EXPR_HAS_LOCATION (*expr_p))
2250 SET_EXPR_LOCATION (*expr_p, input_location);
2252 /* This may be a call to a builtin function.
2254 Builtin function calls may be transformed into different
2255 (and more efficient) builtin function calls under certain
2256 circumstances. Unfortunately, gimplification can muck things
2257 up enough that the builtin expanders are not aware that certain
2258 transformations are still valid.
2260 So we attempt transformation/gimplification of the call before
2261 we gimplify the CALL_EXPR. At this time we do not manage to
2262 transform all calls in the same manner as the expanders do, but
2263 we do transform most of them. */
2264 fndecl = get_callee_fndecl (*expr_p);
2265 if (fndecl && DECL_BUILT_IN (fndecl))
2267 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2269 if (new_tree && new_tree != *expr_p)
2271 /* There was a transformation of this call which computes the
2272 same value, but in a more efficient way. Return and try
2278 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2279 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_VA_START)
2281 builtin_va_start_p = TRUE;
2282 if (call_expr_nargs (*expr_p) < 2)
2284 error ("too few arguments to function %<va_start%>");
2285 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2289 if (fold_builtin_next_arg (*expr_p, true))
2291 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2297 /* There is a sequence point before the call, so any side effects in
2298 the calling expression must occur before the actual call. Force
2299 gimplify_expr to use an internal post queue. */
2300 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
2301 is_gimple_call_addr, fb_rvalue);
2303 nargs = call_expr_nargs (*expr_p);
2305 /* Get argument types for verification. */
2306 fndecl = get_callee_fndecl (*expr_p);
2309 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
2310 else if (POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_FN (*expr_p))))
2311 parms = TYPE_ARG_TYPES (TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (*expr_p))));
2313 if (fndecl && DECL_ARGUMENTS (fndecl))
2314 p = DECL_ARGUMENTS (fndecl);
2319 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
2322 /* If the last argument is __builtin_va_arg_pack () and it is not
2323 passed as a named argument, decrease the number of CALL_EXPR
2324 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
2327 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
2329 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
2330 tree last_arg_fndecl = get_callee_fndecl (last_arg);
2333 && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
2334 && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
2335 && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
2337 tree call = *expr_p;
2340 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
2341 CALL_EXPR_FN (call),
2342 nargs, CALL_EXPR_ARGP (call));
2344 /* Copy all CALL_EXPR flags, location and block, except
2345 CALL_EXPR_VA_ARG_PACK flag. */
2346 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
2347 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
2348 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
2349 = CALL_EXPR_RETURN_SLOT_OPT (call);
2350 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
2351 CALL_CANNOT_INLINE_P (*expr_p) = CALL_CANNOT_INLINE_P (call);
2352 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
2353 TREE_BLOCK (*expr_p) = TREE_BLOCK (call);
2355 /* Set CALL_EXPR_VA_ARG_PACK. */
2356 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
2360 /* Finally, gimplify the function arguments. */
2363 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
2364 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
2365 PUSH_ARGS_REVERSED ? i-- : i++)
2367 enum gimplify_status t;
2369 /* Avoid gimplifying the second argument to va_start, which needs to
2370 be the plain PARM_DECL. */
2371 if ((i != 1) || !builtin_va_start_p)
2373 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
2374 EXPR_LOCATION (*expr_p));
2382 /* Verify the function result. */
2383 if (want_value && fndecl
2384 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl))))
2386 error_at (loc, "using result of function returning %<void%>");
2390 /* Try this again in case gimplification exposed something. */
2391 if (ret != GS_ERROR)
2393 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2395 if (new_tree && new_tree != *expr_p)
2397 /* There was a transformation of this call which computes the
2398 same value, but in a more efficient way. Return and try
2406 *expr_p = error_mark_node;
2410 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
2411 decl. This allows us to eliminate redundant or useless
2412 calls to "const" functions. */
2413 if (TREE_CODE (*expr_p) == CALL_EXPR)
2415 int flags = call_expr_flags (*expr_p);
2416 if (flags & (ECF_CONST | ECF_PURE)
2417 /* An infinite loop is considered a side effect. */
2418 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
2419 TREE_SIDE_EFFECTS (*expr_p) = 0;
2422 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
2423 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
2424 form and delegate the creation of a GIMPLE_CALL to
2425 gimplify_modify_expr. This is always possible because when
2426 WANT_VALUE is true, the caller wants the result of this call into
2427 a temporary, which means that we will emit an INIT_EXPR in
2428 internal_get_tmp_var which will then be handled by
2429 gimplify_modify_expr. */
2432 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
2433 have to do is replicate it as a GIMPLE_CALL tuple. */
2434 call = gimple_build_call_from_tree (*expr_p);
2435 gimplify_seq_add_stmt (pre_p, call);
2436 *expr_p = NULL_TREE;
2442 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
2443 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
2445 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
2446 condition is true or false, respectively. If null, we should generate
2447 our own to skip over the evaluation of this specific expression.
2449 LOCUS is the source location of the COND_EXPR.
2451 This function is the tree equivalent of do_jump.
2453 shortcut_cond_r should only be called by shortcut_cond_expr. */
2456 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
2459 tree local_label = NULL_TREE;
2460 tree t, expr = NULL;
2462 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
2463 retain the shortcut semantics. Just insert the gotos here;
2464 shortcut_cond_expr will append the real blocks later. */
2465 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2467 location_t new_locus;
2469 /* Turn if (a && b) into
2471 if (a); else goto no;
2472 if (b) goto yes; else goto no;
2475 if (false_label_p == NULL)
2476 false_label_p = &local_label;
2478 /* Keep the original source location on the first 'if'. */
2479 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
2480 append_to_statement_list (t, &expr);
2482 /* Set the source location of the && on the second 'if'. */
2483 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2484 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2486 append_to_statement_list (t, &expr);
2488 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2490 location_t new_locus;
2492 /* Turn if (a || b) into
2495 if (b) goto yes; else goto no;
2498 if (true_label_p == NULL)
2499 true_label_p = &local_label;
2501 /* Keep the original source location on the first 'if'. */
2502 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
2503 append_to_statement_list (t, &expr);
2505 /* Set the source location of the || on the second 'if'. */
2506 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2507 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2509 append_to_statement_list (t, &expr);
2511 else if (TREE_CODE (pred) == COND_EXPR)
2513 location_t new_locus;
2515 /* As long as we're messing with gotos, turn if (a ? b : c) into
2517 if (b) goto yes; else goto no;
2519 if (c) goto yes; else goto no; */
2521 /* Keep the original source location on the first 'if'. Set the source
2522 location of the ? on the second 'if'. */
2523 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2524 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
2525 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2526 false_label_p, locus),
2527 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
2528 false_label_p, new_locus));
2532 expr = build3 (COND_EXPR, void_type_node, pred,
2533 build_and_jump (true_label_p),
2534 build_and_jump (false_label_p));
2535 SET_EXPR_LOCATION (expr, locus);
2540 t = build1 (LABEL_EXPR, void_type_node, local_label);
2541 append_to_statement_list (t, &expr);
2547 /* Given a conditional expression EXPR with short-circuit boolean
2548 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
2549 predicate appart into the equivalent sequence of conditionals. */
2552 shortcut_cond_expr (tree expr)
2554 tree pred = TREE_OPERAND (expr, 0);
2555 tree then_ = TREE_OPERAND (expr, 1);
2556 tree else_ = TREE_OPERAND (expr, 2);
2557 tree true_label, false_label, end_label, t;
2559 tree *false_label_p;
2560 bool emit_end, emit_false, jump_over_else;
2561 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
2562 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
2564 /* First do simple transformations. */
2567 /* If there is no 'else', turn
2570 if (a) if (b) then c. */
2571 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2573 /* Keep the original source location on the first 'if'. */
2574 location_t locus = EXPR_HAS_LOCATION (expr)
2575 ? EXPR_LOCATION (expr) : input_location;
2576 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2577 /* Set the source location of the && on the second 'if'. */
2578 if (EXPR_HAS_LOCATION (pred))
2579 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2580 then_ = shortcut_cond_expr (expr);
2581 then_se = then_ && TREE_SIDE_EFFECTS (then_);
2582 pred = TREE_OPERAND (pred, 0);
2583 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
2584 SET_EXPR_LOCATION (expr, locus);
2590 /* If there is no 'then', turn
2593 if (a); else if (b); else d. */
2594 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2596 /* Keep the original source location on the first 'if'. */
2597 location_t locus = EXPR_HAS_LOCATION (expr)
2598 ? EXPR_LOCATION (expr) : input_location;
2599 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2600 /* Set the source location of the || on the second 'if'. */
2601 if (EXPR_HAS_LOCATION (pred))
2602 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2603 else_ = shortcut_cond_expr (expr);
2604 else_se = else_ && TREE_SIDE_EFFECTS (else_);
2605 pred = TREE_OPERAND (pred, 0);
2606 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
2607 SET_EXPR_LOCATION (expr, locus);
2611 /* If we're done, great. */
2612 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
2613 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
2616 /* Otherwise we need to mess with gotos. Change
2619 if (a); else goto no;
2622 and recursively gimplify the condition. */
2624 true_label = false_label = end_label = NULL_TREE;
2626 /* If our arms just jump somewhere, hijack those labels so we don't
2627 generate jumps to jumps. */
2630 && TREE_CODE (then_) == GOTO_EXPR
2631 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
2633 true_label = GOTO_DESTINATION (then_);
2639 && TREE_CODE (else_) == GOTO_EXPR
2640 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
2642 false_label = GOTO_DESTINATION (else_);
2647 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
2649 true_label_p = &true_label;
2651 true_label_p = NULL;
2653 /* The 'else' branch also needs a label if it contains interesting code. */
2654 if (false_label || else_se)
2655 false_label_p = &false_label;
2657 false_label_p = NULL;
2659 /* If there was nothing else in our arms, just forward the label(s). */
2660 if (!then_se && !else_se)
2661 return shortcut_cond_r (pred, true_label_p, false_label_p,
2662 EXPR_HAS_LOCATION (expr)
2663 ? EXPR_LOCATION (expr) : input_location);
2665 /* If our last subexpression already has a terminal label, reuse it. */
2667 t = expr_last (else_);
2669 t = expr_last (then_);
2672 if (t && TREE_CODE (t) == LABEL_EXPR)
2673 end_label = LABEL_EXPR_LABEL (t);
2675 /* If we don't care about jumping to the 'else' branch, jump to the end
2676 if the condition is false. */
2678 false_label_p = &end_label;
2680 /* We only want to emit these labels if we aren't hijacking them. */
2681 emit_end = (end_label == NULL_TREE);
2682 emit_false = (false_label == NULL_TREE);
2684 /* We only emit the jump over the else clause if we have to--if the
2685 then clause may fall through. Otherwise we can wind up with a
2686 useless jump and a useless label at the end of gimplified code,
2687 which will cause us to think that this conditional as a whole
2688 falls through even if it doesn't. If we then inline a function
2689 which ends with such a condition, that can cause us to issue an
2690 inappropriate warning about control reaching the end of a
2691 non-void function. */
2692 jump_over_else = block_may_fallthru (then_);
2694 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
2695 EXPR_HAS_LOCATION (expr)
2696 ? EXPR_LOCATION (expr) : input_location);
2699 append_to_statement_list (pred, &expr);
2701 append_to_statement_list (then_, &expr);
2706 tree last = expr_last (expr);
2707 t = build_and_jump (&end_label);
2708 if (EXPR_HAS_LOCATION (last))
2709 SET_EXPR_LOCATION (t, EXPR_LOCATION (last));
2710 append_to_statement_list (t, &expr);
2714 t = build1 (LABEL_EXPR, void_type_node, false_label);
2715 append_to_statement_list (t, &expr);
2717 append_to_statement_list (else_, &expr);
2719 if (emit_end && end_label)
2721 t = build1 (LABEL_EXPR, void_type_node, end_label);
2722 append_to_statement_list (t, &expr);
2728 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
2731 gimple_boolify (tree expr)
2733 tree type = TREE_TYPE (expr);
2734 location_t loc = EXPR_LOCATION (expr);
2736 if (TREE_CODE (expr) == NE_EXPR
2737 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
2738 && integer_zerop (TREE_OPERAND (expr, 1)))
2740 tree call = TREE_OPERAND (expr, 0);
2741 tree fn = get_callee_fndecl (call);
2743 /* For __builtin_expect ((long) (x), y) recurse into x as well
2744 if x is truth_value_p. */
2746 && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL
2747 && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT
2748 && call_expr_nargs (call) == 2)
2750 tree arg = CALL_EXPR_ARG (call, 0);
2753 if (TREE_CODE (arg) == NOP_EXPR
2754 && TREE_TYPE (arg) == TREE_TYPE (call))
2755 arg = TREE_OPERAND (arg, 0);
2756 if (truth_value_p (TREE_CODE (arg)))
2758 arg = gimple_boolify (arg);
2759 CALL_EXPR_ARG (call, 0)
2760 = fold_convert_loc (loc, TREE_TYPE (call), arg);
2766 if (TREE_CODE (type) == BOOLEAN_TYPE)
2769 switch (TREE_CODE (expr))
2771 case TRUTH_AND_EXPR:
2773 case TRUTH_XOR_EXPR:
2774 case TRUTH_ANDIF_EXPR:
2775 case TRUTH_ORIF_EXPR:
2776 /* Also boolify the arguments of truth exprs. */
2777 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
2780 case TRUTH_NOT_EXPR:
2781 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2784 case EQ_EXPR: case NE_EXPR:
2785 case LE_EXPR: case GE_EXPR: case LT_EXPR: case GT_EXPR:
2786 /* These expressions always produce boolean results. */
2787 TREE_TYPE (expr) = boolean_type_node;
2791 /* Other expressions that get here must have boolean values, but
2792 might need to be converted to the appropriate mode. */
2793 return fold_convert_loc (loc, boolean_type_node, expr);
2797 /* Given a conditional expression *EXPR_P without side effects, gimplify
2798 its operands. New statements are inserted to PRE_P. */
2800 static enum gimplify_status
2801 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
2803 tree expr = *expr_p, cond;
2804 enum gimplify_status ret, tret;
2805 enum tree_code code;
2807 cond = gimple_boolify (COND_EXPR_COND (expr));
2809 /* We need to handle && and || specially, as their gimplification
2810 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
2811 code = TREE_CODE (cond);
2812 if (code == TRUTH_ANDIF_EXPR)
2813 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
2814 else if (code == TRUTH_ORIF_EXPR)
2815 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
2816 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
2817 COND_EXPR_COND (*expr_p) = cond;
2819 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
2820 is_gimple_val, fb_rvalue);
2821 ret = MIN (ret, tret);
2822 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
2823 is_gimple_val, fb_rvalue);
2825 return MIN (ret, tret);
2828 /* Returns true if evaluating EXPR could trap.
2829 EXPR is GENERIC, while tree_could_trap_p can be called
2833 generic_expr_could_trap_p (tree expr)
2837 if (!expr || is_gimple_val (expr))
2840 if (!EXPR_P (expr) || tree_could_trap_p (expr))
2843 n = TREE_OPERAND_LENGTH (expr);
2844 for (i = 0; i < n; i++)
2845 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
2851 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
2860 The second form is used when *EXPR_P is of type void.
2862 PRE_P points to the list where side effects that must happen before
2863 *EXPR_P should be stored. */
2865 static enum gimplify_status
2866 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
2868 tree expr = *expr_p;
2869 tree type = TREE_TYPE (expr);
2870 location_t loc = EXPR_LOCATION (expr);
2871 tree tmp, arm1, arm2;
2872 enum gimplify_status ret;
2873 tree label_true, label_false, label_cont;
2874 bool have_then_clause_p, have_else_clause_p;
2876 enum tree_code pred_code;
2877 gimple_seq seq = NULL;
2879 /* If this COND_EXPR has a value, copy the values into a temporary within
2881 if (!VOID_TYPE_P (type))
2883 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
2886 /* If either an rvalue is ok or we do not require an lvalue, create the
2887 temporary. But we cannot do that if the type is addressable. */
2888 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
2889 && !TREE_ADDRESSABLE (type))
2891 if (gimplify_ctxp->allow_rhs_cond_expr
2892 /* If either branch has side effects or could trap, it can't be
2893 evaluated unconditionally. */
2894 && !TREE_SIDE_EFFECTS (then_)
2895 && !generic_expr_could_trap_p (then_)
2896 && !TREE_SIDE_EFFECTS (else_)
2897 && !generic_expr_could_trap_p (else_))
2898 return gimplify_pure_cond_expr (expr_p, pre_p);
2900 tmp = create_tmp_var (type, "iftmp");
2904 /* Otherwise, only create and copy references to the values. */
2907 type = build_pointer_type (type);
2909 if (!VOID_TYPE_P (TREE_TYPE (then_)))
2910 then_ = build_fold_addr_expr_loc (loc, then_);
2912 if (!VOID_TYPE_P (TREE_TYPE (else_)))
2913 else_ = build_fold_addr_expr_loc (loc, else_);
2916 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
2918 tmp = create_tmp_var (type, "iftmp");
2919 result = build_fold_indirect_ref_loc (loc, tmp);
2922 /* Build the new then clause, `tmp = then_;'. But don't build the
2923 assignment if the value is void; in C++ it can be if it's a throw. */
2924 if (!VOID_TYPE_P (TREE_TYPE (then_)))
2925 TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_);
2927 /* Similarly, build the new else clause, `tmp = else_;'. */
2928 if (!VOID_TYPE_P (TREE_TYPE (else_)))
2929 TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_);
2931 TREE_TYPE (expr) = void_type_node;
2932 recalculate_side_effects (expr);
2934 /* Move the COND_EXPR to the prequeue. */
2935 gimplify_stmt (&expr, pre_p);
2941 /* Make sure the condition has BOOLEAN_TYPE. */
2942 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2944 /* Break apart && and || conditions. */
2945 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
2946 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
2948 expr = shortcut_cond_expr (expr);
2950 if (expr != *expr_p)
2954 /* We can't rely on gimplify_expr to re-gimplify the expanded
2955 form properly, as cleanups might cause the target labels to be
2956 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
2957 set up a conditional context. */
2958 gimple_push_condition ();
2959 gimplify_stmt (expr_p, &seq);
2960 gimple_pop_condition (pre_p);
2961 gimple_seq_add_seq (pre_p, seq);
2967 /* Now do the normal gimplification. */
2969 /* Gimplify condition. */
2970 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
2972 if (ret == GS_ERROR)
2974 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
2976 gimple_push_condition ();
2978 have_then_clause_p = have_else_clause_p = false;
2979 if (TREE_OPERAND (expr, 1) != NULL
2980 && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR
2981 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL
2982 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1)))
2983 == current_function_decl)
2984 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
2985 have different locations, otherwise we end up with incorrect
2986 location information on the branches. */
2988 || !EXPR_HAS_LOCATION (expr)
2989 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1))
2990 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1))))
2992 label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1));
2993 have_then_clause_p = true;
2996 label_true = create_artificial_label (UNKNOWN_LOCATION);
2997 if (TREE_OPERAND (expr, 2) != NULL
2998 && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR
2999 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL
3000 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2)))
3001 == current_function_decl)
3002 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3003 have different locations, otherwise we end up with incorrect
3004 location information on the branches. */
3006 || !EXPR_HAS_LOCATION (expr)
3007 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2))
3008 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2))))
3010 label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2));
3011 have_else_clause_p = true;
3014 label_false = create_artificial_label (UNKNOWN_LOCATION);
3016 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
3019 gimple_cond = gimple_build_cond (pred_code, arm1, arm2, label_true,
3022 gimplify_seq_add_stmt (&seq, gimple_cond);
3023 label_cont = NULL_TREE;
3024 if (!have_then_clause_p)
3026 /* For if (...) {} else { code; } put label_true after
3028 if (TREE_OPERAND (expr, 1) == NULL_TREE
3029 && !have_else_clause_p
3030 && TREE_OPERAND (expr, 2) != NULL_TREE)
3031 label_cont = label_true;
3034 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
3035 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
3036 /* For if (...) { code; } else {} or
3037 if (...) { code; } else goto label; or
3038 if (...) { code; return; } else { ... }
3039 label_cont isn't needed. */
3040 if (!have_else_clause_p
3041 && TREE_OPERAND (expr, 2) != NULL_TREE
3042 && gimple_seq_may_fallthru (seq))
3045 label_cont = create_artificial_label (UNKNOWN_LOCATION);
3047 g = gimple_build_goto (label_cont);
3049 /* GIMPLE_COND's are very low level; they have embedded
3050 gotos. This particular embedded goto should not be marked
3051 with the location of the original COND_EXPR, as it would
3052 correspond to the COND_EXPR's condition, not the ELSE or the
3053 THEN arms. To avoid marking it with the wrong location, flag
3054 it as "no location". */
3055 gimple_set_do_not_emit_location (g);
3057 gimplify_seq_add_stmt (&seq, g);
3061 if (!have_else_clause_p)
3063 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
3064 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
3067 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
3069 gimple_pop_condition (pre_p);
3070 gimple_seq_add_seq (pre_p, seq);
3072 if (ret == GS_ERROR)
3074 else if (have_then_clause_p || have_else_clause_p)
3078 /* Both arms are empty; replace the COND_EXPR with its predicate. */
3079 expr = TREE_OPERAND (expr, 0);
3080 gimplify_stmt (&expr, pre_p);
3087 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
3088 to be marked addressable.
3090 We cannot rely on such an expression being directly markable if a temporary
3091 has been created by the gimplification. In this case, we create another
3092 temporary and initialize it with a copy, which will become a store after we
3093 mark it addressable. This can happen if the front-end passed us something
3094 that it could not mark addressable yet, like a Fortran pass-by-reference
3095 parameter (int) floatvar. */
3098 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
3100 while (handled_component_p (*expr_p))
3101 expr_p = &TREE_OPERAND (*expr_p, 0);
3102 if (is_gimple_reg (*expr_p))
3103 *expr_p = get_initialized_tmp_var (*expr_p, seq_p, NULL);
3106 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3107 a call to __builtin_memcpy. */
3109 static enum gimplify_status
3110 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
3113 tree t, to, to_ptr, from, from_ptr;
3115 location_t loc = EXPR_LOCATION (*expr_p);
3117 to = TREE_OPERAND (*expr_p, 0);
3118 from = TREE_OPERAND (*expr_p, 1);
3120 /* Mark the RHS addressable. Beware that it may not be possible to do so
3121 directly if a temporary has been created by the gimplification. */
3122 prepare_gimple_addressable (&from, seq_p);
3124 mark_addressable (from);
3125 from_ptr = build_fold_addr_expr_loc (loc, from);
3126 gimplify_arg (&from_ptr, seq_p, loc);
3128 mark_addressable (to);
3129 to_ptr = build_fold_addr_expr_loc (loc, to);
3130 gimplify_arg (&to_ptr, seq_p, loc);
3132 t = implicit_built_in_decls[BUILT_IN_MEMCPY];
3134 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
3138 /* tmp = memcpy() */
3139 t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3140 gimple_call_set_lhs (gs, t);
3141 gimplify_seq_add_stmt (seq_p, gs);
3143 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
3147 gimplify_seq_add_stmt (seq_p, gs);
3152 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3153 a call to __builtin_memset. In this case we know that the RHS is
3154 a CONSTRUCTOR with an empty element list. */
3156 static enum gimplify_status
3157 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
3160 tree t, from, to, to_ptr;
3162 location_t loc = EXPR_LOCATION (*expr_p);
3164 /* Assert our assumptions, to abort instead of producing wrong code
3165 silently if they are not met. Beware that the RHS CONSTRUCTOR might
3166 not be immediately exposed. */
3167 from = TREE_OPERAND (*expr_p, 1);
3168 if (TREE_CODE (from) == WITH_SIZE_EXPR)
3169 from = TREE_OPERAND (from, 0);
3171 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
3172 && VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (from)));
3175 to = TREE_OPERAND (*expr_p, 0);
3177 to_ptr = build_fold_addr_expr_loc (loc, to);
3178 gimplify_arg (&to_ptr, seq_p, loc);
3179 t = implicit_built_in_decls[BUILT_IN_MEMSET];
3181 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
3185 /* tmp = memset() */
3186 t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3187 gimple_call_set_lhs (gs, t);
3188 gimplify_seq_add_stmt (seq_p, gs);
3190 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
3194 gimplify_seq_add_stmt (seq_p, gs);
3199 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
3200 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
3201 assignment. Returns non-null if we detect a potential overlap. */
3203 struct gimplify_init_ctor_preeval_data
3205 /* The base decl of the lhs object. May be NULL, in which case we
3206 have to assume the lhs is indirect. */
3209 /* The alias set of the lhs object. */
3210 alias_set_type lhs_alias_set;
3214 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
3216 struct gimplify_init_ctor_preeval_data *data
3217 = (struct gimplify_init_ctor_preeval_data *) xdata;
3220 /* If we find the base object, obviously we have overlap. */
3221 if (data->lhs_base_decl == t)
3224 /* If the constructor component is indirect, determine if we have a
3225 potential overlap with the lhs. The only bits of information we
3226 have to go on at this point are addressability and alias sets. */
3227 if (TREE_CODE (t) == INDIRECT_REF
3228 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3229 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
3232 /* If the constructor component is a call, determine if it can hide a
3233 potential overlap with the lhs through an INDIRECT_REF like above. */
3234 if (TREE_CODE (t) == CALL_EXPR)
3236 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
3238 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
3239 if (POINTER_TYPE_P (TREE_VALUE (type))
3240 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3241 && alias_sets_conflict_p (data->lhs_alias_set,
3243 (TREE_TYPE (TREE_VALUE (type)))))
3247 if (IS_TYPE_OR_DECL_P (t))
3252 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
3253 force values that overlap with the lhs (as described by *DATA)
3254 into temporaries. */
3257 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3258 struct gimplify_init_ctor_preeval_data *data)
3260 enum gimplify_status one;
3262 /* If the value is constant, then there's nothing to pre-evaluate. */
3263 if (TREE_CONSTANT (*expr_p))
3265 /* Ensure it does not have side effects, it might contain a reference to
3266 the object we're initializing. */
3267 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
3271 /* If the type has non-trivial constructors, we can't pre-evaluate. */
3272 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
3275 /* Recurse for nested constructors. */
3276 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
3278 unsigned HOST_WIDE_INT ix;
3279 constructor_elt *ce;
3280 VEC(constructor_elt,gc) *v = CONSTRUCTOR_ELTS (*expr_p);
3282 for (ix = 0; VEC_iterate (constructor_elt, v, ix, ce); ix++)
3283 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
3288 /* If this is a variable sized type, we must remember the size. */
3289 maybe_with_size_expr (expr_p);
3291 /* Gimplify the constructor element to something appropriate for the rhs
3292 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
3293 the gimplifier will consider this a store to memory. Doing this
3294 gimplification now means that we won't have to deal with complicated
3295 language-specific trees, nor trees like SAVE_EXPR that can induce
3296 exponential search behavior. */
3297 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
3298 if (one == GS_ERROR)
3304 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
3305 with the lhs, since "a = { .x=a }" doesn't make sense. This will
3306 always be true for all scalars, since is_gimple_mem_rhs insists on a
3307 temporary variable for them. */
3308 if (DECL_P (*expr_p))
3311 /* If this is of variable size, we have no choice but to assume it doesn't
3312 overlap since we can't make a temporary for it. */
3313 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
3316 /* Otherwise, we must search for overlap ... */
3317 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
3320 /* ... and if found, force the value into a temporary. */
3321 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
3324 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
3325 a RANGE_EXPR in a CONSTRUCTOR for an array.
3329 object[var] = value;
3336 We increment var _after_ the loop exit check because we might otherwise
3337 fail if upper == TYPE_MAX_VALUE (type for upper).
3339 Note that we never have to deal with SAVE_EXPRs here, because this has
3340 already been taken care of for us, in gimplify_init_ctor_preeval(). */
3342 static void gimplify_init_ctor_eval (tree, VEC(constructor_elt,gc) *,
3343 gimple_seq *, bool);
3346 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
3347 tree value, tree array_elt_type,
3348 gimple_seq *pre_p, bool cleared)
3350 tree loop_entry_label, loop_exit_label, fall_thru_label;
3351 tree var, var_type, cref, tmp;
3353 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
3354 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
3355 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
3357 /* Create and initialize the index variable. */
3358 var_type = TREE_TYPE (upper);
3359 var = create_tmp_var (var_type, NULL);
3360 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
3362 /* Add the loop entry label. */
3363 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
3365 /* Build the reference. */
3366 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3367 var, NULL_TREE, NULL_TREE);
3369 /* If we are a constructor, just call gimplify_init_ctor_eval to do
3370 the store. Otherwise just assign value to the reference. */
3372 if (TREE_CODE (value) == CONSTRUCTOR)
3373 /* NB we might have to call ourself recursively through
3374 gimplify_init_ctor_eval if the value is a constructor. */
3375 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3378 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
3380 /* We exit the loop when the index var is equal to the upper bound. */
3381 gimplify_seq_add_stmt (pre_p,
3382 gimple_build_cond (EQ_EXPR, var, upper,
3383 loop_exit_label, fall_thru_label));
3385 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
3387 /* Otherwise, increment the index var... */
3388 tmp = build2 (PLUS_EXPR, var_type, var,
3389 fold_convert (var_type, integer_one_node));
3390 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
3392 /* ...and jump back to the loop entry. */
3393 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
3395 /* Add the loop exit label. */
3396 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
3399 /* Return true if FDECL is accessing a field that is zero sized. */
3402 zero_sized_field_decl (const_tree fdecl)
3404 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
3405 && integer_zerop (DECL_SIZE (fdecl)))
3410 /* Return true if TYPE is zero sized. */
3413 zero_sized_type (const_tree type)
3415 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
3416 && integer_zerop (TYPE_SIZE (type)))
3421 /* A subroutine of gimplify_init_constructor. Generate individual
3422 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
3423 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
3424 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
3428 gimplify_init_ctor_eval (tree object, VEC(constructor_elt,gc) *elts,
3429 gimple_seq *pre_p, bool cleared)
3431 tree array_elt_type = NULL;
3432 unsigned HOST_WIDE_INT ix;
3433 tree purpose, value;
3435 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
3436 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
3438 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
3442 /* NULL values are created above for gimplification errors. */
3446 if (cleared && initializer_zerop (value))
3449 /* ??? Here's to hoping the front end fills in all of the indices,
3450 so we don't have to figure out what's missing ourselves. */
3451 gcc_assert (purpose);
3453 /* Skip zero-sized fields, unless value has side-effects. This can
3454 happen with calls to functions returning a zero-sized type, which
3455 we shouldn't discard. As a number of downstream passes don't
3456 expect sets of zero-sized fields, we rely on the gimplification of
3457 the MODIFY_EXPR we make below to drop the assignment statement. */
3458 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
3461 /* If we have a RANGE_EXPR, we have to build a loop to assign the
3463 if (TREE_CODE (purpose) == RANGE_EXPR)
3465 tree lower = TREE_OPERAND (purpose, 0);
3466 tree upper = TREE_OPERAND (purpose, 1);
3468 /* If the lower bound is equal to upper, just treat it as if
3469 upper was the index. */
3470 if (simple_cst_equal (lower, upper))
3474 gimplify_init_ctor_eval_range (object, lower, upper, value,
3475 array_elt_type, pre_p, cleared);
3482 /* Do not use bitsizetype for ARRAY_REF indices. */
3483 if (TYPE_DOMAIN (TREE_TYPE (object)))
3484 purpose = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
3486 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3487 purpose, NULL_TREE, NULL_TREE);
3491 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
3492 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
3493 unshare_expr (object), purpose, NULL_TREE);
3496 if (TREE_CODE (value) == CONSTRUCTOR
3497 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
3498 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3502 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
3503 gimplify_and_add (init, pre_p);
3510 /* Returns the appropriate RHS predicate for this LHS. */
3513 rhs_predicate_for (tree lhs)
3515 if (is_gimple_reg (lhs))
3516 return is_gimple_reg_rhs_or_call;
3518 return is_gimple_mem_rhs_or_call;
3521 /* Gimplify a C99 compound literal expression. This just means adding
3522 the DECL_EXPR before the current statement and using its anonymous
3525 static enum gimplify_status
3526 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p)
3528 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
3529 tree decl = DECL_EXPR_DECL (decl_s);
3530 /* Mark the decl as addressable if the compound literal
3531 expression is addressable now, otherwise it is marked too late
3532 after we gimplify the initialization expression. */
3533 if (TREE_ADDRESSABLE (*expr_p))
3534 TREE_ADDRESSABLE (decl) = 1;
3536 /* Preliminarily mark non-addressed complex variables as eligible
3537 for promotion to gimple registers. We'll transform their uses
3539 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
3540 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
3541 && !TREE_THIS_VOLATILE (decl)
3542 && !needs_to_live_in_memory (decl))
3543 DECL_GIMPLE_REG_P (decl) = 1;
3545 /* This decl isn't mentioned in the enclosing block, so add it to the
3546 list of temps. FIXME it seems a bit of a kludge to say that
3547 anonymous artificial vars aren't pushed, but everything else is. */
3548 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
3549 gimple_add_tmp_var (decl);
3551 gimplify_and_add (decl_s, pre_p);
3556 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
3557 return a new CONSTRUCTOR if something changed. */
3560 optimize_compound_literals_in_ctor (tree orig_ctor)
3562 tree ctor = orig_ctor;
3563 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (ctor);
3564 unsigned int idx, num = VEC_length (constructor_elt, elts);
3566 for (idx = 0; idx < num; idx++)
3568 tree value = VEC_index (constructor_elt, elts, idx)->value;
3569 tree newval = value;
3570 if (TREE_CODE (value) == CONSTRUCTOR)
3571 newval = optimize_compound_literals_in_ctor (value);
3572 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
3574 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
3575 tree decl = DECL_EXPR_DECL (decl_s);
3576 tree init = DECL_INITIAL (decl);
3578 if (!TREE_ADDRESSABLE (value)
3579 && !TREE_ADDRESSABLE (decl)
3581 newval = optimize_compound_literals_in_ctor (init);
3583 if (newval == value)
3586 if (ctor == orig_ctor)
3588 ctor = copy_node (orig_ctor);
3589 CONSTRUCTOR_ELTS (ctor) = VEC_copy (constructor_elt, gc, elts);
3590 elts = CONSTRUCTOR_ELTS (ctor);
3592 VEC_index (constructor_elt, elts, idx)->value = newval;
3599 /* A subroutine of gimplify_modify_expr. Break out elements of a
3600 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
3602 Note that we still need to clear any elements that don't have explicit
3603 initializers, so if not all elements are initialized we keep the
3604 original MODIFY_EXPR, we just remove all of the constructor elements.
3606 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
3607 GS_ERROR if we would have to create a temporary when gimplifying
3608 this constructor. Otherwise, return GS_OK.
3610 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
3612 static enum gimplify_status
3613 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3614 bool want_value, bool notify_temp_creation)
3616 tree object, ctor, type;
3617 enum gimplify_status ret;
3618 VEC(constructor_elt,gc) *elts;
3620 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
3622 if (!notify_temp_creation)
3624 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
3625 is_gimple_lvalue, fb_lvalue);
3626 if (ret == GS_ERROR)
3630 object = TREE_OPERAND (*expr_p, 0);
3631 ctor = TREE_OPERAND (*expr_p, 1) =
3632 optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
3633 type = TREE_TYPE (ctor);
3634 elts = CONSTRUCTOR_ELTS (ctor);
3637 switch (TREE_CODE (type))
3641 case QUAL_UNION_TYPE:
3644 struct gimplify_init_ctor_preeval_data preeval_data;
3645 HOST_WIDE_INT num_type_elements, num_ctor_elements;
3646 HOST_WIDE_INT num_nonzero_elements;
3647 bool cleared, valid_const_initializer;
3649 /* Aggregate types must lower constructors to initialization of
3650 individual elements. The exception is that a CONSTRUCTOR node
3651 with no elements indicates zero-initialization of the whole. */
3652 if (VEC_empty (constructor_elt, elts))
3654 if (notify_temp_creation)
3659 /* Fetch information about the constructor to direct later processing.
3660 We might want to make static versions of it in various cases, and
3661 can only do so if it known to be a valid constant initializer. */
3662 valid_const_initializer
3663 = categorize_ctor_elements (ctor, &num_nonzero_elements,
3664 &num_ctor_elements, &cleared);
3666 /* If a const aggregate variable is being initialized, then it
3667 should never be a lose to promote the variable to be static. */
3668 if (valid_const_initializer
3669 && num_nonzero_elements > 1
3670 && TREE_READONLY (object)
3671 && TREE_CODE (object) == VAR_DECL
3672 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
3674 if (notify_temp_creation)
3676 DECL_INITIAL (object) = ctor;
3677 TREE_STATIC (object) = 1;
3678 if (!DECL_NAME (object))
3679 DECL_NAME (object) = create_tmp_var_name ("C");
3680 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
3682 /* ??? C++ doesn't automatically append a .<number> to the
3683 assembler name, and even when it does, it looks a FE private
3684 data structures to figure out what that number should be,
3685 which are not set for this variable. I suppose this is
3686 important for local statics for inline functions, which aren't
3687 "local" in the object file sense. So in order to get a unique
3688 TU-local symbol, we must invoke the lhd version now. */
3689 lhd_set_decl_assembler_name (object);
3691 *expr_p = NULL_TREE;
3695 /* If there are "lots" of initialized elements, even discounting
3696 those that are not address constants (and thus *must* be
3697 computed at runtime), then partition the constructor into
3698 constant and non-constant parts. Block copy the constant
3699 parts in, then generate code for the non-constant parts. */
3700 /* TODO. There's code in cp/typeck.c to do this. */
3702 num_type_elements = count_type_elements (type, true);
3704 /* If count_type_elements could not determine number of type elements
3705 for a constant-sized object, assume clearing is needed.
3706 Don't do this for variable-sized objects, as store_constructor
3707 will ignore the clearing of variable-sized objects. */
3708 if (num_type_elements < 0 && int_size_in_bytes (type) >= 0)
3710 /* If there are "lots" of zeros, then block clear the object first. */
3711 else if (num_type_elements - num_nonzero_elements
3712 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
3713 && num_nonzero_elements < num_type_elements/4)
3715 /* ??? This bit ought not be needed. For any element not present
3716 in the initializer, we should simply set them to zero. Except
3717 we'd need to *find* the elements that are not present, and that
3718 requires trickery to avoid quadratic compile-time behavior in
3719 large cases or excessive memory use in small cases. */
3720 else if (num_ctor_elements < num_type_elements)
3723 /* If there are "lots" of initialized elements, and all of them
3724 are valid address constants, then the entire initializer can
3725 be dropped to memory, and then memcpy'd out. Don't do this
3726 for sparse arrays, though, as it's more efficient to follow
3727 the standard CONSTRUCTOR behavior of memset followed by
3728 individual element initialization. Also don't do this for small
3729 all-zero initializers (which aren't big enough to merit
3730 clearing), and don't try to make bitwise copies of
3731 TREE_ADDRESSABLE types. */
3732 if (valid_const_initializer
3733 && !(cleared || num_nonzero_elements == 0)
3734 && !TREE_ADDRESSABLE (type))
3736 HOST_WIDE_INT size = int_size_in_bytes (type);
3739 /* ??? We can still get unbounded array types, at least
3740 from the C++ front end. This seems wrong, but attempt
3741 to work around it for now. */
3744 size = int_size_in_bytes (TREE_TYPE (object));
3746 TREE_TYPE (ctor) = type = TREE_TYPE (object);
3749 /* Find the maximum alignment we can assume for the object. */
3750 /* ??? Make use of DECL_OFFSET_ALIGN. */
3751 if (DECL_P (object))
3752 align = DECL_ALIGN (object);
3754 align = TYPE_ALIGN (type);
3757 && num_nonzero_elements > 1
3758 && !can_move_by_pieces (size, align))
3760 if (notify_temp_creation)
3763 walk_tree (&ctor, force_labels_r, NULL, NULL);
3764 TREE_OPERAND (*expr_p, 1) = tree_output_constant_def (ctor);
3766 /* This is no longer an assignment of a CONSTRUCTOR, but
3767 we still may have processing to do on the LHS. So
3768 pretend we didn't do anything here to let that happen. */
3769 return GS_UNHANDLED;
3773 /* If the target is volatile and we have non-zero elements
3774 initialize the target from a temporary. */
3775 if (TREE_THIS_VOLATILE (object)
3776 && !TREE_ADDRESSABLE (type)
3777 && num_nonzero_elements > 0)
3779 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type), NULL);
3780 TREE_OPERAND (*expr_p, 0) = temp;
3781 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
3783 build2 (MODIFY_EXPR, void_type_node,
3788 if (notify_temp_creation)
3791 /* If there are nonzero elements and if needed, pre-evaluate to capture
3792 elements overlapping with the lhs into temporaries. We must do this
3793 before clearing to fetch the values before they are zeroed-out. */
3794 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
3796 preeval_data.lhs_base_decl = get_base_address (object);
3797 if (!DECL_P (preeval_data.lhs_base_decl))
3798 preeval_data.lhs_base_decl = NULL;
3799 preeval_data.lhs_alias_set = get_alias_set (object);
3801 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
3802 pre_p, post_p, &preeval_data);
3807 /* Zap the CONSTRUCTOR element list, which simplifies this case.
3808 Note that we still have to gimplify, in order to handle the
3809 case of variable sized types. Avoid shared tree structures. */
3810 CONSTRUCTOR_ELTS (ctor) = NULL;
3811 TREE_SIDE_EFFECTS (ctor) = 0;
3812 object = unshare_expr (object);
3813 gimplify_stmt (expr_p, pre_p);
3816 /* If we have not block cleared the object, or if there are nonzero
3817 elements in the constructor, add assignments to the individual
3818 scalar fields of the object. */
3819 if (!cleared || num_nonzero_elements > 0)
3820 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
3822 *expr_p = NULL_TREE;
3830 if (notify_temp_creation)
3833 /* Extract the real and imaginary parts out of the ctor. */
3834 gcc_assert (VEC_length (constructor_elt, elts) == 2);
3835 r = VEC_index (constructor_elt, elts, 0)->value;
3836 i = VEC_index (constructor_elt, elts, 1)->value;
3837 if (r == NULL || i == NULL)
3839 tree zero = fold_convert (TREE_TYPE (type), integer_zero_node);
3846 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
3847 represent creation of a complex value. */
3848 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
3850 ctor = build_complex (type, r, i);
3851 TREE_OPERAND (*expr_p, 1) = ctor;
3855 ctor = build2 (COMPLEX_EXPR, type, r, i);
3856 TREE_OPERAND (*expr_p, 1) = ctor;
3857 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
3860 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
3868 unsigned HOST_WIDE_INT ix;
3869 constructor_elt *ce;
3871 if (notify_temp_creation)
3874 /* Go ahead and simplify constant constructors to VECTOR_CST. */
3875 if (TREE_CONSTANT (ctor))
3877 bool constant_p = true;
3880 /* Even when ctor is constant, it might contain non-*_CST
3881 elements, such as addresses or trapping values like
3882 1.0/0.0 - 1.0/0.0. Such expressions don't belong
3883 in VECTOR_CST nodes. */
3884 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
3885 if (!CONSTANT_CLASS_P (value))
3893 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
3897 /* Don't reduce an initializer constant even if we can't
3898 make a VECTOR_CST. It won't do anything for us, and it'll
3899 prevent us from representing it as a single constant. */
3900 if (initializer_constant_valid_p (ctor, type))
3903 TREE_CONSTANT (ctor) = 0;
3906 /* Vector types use CONSTRUCTOR all the way through gimple
3907 compilation as a general initializer. */
3908 for (ix = 0; VEC_iterate (constructor_elt, elts, ix, ce); ix++)
3910 enum gimplify_status tret;
3911 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
3913 if (tret == GS_ERROR)
3916 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
3917 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
3922 /* So how did we get a CONSTRUCTOR for a scalar type? */
3926 if (ret == GS_ERROR)
3928 else if (want_value)
3935 /* If we have gimplified both sides of the initializer but have
3936 not emitted an assignment, do so now. */
3939 tree lhs = TREE_OPERAND (*expr_p, 0);
3940 tree rhs = TREE_OPERAND (*expr_p, 1);
3941 gimple init = gimple_build_assign (lhs, rhs);
3942 gimplify_seq_add_stmt (pre_p, init);
3950 /* Given a pointer value OP0, return a simplified version of an
3951 indirection through OP0, or NULL_TREE if no simplification is
3952 possible. Note that the resulting type may be different from
3953 the type pointed to in the sense that it is still compatible
3954 from the langhooks point of view. */
3957 gimple_fold_indirect_ref (tree t)
3959 tree type = TREE_TYPE (TREE_TYPE (t));
3964 subtype = TREE_TYPE (sub);
3965 if (!POINTER_TYPE_P (subtype))
3968 if (TREE_CODE (sub) == ADDR_EXPR)
3970 tree op = TREE_OPERAND (sub, 0);
3971 tree optype = TREE_TYPE (op);
3973 if (useless_type_conversion_p (type, optype))
3976 /* *(foo *)&fooarray => fooarray[0] */
3977 if (TREE_CODE (optype) == ARRAY_TYPE
3978 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
3979 && useless_type_conversion_p (type, TREE_TYPE (optype)))
3981 tree type_domain = TYPE_DOMAIN (optype);
3982 tree min_val = size_zero_node;
3983 if (type_domain && TYPE_MIN_VALUE (type_domain))
3984 min_val = TYPE_MIN_VALUE (type_domain);
3985 if (TREE_CODE (min_val) == INTEGER_CST)
3986 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
3988 /* *(foo *)&complexfoo => __real__ complexfoo */
3989 else if (TREE_CODE (optype) == COMPLEX_TYPE
3990 && useless_type_conversion_p (type, TREE_TYPE (optype)))
3991 return fold_build1 (REALPART_EXPR, type, op);
3992 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
3993 else if (TREE_CODE (optype) == VECTOR_TYPE
3994 && useless_type_conversion_p (type, TREE_TYPE (optype)))
3996 tree part_width = TYPE_SIZE (type);
3997 tree index = bitsize_int (0);
3998 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
4002 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
4003 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
4004 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
4006 tree op00 = TREE_OPERAND (sub, 0);
4007 tree op01 = TREE_OPERAND (sub, 1);
4011 op00type = TREE_TYPE (op00);
4012 if (TREE_CODE (op00) == ADDR_EXPR
4013 && TREE_CODE (TREE_TYPE (op00type)) == VECTOR_TYPE
4014 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (op00type))))
4016 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
4017 tree part_width = TYPE_SIZE (type);
4018 unsigned HOST_WIDE_INT part_widthi
4019 = tree_low_cst (part_width, 0) / BITS_PER_UNIT;
4020 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
4021 tree index = bitsize_int (indexi);
4022 if (offset / part_widthi
4023 <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op00type)))
4024 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (op00, 0),
4029 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
4030 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
4031 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
4033 tree op00 = TREE_OPERAND (sub, 0);
4034 tree op01 = TREE_OPERAND (sub, 1);
4038 op00type = TREE_TYPE (op00);
4039 if (TREE_CODE (op00) == ADDR_EXPR
4040 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
4041 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (op00type))))
4043 tree size = TYPE_SIZE_UNIT (type);
4044 if (tree_int_cst_equal (size, op01))
4045 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
4049 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
4050 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
4051 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
4052 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
4055 tree min_val = size_zero_node;
4057 sub = gimple_fold_indirect_ref (sub);
4059 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
4060 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
4061 if (type_domain && TYPE_MIN_VALUE (type_domain))
4062 min_val = TYPE_MIN_VALUE (type_domain);
4063 if (TREE_CODE (min_val) == INTEGER_CST)
4064 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
4070 /* Given a pointer value OP0, return a simplified version of an
4071 indirection through OP0, or NULL_TREE if no simplification is
4072 possible. This may only be applied to a rhs of an expression.
4073 Note that the resulting type may be different from the type pointed
4074 to in the sense that it is still compatible from the langhooks
4078 gimple_fold_indirect_ref_rhs (tree t)
4080 return gimple_fold_indirect_ref (t);
4083 /* Subroutine of gimplify_modify_expr to do simplifications of
4084 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
4085 something changes. */
4087 static enum gimplify_status
4088 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
4089 gimple_seq *pre_p, gimple_seq *post_p,
4092 enum gimplify_status ret = GS_OK;
4094 while (ret != GS_UNHANDLED)
4095 switch (TREE_CODE (*from_p))
4098 /* If we're assigning from a read-only variable initialized with
4099 a constructor, do the direct assignment from the constructor,
4100 but only if neither source nor target are volatile since this
4101 latter assignment might end up being done on a per-field basis. */
4102 if (DECL_INITIAL (*from_p)
4103 && TREE_READONLY (*from_p)
4104 && !TREE_THIS_VOLATILE (*from_p)
4105 && !TREE_THIS_VOLATILE (*to_p)
4106 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
4108 tree old_from = *from_p;
4110 /* Move the constructor into the RHS. */
4111 *from_p = unshare_expr (DECL_INITIAL (*from_p));
4113 /* Let's see if gimplify_init_constructor will need to put
4114 it in memory. If so, revert the change. */
4115 ret = gimplify_init_constructor (expr_p, NULL, NULL, false, true);
4116 if (ret == GS_ERROR)
4131 /* If we have code like
4135 where the type of "x" is a (possibly cv-qualified variant
4136 of "A"), treat the entire expression as identical to "x".
4137 This kind of code arises in C++ when an object is bound
4138 to a const reference, and if "x" is a TARGET_EXPR we want
4139 to take advantage of the optimization below. */
4140 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
4153 /* If we are initializing something from a TARGET_EXPR, strip the
4154 TARGET_EXPR and initialize it directly, if possible. This can't
4155 be done if the initializer is void, since that implies that the
4156 temporary is set in some non-trivial way.
4158 ??? What about code that pulls out the temp and uses it
4159 elsewhere? I think that such code never uses the TARGET_EXPR as
4160 an initializer. If I'm wrong, we'll die because the temp won't
4161 have any RTL. In that case, I guess we'll need to replace
4162 references somehow. */
4163 tree init = TARGET_EXPR_INITIAL (*from_p);
4166 && !VOID_TYPE_P (TREE_TYPE (init)))
4177 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
4179 gimplify_compound_expr (from_p, pre_p, true);
4184 /* If we're initializing from a CONSTRUCTOR, break this into
4185 individual MODIFY_EXPRs. */
4186 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
4190 /* If we're assigning to a non-register type, push the assignment
4191 down into the branches. This is mandatory for ADDRESSABLE types,
4192 since we cannot generate temporaries for such, but it saves a
4193 copy in other cases as well. */
4194 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
4196 /* This code should mirror the code in gimplify_cond_expr. */
4197 enum tree_code code = TREE_CODE (*expr_p);
4198 tree cond = *from_p;
4199 tree result = *to_p;
4201 ret = gimplify_expr (&result, pre_p, post_p,
4202 is_gimple_lvalue, fb_lvalue);
4203 if (ret != GS_ERROR)
4206 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
4207 TREE_OPERAND (cond, 1)
4208 = build2 (code, void_type_node, result,
4209 TREE_OPERAND (cond, 1));
4210 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
4211 TREE_OPERAND (cond, 2)
4212 = build2 (code, void_type_node, unshare_expr (result),
4213 TREE_OPERAND (cond, 2));
4215 TREE_TYPE (cond) = void_type_node;
4216 recalculate_side_effects (cond);
4220 gimplify_and_add (cond, pre_p);
4221 *expr_p = unshare_expr (result);
4232 /* For calls that return in memory, give *to_p as the CALL_EXPR's
4233 return slot so that we don't generate a temporary. */
4234 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
4235 && aggregate_value_p (*from_p, *from_p))
4239 if (!(rhs_predicate_for (*to_p))(*from_p))
4240 /* If we need a temporary, *to_p isn't accurate. */
4242 else if (TREE_CODE (*to_p) == RESULT_DECL
4243 && DECL_NAME (*to_p) == NULL_TREE
4244 && needs_to_live_in_memory (*to_p))
4245 /* It's OK to use the return slot directly unless it's an NRV. */
4247 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
4248 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
4249 /* Don't force regs into memory. */
4251 else if (TREE_CODE (*expr_p) == INIT_EXPR)
4252 /* It's OK to use the target directly if it's being
4255 else if (!is_gimple_non_addressable (*to_p))
4256 /* Don't use the original target if it's already addressable;
4257 if its address escapes, and the called function uses the
4258 NRV optimization, a conforming program could see *to_p
4259 change before the called function returns; see c++/19317.
4260 When optimizing, the return_slot pass marks more functions
4261 as safe after we have escape info. */
4268 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
4269 mark_addressable (*to_p);
4276 case WITH_SIZE_EXPR:
4277 /* Likewise for calls that return an aggregate of non-constant size,
4278 since we would not be able to generate a temporary at all. */
4279 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
4281 *from_p = TREE_OPERAND (*from_p, 0);
4288 /* If we're initializing from a container, push the initialization
4290 case CLEANUP_POINT_EXPR:
4292 case STATEMENT_LIST:
4294 tree wrap = *from_p;
4297 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
4299 if (ret != GS_ERROR)
4302 t = voidify_wrapper_expr (wrap, *expr_p);
4303 gcc_assert (t == *expr_p);
4307 gimplify_and_add (wrap, pre_p);
4308 *expr_p = unshare_expr (*to_p);
4315 case COMPOUND_LITERAL_EXPR:
4317 tree complit = TREE_OPERAND (*expr_p, 1);
4318 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
4319 tree decl = DECL_EXPR_DECL (decl_s);
4320 tree init = DECL_INITIAL (decl);
4322 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
4323 into struct T x = { 0, 1, 2 } if the address of the
4324 compound literal has never been taken. */
4325 if (!TREE_ADDRESSABLE (complit)
4326 && !TREE_ADDRESSABLE (decl)
4329 *expr_p = copy_node (*expr_p);
4330 TREE_OPERAND (*expr_p, 1) = init;
4344 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
4345 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
4346 DECL_GIMPLE_REG_P set.
4348 IMPORTANT NOTE: This promotion is performed by introducing a load of the
4349 other, unmodified part of the complex object just before the total store.
4350 As a consequence, if the object is still uninitialized, an undefined value
4351 will be loaded into a register, which may result in a spurious exception
4352 if the register is floating-point and the value happens to be a signaling
4353 NaN for example. Then the fully-fledged complex operations lowering pass
4354 followed by a DCE pass are necessary in order to fix things up. */
4356 static enum gimplify_status
4357 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
4360 enum tree_code code, ocode;
4361 tree lhs, rhs, new_rhs, other, realpart, imagpart;
4363 lhs = TREE_OPERAND (*expr_p, 0);
4364 rhs = TREE_OPERAND (*expr_p, 1);
4365 code = TREE_CODE (lhs);
4366 lhs = TREE_OPERAND (lhs, 0);
4368 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
4369 other = build1 (ocode, TREE_TYPE (rhs), lhs);
4370 other = get_formal_tmp_var (other, pre_p);
4372 realpart = code == REALPART_EXPR ? rhs : other;
4373 imagpart = code == REALPART_EXPR ? other : rhs;
4375 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
4376 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
4378 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
4380 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
4381 *expr_p = (want_value) ? rhs : NULL_TREE;
4387 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
4393 PRE_P points to the list where side effects that must happen before
4394 *EXPR_P should be stored.
4396 POST_P points to the list where side effects that must happen after
4397 *EXPR_P should be stored.
4399 WANT_VALUE is nonzero iff we want to use the value of this expression
4400 in another expression. */
4402 static enum gimplify_status
4403 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4406 tree *from_p = &TREE_OPERAND (*expr_p, 1);
4407 tree *to_p = &TREE_OPERAND (*expr_p, 0);
4408 enum gimplify_status ret = GS_UNHANDLED;
4410 location_t loc = EXPR_LOCATION (*expr_p);
4412 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
4413 || TREE_CODE (*expr_p) == INIT_EXPR);
4415 /* Insert pointer conversions required by the middle-end that are not
4416 required by the frontend. This fixes middle-end type checking for
4417 for example gcc.dg/redecl-6.c. */
4418 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
4420 STRIP_USELESS_TYPE_CONVERSION (*from_p);
4421 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
4422 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
4425 /* See if any simplifications can be done based on what the RHS is. */
4426 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4428 if (ret != GS_UNHANDLED)
4431 /* For zero sized types only gimplify the left hand side and right hand
4432 side as statements and throw away the assignment. Do this after
4433 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
4435 if (zero_sized_type (TREE_TYPE (*from_p)) && !want_value)
4437 gimplify_stmt (from_p, pre_p);
4438 gimplify_stmt (to_p, pre_p);
4439 *expr_p = NULL_TREE;
4443 /* If the value being copied is of variable width, compute the length
4444 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
4445 before gimplifying any of the operands so that we can resolve any
4446 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
4447 the size of the expression to be copied, not of the destination, so
4448 that is what we must do here. */
4449 maybe_with_size_expr (from_p);
4451 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4452 if (ret == GS_ERROR)
4455 /* As a special case, we have to temporarily allow for assignments
4456 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
4457 a toplevel statement, when gimplifying the GENERIC expression
4458 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
4459 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
4461 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
4462 prevent gimplify_expr from trying to create a new temporary for
4463 foo's LHS, we tell it that it should only gimplify until it
4464 reaches the CALL_EXPR. On return from gimplify_expr, the newly
4465 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
4466 and all we need to do here is set 'a' to be its LHS. */
4467 ret = gimplify_expr (from_p, pre_p, post_p, rhs_predicate_for (*to_p),
4469 if (ret == GS_ERROR)
4472 /* Now see if the above changed *from_p to something we handle specially. */
4473 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4475 if (ret != GS_UNHANDLED)
4478 /* If we've got a variable sized assignment between two lvalues (i.e. does
4479 not involve a call), then we can make things a bit more straightforward
4480 by converting the assignment to memcpy or memset. */
4481 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
4483 tree from = TREE_OPERAND (*from_p, 0);
4484 tree size = TREE_OPERAND (*from_p, 1);
4486 if (TREE_CODE (from) == CONSTRUCTOR)
4487 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
4489 if (is_gimple_addressable (from))
4492 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
4497 /* Transform partial stores to non-addressable complex variables into
4498 total stores. This allows us to use real instead of virtual operands
4499 for these variables, which improves optimization. */
4500 if ((TREE_CODE (*to_p) == REALPART_EXPR
4501 || TREE_CODE (*to_p) == IMAGPART_EXPR)
4502 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
4503 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
4505 /* Try to alleviate the effects of the gimplification creating artificial
4506 temporaries (see for example is_gimple_reg_rhs) on the debug info. */
4507 if (!gimplify_ctxp->into_ssa
4509 && DECL_IGNORED_P (*from_p)
4511 && !DECL_IGNORED_P (*to_p))
4513 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
4515 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
4516 DECL_DEBUG_EXPR_IS_FROM (*from_p) = 1;
4517 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
4520 if (TREE_CODE (*from_p) == CALL_EXPR)
4522 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
4523 instead of a GIMPLE_ASSIGN. */
4524 assign = gimple_build_call_from_tree (*from_p);
4525 if (!gimple_call_noreturn_p (assign))
4526 gimple_call_set_lhs (assign, *to_p);
4530 assign = gimple_build_assign (*to_p, *from_p);
4531 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
4534 gimplify_seq_add_stmt (pre_p, assign);
4536 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
4538 /* If we've somehow already got an SSA_NAME on the LHS, then
4539 we've probably modified it twice. Not good. */
4540 gcc_assert (TREE_CODE (*to_p) != SSA_NAME);
4541 *to_p = make_ssa_name (*to_p, assign);
4542 gimple_set_lhs (assign, *to_p);
4547 *expr_p = unshare_expr (*to_p);
4556 /* Gimplify a comparison between two variable-sized objects. Do this
4557 with a call to BUILT_IN_MEMCMP. */
4559 static enum gimplify_status
4560 gimplify_variable_sized_compare (tree *expr_p)
4562 tree op0 = TREE_OPERAND (*expr_p, 0);
4563 tree op1 = TREE_OPERAND (*expr_p, 1);
4564 tree t, arg, dest, src;
4565 location_t loc = EXPR_LOCATION (*expr_p);
4567 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
4568 arg = unshare_expr (arg);
4569 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
4570 src = build_fold_addr_expr_loc (loc, op1);
4571 dest = build_fold_addr_expr_loc (loc, op0);
4572 t = implicit_built_in_decls[BUILT_IN_MEMCMP];
4573 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
4575 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
4580 /* Gimplify a comparison between two aggregate objects of integral scalar
4581 mode as a comparison between the bitwise equivalent scalar values. */
4583 static enum gimplify_status
4584 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
4586 location_t loc = EXPR_LOCATION (*expr_p);
4587 tree op0 = TREE_OPERAND (*expr_p, 0);
4588 tree op1 = TREE_OPERAND (*expr_p, 1);
4590 tree type = TREE_TYPE (op0);
4591 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
4593 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
4594 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
4597 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
4602 /* Gimplify TRUTH_ANDIF_EXPR and TRUTH_ORIF_EXPR expressions. EXPR_P
4603 points to the expression to gimplify.
4605 Expressions of the form 'a && b' are gimplified to:
4607 a && b ? true : false
4609 LOCUS is the source location to be put on the generated COND_EXPR.
4610 gimplify_cond_expr will do the rest. */
4612 static enum gimplify_status
4613 gimplify_boolean_expr (tree *expr_p, location_t locus)
4615 /* Preserve the original type of the expression. */
4616 tree type = TREE_TYPE (*expr_p);
4618 *expr_p = build3 (COND_EXPR, type, *expr_p,
4619 fold_convert_loc (locus, type, boolean_true_node),
4620 fold_convert_loc (locus, type, boolean_false_node));
4622 SET_EXPR_LOCATION (*expr_p, locus);
4627 /* Gimplifies an expression sequence. This function gimplifies each
4628 expression and re-writes the original expression with the last
4629 expression of the sequence in GIMPLE form.
4631 PRE_P points to the list where the side effects for all the
4632 expressions in the sequence will be emitted.
4634 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
4636 static enum gimplify_status
4637 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
4643 tree *sub_p = &TREE_OPERAND (t, 0);
4645 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
4646 gimplify_compound_expr (sub_p, pre_p, false);
4648 gimplify_stmt (sub_p, pre_p);
4650 t = TREE_OPERAND (t, 1);
4652 while (TREE_CODE (t) == COMPOUND_EXPR);
4659 gimplify_stmt (expr_p, pre_p);
4665 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
4666 gimplify. After gimplification, EXPR_P will point to a new temporary
4667 that holds the original value of the SAVE_EXPR node.
4669 PRE_P points to the list where side effects that must happen before
4670 *EXPR_P should be stored. */
4672 static enum gimplify_status
4673 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4675 enum gimplify_status ret = GS_ALL_DONE;
4678 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
4679 val = TREE_OPERAND (*expr_p, 0);
4681 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
4682 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
4684 /* The operand may be a void-valued expression such as SAVE_EXPRs
4685 generated by the Java frontend for class initialization. It is
4686 being executed only for its side-effects. */
4687 if (TREE_TYPE (val) == void_type_node)
4689 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4690 is_gimple_stmt, fb_none);
4694 val = get_initialized_tmp_var (val, pre_p, post_p);
4696 TREE_OPERAND (*expr_p, 0) = val;
4697 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
4705 /* Re-write the ADDR_EXPR node pointed to by EXPR_P
4712 PRE_P points to the list where side effects that must happen before
4713 *EXPR_P should be stored.
4715 POST_P points to the list where side effects that must happen after
4716 *EXPR_P should be stored. */
4718 static enum gimplify_status
4719 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4721 tree expr = *expr_p;
4722 tree op0 = TREE_OPERAND (expr, 0);
4723 enum gimplify_status ret;
4724 location_t loc = EXPR_LOCATION (*expr_p);
4726 switch (TREE_CODE (op0))
4729 case MISALIGNED_INDIRECT_REF:
4731 /* Check if we are dealing with an expression of the form '&*ptr'.
4732 While the front end folds away '&*ptr' into 'ptr', these
4733 expressions may be generated internally by the compiler (e.g.,
4734 builtins like __builtin_va_end). */
4735 /* Caution: the silent array decomposition semantics we allow for
4736 ADDR_EXPR means we can't always discard the pair. */
4737 /* Gimplification of the ADDR_EXPR operand may drop
4738 cv-qualification conversions, so make sure we add them if
4741 tree op00 = TREE_OPERAND (op0, 0);
4742 tree t_expr = TREE_TYPE (expr);
4743 tree t_op00 = TREE_TYPE (op00);
4745 if (!useless_type_conversion_p (t_expr, t_op00))
4746 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
4752 case VIEW_CONVERT_EXPR:
4753 /* Take the address of our operand and then convert it to the type of
4756 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
4757 all clear. The impact of this transformation is even less clear. */
4759 /* If the operand is a useless conversion, look through it. Doing so
4760 guarantees that the ADDR_EXPR and its operand will remain of the
4762 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
4763 op0 = TREE_OPERAND (op0, 0);
4765 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
4766 build_fold_addr_expr_loc (loc,
4767 TREE_OPERAND (op0, 0)));
4772 /* We use fb_either here because the C frontend sometimes takes
4773 the address of a call that returns a struct; see
4774 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
4775 the implied temporary explicit. */
4777 /* Make the operand addressable. */
4778 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
4779 is_gimple_addressable, fb_either);
4780 if (ret == GS_ERROR)
4783 /* Then mark it. Beware that it may not be possible to do so directly
4784 if a temporary has been created by the gimplification. */
4785 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
4787 op0 = TREE_OPERAND (expr, 0);
4789 /* For various reasons, the gimplification of the expression
4790 may have made a new INDIRECT_REF. */
4791 if (TREE_CODE (op0) == INDIRECT_REF)
4792 goto do_indirect_ref;
4794 mark_addressable (TREE_OPERAND (expr, 0));
4796 /* The FEs may end up building ADDR_EXPRs early on a decl with
4797 an incomplete type. Re-build ADDR_EXPRs in canonical form
4799 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
4800 *expr_p = build_fold_addr_expr (op0);
4802 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
4803 recompute_tree_invariant_for_addr_expr (*expr_p);
4805 /* If we re-built the ADDR_EXPR add a conversion to the original type
4807 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
4808 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
4816 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
4817 value; output operands should be a gimple lvalue. */
4819 static enum gimplify_status
4820 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4824 const char **oconstraints;
4827 const char *constraint;
4828 bool allows_mem, allows_reg, is_inout;
4829 enum gimplify_status ret, tret;
4831 VEC(tree, gc) *inputs;
4832 VEC(tree, gc) *outputs;
4833 VEC(tree, gc) *clobbers;
4834 VEC(tree, gc) *labels;
4838 noutputs = list_length (ASM_OUTPUTS (expr));
4839 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
4841 inputs = outputs = clobbers = labels = NULL;
4844 link_next = NULL_TREE;
4845 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
4848 size_t constraint_len;
4850 link_next = TREE_CHAIN (link);
4854 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4855 constraint_len = strlen (constraint);
4856 if (constraint_len == 0)
4859 ok = parse_output_constraint (&constraint, i, 0, 0,
4860 &allows_mem, &allows_reg, &is_inout);
4867 if (!allows_reg && allows_mem)
4868 mark_addressable (TREE_VALUE (link));
4870 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
4871 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
4872 fb_lvalue | fb_mayfail);
4873 if (tret == GS_ERROR)
4875 error ("invalid lvalue in asm output %d", i);
4879 VEC_safe_push (tree, gc, outputs, link);
4880 TREE_CHAIN (link) = NULL_TREE;
4884 /* An input/output operand. To give the optimizers more
4885 flexibility, split it into separate input and output
4890 /* Turn the in/out constraint into an output constraint. */
4891 char *p = xstrdup (constraint);
4893 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
4895 /* And add a matching input constraint. */
4898 sprintf (buf, "%d", i);
4900 /* If there are multiple alternatives in the constraint,
4901 handle each of them individually. Those that allow register
4902 will be replaced with operand number, the others will stay
4904 if (strchr (p, ',') != NULL)
4906 size_t len = 0, buflen = strlen (buf);
4907 char *beg, *end, *str, *dst;
4911 end = strchr (beg, ',');
4913 end = strchr (beg, '\0');
4914 if ((size_t) (end - beg) < buflen)
4917 len += end - beg + 1;
4924 str = (char *) alloca (len);
4925 for (beg = p + 1, dst = str;;)
4928 bool mem_p, reg_p, inout_p;
4930 end = strchr (beg, ',');
4935 parse_output_constraint (&tem, i, 0, 0,
4936 &mem_p, ®_p, &inout_p);
4941 memcpy (dst, buf, buflen);
4950 memcpy (dst, beg, len);
4959 input = build_string (dst - str, str);
4962 input = build_string (strlen (buf), buf);
4965 input = build_string (constraint_len - 1, constraint + 1);
4969 input = build_tree_list (build_tree_list (NULL_TREE, input),
4970 unshare_expr (TREE_VALUE (link)));
4971 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
4975 link_next = NULL_TREE;
4976 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
4978 link_next = TREE_CHAIN (link);
4979 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4980 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
4981 oconstraints, &allows_mem, &allows_reg);
4983 /* If we can't make copies, we can only accept memory. */
4984 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
4990 error ("impossible constraint in %<asm%>");
4991 error ("non-memory input %d must stay in memory", i);
4996 /* If the operand is a memory input, it should be an lvalue. */
4997 if (!allows_reg && allows_mem)
4999 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5000 is_gimple_lvalue, fb_lvalue | fb_mayfail);
5001 mark_addressable (TREE_VALUE (link));
5002 if (tret == GS_ERROR)
5004 if (EXPR_HAS_LOCATION (TREE_VALUE (link)))
5005 input_location = EXPR_LOCATION (TREE_VALUE (link));
5006 error ("memory input %d is not directly addressable", i);
5012 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5013 is_gimple_asm_val, fb_rvalue);
5014 if (tret == GS_ERROR)
5018 TREE_CHAIN (link) = NULL_TREE;
5019 VEC_safe_push (tree, gc, inputs, link);
5022 for (link = ASM_CLOBBERS (expr); link; ++i, link = TREE_CHAIN (link))
5023 VEC_safe_push (tree, gc, clobbers, link);
5025 for (link = ASM_LABELS (expr); link; ++i, link = TREE_CHAIN (link))
5026 VEC_safe_push (tree, gc, labels, link);
5028 /* Do not add ASMs with errors to the gimple IL stream. */
5029 if (ret != GS_ERROR)
5031 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
5032 inputs, outputs, clobbers, labels);
5034 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr));
5035 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
5037 gimplify_seq_add_stmt (pre_p, stmt);
5043 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
5044 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
5045 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
5046 return to this function.
5048 FIXME should we complexify the prequeue handling instead? Or use flags
5049 for all the cleanups and let the optimizer tighten them up? The current
5050 code seems pretty fragile; it will break on a cleanup within any
5051 non-conditional nesting. But any such nesting would be broken, anyway;
5052 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
5053 and continues out of it. We can do that at the RTL level, though, so
5054 having an optimizer to tighten up try/finally regions would be a Good
5057 static enum gimplify_status
5058 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
5060 gimple_stmt_iterator iter;
5061 gimple_seq body_sequence = NULL;
5063 tree temp = voidify_wrapper_expr (*expr_p, NULL);
5065 /* We only care about the number of conditions between the innermost
5066 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
5067 any cleanups collected outside the CLEANUP_POINT_EXPR. */
5068 int old_conds = gimplify_ctxp->conditions;
5069 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
5070 gimplify_ctxp->conditions = 0;
5071 gimplify_ctxp->conditional_cleanups = NULL;
5073 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
5075 gimplify_ctxp->conditions = old_conds;
5076 gimplify_ctxp->conditional_cleanups = old_cleanups;
5078 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
5080 gimple wce = gsi_stmt (iter);
5082 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
5084 if (gsi_one_before_end_p (iter))
5086 /* Note that gsi_insert_seq_before and gsi_remove do not
5087 scan operands, unlike some other sequence mutators. */
5088 gsi_insert_seq_before_without_update (&iter,
5089 gimple_wce_cleanup (wce),
5091 gsi_remove (&iter, true);
5098 enum gimple_try_flags kind;
5100 if (gimple_wce_cleanup_eh_only (wce))
5101 kind = GIMPLE_TRY_CATCH;
5103 kind = GIMPLE_TRY_FINALLY;
5104 seq = gsi_split_seq_after (iter);
5106 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
5107 /* Do not use gsi_replace here, as it may scan operands.
5108 We want to do a simple structural modification only. */
5109 *gsi_stmt_ptr (&iter) = gtry;
5110 iter = gsi_start (seq);
5117 gimplify_seq_add_seq (pre_p, body_sequence);
5130 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
5131 is the cleanup action required. EH_ONLY is true if the cleanup should
5132 only be executed if an exception is thrown, not on normal exit. */
5135 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p)
5138 gimple_seq cleanup_stmts = NULL;
5140 /* Errors can result in improperly nested cleanups. Which results in
5141 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
5142 if (errorcount || sorrycount)
5145 if (gimple_conditional_context ())
5147 /* If we're in a conditional context, this is more complex. We only
5148 want to run the cleanup if we actually ran the initialization that
5149 necessitates it, but we want to run it after the end of the
5150 conditional context. So we wrap the try/finally around the
5151 condition and use a flag to determine whether or not to actually
5152 run the destructor. Thus
5156 becomes (approximately)
5160 if (test) { A::A(temp); flag = 1; val = f(temp); }
5163 if (flag) A::~A(temp);
5167 tree flag = create_tmp_var (boolean_type_node, "cleanup");
5168 gimple ffalse = gimple_build_assign (flag, boolean_false_node);
5169 gimple ftrue = gimple_build_assign (flag, boolean_true_node);
5171 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
5172 gimplify_stmt (&cleanup, &cleanup_stmts);
5173 wce = gimple_build_wce (cleanup_stmts);
5175 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
5176 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
5177 gimplify_seq_add_stmt (pre_p, ftrue);
5179 /* Because of this manipulation, and the EH edges that jump
5180 threading cannot redirect, the temporary (VAR) will appear
5181 to be used uninitialized. Don't warn. */
5182 TREE_NO_WARNING (var) = 1;
5186 gimplify_stmt (&cleanup, &cleanup_stmts);
5187 wce = gimple_build_wce (cleanup_stmts);
5188 gimple_wce_set_cleanup_eh_only (wce, eh_only);
5189 gimplify_seq_add_stmt (pre_p, wce);
5193 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
5195 static enum gimplify_status
5196 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5198 tree targ = *expr_p;
5199 tree temp = TARGET_EXPR_SLOT (targ);
5200 tree init = TARGET_EXPR_INITIAL (targ);
5201 enum gimplify_status ret;
5205 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
5206 to the temps list. Handle also variable length TARGET_EXPRs. */
5207 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
5209 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
5210 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
5211 gimplify_vla_decl (temp, pre_p);
5214 gimple_add_tmp_var (temp);
5216 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
5217 expression is supposed to initialize the slot. */
5218 if (VOID_TYPE_P (TREE_TYPE (init)))
5219 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5222 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
5224 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5226 ggc_free (init_expr);
5228 if (ret == GS_ERROR)
5230 /* PR c++/28266 Make sure this is expanded only once. */
5231 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5235 gimplify_and_add (init, pre_p);
5237 /* If needed, push the cleanup for the temp. */
5238 if (TARGET_EXPR_CLEANUP (targ))
5239 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
5240 CLEANUP_EH_ONLY (targ), pre_p);
5242 /* Only expand this once. */
5243 TREE_OPERAND (targ, 3) = init;
5244 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5247 /* We should have expanded this before. */
5248 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
5254 /* Gimplification of expression trees. */
5256 /* Gimplify an expression which appears at statement context. The
5257 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
5258 NULL, a new sequence is allocated.
5260 Return true if we actually added a statement to the queue. */
5263 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
5265 gimple_seq_node last;
5268 *seq_p = gimple_seq_alloc ();
5270 last = gimple_seq_last (*seq_p);
5271 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
5272 return last != gimple_seq_last (*seq_p);
5276 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
5277 to CTX. If entries already exist, force them to be some flavor of private.
5278 If there is no enclosing parallel, do nothing. */
5281 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
5285 if (decl == NULL || !DECL_P (decl))
5290 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5293 if (n->value & GOVD_SHARED)
5294 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
5298 else if (ctx->region_type != ORT_WORKSHARE)
5299 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
5301 ctx = ctx->outer_context;
5306 /* Similarly for each of the type sizes of TYPE. */
5309 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
5311 if (type == NULL || type == error_mark_node)
5313 type = TYPE_MAIN_VARIANT (type);
5315 if (pointer_set_insert (ctx->privatized_types, type))
5318 switch (TREE_CODE (type))
5324 case FIXED_POINT_TYPE:
5325 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
5326 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
5330 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5331 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
5336 case QUAL_UNION_TYPE:
5339 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
5340 if (TREE_CODE (field) == FIELD_DECL)
5342 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
5343 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
5349 case REFERENCE_TYPE:
5350 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5357 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
5358 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
5359 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
5362 /* Add an entry for DECL in the OpenMP context CTX with FLAGS. */
5365 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
5368 unsigned int nflags;
5371 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
5374 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
5375 there are constructors involved somewhere. */
5376 if (TREE_ADDRESSABLE (TREE_TYPE (decl))
5377 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))
5380 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5383 /* We shouldn't be re-adding the decl with the same data
5385 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
5386 /* The only combination of data sharing classes we should see is
5387 FIRSTPRIVATE and LASTPRIVATE. */
5388 nflags = n->value | flags;
5389 gcc_assert ((nflags & GOVD_DATA_SHARE_CLASS)
5390 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE));
5395 /* When adding a variable-sized variable, we have to handle all sorts
5396 of additional bits of data: the pointer replacement variable, and
5397 the parameters of the type. */
5398 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5400 /* Add the pointer replacement variable as PRIVATE if the variable
5401 replacement is private, else FIRSTPRIVATE since we'll need the
5402 address of the original variable either for SHARED, or for the
5403 copy into or out of the context. */
5404 if (!(flags & GOVD_LOCAL))
5406 nflags = flags & GOVD_PRIVATE ? GOVD_PRIVATE : GOVD_FIRSTPRIVATE;
5407 nflags |= flags & GOVD_SEEN;
5408 t = DECL_VALUE_EXPR (decl);
5409 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5410 t = TREE_OPERAND (t, 0);
5411 gcc_assert (DECL_P (t));
5412 omp_add_variable (ctx, t, nflags);
5415 /* Add all of the variable and type parameters (which should have
5416 been gimplified to a formal temporary) as FIRSTPRIVATE. */
5417 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
5418 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
5419 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5421 /* The variable-sized variable itself is never SHARED, only some form
5422 of PRIVATE. The sharing would take place via the pointer variable
5423 which we remapped above. */
5424 if (flags & GOVD_SHARED)
5425 flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE
5426 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
5428 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
5429 alloca statement we generate for the variable, so make sure it
5430 is available. This isn't automatically needed for the SHARED
5431 case, since we won't be allocating local storage then.
5432 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
5433 in this case omp_notice_variable will be called later
5434 on when it is gimplified. */
5435 else if (! (flags & GOVD_LOCAL))
5436 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
5438 else if (lang_hooks.decls.omp_privatize_by_reference (decl))
5440 gcc_assert ((flags & GOVD_LOCAL) == 0);
5441 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5443 /* Similar to the direct variable sized case above, we'll need the
5444 size of references being privatized. */
5445 if ((flags & GOVD_SHARED) == 0)
5447 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
5448 if (TREE_CODE (t) != INTEGER_CST)
5449 omp_notice_variable (ctx, t, true);
5453 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
5456 /* Record the fact that DECL was used within the OpenMP context CTX.
5457 IN_CODE is true when real code uses DECL, and false when we should
5458 merely emit default(none) errors. Return true if DECL is going to
5459 be remapped and thus DECL shouldn't be gimplified into its
5460 DECL_VALUE_EXPR (if any). */
5463 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
5466 unsigned flags = in_code ? GOVD_SEEN : 0;
5467 bool ret = false, shared;
5469 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
5472 /* Threadprivate variables are predetermined. */
5473 if (is_global_var (decl))
5475 if (DECL_THREAD_LOCAL_P (decl))
5478 if (DECL_HAS_VALUE_EXPR_P (decl))
5480 tree value = get_base_address (DECL_VALUE_EXPR (decl));
5482 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
5487 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5490 enum omp_clause_default_kind default_kind, kind;
5491 struct gimplify_omp_ctx *octx;
5493 if (ctx->region_type == ORT_WORKSHARE)
5496 /* ??? Some compiler-generated variables (like SAVE_EXPRs) could be
5497 remapped firstprivate instead of shared. To some extent this is
5498 addressed in omp_firstprivatize_type_sizes, but not effectively. */
5499 default_kind = ctx->default_kind;
5500 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
5501 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
5502 default_kind = kind;
5504 switch (default_kind)
5506 case OMP_CLAUSE_DEFAULT_NONE:
5507 error ("%qE not specified in enclosing parallel",
5509 error_at (ctx->location, "enclosing parallel");
5511 case OMP_CLAUSE_DEFAULT_SHARED:
5512 flags |= GOVD_SHARED;
5514 case OMP_CLAUSE_DEFAULT_PRIVATE:
5515 flags |= GOVD_PRIVATE;
5517 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
5518 flags |= GOVD_FIRSTPRIVATE;
5520 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
5521 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
5522 gcc_assert (ctx->region_type == ORT_TASK);
5523 if (ctx->outer_context)
5524 omp_notice_variable (ctx->outer_context, decl, in_code);
5525 for (octx = ctx->outer_context; octx; octx = octx->outer_context)
5529 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
5530 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
5532 flags |= GOVD_FIRSTPRIVATE;
5535 if ((octx->region_type & ORT_PARALLEL) != 0)
5538 if (flags & GOVD_FIRSTPRIVATE)
5541 && (TREE_CODE (decl) == PARM_DECL
5542 || (!is_global_var (decl)
5543 && DECL_CONTEXT (decl) == current_function_decl)))
5545 flags |= GOVD_FIRSTPRIVATE;
5548 flags |= GOVD_SHARED;
5554 if ((flags & GOVD_PRIVATE)
5555 && lang_hooks.decls.omp_private_outer_ref (decl))
5556 flags |= GOVD_PRIVATE_OUTER_REF;
5558 omp_add_variable (ctx, decl, flags);
5560 shared = (flags & GOVD_SHARED) != 0;
5561 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
5565 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
5566 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
5568 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5571 tree t = DECL_VALUE_EXPR (decl);
5572 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5573 t = TREE_OPERAND (t, 0);
5574 gcc_assert (DECL_P (t));
5575 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
5576 n2->value |= GOVD_SEEN;
5579 shared = ((flags | n->value) & GOVD_SHARED) != 0;
5580 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
5582 /* If nothing changed, there's nothing left to do. */
5583 if ((n->value & flags) == flags)
5589 /* If the variable is private in the current context, then we don't
5590 need to propagate anything to an outer context. */
5591 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
5593 if (ctx->outer_context
5594 && omp_notice_variable (ctx->outer_context, decl, in_code))
5599 /* Verify that DECL is private within CTX. If there's specific information
5600 to the contrary in the innermost scope, generate an error. */
5603 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl)
5607 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5610 if (n->value & GOVD_SHARED)
5612 if (ctx == gimplify_omp_ctxp)
5614 error ("iteration variable %qE should be private",
5616 n->value = GOVD_PRIVATE;
5622 else if ((n->value & GOVD_EXPLICIT) != 0
5623 && (ctx == gimplify_omp_ctxp
5624 || (ctx->region_type == ORT_COMBINED_PARALLEL
5625 && gimplify_omp_ctxp->outer_context == ctx)))
5627 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
5628 error ("iteration variable %qE should not be firstprivate",
5630 else if ((n->value & GOVD_REDUCTION) != 0)
5631 error ("iteration variable %qE should not be reduction",
5634 return (ctx == gimplify_omp_ctxp
5635 || (ctx->region_type == ORT_COMBINED_PARALLEL
5636 && gimplify_omp_ctxp->outer_context == ctx));
5639 if (ctx->region_type != ORT_WORKSHARE)
5641 else if (ctx->outer_context)
5642 return omp_is_private (ctx->outer_context, decl);
5646 /* Return true if DECL is private within a parallel region
5647 that binds to the current construct's context or in parallel
5648 region's REDUCTION clause. */
5651 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl)
5657 ctx = ctx->outer_context;
5659 return !(is_global_var (decl)
5660 /* References might be private, but might be shared too. */
5661 || lang_hooks.decls.omp_privatize_by_reference (decl));
5663 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5665 return (n->value & GOVD_SHARED) == 0;
5667 while (ctx->region_type == ORT_WORKSHARE);
5671 /* Scan the OpenMP clauses in *LIST_P, installing mappings into a new
5672 and previous omp contexts. */
5675 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
5676 enum omp_region_type region_type)
5678 struct gimplify_omp_ctx *ctx, *outer_ctx;
5679 struct gimplify_ctx gctx;
5682 ctx = new_omp_context (region_type);
5683 outer_ctx = ctx->outer_context;
5685 while ((c = *list_p) != NULL)
5687 bool remove = false;
5688 bool notice_outer = true;
5689 const char *check_non_private = NULL;
5693 switch (OMP_CLAUSE_CODE (c))
5695 case OMP_CLAUSE_PRIVATE:
5696 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
5697 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
5699 flags |= GOVD_PRIVATE_OUTER_REF;
5700 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
5703 notice_outer = false;
5705 case OMP_CLAUSE_SHARED:
5706 flags = GOVD_SHARED | GOVD_EXPLICIT;
5708 case OMP_CLAUSE_FIRSTPRIVATE:
5709 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
5710 check_non_private = "firstprivate";
5712 case OMP_CLAUSE_LASTPRIVATE:
5713 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
5714 check_non_private = "lastprivate";
5716 case OMP_CLAUSE_REDUCTION:
5717 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
5718 check_non_private = "reduction";
5722 decl = OMP_CLAUSE_DECL (c);
5723 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
5728 omp_add_variable (ctx, decl, flags);
5729 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5730 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5732 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
5733 GOVD_LOCAL | GOVD_SEEN);
5734 gimplify_omp_ctxp = ctx;
5735 push_gimplify_context (&gctx);
5737 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = gimple_seq_alloc ();
5738 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = gimple_seq_alloc ();
5740 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
5741 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
5742 pop_gimplify_context
5743 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
5744 push_gimplify_context (&gctx);
5745 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
5746 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5747 pop_gimplify_context
5748 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
5749 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
5750 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
5752 gimplify_omp_ctxp = outer_ctx;
5754 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5755 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
5757 gimplify_omp_ctxp = ctx;
5758 push_gimplify_context (&gctx);
5759 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
5761 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
5763 TREE_SIDE_EFFECTS (bind) = 1;
5764 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
5765 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
5767 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
5768 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
5769 pop_gimplify_context
5770 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
5771 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
5773 gimplify_omp_ctxp = outer_ctx;
5779 case OMP_CLAUSE_COPYIN:
5780 case OMP_CLAUSE_COPYPRIVATE:
5781 decl = OMP_CLAUSE_DECL (c);
5782 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
5789 omp_notice_variable (outer_ctx, decl, true);
5790 if (check_non_private
5791 && region_type == ORT_WORKSHARE
5792 && omp_check_private (ctx, decl))
5794 error ("%s variable %qE is private in outer context",
5795 check_non_private, DECL_NAME (decl));
5801 OMP_CLAUSE_OPERAND (c, 0)
5802 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
5805 case OMP_CLAUSE_SCHEDULE:
5806 case OMP_CLAUSE_NUM_THREADS:
5807 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
5808 is_gimple_val, fb_rvalue) == GS_ERROR)
5812 case OMP_CLAUSE_NOWAIT:
5813 case OMP_CLAUSE_ORDERED:
5814 case OMP_CLAUSE_UNTIED:
5815 case OMP_CLAUSE_COLLAPSE:
5818 case OMP_CLAUSE_DEFAULT:
5819 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
5827 *list_p = OMP_CLAUSE_CHAIN (c);
5829 list_p = &OMP_CLAUSE_CHAIN (c);
5832 gimplify_omp_ctxp = ctx;
5835 /* For all variables that were not actually used within the context,
5836 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
5839 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
5841 tree *list_p = (tree *) data;
5842 tree decl = (tree) n->key;
5843 unsigned flags = n->value;
5844 enum omp_clause_code code;
5848 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
5850 if ((flags & GOVD_SEEN) == 0)
5852 if (flags & GOVD_DEBUG_PRIVATE)
5854 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE);
5855 private_debug = true;
5859 = lang_hooks.decls.omp_private_debug_clause (decl,
5860 !!(flags & GOVD_SHARED));
5862 code = OMP_CLAUSE_PRIVATE;
5863 else if (flags & GOVD_SHARED)
5865 if (is_global_var (decl))
5867 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
5871 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5872 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
5873 | GOVD_PRIVATE | GOVD_REDUCTION)) != 0)
5875 ctx = ctx->outer_context;
5880 code = OMP_CLAUSE_SHARED;
5882 else if (flags & GOVD_PRIVATE)
5883 code = OMP_CLAUSE_PRIVATE;
5884 else if (flags & GOVD_FIRSTPRIVATE)
5885 code = OMP_CLAUSE_FIRSTPRIVATE;
5889 clause = build_omp_clause (input_location, code);
5890 OMP_CLAUSE_DECL (clause) = decl;
5891 OMP_CLAUSE_CHAIN (clause) = *list_p;
5893 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
5894 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
5895 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
5897 lang_hooks.decls.omp_finish_clause (clause);
5903 gimplify_adjust_omp_clauses (tree *list_p)
5905 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
5908 while ((c = *list_p) != NULL)
5911 bool remove = false;
5913 switch (OMP_CLAUSE_CODE (c))
5915 case OMP_CLAUSE_PRIVATE:
5916 case OMP_CLAUSE_SHARED:
5917 case OMP_CLAUSE_FIRSTPRIVATE:
5918 decl = OMP_CLAUSE_DECL (c);
5919 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5920 remove = !(n->value & GOVD_SEEN);
5923 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
5924 if ((n->value & GOVD_DEBUG_PRIVATE)
5925 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
5927 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
5928 || ((n->value & GOVD_DATA_SHARE_CLASS)
5930 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
5931 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
5936 case OMP_CLAUSE_LASTPRIVATE:
5937 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
5938 accurately reflect the presence of a FIRSTPRIVATE clause. */
5939 decl = OMP_CLAUSE_DECL (c);
5940 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5941 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
5942 = (n->value & GOVD_FIRSTPRIVATE) != 0;
5945 case OMP_CLAUSE_REDUCTION:
5946 case OMP_CLAUSE_COPYIN:
5947 case OMP_CLAUSE_COPYPRIVATE:
5949 case OMP_CLAUSE_NUM_THREADS:
5950 case OMP_CLAUSE_SCHEDULE:
5951 case OMP_CLAUSE_NOWAIT:
5952 case OMP_CLAUSE_ORDERED:
5953 case OMP_CLAUSE_DEFAULT:
5954 case OMP_CLAUSE_UNTIED:
5955 case OMP_CLAUSE_COLLAPSE:
5963 *list_p = OMP_CLAUSE_CHAIN (c);
5965 list_p = &OMP_CLAUSE_CHAIN (c);
5968 /* Add in any implicit data sharing. */
5969 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, list_p);
5971 gimplify_omp_ctxp = ctx->outer_context;
5972 delete_omp_context (ctx);
5975 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
5976 gimplification of the body, as well as scanning the body for used
5977 variables. We need to do this scan now, because variable-sized
5978 decls will be decomposed during gimplification. */
5981 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
5983 tree expr = *expr_p;
5985 gimple_seq body = NULL;
5986 struct gimplify_ctx gctx;
5988 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
5989 OMP_PARALLEL_COMBINED (expr)
5990 ? ORT_COMBINED_PARALLEL
5993 push_gimplify_context (&gctx);
5995 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
5996 if (gimple_code (g) == GIMPLE_BIND)
5997 pop_gimplify_context (g);
5999 pop_gimplify_context (NULL);
6001 gimplify_adjust_omp_clauses (&OMP_PARALLEL_CLAUSES (expr));
6003 g = gimple_build_omp_parallel (body,
6004 OMP_PARALLEL_CLAUSES (expr),
6005 NULL_TREE, NULL_TREE);
6006 if (OMP_PARALLEL_COMBINED (expr))
6007 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
6008 gimplify_seq_add_stmt (pre_p, g);
6009 *expr_p = NULL_TREE;
6012 /* Gimplify the contents of an OMP_TASK statement. This involves
6013 gimplification of the body, as well as scanning the body for used
6014 variables. We need to do this scan now, because variable-sized
6015 decls will be decomposed during gimplification. */
6018 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
6020 tree expr = *expr_p;
6022 gimple_seq body = NULL;
6023 struct gimplify_ctx gctx;
6025 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p, ORT_TASK);
6027 push_gimplify_context (&gctx);
6029 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
6030 if (gimple_code (g) == GIMPLE_BIND)
6031 pop_gimplify_context (g);
6033 pop_gimplify_context (NULL);
6035 gimplify_adjust_omp_clauses (&OMP_TASK_CLAUSES (expr));
6037 g = gimple_build_omp_task (body,
6038 OMP_TASK_CLAUSES (expr),
6039 NULL_TREE, NULL_TREE,
6040 NULL_TREE, NULL_TREE, NULL_TREE);
6041 gimplify_seq_add_stmt (pre_p, g);
6042 *expr_p = NULL_TREE;
6045 /* Gimplify the gross structure of an OMP_FOR statement. */
6047 static enum gimplify_status
6048 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
6050 tree for_stmt, decl, var, t;
6051 enum gimplify_status ret = GS_ALL_DONE;
6052 enum gimplify_status tret;
6054 gimple_seq for_body, for_pre_body;
6059 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p,
6062 /* Handle OMP_FOR_INIT. */
6063 for_pre_body = NULL;
6064 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
6065 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
6067 for_body = gimple_seq_alloc ();
6068 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
6069 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
6070 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
6071 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
6072 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
6074 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
6075 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
6076 decl = TREE_OPERAND (t, 0);
6077 gcc_assert (DECL_P (decl));
6078 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
6079 || POINTER_TYPE_P (TREE_TYPE (decl)));
6081 /* Make sure the iteration variable is private. */
6082 if (omp_is_private (gimplify_omp_ctxp, decl))
6083 omp_notice_variable (gimplify_omp_ctxp, decl, true);
6085 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
6087 /* If DECL is not a gimple register, create a temporary variable to act
6088 as an iteration counter. This is valid, since DECL cannot be
6089 modified in the body of the loop. */
6090 if (!is_gimple_reg (decl))
6092 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
6093 TREE_OPERAND (t, 0) = var;
6095 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
6097 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
6102 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
6103 is_gimple_val, fb_rvalue);
6104 ret = MIN (ret, tret);
6105 if (ret == GS_ERROR)
6108 /* Handle OMP_FOR_COND. */
6109 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
6110 gcc_assert (COMPARISON_CLASS_P (t));
6111 gcc_assert (TREE_OPERAND (t, 0) == decl);
6113 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
6114 is_gimple_val, fb_rvalue);
6115 ret = MIN (ret, tret);
6117 /* Handle OMP_FOR_INCR. */
6118 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6119 switch (TREE_CODE (t))
6121 case PREINCREMENT_EXPR:
6122 case POSTINCREMENT_EXPR:
6123 t = build_int_cst (TREE_TYPE (decl), 1);
6124 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
6125 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
6126 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
6129 case PREDECREMENT_EXPR:
6130 case POSTDECREMENT_EXPR:
6131 t = build_int_cst (TREE_TYPE (decl), -1);
6132 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
6133 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
6134 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
6138 gcc_assert (TREE_OPERAND (t, 0) == decl);
6139 TREE_OPERAND (t, 0) = var;
6141 t = TREE_OPERAND (t, 1);
6142 switch (TREE_CODE (t))
6145 if (TREE_OPERAND (t, 1) == decl)
6147 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
6148 TREE_OPERAND (t, 0) = var;
6154 case POINTER_PLUS_EXPR:
6155 gcc_assert (TREE_OPERAND (t, 0) == decl);
6156 TREE_OPERAND (t, 0) = var;
6162 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
6163 is_gimple_val, fb_rvalue);
6164 ret = MIN (ret, tret);
6171 if (var != decl || TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1)
6174 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
6175 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6176 && OMP_CLAUSE_DECL (c) == decl
6177 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
6179 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6180 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
6181 gcc_assert (TREE_OPERAND (t, 0) == var);
6182 t = TREE_OPERAND (t, 1);
6183 gcc_assert (TREE_CODE (t) == PLUS_EXPR
6184 || TREE_CODE (t) == MINUS_EXPR
6185 || TREE_CODE (t) == POINTER_PLUS_EXPR);
6186 gcc_assert (TREE_OPERAND (t, 0) == var);
6187 t = build2 (TREE_CODE (t), TREE_TYPE (decl), decl,
6188 TREE_OPERAND (t, 1));
6189 gimplify_assign (decl, t,
6190 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
6195 gimplify_and_add (OMP_FOR_BODY (for_stmt), &for_body);
6197 gimplify_adjust_omp_clauses (&OMP_FOR_CLAUSES (for_stmt));
6199 gfor = gimple_build_omp_for (for_body, OMP_FOR_CLAUSES (for_stmt),
6200 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
6203 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
6205 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
6206 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
6207 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
6208 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
6209 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
6210 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
6211 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6212 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
6215 gimplify_seq_add_stmt (pre_p, gfor);
6216 return ret == GS_ALL_DONE ? GS_ALL_DONE : GS_ERROR;
6219 /* Gimplify the gross structure of other OpenMP worksharing constructs.
6220 In particular, OMP_SECTIONS and OMP_SINGLE. */
6223 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
6225 tree expr = *expr_p;
6227 gimple_seq body = NULL;
6229 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ORT_WORKSHARE);
6230 gimplify_and_add (OMP_BODY (expr), &body);
6231 gimplify_adjust_omp_clauses (&OMP_CLAUSES (expr));
6233 if (TREE_CODE (expr) == OMP_SECTIONS)
6234 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
6235 else if (TREE_CODE (expr) == OMP_SINGLE)
6236 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
6240 gimplify_seq_add_stmt (pre_p, stmt);
6243 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
6244 stabilized the lhs of the atomic operation as *ADDR. Return true if
6245 EXPR is this stabilized form. */
6248 goa_lhs_expr_p (tree expr, tree addr)
6250 /* Also include casts to other type variants. The C front end is fond
6251 of adding these for e.g. volatile variables. This is like
6252 STRIP_TYPE_NOPS but includes the main variant lookup. */
6253 STRIP_USELESS_TYPE_CONVERSION (expr);
6255 if (TREE_CODE (expr) == INDIRECT_REF)
6257 expr = TREE_OPERAND (expr, 0);
6259 && (CONVERT_EXPR_P (expr)
6260 || TREE_CODE (expr) == NON_LVALUE_EXPR)
6261 && TREE_CODE (expr) == TREE_CODE (addr)
6262 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
6264 expr = TREE_OPERAND (expr, 0);
6265 addr = TREE_OPERAND (addr, 0);
6269 return (TREE_CODE (addr) == ADDR_EXPR
6270 && TREE_CODE (expr) == ADDR_EXPR
6271 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
6273 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
6278 /* Walk *EXPR_P and replace
6279 appearances of *LHS_ADDR with LHS_VAR. If an expression does not involve
6280 the lhs, evaluate it into a temporary. Return 1 if the lhs appeared as
6281 a subexpression, 0 if it did not, or -1 if an error was encountered. */
6284 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
6287 tree expr = *expr_p;
6290 if (goa_lhs_expr_p (expr, lhs_addr))
6295 if (is_gimple_val (expr))
6299 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
6302 case tcc_comparison:
6303 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
6306 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
6309 case tcc_expression:
6310 switch (TREE_CODE (expr))
6312 case TRUTH_ANDIF_EXPR:
6313 case TRUTH_ORIF_EXPR:
6314 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
6316 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
6329 enum gimplify_status gs;
6330 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
6331 if (gs != GS_ALL_DONE)
6339 /* Gimplify an OMP_ATOMIC statement. */
6341 static enum gimplify_status
6342 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
6344 tree addr = TREE_OPERAND (*expr_p, 0);
6345 tree rhs = TREE_OPERAND (*expr_p, 1);
6346 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
6349 tmp_load = create_tmp_reg (type, NULL);
6350 if (goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
6353 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
6357 gimplify_seq_add_stmt (pre_p, gimple_build_omp_atomic_load (tmp_load, addr));
6358 if (gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
6361 gimplify_seq_add_stmt (pre_p, gimple_build_omp_atomic_store (rhs));
6368 /* Converts the GENERIC expression tree *EXPR_P to GIMPLE. If the
6369 expression produces a value to be used as an operand inside a GIMPLE
6370 statement, the value will be stored back in *EXPR_P. This value will
6371 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
6372 an SSA_NAME. The corresponding sequence of GIMPLE statements is
6373 emitted in PRE_P and POST_P.
6375 Additionally, this process may overwrite parts of the input
6376 expression during gimplification. Ideally, it should be
6377 possible to do non-destructive gimplification.
6379 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
6380 the expression needs to evaluate to a value to be used as
6381 an operand in a GIMPLE statement, this value will be stored in
6382 *EXPR_P on exit. This happens when the caller specifies one
6383 of fb_lvalue or fb_rvalue fallback flags.
6385 PRE_P will contain the sequence of GIMPLE statements corresponding
6386 to the evaluation of EXPR and all the side-effects that must
6387 be executed before the main expression. On exit, the last
6388 statement of PRE_P is the core statement being gimplified. For
6389 instance, when gimplifying 'if (++a)' the last statement in
6390 PRE_P will be 'if (t.1)' where t.1 is the result of
6391 pre-incrementing 'a'.
6393 POST_P will contain the sequence of GIMPLE statements corresponding
6394 to the evaluation of all the side-effects that must be executed
6395 after the main expression. If this is NULL, the post
6396 side-effects are stored at the end of PRE_P.
6398 The reason why the output is split in two is to handle post
6399 side-effects explicitly. In some cases, an expression may have
6400 inner and outer post side-effects which need to be emitted in
6401 an order different from the one given by the recursive
6402 traversal. For instance, for the expression (*p--)++ the post
6403 side-effects of '--' must actually occur *after* the post
6404 side-effects of '++'. However, gimplification will first visit
6405 the inner expression, so if a separate POST sequence was not
6406 used, the resulting sequence would be:
6413 However, the post-decrement operation in line #2 must not be
6414 evaluated until after the store to *p at line #4, so the
6415 correct sequence should be:
6422 So, by specifying a separate post queue, it is possible
6423 to emit the post side-effects in the correct order.
6424 If POST_P is NULL, an internal queue will be used. Before
6425 returning to the caller, the sequence POST_P is appended to
6426 the main output sequence PRE_P.
6428 GIMPLE_TEST_F points to a function that takes a tree T and
6429 returns nonzero if T is in the GIMPLE form requested by the
6430 caller. The GIMPLE predicates are in tree-gimple.c.
6432 FALLBACK tells the function what sort of a temporary we want if
6433 gimplification cannot produce an expression that complies with
6436 fb_none means that no temporary should be generated
6437 fb_rvalue means that an rvalue is OK to generate
6438 fb_lvalue means that an lvalue is OK to generate
6439 fb_either means that either is OK, but an lvalue is preferable.
6440 fb_mayfail means that gimplification may fail (in which case
6441 GS_ERROR will be returned)
6443 The return value is either GS_ERROR or GS_ALL_DONE, since this
6444 function iterates until EXPR is completely gimplified or an error
6447 enum gimplify_status
6448 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
6449 bool (*gimple_test_f) (tree), fallback_t fallback)
6452 gimple_seq internal_pre = NULL;
6453 gimple_seq internal_post = NULL;
6456 location_t saved_location;
6457 enum gimplify_status ret;
6458 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
6460 save_expr = *expr_p;
6461 if (save_expr == NULL_TREE)
6464 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
6465 is_statement = gimple_test_f == is_gimple_stmt;
6469 /* Consistency checks. */
6470 if (gimple_test_f == is_gimple_reg)
6471 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
6472 else if (gimple_test_f == is_gimple_val
6473 || gimple_test_f == is_gimple_call_addr
6474 || gimple_test_f == is_gimple_condexpr
6475 || gimple_test_f == is_gimple_mem_rhs
6476 || gimple_test_f == is_gimple_mem_rhs_or_call
6477 || gimple_test_f == is_gimple_reg_rhs
6478 || gimple_test_f == is_gimple_reg_rhs_or_call
6479 || gimple_test_f == is_gimple_asm_val)
6480 gcc_assert (fallback & fb_rvalue);
6481 else if (gimple_test_f == is_gimple_min_lval
6482 || gimple_test_f == is_gimple_lvalue)
6483 gcc_assert (fallback & fb_lvalue);
6484 else if (gimple_test_f == is_gimple_addressable)
6485 gcc_assert (fallback & fb_either);
6486 else if (gimple_test_f == is_gimple_stmt)
6487 gcc_assert (fallback == fb_none);
6490 /* We should have recognized the GIMPLE_TEST_F predicate to
6491 know what kind of fallback to use in case a temporary is
6492 needed to hold the value or address of *EXPR_P. */
6496 /* We used to check the predicate here and return immediately if it
6497 succeeds. This is wrong; the design is for gimplification to be
6498 idempotent, and for the predicates to only test for valid forms, not
6499 whether they are fully simplified. */
6501 pre_p = &internal_pre;
6504 post_p = &internal_post;
6506 /* Remember the last statements added to PRE_P and POST_P. Every
6507 new statement added by the gimplification helpers needs to be
6508 annotated with location information. To centralize the
6509 responsibility, we remember the last statement that had been
6510 added to both queues before gimplifying *EXPR_P. If
6511 gimplification produces new statements in PRE_P and POST_P, those
6512 statements will be annotated with the same location information
6514 pre_last_gsi = gsi_last (*pre_p);
6515 post_last_gsi = gsi_last (*post_p);
6517 saved_location = input_location;
6518 if (save_expr != error_mark_node
6519 && EXPR_HAS_LOCATION (*expr_p))
6520 input_location = EXPR_LOCATION (*expr_p);
6522 /* Loop over the specific gimplifiers until the toplevel node
6523 remains the same. */
6526 /* Strip away as many useless type conversions as possible
6528 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
6530 /* Remember the expr. */
6531 save_expr = *expr_p;
6533 /* Die, die, die, my darling. */
6534 if (save_expr == error_mark_node
6535 || (TREE_TYPE (save_expr)
6536 && TREE_TYPE (save_expr) == error_mark_node))
6542 /* Do any language-specific gimplification. */
6543 ret = ((enum gimplify_status)
6544 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
6547 if (*expr_p == NULL_TREE)
6549 if (*expr_p != save_expr)
6552 else if (ret != GS_UNHANDLED)
6556 switch (TREE_CODE (*expr_p))
6558 /* First deal with the special cases. */
6560 case POSTINCREMENT_EXPR:
6561 case POSTDECREMENT_EXPR:
6562 case PREINCREMENT_EXPR:
6563 case PREDECREMENT_EXPR:
6564 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
6565 fallback != fb_none);
6569 case ARRAY_RANGE_REF:
6573 case VIEW_CONVERT_EXPR:
6574 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
6575 fallback ? fallback : fb_rvalue);
6579 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
6581 /* C99 code may assign to an array in a structure value of a
6582 conditional expression, and this has undefined behavior
6583 only on execution, so create a temporary if an lvalue is
6585 if (fallback == fb_lvalue)
6587 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
6588 mark_addressable (*expr_p);
6593 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
6595 /* C99 code may assign to an array in a structure returned
6596 from a function, and this has undefined behavior only on
6597 execution, so create a temporary if an lvalue is
6599 if (fallback == fb_lvalue)
6601 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
6602 mark_addressable (*expr_p);
6610 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
6613 case COMPOUND_LITERAL_EXPR:
6614 ret = gimplify_compound_literal_expr (expr_p, pre_p);
6619 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
6620 fallback != fb_none);
6623 case TRUTH_ANDIF_EXPR:
6624 case TRUTH_ORIF_EXPR:
6625 /* Pass the source location of the outer expression. */
6626 ret = gimplify_boolean_expr (expr_p, saved_location);
6629 case TRUTH_NOT_EXPR:
6630 if (TREE_CODE (TREE_TYPE (*expr_p)) != BOOLEAN_TYPE)
6632 tree type = TREE_TYPE (*expr_p);
6633 *expr_p = fold_convert (type, gimple_boolify (*expr_p));
6638 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6639 is_gimple_val, fb_rvalue);
6640 recalculate_side_effects (*expr_p);
6644 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
6648 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
6652 if (IS_EMPTY_STMT (*expr_p))
6658 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
6659 || fallback == fb_none)
6661 /* Just strip a conversion to void (or in void context) and
6663 *expr_p = TREE_OPERAND (*expr_p, 0);
6667 ret = gimplify_conversion (expr_p);
6668 if (ret == GS_ERROR)
6670 if (*expr_p != save_expr)
6674 case FIX_TRUNC_EXPR:
6675 /* unary_expr: ... | '(' cast ')' val | ... */
6676 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6677 is_gimple_val, fb_rvalue);
6678 recalculate_side_effects (*expr_p);
6682 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
6683 if (*expr_p != save_expr)
6685 /* else fall through. */
6686 case ALIGN_INDIRECT_REF:
6687 case MISALIGNED_INDIRECT_REF:
6688 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6689 is_gimple_reg, fb_rvalue);
6690 recalculate_side_effects (*expr_p);
6693 /* Constants need not be gimplified. */
6704 /* If we require an lvalue, such as for ADDR_EXPR, retain the
6705 CONST_DECL node. Otherwise the decl is replaceable by its
6707 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
6708 if (fallback & fb_lvalue)
6711 *expr_p = DECL_INITIAL (*expr_p);
6715 ret = gimplify_decl_expr (expr_p, pre_p);
6719 ret = gimplify_bind_expr (expr_p, pre_p);
6723 ret = gimplify_loop_expr (expr_p, pre_p);
6727 ret = gimplify_switch_expr (expr_p, pre_p);
6731 ret = gimplify_exit_expr (expr_p);
6735 /* If the target is not LABEL, then it is a computed jump
6736 and the target needs to be gimplified. */
6737 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
6739 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
6740 NULL, is_gimple_val, fb_rvalue);
6741 if (ret == GS_ERROR)
6744 gimplify_seq_add_stmt (pre_p,
6745 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
6749 gimplify_seq_add_stmt (pre_p,
6750 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
6751 PREDICT_EXPR_OUTCOME (*expr_p)));
6757 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
6758 == current_function_decl);
6759 gimplify_seq_add_stmt (pre_p,
6760 gimple_build_label (LABEL_EXPR_LABEL (*expr_p)));
6763 case CASE_LABEL_EXPR:
6764 ret = gimplify_case_label_expr (expr_p, pre_p);
6768 ret = gimplify_return_expr (*expr_p, pre_p);
6772 /* Don't reduce this in place; let gimplify_init_constructor work its
6773 magic. Buf if we're just elaborating this for side effects, just
6774 gimplify any element that has side-effects. */
6775 if (fallback == fb_none)
6777 unsigned HOST_WIDE_INT ix;
6778 constructor_elt *ce;
6779 tree temp = NULL_TREE;
6781 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (*expr_p),
6784 if (TREE_SIDE_EFFECTS (ce->value))
6785 append_to_statement_list (ce->value, &temp);
6790 /* C99 code may assign to an array in a constructed
6791 structure or union, and this has undefined behavior only
6792 on execution, so create a temporary if an lvalue is
6794 else if (fallback == fb_lvalue)
6796 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
6797 mark_addressable (*expr_p);
6803 /* The following are special cases that are not handled by the
6804 original GIMPLE grammar. */
6806 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
6809 ret = gimplify_save_expr (expr_p, pre_p, post_p);
6814 enum gimplify_status r0, r1, r2;
6816 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
6817 post_p, is_gimple_lvalue, fb_either);
6818 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
6819 post_p, is_gimple_val, fb_rvalue);
6820 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
6821 post_p, is_gimple_val, fb_rvalue);
6822 recalculate_side_effects (*expr_p);
6824 ret = MIN (r0, MIN (r1, r2));
6828 case TARGET_MEM_REF:
6830 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
6832 if (TMR_SYMBOL (*expr_p))
6833 r0 = gimplify_expr (&TMR_SYMBOL (*expr_p), pre_p,
6834 post_p, is_gimple_lvalue, fb_either);
6835 else if (TMR_BASE (*expr_p))
6836 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
6837 post_p, is_gimple_val, fb_either);
6838 if (TMR_INDEX (*expr_p))
6839 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
6840 post_p, is_gimple_val, fb_rvalue);
6841 /* TMR_STEP and TMR_OFFSET are always integer constants. */
6846 case NON_LVALUE_EXPR:
6847 /* This should have been stripped above. */
6851 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
6854 case TRY_FINALLY_EXPR:
6855 case TRY_CATCH_EXPR:
6857 gimple_seq eval, cleanup;
6860 eval = cleanup = NULL;
6861 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
6862 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
6863 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
6864 if (gimple_seq_empty_p (cleanup))
6866 gimple_seq_add_seq (pre_p, eval);
6870 try_ = gimple_build_try (eval, cleanup,
6871 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
6872 ? GIMPLE_TRY_FINALLY
6873 : GIMPLE_TRY_CATCH);
6874 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
6875 gimple_try_set_catch_is_cleanup (try_,
6876 TRY_CATCH_IS_CLEANUP (*expr_p));
6877 gimplify_seq_add_stmt (pre_p, try_);
6882 case CLEANUP_POINT_EXPR:
6883 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
6887 ret = gimplify_target_expr (expr_p, pre_p, post_p);
6893 gimple_seq handler = NULL;
6894 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
6895 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
6896 gimplify_seq_add_stmt (pre_p, c);
6901 case EH_FILTER_EXPR:
6904 gimple_seq failure = NULL;
6906 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
6907 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
6908 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
6909 gimplify_seq_add_stmt (pre_p, ehf);
6916 enum gimplify_status r0, r1;
6917 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
6918 post_p, is_gimple_val, fb_rvalue);
6919 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
6920 post_p, is_gimple_val, fb_rvalue);
6921 TREE_SIDE_EFFECTS (*expr_p) = 0;
6927 /* We get here when taking the address of a label. We mark
6928 the label as "forced"; meaning it can never be removed and
6929 it is a potential target for any computed goto. */
6930 FORCED_LABEL (*expr_p) = 1;
6934 case STATEMENT_LIST:
6935 ret = gimplify_statement_list (expr_p, pre_p);
6938 case WITH_SIZE_EXPR:
6940 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
6941 post_p == &internal_post ? NULL : post_p,
6942 gimple_test_f, fallback);
6943 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
6944 is_gimple_val, fb_rvalue);
6950 ret = gimplify_var_or_parm_decl (expr_p);
6954 /* When within an OpenMP context, notice uses of variables. */
6955 if (gimplify_omp_ctxp)
6956 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
6961 /* Allow callbacks into the gimplifier during optimization. */
6966 gimplify_omp_parallel (expr_p, pre_p);
6971 gimplify_omp_task (expr_p, pre_p);
6976 ret = gimplify_omp_for (expr_p, pre_p);
6981 gimplify_omp_workshare (expr_p, pre_p);
6990 gimple_seq body = NULL;
6993 gimplify_and_add (OMP_BODY (*expr_p), &body);
6994 switch (TREE_CODE (*expr_p))
6997 g = gimple_build_omp_section (body);
7000 g = gimple_build_omp_master (body);
7003 g = gimple_build_omp_ordered (body);
7006 g = gimple_build_omp_critical (body,
7007 OMP_CRITICAL_NAME (*expr_p));
7012 gimplify_seq_add_stmt (pre_p, g);
7018 ret = gimplify_omp_atomic (expr_p, pre_p);
7021 case POINTER_PLUS_EXPR:
7022 /* Convert ((type *)A)+offset into &A->field_of_type_and_offset.
7023 The second is gimple immediate saving a need for extra statement.
7025 if (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
7026 && (tmp = maybe_fold_offset_to_address
7027 (EXPR_LOCATION (*expr_p),
7028 TREE_OPERAND (*expr_p, 0), TREE_OPERAND (*expr_p, 1),
7029 TREE_TYPE (*expr_p))))
7034 /* Convert (void *)&a + 4 into (void *)&a[1]. */
7035 if (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == NOP_EXPR
7036 && TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
7037 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*expr_p,
7039 && (tmp = maybe_fold_offset_to_address
7040 (EXPR_LOCATION (*expr_p),
7041 TREE_OPERAND (TREE_OPERAND (*expr_p, 0), 0),
7042 TREE_OPERAND (*expr_p, 1),
7043 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*expr_p, 0),
7046 *expr_p = fold_convert (TREE_TYPE (*expr_p), tmp);
7052 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
7054 case tcc_comparison:
7055 /* Handle comparison of objects of non scalar mode aggregates
7056 with a call to memcmp. It would be nice to only have to do
7057 this for variable-sized objects, but then we'd have to allow
7058 the same nest of reference nodes we allow for MODIFY_EXPR and
7061 Compare scalar mode aggregates as scalar mode values. Using
7062 memcmp for them would be very inefficient at best, and is
7063 plain wrong if bitfields are involved. */
7065 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
7067 if (!AGGREGATE_TYPE_P (type))
7069 else if (TYPE_MODE (type) != BLKmode)
7070 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
7072 ret = gimplify_variable_sized_compare (expr_p);
7077 /* If *EXPR_P does not need to be special-cased, handle it
7078 according to its class. */
7080 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7081 post_p, is_gimple_val, fb_rvalue);
7087 enum gimplify_status r0, r1;
7089 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7090 post_p, is_gimple_val, fb_rvalue);
7091 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
7092 post_p, is_gimple_val, fb_rvalue);
7098 case tcc_declaration:
7101 goto dont_recalculate;
7104 gcc_assert (TREE_CODE (*expr_p) == TRUTH_AND_EXPR
7105 || TREE_CODE (*expr_p) == TRUTH_OR_EXPR
7106 || TREE_CODE (*expr_p) == TRUTH_XOR_EXPR);
7110 recalculate_side_effects (*expr_p);
7116 /* If we replaced *expr_p, gimplify again. */
7117 if (ret == GS_OK && (*expr_p == NULL || *expr_p == save_expr))
7120 while (ret == GS_OK);
7122 /* If we encountered an error_mark somewhere nested inside, either
7123 stub out the statement or propagate the error back out. */
7124 if (ret == GS_ERROR)
7131 /* This was only valid as a return value from the langhook, which
7132 we handled. Make sure it doesn't escape from any other context. */
7133 gcc_assert (ret != GS_UNHANDLED);
7135 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
7137 /* We aren't looking for a value, and we don't have a valid
7138 statement. If it doesn't have side-effects, throw it away. */
7139 if (!TREE_SIDE_EFFECTS (*expr_p))
7141 else if (!TREE_THIS_VOLATILE (*expr_p))
7143 /* This is probably a _REF that contains something nested that
7144 has side effects. Recurse through the operands to find it. */
7145 enum tree_code code = TREE_CODE (*expr_p);
7152 case VIEW_CONVERT_EXPR:
7153 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7154 gimple_test_f, fallback);
7158 case ARRAY_RANGE_REF:
7159 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7160 gimple_test_f, fallback);
7161 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
7162 gimple_test_f, fallback);
7166 /* Anything else with side-effects must be converted to
7167 a valid statement before we get here. */
7173 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
7174 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
7176 /* Historically, the compiler has treated a bare reference
7177 to a non-BLKmode volatile lvalue as forcing a load. */
7178 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
7180 /* Normally, we do not want to create a temporary for a
7181 TREE_ADDRESSABLE type because such a type should not be
7182 copied by bitwise-assignment. However, we make an
7183 exception here, as all we are doing here is ensuring that
7184 we read the bytes that make up the type. We use
7185 create_tmp_var_raw because create_tmp_var will abort when
7186 given a TREE_ADDRESSABLE type. */
7187 tree tmp = create_tmp_var_raw (type, "vol");
7188 gimple_add_tmp_var (tmp);
7189 gimplify_assign (tmp, *expr_p, pre_p);
7193 /* We can't do anything useful with a volatile reference to
7194 an incomplete type, so just throw it away. Likewise for
7195 a BLKmode type, since any implicit inner load should
7196 already have been turned into an explicit one by the
7197 gimplification process. */
7201 /* If we are gimplifying at the statement level, we're done. Tack
7202 everything together and return. */
7203 if (fallback == fb_none || is_statement)
7205 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
7206 it out for GC to reclaim it. */
7207 *expr_p = NULL_TREE;
7209 if (!gimple_seq_empty_p (internal_pre)
7210 || !gimple_seq_empty_p (internal_post))
7212 gimplify_seq_add_seq (&internal_pre, internal_post);
7213 gimplify_seq_add_seq (pre_p, internal_pre);
7216 /* The result of gimplifying *EXPR_P is going to be the last few
7217 statements in *PRE_P and *POST_P. Add location information
7218 to all the statements that were added by the gimplification
7220 if (!gimple_seq_empty_p (*pre_p))
7221 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
7223 if (!gimple_seq_empty_p (*post_p))
7224 annotate_all_with_location_after (*post_p, post_last_gsi,
7230 #ifdef ENABLE_GIMPLE_CHECKING
7233 enum tree_code code = TREE_CODE (*expr_p);
7234 /* These expressions should already be in gimple IR form. */
7235 gcc_assert (code != MODIFY_EXPR
7237 && code != BIND_EXPR
7238 && code != CATCH_EXPR
7239 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
7240 && code != EH_FILTER_EXPR
7241 && code != GOTO_EXPR
7242 && code != LABEL_EXPR
7243 && code != LOOP_EXPR
7244 && code != SWITCH_EXPR
7245 && code != TRY_FINALLY_EXPR
7246 && code != OMP_CRITICAL
7248 && code != OMP_MASTER
7249 && code != OMP_ORDERED
7250 && code != OMP_PARALLEL
7251 && code != OMP_SECTIONS
7252 && code != OMP_SECTION
7253 && code != OMP_SINGLE);
7257 /* Otherwise we're gimplifying a subexpression, so the resulting
7258 value is interesting. If it's a valid operand that matches
7259 GIMPLE_TEST_F, we're done. Unless we are handling some
7260 post-effects internally; if that's the case, we need to copy into
7261 a temporary before adding the post-effects to POST_P. */
7262 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
7265 /* Otherwise, we need to create a new temporary for the gimplified
7268 /* We can't return an lvalue if we have an internal postqueue. The
7269 object the lvalue refers to would (probably) be modified by the
7270 postqueue; we need to copy the value out first, which means an
7272 if ((fallback & fb_lvalue)
7273 && gimple_seq_empty_p (internal_post)
7274 && is_gimple_addressable (*expr_p))
7276 /* An lvalue will do. Take the address of the expression, store it
7277 in a temporary, and replace the expression with an INDIRECT_REF of
7279 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
7280 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
7281 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (tmp)), tmp);
7283 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
7285 /* An rvalue will do. Assign the gimplified expression into a
7286 new temporary TMP and replace the original expression with
7287 TMP. First, make sure that the expression has a type so that
7288 it can be assigned into a temporary. */
7289 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
7291 if (!gimple_seq_empty_p (internal_post) || (fallback & fb_lvalue))
7292 /* The postqueue might change the value of the expression between
7293 the initialization and use of the temporary, so we can't use a
7294 formal temp. FIXME do we care? */
7296 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
7297 if (TREE_CODE (TREE_TYPE (*expr_p)) == COMPLEX_TYPE
7298 || TREE_CODE (TREE_TYPE (*expr_p)) == VECTOR_TYPE)
7299 DECL_GIMPLE_REG_P (*expr_p) = 1;
7302 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
7306 #ifdef ENABLE_GIMPLE_CHECKING
7307 if (!(fallback & fb_mayfail))
7309 fprintf (stderr, "gimplification failed:\n");
7310 print_generic_expr (stderr, *expr_p, 0);
7311 debug_tree (*expr_p);
7312 internal_error ("gimplification failed");
7315 gcc_assert (fallback & fb_mayfail);
7317 /* If this is an asm statement, and the user asked for the
7318 impossible, don't die. Fail and let gimplify_asm_expr
7324 /* Make sure the temporary matches our predicate. */
7325 gcc_assert ((*gimple_test_f) (*expr_p));
7327 if (!gimple_seq_empty_p (internal_post))
7329 annotate_all_with_location (internal_post, input_location);
7330 gimplify_seq_add_seq (pre_p, internal_post);
7334 input_location = saved_location;
7338 /* Look through TYPE for variable-sized objects and gimplify each such
7339 size that we find. Add to LIST_P any statements generated. */
7342 gimplify_type_sizes (tree type, gimple_seq *list_p)
7346 if (type == NULL || type == error_mark_node)
7349 /* We first do the main variant, then copy into any other variants. */
7350 type = TYPE_MAIN_VARIANT (type);
7352 /* Avoid infinite recursion. */
7353 if (TYPE_SIZES_GIMPLIFIED (type))
7356 TYPE_SIZES_GIMPLIFIED (type) = 1;
7358 switch (TREE_CODE (type))
7364 case FIXED_POINT_TYPE:
7365 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
7366 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
7368 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
7370 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
7371 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
7376 /* These types may not have declarations, so handle them here. */
7377 gimplify_type_sizes (TREE_TYPE (type), list_p);
7378 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
7379 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
7380 with assigned stack slots, for -O1+ -g they should be tracked
7382 if (TYPE_DOMAIN (type)
7383 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
7385 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
7386 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
7387 DECL_IGNORED_P (t) = 0;
7388 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7389 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
7390 DECL_IGNORED_P (t) = 0;
7396 case QUAL_UNION_TYPE:
7397 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
7398 if (TREE_CODE (field) == FIELD_DECL)
7400 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
7401 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
7402 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
7403 gimplify_type_sizes (TREE_TYPE (field), list_p);
7408 case REFERENCE_TYPE:
7409 /* We used to recurse on the pointed-to type here, which turned out to
7410 be incorrect because its definition might refer to variables not
7411 yet initialized at this point if a forward declaration is involved.
7413 It was actually useful for anonymous pointed-to types to ensure
7414 that the sizes evaluation dominates every possible later use of the
7415 values. Restricting to such types here would be safe since there
7416 is no possible forward declaration around, but would introduce an
7417 undesirable middle-end semantic to anonymity. We then defer to
7418 front-ends the responsibility of ensuring that the sizes are
7419 evaluated both early and late enough, e.g. by attaching artificial
7420 type declarations to the tree. */
7427 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
7428 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
7430 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
7432 TYPE_SIZE (t) = TYPE_SIZE (type);
7433 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
7434 TYPE_SIZES_GIMPLIFIED (t) = 1;
7438 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
7439 a size or position, has had all of its SAVE_EXPRs evaluated.
7440 We add any required statements to *STMT_P. */
7443 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
7445 tree type, expr = *expr_p;
7447 /* We don't do anything if the value isn't there, is constant, or contains
7448 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
7449 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
7450 will want to replace it with a new variable, but that will cause problems
7451 if this type is from outside the function. It's OK to have that here. */
7452 if (expr == NULL_TREE || TREE_CONSTANT (expr)
7453 || TREE_CODE (expr) == VAR_DECL
7454 || CONTAINS_PLACEHOLDER_P (expr))
7457 type = TREE_TYPE (expr);
7458 *expr_p = unshare_expr (expr);
7460 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue);
7463 /* Verify that we've an exact type match with the original expression.
7464 In particular, we do not wish to drop a "sizetype" in favour of a
7465 type of similar dimensions. We don't want to pollute the generic
7466 type-stripping code with this knowledge because it doesn't matter
7467 for the bulk of GENERIC/GIMPLE. It only matters that TYPE_SIZE_UNIT
7468 and friends retain their "sizetype-ness". */
7469 if (TREE_TYPE (expr) != type
7470 && TREE_CODE (type) == INTEGER_TYPE
7471 && TYPE_IS_SIZETYPE (type))
7476 *expr_p = create_tmp_var (type, NULL);
7477 tmp = build1 (NOP_EXPR, type, expr);
7478 stmt = gimplify_assign (*expr_p, tmp, stmt_p);
7479 if (EXPR_HAS_LOCATION (expr))
7480 gimple_set_location (stmt, EXPR_LOCATION (expr));
7482 gimple_set_location (stmt, input_location);
7487 /* Gimplify the body of statements pointed to by BODY_P and return a
7488 GIMPLE_BIND containing the sequence of GIMPLE statements
7489 corresponding to BODY_P. FNDECL is the function decl containing
7493 gimplify_body (tree *body_p, tree fndecl, bool do_parms)
7495 location_t saved_location = input_location;
7496 gimple_seq parm_stmts, seq;
7498 struct gimplify_ctx gctx;
7500 timevar_push (TV_TREE_GIMPLIFY);
7502 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
7504 default_rtl_profile ();
7506 gcc_assert (gimplify_ctxp == NULL);
7507 push_gimplify_context (&gctx);
7509 /* Unshare most shared trees in the body and in that of any nested functions.
7510 It would seem we don't have to do this for nested functions because
7511 they are supposed to be output and then the outer function gimplified
7512 first, but the g++ front end doesn't always do it that way. */
7513 unshare_body (body_p, fndecl);
7514 unvisit_body (body_p, fndecl);
7516 if (cgraph_node (fndecl)->origin)
7517 nonlocal_vlas = pointer_set_create ();
7519 /* Make sure input_location isn't set to something weird. */
7520 input_location = DECL_SOURCE_LOCATION (fndecl);
7522 /* Resolve callee-copies. This has to be done before processing
7523 the body so that DECL_VALUE_EXPR gets processed correctly. */
7524 parm_stmts = (do_parms) ? gimplify_parameters () : NULL;
7526 /* Gimplify the function's body. */
7528 gimplify_stmt (body_p, &seq);
7529 outer_bind = gimple_seq_first_stmt (seq);
7532 outer_bind = gimple_build_nop ();
7533 gimplify_seq_add_stmt (&seq, outer_bind);
7536 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
7537 not the case, wrap everything in a GIMPLE_BIND to make it so. */
7538 if (gimple_code (outer_bind) == GIMPLE_BIND
7539 && gimple_seq_first (seq) == gimple_seq_last (seq))
7542 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
7544 *body_p = NULL_TREE;
7546 /* If we had callee-copies statements, insert them at the beginning
7547 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
7548 if (!gimple_seq_empty_p (parm_stmts))
7552 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
7553 gimple_bind_set_body (outer_bind, parm_stmts);
7555 for (parm = DECL_ARGUMENTS (current_function_decl);
7556 parm; parm = TREE_CHAIN (parm))
7557 if (DECL_HAS_VALUE_EXPR_P (parm))
7559 DECL_HAS_VALUE_EXPR_P (parm) = 0;
7560 DECL_IGNORED_P (parm) = 0;
7566 pointer_set_destroy (nonlocal_vlas);
7567 nonlocal_vlas = NULL;
7570 pop_gimplify_context (outer_bind);
7571 gcc_assert (gimplify_ctxp == NULL);
7573 #ifdef ENABLE_TYPES_CHECKING
7574 if (!errorcount && !sorrycount)
7575 verify_types_in_gimple_seq (gimple_bind_body (outer_bind));
7578 timevar_pop (TV_TREE_GIMPLIFY);
7579 input_location = saved_location;
7584 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
7585 node for the function we want to gimplify.
7587 Returns the sequence of GIMPLE statements corresponding to the body
7591 gimplify_function_tree (tree fndecl)
7593 tree oldfn, parm, ret;
7597 gcc_assert (!gimple_body (fndecl));
7599 oldfn = current_function_decl;
7600 current_function_decl = fndecl;
7601 if (DECL_STRUCT_FUNCTION (fndecl))
7602 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
7604 push_struct_function (fndecl);
7606 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = TREE_CHAIN (parm))
7608 /* Preliminarily mark non-addressed complex variables as eligible
7609 for promotion to gimple registers. We'll transform their uses
7611 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
7612 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
7613 && !TREE_THIS_VOLATILE (parm)
7614 && !needs_to_live_in_memory (parm))
7615 DECL_GIMPLE_REG_P (parm) = 1;
7618 ret = DECL_RESULT (fndecl);
7619 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
7620 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
7621 && !needs_to_live_in_memory (ret))
7622 DECL_GIMPLE_REG_P (ret) = 1;
7624 bind = gimplify_body (&DECL_SAVED_TREE (fndecl), fndecl, true);
7626 /* The tree body of the function is no longer needed, replace it
7627 with the new GIMPLE body. */
7628 seq = gimple_seq_alloc ();
7629 gimple_seq_add_stmt (&seq, bind);
7630 gimple_set_body (fndecl, seq);
7632 /* If we're instrumenting function entry/exit, then prepend the call to
7633 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
7634 catch the exit hook. */
7635 /* ??? Add some way to ignore exceptions for this TFE. */
7636 if (flag_instrument_function_entry_exit
7637 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
7638 && !flag_instrument_functions_exclude_p (fndecl))
7643 gimple_seq cleanup = NULL, body = NULL;
7645 x = implicit_built_in_decls[BUILT_IN_PROFILE_FUNC_EXIT];
7646 gimplify_seq_add_stmt (&cleanup, gimple_build_call (x, 0));
7647 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
7649 x = implicit_built_in_decls[BUILT_IN_PROFILE_FUNC_ENTER];
7650 gimplify_seq_add_stmt (&body, gimple_build_call (x, 0));
7651 gimplify_seq_add_stmt (&body, tf);
7652 new_bind = gimple_build_bind (NULL, body, gimple_bind_block (bind));
7653 /* Clear the block for BIND, since it is no longer directly inside
7654 the function, but within a try block. */
7655 gimple_bind_set_block (bind, NULL);
7657 /* Replace the current function body with the body
7658 wrapped in the try/finally TF. */
7659 seq = gimple_seq_alloc ();
7660 gimple_seq_add_stmt (&seq, new_bind);
7661 gimple_set_body (fndecl, seq);
7664 DECL_SAVED_TREE (fndecl) = NULL_TREE;
7665 cfun->curr_properties = PROP_gimple_any;
7667 current_function_decl = oldfn;
7672 /* Some transformations like inlining may invalidate the GIMPLE form
7673 for operands. This function traverses all the operands in STMT and
7674 gimplifies anything that is not a valid gimple operand. Any new
7675 GIMPLE statements are inserted before *GSI_P. */
7678 gimple_regimplify_operands (gimple stmt, gimple_stmt_iterator *gsi_p)
7681 tree orig_lhs = NULL_TREE, lhs, t;
7682 gimple_seq pre = NULL;
7683 gimple post_stmt = NULL;
7684 struct gimplify_ctx gctx;
7686 push_gimplify_context (&gctx);
7687 gimplify_ctxp->into_ssa = gimple_in_ssa_p (cfun);
7689 switch (gimple_code (stmt))
7692 gimplify_expr (gimple_cond_lhs_ptr (stmt), &pre, NULL,
7693 is_gimple_val, fb_rvalue);
7694 gimplify_expr (gimple_cond_rhs_ptr (stmt), &pre, NULL,
7695 is_gimple_val, fb_rvalue);
7698 gimplify_expr (gimple_switch_index_ptr (stmt), &pre, NULL,
7699 is_gimple_val, fb_rvalue);
7701 case GIMPLE_OMP_ATOMIC_LOAD:
7702 gimplify_expr (gimple_omp_atomic_load_rhs_ptr (stmt), &pre, NULL,
7703 is_gimple_val, fb_rvalue);
7707 size_t i, noutputs = gimple_asm_noutputs (stmt);
7708 const char *constraint, **oconstraints;
7709 bool allows_mem, allows_reg, is_inout;
7712 = (const char **) alloca ((noutputs) * sizeof (const char *));
7713 for (i = 0; i < noutputs; i++)
7715 tree op = gimple_asm_output_op (stmt, i);
7716 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
7717 oconstraints[i] = constraint;
7718 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
7719 &allows_reg, &is_inout);
7720 gimplify_expr (&TREE_VALUE (op), &pre, NULL,
7721 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
7722 fb_lvalue | fb_mayfail);
7724 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
7726 tree op = gimple_asm_input_op (stmt, i);
7727 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
7728 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
7729 oconstraints, &allows_mem, &allows_reg);
7730 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (op))) && allows_mem)
7732 if (!allows_reg && allows_mem)
7733 gimplify_expr (&TREE_VALUE (op), &pre, NULL,
7734 is_gimple_lvalue, fb_lvalue | fb_mayfail);
7736 gimplify_expr (&TREE_VALUE (op), &pre, NULL,
7737 is_gimple_asm_val, fb_rvalue);
7742 /* NOTE: We start gimplifying operands from last to first to
7743 make sure that side-effects on the RHS of calls, assignments
7744 and ASMs are executed before the LHS. The ordering is not
7745 important for other statements. */
7746 num_ops = gimple_num_ops (stmt);
7747 orig_lhs = gimple_get_lhs (stmt);
7748 for (i = num_ops; i > 0; i--)
7750 tree op = gimple_op (stmt, i - 1);
7751 if (op == NULL_TREE)
7753 if (i == 1 && (is_gimple_call (stmt) || is_gimple_assign (stmt)))
7754 gimplify_expr (&op, &pre, NULL, is_gimple_lvalue, fb_lvalue);
7756 && is_gimple_assign (stmt)
7758 && get_gimple_rhs_class (gimple_expr_code (stmt))
7759 == GIMPLE_SINGLE_RHS)
7760 gimplify_expr (&op, &pre, NULL,
7761 rhs_predicate_for (gimple_assign_lhs (stmt)),
7763 else if (i == 2 && is_gimple_call (stmt))
7765 if (TREE_CODE (op) == FUNCTION_DECL)
7767 gimplify_expr (&op, &pre, NULL, is_gimple_call_addr, fb_rvalue);
7770 gimplify_expr (&op, &pre, NULL, is_gimple_val, fb_rvalue);
7771 gimple_set_op (stmt, i - 1, op);
7774 lhs = gimple_get_lhs (stmt);
7775 /* If the LHS changed it in a way that requires a simple RHS,
7776 create temporary. */
7777 if (lhs && !is_gimple_reg (lhs))
7779 bool need_temp = false;
7781 if (is_gimple_assign (stmt)
7783 && get_gimple_rhs_class (gimple_expr_code (stmt))
7784 == GIMPLE_SINGLE_RHS)
7785 gimplify_expr (gimple_assign_rhs1_ptr (stmt), &pre, NULL,
7786 rhs_predicate_for (gimple_assign_lhs (stmt)),
7788 else if (is_gimple_reg (lhs))
7790 if (is_gimple_reg_type (TREE_TYPE (lhs)))
7792 if (is_gimple_call (stmt))
7794 i = gimple_call_flags (stmt);
7795 if ((i & ECF_LOOPING_CONST_OR_PURE)
7796 || !(i & (ECF_CONST | ECF_PURE)))
7799 if (stmt_can_throw_internal (stmt))
7805 if (is_gimple_reg_type (TREE_TYPE (lhs)))
7807 else if (TYPE_MODE (TREE_TYPE (lhs)) != BLKmode)
7809 if (is_gimple_call (stmt))
7811 tree fndecl = gimple_call_fndecl (stmt);
7813 if (!aggregate_value_p (TREE_TYPE (lhs), fndecl)
7814 && !(fndecl && DECL_RESULT (fndecl)
7815 && DECL_BY_REFERENCE (DECL_RESULT (fndecl))))
7824 tree temp = create_tmp_reg (TREE_TYPE (lhs), NULL);
7826 if (TREE_CODE (orig_lhs) == SSA_NAME)
7827 orig_lhs = SSA_NAME_VAR (orig_lhs);
7829 if (gimple_in_ssa_p (cfun))
7830 temp = make_ssa_name (temp, NULL);
7831 gimple_set_lhs (stmt, temp);
7832 post_stmt = gimple_build_assign (lhs, temp);
7833 if (TREE_CODE (lhs) == SSA_NAME)
7834 SSA_NAME_DEF_STMT (lhs) = post_stmt;
7840 if (gimple_referenced_vars (cfun))
7841 for (t = gimplify_ctxp->temps; t ; t = TREE_CHAIN (t))
7842 add_referenced_var (t);
7844 if (!gimple_seq_empty_p (pre))
7846 if (gimple_in_ssa_p (cfun))
7848 gimple_stmt_iterator i;
7850 for (i = gsi_start (pre); !gsi_end_p (i); gsi_next (&i))
7851 mark_symbols_for_renaming (gsi_stmt (i));
7853 gsi_insert_seq_before (gsi_p, pre, GSI_SAME_STMT);
7856 gsi_insert_after (gsi_p, post_stmt, GSI_NEW_STMT);
7858 pop_gimplify_context (NULL);
7862 /* Expands EXPR to list of gimple statements STMTS. If SIMPLE is true,
7863 force the result to be either ssa_name or an invariant, otherwise
7864 just force it to be a rhs expression. If VAR is not NULL, make the
7865 base variable of the final destination be VAR if suitable. */
7868 force_gimple_operand (tree expr, gimple_seq *stmts, bool simple, tree var)
7871 enum gimplify_status ret;
7872 gimple_predicate gimple_test_f;
7873 struct gimplify_ctx gctx;
7877 if (is_gimple_val (expr))
7880 gimple_test_f = simple ? is_gimple_val : is_gimple_reg_rhs;
7882 push_gimplify_context (&gctx);
7883 gimplify_ctxp->into_ssa = gimple_in_ssa_p (cfun);
7884 gimplify_ctxp->allow_rhs_cond_expr = true;
7887 expr = build2 (MODIFY_EXPR, TREE_TYPE (var), var, expr);
7889 if (TREE_CODE (expr) != MODIFY_EXPR
7890 && TREE_TYPE (expr) == void_type_node)
7892 gimplify_and_add (expr, stmts);
7897 ret = gimplify_expr (&expr, stmts, NULL, gimple_test_f, fb_rvalue);
7898 gcc_assert (ret != GS_ERROR);
7901 if (gimple_referenced_vars (cfun))
7902 for (t = gimplify_ctxp->temps; t ; t = TREE_CHAIN (t))
7903 add_referenced_var (t);
7905 pop_gimplify_context (NULL);
7910 /* Invokes force_gimple_operand for EXPR with parameters SIMPLE_P and VAR. If
7911 some statements are produced, emits them at GSI. If BEFORE is true.
7912 the statements are appended before GSI, otherwise they are appended after
7913 it. M specifies the way GSI moves after insertion (GSI_SAME_STMT or
7914 GSI_CONTINUE_LINKING are the usual values). */
7917 force_gimple_operand_gsi (gimple_stmt_iterator *gsi, tree expr,
7918 bool simple_p, tree var, bool before,
7919 enum gsi_iterator_update m)
7923 expr = force_gimple_operand (expr, &stmts, simple_p, var);
7925 if (!gimple_seq_empty_p (stmts))
7927 if (gimple_in_ssa_p (cfun))
7929 gimple_stmt_iterator i;
7931 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
7932 mark_symbols_for_renaming (gsi_stmt (i));
7936 gsi_insert_seq_before (gsi, stmts, m);
7938 gsi_insert_seq_after (gsi, stmts, m);
7944 #include "gt-gimplify.h"