/* Tree lowering pass. This pass converts the GENERIC functions-as-trees
tree representation into the GIMPLE form.
- Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
- Free Software Foundation, Inc.
+ Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
+ 2012 Free Software Foundation, Inc.
Major work done by Sebastian Pop <s.pop@laposte.net>,
Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
before the def/use vectors have been constructed. */
void
-gimplify_seq_add_stmt (gimple_seq *seq_p, gimple gs)
+gimple_seq_add_stmt_without_update (gimple_seq *seq_p, gimple gs)
{
gimple_stmt_iterator si;
gsi_insert_after_without_update (&si, gs, GSI_NEW_STMT);
}
+/* Shorter alias name for the above function for use in gimplify.c
+ only. */
+
+static inline void
+gimplify_seq_add_stmt (gimple_seq *seq_p, gimple gs)
+{
+ gimple_seq_add_stmt_without_update (seq_p, gs);
+}
+
/* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
NULL, a new sequence is allocated. This function is
similar to gimple_seq_add_seq, but does not scan the operands.
char *preftmp = ASTRDUP (prefix);
remove_suffix (preftmp, strlen (preftmp));
+ clean_symbol_name (preftmp);
+
prefix = preftmp;
}
static inline tree
create_tmp_from_val (tree val)
{
- return create_tmp_var (TREE_TYPE (val), get_name (val));
+ /* Drop all qualifiers and address-space information from the value type. */
+ return create_tmp_var (TYPE_MAIN_VARIANT (TREE_TYPE (val)), get_name (val));
}
/* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
return ret;
}
+/* Returns true iff T is a valid RHS for an assignment to a renamed
+ user -- or front-end generated artificial -- variable. */
+
+static bool
+is_gimple_reg_rhs (tree t)
+{
+ return get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS;
+}
+
+/* Returns true iff T is a valid RHS for an assignment to an un-renamed
+ LHS, or for a call argument. */
+
+static bool
+is_gimple_mem_rhs (tree t)
+{
+ /* If we're dealing with a renamable type, either source or dest must be
+ a renamed variable. */
+ if (is_gimple_reg_type (TREE_TYPE (t)))
+ return is_gimple_val (t);
+ else
+ return is_gimple_val (t) || is_gimple_lvalue (t);
+}
+
/* Return true if T is a CALL_EXPR or an expression that can be
assigned to a temporary. Note that this predicate should only be
used during gimplification. See the rationale for this in
way to go. */
/* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
- These nodes model computations that should only be done once. If we
- were to unshare something like SAVE_EXPR(i++), the gimplification
- process would create wrong code. */
+ These nodes model computations that must be done once. If we were to
+ unshare something like SAVE_EXPR(i++), the gimplification process would
+ create wrong code. However, if DATA is non-null, it must hold a pointer
+ set that is used to unshare the subtrees of these nodes. */
static tree
mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
return NULL_TREE;
}
-/* Callback for walk_tree to unshare most of the shared trees rooted at
- *TP. If *TP has been visited already (i.e., TREE_VISITED (*TP) == 1),
- then *TP is deep copied by calling mostly_copy_tree_r. */
+/* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
+ If *TP has been visited already, then *TP is deeply copied by calling
+ mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
static tree
copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
return NULL_TREE;
}
-/* Unshare most of the shared trees rooted at *TP. */
+/* Unshare most of the shared trees rooted at *TP. DATA is passed to the
+ copy_if_shared_r callback unmodified. */
static inline void
-copy_if_shared (tree *tp)
+copy_if_shared (tree *tp, void *data)
{
- /* If the language requires deep unsharing, we need a pointer set to make
- sure we don't repeatedly unshare subtrees of unshareable nodes. */
- struct pointer_set_t *visited
- = lang_hooks.deep_unsharing ? pointer_set_create () : NULL;
- walk_tree (tp, copy_if_shared_r, visited, NULL);
- if (visited)
- pointer_set_destroy (visited);
+ walk_tree (tp, copy_if_shared_r, data, NULL);
}
-/* Unshare all the trees in BODY_P, a pointer into the body of FNDECL, and the
- bodies of any nested functions if we are unsharing the entire body of
- FNDECL. */
+/* Unshare all the trees in the body of FNDECL, as well as in the bodies of
+ any nested functions. */
static void
-unshare_body (tree *body_p, tree fndecl)
+unshare_body (tree fndecl)
{
struct cgraph_node *cgn = cgraph_get_node (fndecl);
+ /* If the language requires deep unsharing, we need a pointer set to make
+ sure we don't repeatedly unshare subtrees of unshareable nodes. */
+ struct pointer_set_t *visited
+ = lang_hooks.deep_unsharing ? pointer_set_create () : NULL;
+
+ copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
+ copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
+ copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
- copy_if_shared (body_p);
+ if (visited)
+ pointer_set_destroy (visited);
- if (cgn && body_p == &DECL_SAVED_TREE (fndecl))
+ if (cgn)
for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
- unshare_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl);
+ unshare_body (cgn->symbol.decl);
}
/* Callback for walk_tree to unmark the visited trees rooted at *TP.
/* Likewise, but mark all trees as not visited. */
static void
-unvisit_body (tree *body_p, tree fndecl)
+unvisit_body (tree fndecl)
{
struct cgraph_node *cgn = cgraph_get_node (fndecl);
- unmark_visited (body_p);
+ unmark_visited (&DECL_SAVED_TREE (fndecl));
+ unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
+ unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
- if (cgn && body_p == &DECL_SAVED_TREE (fndecl))
+ if (cgn)
for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
- unvisit_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl);
+ unvisit_body (cgn->symbol.decl);
}
/* Unconditionally make an unshared copy of EXPR. This is used when using
}
break;
+ case TRANSACTION_EXPR:
+ TREE_SIDE_EFFECTS (*p) = 1;
+ TREE_TYPE (*p) = void_type_node;
+ p = &TRANSACTION_EXPR_BODY (*p);
+ break;
+
default:
+ /* Assume that any tree upon which voidify_wrapper_expr is
+ directly called is a wrapper, and that its body is op0. */
+ if (p == &wrapper)
+ {
+ TREE_SIDE_EFFECTS (*p) = 1;
+ TREE_TYPE (*p) = void_type_node;
+ p = &TREE_OPERAND (*p, 0);
+ break;
+ }
goto out;
}
}
{
tree tmp_var;
- *save = gimple_build_call (implicit_built_in_decls[BUILT_IN_STACK_SAVE], 0);
+ *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
gimple_call_set_lhs (*save, tmp_var);
*restore
- = gimple_build_call (implicit_built_in_decls[BUILT_IN_STACK_RESTORE],
+ = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1, tmp_var);
}
bool old_save_stack = gimplify_ctxp->save_stack;
tree t;
gimple gimple_bind;
- gimple_seq body;
+ gimple_seq body, cleanup;
+ gimple stack_save;
tree temp = voidify_wrapper_expr (bind_expr, NULL);
gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
gimple_bind_set_body (gimple_bind, body);
+ cleanup = NULL;
+ stack_save = NULL;
if (gimplify_ctxp->save_stack)
{
- gimple stack_save, stack_restore, gs;
- gimple_seq cleanup, new_body;
+ gimple stack_restore;
/* Save stack on entry and restore it on exit. Add a try_finally
block to achieve this. Note that mudflap depends on the
format of the emitted code: see mx_register_decls(). */
build_stack_save_restore (&stack_save, &stack_restore);
- cleanup = new_body = NULL;
gimplify_seq_add_stmt (&cleanup, stack_restore);
+ }
+
+ /* Add clobbers for all variables that go out of scope. */
+ for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
+ {
+ if (TREE_CODE (t) == VAR_DECL
+ && !is_global_var (t)
+ && DECL_CONTEXT (t) == current_function_decl
+ && !DECL_HARD_REGISTER (t)
+ && !TREE_THIS_VOLATILE (t)
+ && !DECL_HAS_VALUE_EXPR_P (t)
+ /* Only care for variables that have to be in memory. Others
+ will be rewritten into SSA names, hence moved to the top-level. */
+ && !is_gimple_reg (t))
+ {
+ tree clobber = build_constructor (TREE_TYPE (t), NULL);
+ TREE_THIS_VOLATILE (clobber) = 1;
+ gimplify_seq_add_stmt (&cleanup, gimple_build_assign (t, clobber));
+ }
+ }
+
+ if (cleanup)
+ {
+ gimple gs;
+ gimple_seq new_body;
+
+ new_body = NULL;
gs = gimple_build_try (gimple_bind_body (gimple_bind), cleanup,
GIMPLE_TRY_FINALLY);
- gimplify_seq_add_stmt (&new_body, stack_save);
+ if (stack_save)
+ gimplify_seq_add_stmt (&new_body, stack_save);
gimplify_seq_add_stmt (&new_body, gs);
gimple_bind_set_body (gimple_bind, new_body);
}
SET_DECL_VALUE_EXPR (decl, t);
DECL_HAS_VALUE_EXPR_P (decl) = 1;
- t = built_in_decls[BUILT_IN_ALLOCA];
- t = build_call_expr (t, 1, DECL_SIZE_UNIT (decl));
+ t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
+ t = build_call_expr (t, 2, DECL_SIZE_UNIT (decl),
+ size_int (DECL_ALIGN (decl)));
/* The call has been built for a variable-sized object. */
CALL_ALLOCA_FOR_VAR_P (t) = 1;
t = fold_convert (ptr_type, t);
tree switch_expr = *expr_p;
gimple_seq switch_body_seq = NULL;
enum gimplify_status ret;
+ tree index_type = TREE_TYPE (switch_expr);
+ if (index_type == NULL_TREE)
+ index_type = TREE_TYPE (SWITCH_COND (switch_expr));
ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
fb_rvalue);
{
VEC (tree,heap) *labels;
VEC (tree,heap) *saved_labels;
+ tree min_value, max_value;
tree default_case = NULL_TREE;
size_t i, len;
gimple gimple_switch;
be bothered to null out the body too. */
gcc_assert (!SWITCH_LABELS (switch_expr));
- /* save old labels, get new ones from body, then restore the old
+ /* Save old labels, get new ones from body, then restore the old
labels. Save all the things from the switch body to append after. */
saved_labels = gimplify_ctxp->case_labels;
gimplify_ctxp->case_labels = VEC_alloc (tree, heap, 8);
gimplify_ctxp->case_labels = saved_labels;
i = 0;
+ min_value = TYPE_MIN_VALUE (index_type);
+ max_value = TYPE_MAX_VALUE (index_type);
while (i < VEC_length (tree, labels))
{
tree elt = VEC_index (tree, labels, i);
tree low = CASE_LOW (elt);
+ tree high = CASE_HIGH (elt);
bool remove_element = FALSE;
+
if (low)
{
- /* Discard empty ranges. */
- tree high = CASE_HIGH (elt);
- if (high && tree_int_cst_lt (high, low))
- remove_element = TRUE;
+ gcc_checking_assert (TREE_CODE (low) == INTEGER_CST);
+ gcc_checking_assert (!high || TREE_CODE (high) == INTEGER_CST);
+
+ /* This is a non-default case label, i.e. it has a value.
+
+ See if the case label is reachable within the range of
+ the index type. Remove out-of-range case values. Turn
+ case ranges into a canonical form (high > low strictly)
+ and convert the case label values to the index type.
+
+ NB: The type of gimple_switch_index() may be the promoted
+ type, but the case labels retain the original type. */
+
+ if (high)
+ {
+ /* This is a case range. Discard empty ranges.
+ If the bounds or the range are equal, turn this
+ into a simple (one-value) case. */
+ int cmp = tree_int_cst_compare (high, low);
+ if (cmp < 0)
+ remove_element = TRUE;
+ else if (cmp == 0)
+ high = NULL_TREE;
+ }
+
+ if (! high)
+ {
+ /* If the simple case value is unreachable, ignore it. */
+ if ((TREE_CODE (min_value) == INTEGER_CST
+ && tree_int_cst_compare (low, min_value) < 0)
+ || (TREE_CODE (max_value) == INTEGER_CST
+ && tree_int_cst_compare (low, max_value) > 0))
+ remove_element = TRUE;
+ else
+ low = fold_convert (index_type, low);
+ }
+ else
+ {
+ /* If the entire case range is unreachable, ignore it. */
+ if ((TREE_CODE (min_value) == INTEGER_CST
+ && tree_int_cst_compare (high, min_value) < 0)
+ || (TREE_CODE (max_value) == INTEGER_CST
+ && tree_int_cst_compare (low, max_value) > 0))
+ remove_element = TRUE;
+ else
+ {
+ /* If the lower bound is less than the index type's
+ minimum value, truncate the range bounds. */
+ if (TREE_CODE (min_value) == INTEGER_CST
+ && tree_int_cst_compare (low, min_value) < 0)
+ low = min_value;
+ low = fold_convert (index_type, low);
+
+ /* If the upper bound is greater than the index type's
+ maximum value, truncate the range bounds. */
+ if (TREE_CODE (max_value) == INTEGER_CST
+ && tree_int_cst_compare (high, max_value) > 0)
+ high = max_value;
+ high = fold_convert (index_type, high);
+ }
+ }
+
+ CASE_LOW (elt) = low;
+ CASE_HIGH (elt) = high;
}
else
{
if (!default_case)
{
- tree type = TREE_TYPE (switch_expr);
-
/* If the switch has no default label, add one, so that we jump
around the switch body. If the labels already cover the whole
- range of type, add the default label pointing to one of the
- existing labels. */
- if (type == void_type_node)
- type = TREE_TYPE (SWITCH_COND (switch_expr));
+ range of the switch index_type, add the default label pointing
+ to one of the existing labels. */
if (len
- && INTEGRAL_TYPE_P (type)
- && TYPE_MIN_VALUE (type)
- && TYPE_MAX_VALUE (type)
+ && TYPE_MIN_VALUE (index_type)
+ && TYPE_MAX_VALUE (index_type)
&& tree_int_cst_equal (CASE_LOW (VEC_index (tree, labels, 0)),
- TYPE_MIN_VALUE (type)))
+ TYPE_MIN_VALUE (index_type)))
{
tree low, high = CASE_HIGH (VEC_index (tree, labels, len - 1));
if (!high)
high = CASE_LOW (VEC_index (tree, labels, len - 1));
- if (tree_int_cst_equal (high, TYPE_MAX_VALUE (type)))
+ if (tree_int_cst_equal (high, TYPE_MAX_VALUE (index_type)))
{
for (i = 1; i < len; i++)
{
static enum gimplify_status
gimplify_conversion (tree *expr_p)
{
- tree tem;
location_t loc = EXPR_LOCATION (*expr_p);
gcc_assert (CONVERT_EXPR_P (*expr_p));
if (tree_ssa_useless_type_conversion (*expr_p))
*expr_p = TREE_OPERAND (*expr_p, 0);
- /* Attempt to avoid NOP_EXPR by producing reference to a subtype.
- For example this fold (subclass *)&A into &A->subclass avoiding
- a need for statement. */
- if (CONVERT_EXPR_P (*expr_p)
- && POINTER_TYPE_P (TREE_TYPE (*expr_p))
- && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (*expr_p, 0)))
- && (tem = maybe_fold_offset_to_address
- (EXPR_LOCATION (*expr_p), TREE_OPERAND (*expr_p, 0),
- integer_zero_node, TREE_TYPE (*expr_p))) != NULL_TREE)
- *expr_p = tem;
-
/* If we still have a conversion at the toplevel,
then canonicalize some constructs. */
if (CONVERT_EXPR_P (*expr_p))
/* For POINTERs increment, use POINTER_PLUS_EXPR. */
if (POINTER_TYPE_P (TREE_TYPE (lhs)))
{
- rhs = fold_convert_loc (loc, sizetype, rhs);
+ rhs = convert_to_ptrofftype_loc (loc, rhs);
if (arith_code == MINUS_EXPR)
rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
arith_code = POINTER_PLUS_EXPR;
}
- t1 = build2 (arith_code, TREE_TYPE (*expr_p), lhs, rhs);
-
if (postfix)
{
- gimplify_assign (lvalue, t1, orig_post_p);
+ tree t2 = get_initialized_tmp_var (lhs, pre_p, NULL);
+ t1 = build2 (arith_code, TREE_TYPE (*expr_p), t2, rhs);
+ gimplify_assign (lvalue, t1, pre_p);
gimplify_seq_add_seq (orig_post_p, post);
- *expr_p = lhs;
+ *expr_p = t2;
return GS_ALL_DONE;
}
else
{
+ t1 = build2 (arith_code, TREE_TYPE (*expr_p), lhs, rhs);
*expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
return GS_OK;
}
CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
= CALL_EXPR_RETURN_SLOT_OPT (call);
CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
- CALL_CANNOT_INLINE_P (*expr_p) = CALL_CANNOT_INLINE_P (call);
SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
TREE_BLOCK (*expr_p) = TREE_BLOCK (call);
case TRUTH_NOT_EXPR:
TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
- /* FALLTHRU */
- case EQ_EXPR: case NE_EXPR:
- case LE_EXPR: case GE_EXPR: case LT_EXPR: case GT_EXPR:
/* These expressions always produce boolean results. */
- TREE_TYPE (expr) = boolean_type_node;
+ if (TREE_CODE (type) != BOOLEAN_TYPE)
+ TREE_TYPE (expr) = boolean_type_node;
return expr;
default:
+ if (COMPARISON_CLASS_P (expr))
+ {
+ /* There expressions always prduce boolean results. */
+ if (TREE_CODE (type) != BOOLEAN_TYPE)
+ TREE_TYPE (expr) = boolean_type_node;
+ return expr;
+ }
/* Other expressions that get here must have boolean values, but
might need to be converted to the appropriate mode. */
- if (type == boolean_type_node)
+ if (TREE_CODE (type) == BOOLEAN_TYPE)
return expr;
return fold_convert_loc (loc, boolean_type_node, expr);
}
to_ptr = build_fold_addr_expr_loc (loc, to);
gimplify_arg (&to_ptr, seq_p, loc);
- t = implicit_built_in_decls[BUILT_IN_MEMCPY];
+ t = builtin_decl_implicit (BUILT_IN_MEMCPY);
gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
to_ptr = build_fold_addr_expr_loc (loc, to);
gimplify_arg (&to_ptr, seq_p, loc);
- t = implicit_built_in_decls[BUILT_IN_MEMSET];
+ t = builtin_decl_implicit (BUILT_IN_MEMSET);
gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
decl instead. */
static enum gimplify_status
-gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p)
+gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
+ fallback_t fallback)
{
tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
tree decl = DECL_EXPR_DECL (decl_s);
&& !needs_to_live_in_memory (decl))
DECL_GIMPLE_REG_P (decl) = 1;
+ /* If the decl is not addressable, then it is being used in some
+ expression or on the right hand side of a statement, and it can
+ be put into a readonly data section. */
+ if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
+ TREE_READONLY (decl) = 1;
+
/* This decl isn't mentioned in the enclosing block, so add it to the
list of temps. FIXME it seems a bit of a kludge to say that
anonymous artificial vars aren't pushed, but everything else is. */
/* It's OK to use the target directly if it's being
initialized. */
use_target = true;
- else if (!is_gimple_non_addressable (*to_p))
+ else if (variably_modified_type_p (TREE_TYPE (*to_p), NULL_TREE))
+ /* Always use the target and thus RSO for variable-sized types.
+ GIMPLE cannot deal with a variable-sized assignment
+ embedded in a call statement. */
+ use_target = true;
+ else if (TREE_CODE (*to_p) != SSA_NAME
+ && (!is_gimple_variable (*to_p)
+ || needs_to_live_in_memory (*to_p)))
/* Don't use the original target if it's already addressable;
if its address escapes, and the called function uses the
NRV optimization, a conforming program could see *to_p
return ret;
}
+
+/* Return true if T looks like a valid GIMPLE statement. */
+
+static bool
+is_gimple_stmt (tree t)
+{
+ const enum tree_code code = TREE_CODE (t);
+
+ switch (code)
+ {
+ case NOP_EXPR:
+ /* The only valid NOP_EXPR is the empty statement. */
+ return IS_EMPTY_STMT (t);
+
+ case BIND_EXPR:
+ case COND_EXPR:
+ /* These are only valid if they're void. */
+ return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
+
+ case SWITCH_EXPR:
+ case GOTO_EXPR:
+ case RETURN_EXPR:
+ case LABEL_EXPR:
+ case CASE_LABEL_EXPR:
+ case TRY_CATCH_EXPR:
+ case TRY_FINALLY_EXPR:
+ case EH_FILTER_EXPR:
+ case CATCH_EXPR:
+ case ASM_EXPR:
+ case STATEMENT_LIST:
+ case OMP_PARALLEL:
+ case OMP_FOR:
+ case OMP_SECTIONS:
+ case OMP_SECTION:
+ case OMP_SINGLE:
+ case OMP_MASTER:
+ case OMP_ORDERED:
+ case OMP_CRITICAL:
+ case OMP_TASK:
+ /* These are always void. */
+ return true;
+
+ case CALL_EXPR:
+ case MODIFY_EXPR:
+ case PREDICT_EXPR:
+ /* These are valid regardless of their type. */
+ return true;
+
+ default:
+ return false;
+ }
+}
+
+
/* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
DECL_GIMPLE_REG_P set.
ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
other = build1 (ocode, TREE_TYPE (rhs), lhs);
+ TREE_NO_WARNING (other) = 1;
other = get_formal_tmp_var (other, pre_p);
realpart = code == REALPART_EXPR ? rhs : other;
gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
|| TREE_CODE (*expr_p) == INIT_EXPR);
+ /* Trying to simplify a clobber using normal logic doesn't work,
+ so handle it here. */
+ if (TREE_CLOBBER_P (*from_p))
+ {
+ gcc_assert (!want_value && TREE_CODE (*to_p) == VAR_DECL);
+ gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
+ *expr_p = NULL;
+ return GS_ALL_DONE;
+ }
+
/* Insert pointer conversions required by the middle-end that are not
required by the frontend. This fixes middle-end type checking for
for example gcc.dg/redecl-6.c. */
arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
src = build_fold_addr_expr_loc (loc, op1);
dest = build_fold_addr_expr_loc (loc, op0);
- t = implicit_built_in_decls[BUILT_IN_MEMCMP];
+ t = builtin_decl_implicit (BUILT_IN_MEMCMP);
t = build_call_expr_loc (loc, t, 3, dest, src, arg);
expr
any cleanups collected outside the CLEANUP_POINT_EXPR. */
int old_conds = gimplify_ctxp->conditions;
gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
+ bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
gimplify_ctxp->conditions = 0;
gimplify_ctxp->conditional_cleanups = NULL;
+ gimplify_ctxp->in_cleanup_point_expr = true;
gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
gimplify_ctxp->conditions = old_conds;
gimplify_ctxp->conditional_cleanups = old_cleanups;
+ gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
{
if (init)
{
+ tree cleanup = NULL_TREE;
+
/* TARGET_EXPR temps aren't part of the enclosing block, so add it
to the temps list. Handle also variable length TARGET_EXPRs. */
if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
/* If needed, push the cleanup for the temp. */
if (TARGET_EXPR_CLEANUP (targ))
- gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
- CLEANUP_EH_ONLY (targ), pre_p);
+ {
+ if (CLEANUP_EH_ONLY (targ))
+ gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
+ CLEANUP_EH_ONLY (targ), pre_p);
+ else
+ cleanup = TARGET_EXPR_CLEANUP (targ);
+ }
+
+ /* Add a clobber for the temporary going out of scope, like
+ gimplify_bind_expr. */
+ if (gimplify_ctxp->in_cleanup_point_expr
+ && needs_to_live_in_memory (temp))
+ {
+ tree clobber = build_constructor (TREE_TYPE (temp), NULL);
+ TREE_THIS_VOLATILE (clobber) = true;
+ clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
+ if (cleanup)
+ cleanup = build2 (COMPOUND_EXPR, void_type_node, cleanup,
+ clobber);
+ else
+ cleanup = clobber;
+ }
+
+ if (cleanup)
+ gimple_push_cleanup (temp, cleanup, false, pre_p);
/* Only expand this once. */
TREE_OPERAND (targ, 3) = init;
}
break;
+ case OMP_CLAUSE_FINAL:
case OMP_CLAUSE_IF:
OMP_CLAUSE_OPERAND (c, 0)
= gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
case OMP_CLAUSE_ORDERED:
case OMP_CLAUSE_UNTIED:
case OMP_CLAUSE_COLLAPSE:
+ case OMP_CLAUSE_MERGEABLE:
break;
case OMP_CLAUSE_DEFAULT:
case OMP_CLAUSE_DEFAULT:
case OMP_CLAUSE_UNTIED:
case OMP_CLAUSE_COLLAPSE:
+ case OMP_CLAUSE_FINAL:
+ case OMP_CLAUSE_MERGEABLE:
break;
default:
gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
{
tree addr = TREE_OPERAND (*expr_p, 0);
- tree rhs = TREE_OPERAND (*expr_p, 1);
+ tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
+ ? NULL : TREE_OPERAND (*expr_p, 1);
tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
tree tmp_load;
+ gimple loadstmt, storestmt;
- tmp_load = create_tmp_reg (type, NULL);
- if (goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
- return GS_ERROR;
+ tmp_load = create_tmp_reg (type, NULL);
+ if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
+ return GS_ERROR;
- if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
- != GS_ALL_DONE)
- return GS_ERROR;
+ if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
+ != GS_ALL_DONE)
+ return GS_ERROR;
- gimplify_seq_add_stmt (pre_p, gimple_build_omp_atomic_load (tmp_load, addr));
- if (gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
- != GS_ALL_DONE)
- return GS_ERROR;
- gimplify_seq_add_stmt (pre_p, gimple_build_omp_atomic_store (rhs));
- *expr_p = NULL;
+ loadstmt = gimple_build_omp_atomic_load (tmp_load, addr);
+ gimplify_seq_add_stmt (pre_p, loadstmt);
+ if (rhs && gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
+ != GS_ALL_DONE)
+ return GS_ERROR;
+
+ if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
+ rhs = tmp_load;
+ storestmt = gimple_build_omp_atomic_store (rhs);
+ gimplify_seq_add_stmt (pre_p, storestmt);
+ switch (TREE_CODE (*expr_p))
+ {
+ case OMP_ATOMIC_READ:
+ case OMP_ATOMIC_CAPTURE_OLD:
+ *expr_p = tmp_load;
+ gimple_omp_atomic_set_need_value (loadstmt);
+ break;
+ case OMP_ATOMIC_CAPTURE_NEW:
+ *expr_p = rhs;
+ gimple_omp_atomic_set_need_value (storestmt);
+ break;
+ default:
+ *expr_p = NULL;
+ break;
+ }
return GS_ALL_DONE;
}
+/* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
+ body, and adding some EH bits. */
+
+static enum gimplify_status
+gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
+{
+ tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
+ gimple g;
+ gimple_seq body = NULL;
+ struct gimplify_ctx gctx;
+ int subcode = 0;
+
+ /* Wrap the transaction body in a BIND_EXPR so we have a context
+ where to put decls for OpenMP. */
+ if (TREE_CODE (tbody) != BIND_EXPR)
+ {
+ tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
+ TREE_SIDE_EFFECTS (bind) = 1;
+ SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
+ TRANSACTION_EXPR_BODY (expr) = bind;
+ }
+
+ push_gimplify_context (&gctx);
+ temp = voidify_wrapper_expr (*expr_p, NULL);
+
+ g = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
+ pop_gimplify_context (g);
+
+ g = gimple_build_transaction (body, NULL);
+ if (TRANSACTION_EXPR_OUTER (expr))
+ subcode = GTMA_IS_OUTER;
+ else if (TRANSACTION_EXPR_RELAXED (expr))
+ subcode = GTMA_IS_RELAXED;
+ gimple_transaction_set_subcode (g, subcode);
+
+ gimplify_seq_add_stmt (pre_p, g);
+
+ if (temp)
+ {
+ *expr_p = temp;
+ return GS_OK;
+ }
+
+ *expr_p = NULL_TREE;
+ return GS_ALL_DONE;
+}
+
/* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
expression produces a value to be used as an operand inside a GIMPLE
statement, the value will be stored back in *EXPR_P. This value will
break;
case COMPOUND_LITERAL_EXPR:
- ret = gimplify_compound_literal_expr (expr_p, pre_p);
+ ret = gimplify_compound_literal_expr (expr_p, pre_p, fallback);
break;
case MODIFY_EXPR:
ret = GS_OK;
break;
}
- ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
- is_gimple_mem_ref_addr, fb_rvalue);
- if (ret == GS_ERROR)
- break;
+ /* Avoid re-gimplifying the address operand if it is already
+ in suitable form. Re-gimplifying would mark the address
+ operand addressable. Always gimplify when not in SSA form
+ as we still may have to gimplify decls with value-exprs. */
+ if (!gimplify_ctxp || !gimplify_ctxp->into_ssa
+ || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
+ {
+ ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
+ is_gimple_mem_ref_addr, fb_rvalue);
+ if (ret == GS_ERROR)
+ break;
+ }
recalculate_side_effects (*expr_p);
ret = GS_ALL_DONE;
break;
- /* Constants need not be gimplified. */
+ /* Constants need not be gimplified. */
case INTEGER_CST:
case REAL_CST:
case FIXED_CST:
}
case OMP_ATOMIC:
+ case OMP_ATOMIC_READ:
+ case OMP_ATOMIC_CAPTURE_OLD:
+ case OMP_ATOMIC_CAPTURE_NEW:
ret = gimplify_omp_atomic (expr_p, pre_p);
break;
+ case TRANSACTION_EXPR:
+ ret = gimplify_transaction (expr_p, pre_p);
+ break;
+
case TRUTH_AND_EXPR:
case TRUTH_OR_EXPR:
case TRUTH_XOR_EXPR:
{
tree orig_type = TREE_TYPE (*expr_p);
+ tree new_type, xop0, xop1;
*expr_p = gimple_boolify (*expr_p);
- if (!useless_type_conversion_p (orig_type, TREE_TYPE (*expr_p)))
+ new_type = TREE_TYPE (*expr_p);
+ if (!useless_type_conversion_p (orig_type, new_type))
{
*expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
ret = GS_OK;
default:
break;
}
-
+ /* Now make sure that operands have compatible type to
+ expression's new_type. */
+ xop0 = TREE_OPERAND (*expr_p, 0);
+ xop1 = TREE_OPERAND (*expr_p, 1);
+ if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
+ TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
+ new_type,
+ xop0);
+ if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
+ TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
+ new_type,
+ xop1);
/* Continue classified as tcc_binary. */
goto expr_2;
}
case FMA_EXPR:
+ case VEC_PERM_EXPR:
/* Classified as tcc_expression. */
goto expr_3;
case POINTER_PLUS_EXPR:
- /* Convert ((type *)A)+offset into &A->field_of_type_and_offset.
- The second is gimple immediate saving a need for extra statement.
- */
- if (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
- && (tmp = maybe_fold_offset_to_address
- (EXPR_LOCATION (*expr_p),
- TREE_OPERAND (*expr_p, 0), TREE_OPERAND (*expr_p, 1),
- TREE_TYPE (*expr_p))))
- {
- *expr_p = tmp;
- ret = GS_OK;
- break;
- }
- /* Convert (void *)&a + 4 into (void *)&a[1]. */
- if (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == NOP_EXPR
- && TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
- && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*expr_p,
- 0),0)))
- && (tmp = maybe_fold_offset_to_address
- (EXPR_LOCATION (*expr_p),
- TREE_OPERAND (TREE_OPERAND (*expr_p, 0), 0),
- TREE_OPERAND (*expr_p, 1),
- TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*expr_p, 0),
- 0)))))
- {
- *expr_p = fold_convert (TREE_TYPE (*expr_p), tmp);
- ret = GS_OK;
- break;
- }
- /* FALLTHRU */
+ {
+ enum gimplify_status r0, r1;
+ r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
+ post_p, is_gimple_val, fb_rvalue);
+ r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
+ post_p, is_gimple_val, fb_rvalue);
+ recalculate_side_effects (*expr_p);
+ ret = MIN (r0, r1);
+ /* Convert &X + CST to invariant &MEM[&X, CST]. Do this
+ after gimplifying operands - this is similar to how
+ it would be folding all gimplified stmts on creation
+ to have them canonicalized, which is what we eventually
+ should do anyway. */
+ if (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
+ && is_gimple_min_invariant (TREE_OPERAND (*expr_p, 0)))
+ {
+ *expr_p = build_fold_addr_expr_with_type_loc
+ (input_location,
+ fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (*expr_p)),
+ TREE_OPERAND (*expr_p, 0),
+ fold_convert (ptr_type_node,
+ TREE_OPERAND (*expr_p, 1))),
+ TREE_TYPE (*expr_p));
+ ret = MIN (ret, GS_OK);
+ }
+ break;
+ }
default:
switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
{
tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
- if (!AGGREGATE_TYPE_P (type))
+ /* Vector comparisons need no boolification. */
+ if (TREE_CODE (type) == VECTOR_TYPE)
goto expr_2;
+ else if (!AGGREGATE_TYPE_P (type))
+ {
+ tree org_type = TREE_TYPE (*expr_p);
+ *expr_p = gimple_boolify (*expr_p);
+ if (!useless_type_conversion_p (org_type,
+ TREE_TYPE (*expr_p)))
+ {
+ *expr_p = fold_convert_loc (input_location,
+ org_type, *expr_p);
+ ret = GS_OK;
+ }
+ else
+ goto expr_2;
+ }
else if (TYPE_MODE (type) != BLKmode)
ret = gimplify_scalar_mode_aggregate_compare (expr_p);
else
}
}
-/* Gimplify the body of statements pointed to by BODY_P and return a
- GIMPLE_BIND containing the sequence of GIMPLE statements
- corresponding to BODY_P. FNDECL is the function decl containing
- *BODY_P. */
+/* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
+ containing the sequence of corresponding GIMPLE statements. If DO_PARMS
+ is true, also gimplify the parameters. */
gimple
-gimplify_body (tree *body_p, tree fndecl, bool do_parms)
+gimplify_body (tree fndecl, bool do_parms)
{
location_t saved_location = input_location;
gimple_seq parm_stmts, seq;
It would seem we don't have to do this for nested functions because
they are supposed to be output and then the outer function gimplified
first, but the g++ front end doesn't always do it that way. */
- unshare_body (body_p, fndecl);
- unvisit_body (body_p, fndecl);
+ unshare_body (fndecl);
+ unvisit_body (fndecl);
cgn = cgraph_get_node (fndecl);
if (cgn && cgn->origin)
/* Resolve callee-copies. This has to be done before processing
the body so that DECL_VALUE_EXPR gets processed correctly. */
- parm_stmts = (do_parms) ? gimplify_parameters () : NULL;
+ parm_stmts = do_parms ? gimplify_parameters () : NULL;
/* Gimplify the function's body. */
seq = NULL;
- gimplify_stmt (body_p, &seq);
+ gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
outer_bind = gimple_seq_first_stmt (seq);
if (!outer_bind)
{
else
outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
- *body_p = NULL_TREE;
+ DECL_SAVED_TREE (fndecl) = NULL_TREE;
/* If we had callee-copies statements, insert them at the beginning
of the function and clear DECL_VALUE_EXPR_P on the parameters. */
&& !needs_to_live_in_memory (ret))
DECL_GIMPLE_REG_P (ret) = 1;
- bind = gimplify_body (&DECL_SAVED_TREE (fndecl), fndecl, true);
+ bind = gimplify_body (fndecl, true);
/* The tree body of the function is no longer needed, replace it
with the new GIMPLE body. */
tree tmp_var;
gimple call;
- x = implicit_built_in_decls[BUILT_IN_RETURN_ADDRESS];
+ x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
call = gimple_build_call (x, 1, integer_zero_node);
tmp_var = create_tmp_var (ptr_type_node, "return_addr");
gimple_call_set_lhs (call, tmp_var);
gimplify_seq_add_stmt (&cleanup, call);
- x = implicit_built_in_decls[BUILT_IN_PROFILE_FUNC_EXIT];
+ x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
call = gimple_build_call (x, 2,
build_fold_addr_expr (current_function_decl),
tmp_var);
gimplify_seq_add_stmt (&cleanup, call);
tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
- x = implicit_built_in_decls[BUILT_IN_RETURN_ADDRESS];
+ x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
call = gimple_build_call (x, 1, integer_zero_node);
tmp_var = create_tmp_var (ptr_type_node, "return_addr");
gimple_call_set_lhs (call, tmp_var);
gimplify_seq_add_stmt (&body, call);
- x = implicit_built_in_decls[BUILT_IN_PROFILE_FUNC_ENTER];
+ x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
call = gimple_build_call (x, 2,
build_fold_addr_expr (current_function_decl),
tmp_var);