"assignments",
"phi nodes",
"conditionals",
- "sequences",
"everything else"
};
#endif /* GATHER_STATISTICS */
-/* A cache of gimple_seq objects. Sequences are created and destroyed
- fairly often during gimplification. */
-static GTY ((deletable)) struct gimple_seq_d *gimple_seq_cache;
-
/* Private API manipulation functions shared only with some
other files. */
extern void gimple_set_stored_syms (gimple, bitmap, bitmap_obstack *);
/* Do not call gimple_set_modified here as it has other side
effects and this tuple is still not completely built. */
stmt->gsbase.modified = 1;
+ gimple_init_singleton (stmt);
return stmt;
}
#endif /* ENABLE_GIMPLE_CHECKING */
-/* Allocate a new GIMPLE sequence in GC memory and return it. If
- there are free sequences in GIMPLE_SEQ_CACHE return one of those
- instead. */
-
-gimple_seq
-gimple_seq_alloc (void)
-{
- gimple_seq seq = gimple_seq_cache;
- if (seq)
- {
- gimple_seq_cache = gimple_seq_cache->next_free;
- gcc_assert (gimple_seq_cache != seq);
- memset (seq, 0, sizeof (*seq));
- }
- else
- {
- seq = ggc_alloc_cleared_gimple_seq_d ();
-#ifdef GATHER_STATISTICS
- gimple_alloc_counts[(int) gimple_alloc_kind_seq]++;
- gimple_alloc_sizes[(int) gimple_alloc_kind_seq] += sizeof (*seq);
-#endif
- }
-
- return seq;
-}
-
-/* Return SEQ to the free pool of GIMPLE sequences. */
-
-void
-gimple_seq_free (gimple_seq seq)
-{
- if (seq == NULL)
- return;
-
- gcc_assert (gimple_seq_first (seq) == NULL);
- gcc_assert (gimple_seq_last (seq) == NULL);
-
- /* If this triggers, it's a sign that the same list is being freed
- twice. */
- gcc_assert (seq != gimple_seq_cache || gimple_seq_cache == NULL);
-
- /* Add SEQ to the pool of free sequences. */
- seq->next_free = gimple_seq_cache;
- gimple_seq_cache = seq;
-}
-
-
/* Link gimple statement GS to the end of the sequence *SEQ_P. If
*SEQ_P is NULL, a new sequence is allocated. */
gimple_seq_add_stmt (gimple_seq *seq_p, gimple gs)
{
gimple_stmt_iterator si;
-
if (gs == NULL)
return;
- if (*seq_p == NULL)
- *seq_p = gimple_seq_alloc ();
-
si = gsi_last (*seq_p);
gsi_insert_after (&si, gs, GSI_NEW_STMT);
}
gimple_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
{
gimple_stmt_iterator si;
-
if (src == NULL)
return;
- if (*dst_p == NULL)
- *dst_p = gimple_seq_alloc ();
-
si = gsi_last (*dst_p);
gsi_insert_seq_after (&si, src, GSI_NEW_STMT);
}
gimple_seq_copy (gimple_seq src)
{
gimple_stmt_iterator gsi;
- gimple_seq new_seq = gimple_seq_alloc ();
+ gimple_seq new_seq = NULL;
gimple stmt;
for (gsi = gsi_start (src); !gsi_end_p (gsi); gsi_next (&gsi))
}
-/* Walk all the statements in the sequence SEQ calling walk_gimple_stmt
+/* Walk all the statements in the sequence *PSEQ calling walk_gimple_stmt
on each one. WI is as in walk_gimple_stmt.
If walk_gimple_stmt returns non-NULL, the walk is stopped, and the
Otherwise, all the statements are walked and NULL returned. */
gimple
-walk_gimple_seq (gimple_seq seq, walk_stmt_fn callback_stmt,
- walk_tree_fn callback_op, struct walk_stmt_info *wi)
+walk_gimple_seq_mod (gimple_seq *pseq, walk_stmt_fn callback_stmt,
+ walk_tree_fn callback_op, struct walk_stmt_info *wi)
{
gimple_stmt_iterator gsi;
- for (gsi = gsi_start (seq); !gsi_end_p (gsi); )
+ for (gsi = gsi_start (*pseq); !gsi_end_p (gsi); )
{
tree ret = walk_gimple_stmt (&gsi, callback_stmt, callback_op, wi);
if (ret)
}
+/* Like walk_gimple_seq_mod, but ensure that the head of SEQ isn't
+ changed by the callbacks. */
+
+gimple
+walk_gimple_seq (gimple_seq seq, walk_stmt_fn callback_stmt,
+ walk_tree_fn callback_op, struct walk_stmt_info *wi)
+{
+ gimple_seq seq2 = seq;
+ gimple ret = walk_gimple_seq_mod (&seq2, callback_stmt, callback_op, wi);
+ gcc_assert (seq2 == seq);
+ return ret;
+}
+
+
/* Helper function for walk_gimple_stmt. Walk operands of a GIMPLE_ASM. */
static tree
tree lhs = gimple_assign_lhs (stmt);
wi->val_only
= (is_gimple_reg_type (TREE_TYPE (lhs)) && !is_gimple_reg (lhs))
- || !gimple_assign_single_p (stmt);
+ || gimple_assign_rhs_class (stmt) != GIMPLE_SINGLE_RHS;
}
for (i = 1; i < gimple_num_ops (stmt); i++)
may use a COMPONENT_REF on the LHS. */
if (wi)
{
- /* If the RHS has more than 1 operand, it is not appropriate
- for the memory. */
- wi->val_only = !(is_gimple_mem_rhs (gimple_assign_rhs1 (stmt))
- || TREE_CODE (gimple_assign_rhs1 (stmt))
- == CONSTRUCTOR)
- || !gimple_assign_single_p (stmt);
+ /* If the RHS is of a non-renamable type or is a register variable,
+ we may use a COMPONENT_REF on the LHS. */
+ tree rhs1 = gimple_assign_rhs1 (stmt);
+ wi->val_only
+ = (is_gimple_reg_type (TREE_TYPE (rhs1)) && !is_gimple_reg (rhs1))
+ || gimple_assign_rhs_class (stmt) != GIMPLE_SINGLE_RHS;
wi->is_lhs = true;
}
switch (gimple_code (stmt))
{
case GIMPLE_BIND:
- ret = walk_gimple_seq (gimple_bind_body (stmt), callback_stmt,
- callback_op, wi);
+ ret = walk_gimple_seq_mod (gimple_bind_body_ptr (stmt), callback_stmt,
+ callback_op, wi);
if (ret)
return wi->callback_result;
break;
case GIMPLE_CATCH:
- ret = walk_gimple_seq (gimple_catch_handler (stmt), callback_stmt,
- callback_op, wi);
+ ret = walk_gimple_seq_mod (gimple_catch_handler_ptr (stmt), callback_stmt,
+ callback_op, wi);
if (ret)
return wi->callback_result;
break;
case GIMPLE_EH_FILTER:
- ret = walk_gimple_seq (gimple_eh_filter_failure (stmt), callback_stmt,
+ ret = walk_gimple_seq_mod (gimple_eh_filter_failure_ptr (stmt), callback_stmt,
callback_op, wi);
if (ret)
return wi->callback_result;
break;
case GIMPLE_EH_ELSE:
- ret = walk_gimple_seq (gimple_eh_else_n_body (stmt),
+ ret = walk_gimple_seq_mod (gimple_eh_else_n_body_ptr (stmt),
callback_stmt, callback_op, wi);
if (ret)
return wi->callback_result;
- ret = walk_gimple_seq (gimple_eh_else_e_body (stmt),
+ ret = walk_gimple_seq_mod (gimple_eh_else_e_body_ptr (stmt),
callback_stmt, callback_op, wi);
if (ret)
return wi->callback_result;
break;
case GIMPLE_TRY:
- ret = walk_gimple_seq (gimple_try_eval (stmt), callback_stmt, callback_op,
+ ret = walk_gimple_seq_mod (gimple_try_eval_ptr (stmt), callback_stmt, callback_op,
wi);
if (ret)
return wi->callback_result;
- ret = walk_gimple_seq (gimple_try_cleanup (stmt), callback_stmt,
+ ret = walk_gimple_seq_mod (gimple_try_cleanup_ptr (stmt), callback_stmt,
callback_op, wi);
if (ret)
return wi->callback_result;
break;
case GIMPLE_OMP_FOR:
- ret = walk_gimple_seq (gimple_omp_for_pre_body (stmt), callback_stmt,
+ ret = walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt), callback_stmt,
callback_op, wi);
if (ret)
return wi->callback_result;
case GIMPLE_OMP_TASK:
case GIMPLE_OMP_SECTIONS:
case GIMPLE_OMP_SINGLE:
- ret = walk_gimple_seq (gimple_omp_body (stmt), callback_stmt,
+ ret = walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), callback_stmt,
callback_op, wi);
if (ret)
return wi->callback_result;
break;
case GIMPLE_WITH_CLEANUP_EXPR:
- ret = walk_gimple_seq (gimple_wce_cleanup (stmt), callback_stmt,
+ ret = walk_gimple_seq_mod (gimple_wce_cleanup_ptr (stmt), callback_stmt,
callback_op, wi);
if (ret)
return wi->callback_result;
break;
case GIMPLE_TRANSACTION:
- ret = walk_gimple_seq (gimple_transaction_body (stmt),
+ ret = walk_gimple_seq_mod (gimple_transaction_body_ptr (stmt),
callback_stmt, callback_op, wi);
if (ret)
return wi->callback_result;
tree lhs = gimple_assign_lhs (stmt);
gimple new_stmt = gimple_alloc (gimple_code (stmt), new_rhs_ops + 1);
memcpy (new_stmt, stmt, gimple_size (gimple_code (stmt)));
+ gimple_init_singleton (new_stmt);
gsi_replace (gsi, new_stmt, true);
stmt = new_stmt;
/* Return a deep copy of statement STMT. All the operands from STMT
are reallocated and copied using unshare_expr. The DEF, USE, VDEF
- and VUSE operand arrays are set to empty in the new copy. */
+ and VUSE operand arrays are set to empty in the new copy. The new
+ copy isn't part of any sequence. */
gimple
gimple_copy (gimple stmt)
/* Shallow copy all the fields from STMT. */
memcpy (copy, stmt, gimple_size (code));
+ gimple_init_singleton (copy);
/* If STMT has sub-statements, deep-copy them as well. */
if (gimple_has_substatements (stmt))
}
-/* Set the MODIFIED flag to MODIFIEDP, iff the gimple statement G has
- a MODIFIED field. */
-
-void
-gimple_set_modified (gimple s, bool modifiedp)
-{
- if (gimple_has_ops (s))
- s->gsbase.modified = (unsigned) modifiedp;
-}
-
-
/* Return true if statement S has side-effects. We consider a
statement to have side effects if:
/* Validation of GIMPLE expressions. */
-/* Returns true iff T is a valid RHS for an assignment to a renamed
- user -- or front-end generated artificial -- variable. */
-
-bool
-is_gimple_reg_rhs (tree t)
-{
- return get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS;
-}
-
-/* Returns true iff T is a valid RHS for an assignment to an un-renamed
- LHS, or for a call argument. */
-
-bool
-is_gimple_mem_rhs (tree t)
-{
- /* If we're dealing with a renamable type, either source or dest must be
- a renamed variable. */
- if (is_gimple_reg_type (TREE_TYPE (t)))
- return is_gimple_val (t);
- else
- return is_gimple_val (t) || is_gimple_lvalue (t);
-}
-
/* Return true if T is a valid LHS for a GIMPLE assignment expression. */
bool
return is_gimple_constant (t);
}
-/* Return true if T looks like a valid GIMPLE statement. */
-
-bool
-is_gimple_stmt (tree t)
-{
- const enum tree_code code = TREE_CODE (t);
-
- switch (code)
- {
- case NOP_EXPR:
- /* The only valid NOP_EXPR is the empty statement. */
- return IS_EMPTY_STMT (t);
-
- case BIND_EXPR:
- case COND_EXPR:
- /* These are only valid if they're void. */
- return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
-
- case SWITCH_EXPR:
- case GOTO_EXPR:
- case RETURN_EXPR:
- case LABEL_EXPR:
- case CASE_LABEL_EXPR:
- case TRY_CATCH_EXPR:
- case TRY_FINALLY_EXPR:
- case EH_FILTER_EXPR:
- case CATCH_EXPR:
- case ASM_EXPR:
- case STATEMENT_LIST:
- case OMP_PARALLEL:
- case OMP_FOR:
- case OMP_SECTIONS:
- case OMP_SECTION:
- case OMP_SINGLE:
- case OMP_MASTER:
- case OMP_ORDERED:
- case OMP_CRITICAL:
- case OMP_TASK:
- /* These are always void. */
- return true;
-
- case CALL_EXPR:
- case MODIFY_EXPR:
- case PREDICT_EXPR:
- /* These are valid regardless of their type. */
- return true;
-
- default:
- return false;
- }
-}
-
/* Return true if T is a variable. */
bool
|| TREE_CODE (t) == STRING_CST);
}
-/* Return true if TYPE is a suitable type for a scalar register variable. */
-
-bool
-is_gimple_reg_type (tree type)
-{
- return !AGGREGATE_TYPE_P (type);
-}
-
/* Return true if T is a non-aggregate register variable. */
bool
is_gimple_reg (tree t)
{
if (TREE_CODE (t) == SSA_NAME)
- t = SSA_NAME_VAR (t);
+ {
+ t = SSA_NAME_VAR (t);
+ if (TREE_CODE (t) == VAR_DECL
+ && VAR_DECL_IS_VIRTUAL_OPERAND (t))
+ return false;
+ return true;
+ }
+
+ if (TREE_CODE (t) == VAR_DECL
+ && VAR_DECL_IS_VIRTUAL_OPERAND (t))
+ return false;
if (!is_gimple_variable (t))
return false;
return false;
if (TREE_CODE (t1) == INTEGER_TYPE
- && (TYPE_IS_SIZETYPE (t1) != TYPE_IS_SIZETYPE (t2)
- || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)))
+ && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
return false;
/* That's all we need to check for float and fixed-point types. */
|| FIXED_POINT_TYPE_P (t1))
return true;
- /* For other types fall thru to more complex checks. */
+ /* For other types fall through to more complex checks. */
}
/* If the types have been previously registered and found equal
goto same_types;
else if (i1 == NULL_TREE || i2 == NULL_TREE)
goto different_types;
- /* If for a complete array type the possibly gimplified sizes
- are different the types are different. */
- else if (((TYPE_SIZE (i1) != NULL) ^ (TYPE_SIZE (i2) != NULL))
- || (TYPE_SIZE (i1)
- && TYPE_SIZE (i2)
- && !operand_equal_p (TYPE_SIZE (i1), TYPE_SIZE (i2), 0)))
- goto different_types;
else
{
tree min1 = TYPE_MIN_VALUE (i1);
return false;
if (TREE_CODE (t1) == INTEGER_TYPE
- && (TYPE_IS_SIZETYPE (t1) != TYPE_IS_SIZETYPE (t2)
- || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)))
+ && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
return false;
/* That's all we need to check for float and fixed-point types. */
|| FIXED_POINT_TYPE_P (t1))
return true;
- /* For other types fall thru to more complex checks. */
+ /* For other types fall through to more complex checks. */
}
/* If the types have been previously registered and found equal
v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
}
- /* For array types hash their domain and the string flag. */
- if (TREE_CODE (type) == ARRAY_TYPE
- && TYPE_DOMAIN (type))
+ /* For array types hash the domain and the string flag. */
+ if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
{
v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
v = visit (TYPE_DOMAIN (type), state, v,
v = iterative_hash_hashval_t (na, v);
}
- if (TREE_CODE (type) == RECORD_TYPE
- || TREE_CODE (type) == UNION_TYPE
- || TREE_CODE (type) == QUAL_UNION_TYPE)
+ if (RECORD_OR_UNION_TYPE_P (type))
{
unsigned nf;
tree f;
v = iterative_hash_hashval_t (TREE_CODE (TREE_TYPE (type)), v);
}
- /* For integer types hash the types min/max values and the string flag. */
+ /* For integer types hash only the string flag. */
if (TREE_CODE (type) == INTEGER_TYPE)
- {
- v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
- v = iterative_hash_hashval_t (TYPE_IS_SIZETYPE (type), v);
- }
+ v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
- /* For array types hash their domain and the string flag. */
- if (TREE_CODE (type) == ARRAY_TYPE
- && TYPE_DOMAIN (type))
+ /* For array types hash the domain bounds and the string flag. */
+ if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
{
v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
- v = iterative_hash_canonical_type (TYPE_DOMAIN (type), v);
+ /* OMP lowering can introduce error_mark_node in place of
+ random local decls in types. */
+ if (TYPE_MIN_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
+ v = iterative_hash_expr (TYPE_MIN_VALUE (TYPE_DOMAIN (type)), v);
+ if (TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
+ v = iterative_hash_expr (TYPE_MAX_VALUE (TYPE_DOMAIN (type)), v);
}
/* Recurse for aggregates with a single element type. */
v = iterative_hash_hashval_t (na, v);
}
- if (TREE_CODE (type) == RECORD_TYPE
- || TREE_CODE (type) == UNION_TYPE
- || TREE_CODE (type) == QUAL_UNION_TYPE)
+ if (RECORD_OR_UNION_TYPE_P (type))
{
unsigned nf;
tree f;
return false;
if (TREE_CODE (t1) == INTEGER_TYPE
- && (TYPE_IS_SIZETYPE (t1) != TYPE_IS_SIZETYPE (t2)
- || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)))
+ && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
return false;
/* For canonical type comparisons we do not want to build SCCs
return true;
else if (i1 == NULL_TREE || i2 == NULL_TREE)
return false;
- /* If for a complete array type the possibly gimplified sizes
- are different the types are different. */
- else if (((TYPE_SIZE (i1) != NULL) ^ (TYPE_SIZE (i2) != NULL))
- || (TYPE_SIZE (i1)
- && TYPE_SIZE (i2)
- && !operand_equal_p (TYPE_SIZE (i1), TYPE_SIZE (i2), 0)))
- return false;
else
{
tree min1 = TYPE_MIN_VALUE (i1);