+2007-06-06 Thomas Neumann <tneumann@users.sourceforge.net>
+
+ * tree-ssa-alias-warnings.c (maybe_add_match): Cast according to the
+ coding conventions.
+ (add_key): Likewise.
+ * tree-ssa.c (init_tree_ssa): Use type safe memory macros.
+ * tree-ssa-ccp.c (ccp_fold_builtin): Avoid using C++ keywords as
+ variable names.
+ * tree-ssa-coalesce.c (find_coalesce_pair): Use type safe memory macros.
+ (add_cost_one_coalesce): Likewise.
+ * tree-ssa-copy.c (merge_alias_info): Avoid using C++ keywords as
+ variable names. Rename orig to orig_name for consistency.
+ * tree-ssa-dom.c (dom_thread_across_edge): Cast according to the coding
+ conventions.
+ (cprop_into_successor_phis): Avoid using C++ keywords as variable names.
+ (record_equivalences_from_stmt): Likewise.
+ * tree-ssa-dse.c (dse_initialize_block_local_data): Cast according to
+ the coding conventions.
+ (memory_ssa_name_same): Likewise.
+ (dse_optimize_stmt): Likewise.
+ (dse_record_phis): Likewise.
+ (dse_finalize_block): Likewise.
+ * tree-ssa-loop-im.c (outermost_invariant_loop_expr): Avoid using C++
+ keywords as variable names.
+ (may_move_till): Cast according to the coding conventions.
+ (force_move_till_expr): Avoid using C++ keywords as variable names.
+ (force_move_till): Cast according to the coding conventions.
+ (memref_hash): Likewise.
+ (memref_eq): Likewise.
+ (gather_mem_refs_stmt): Likewise.
+ * tree-ssa-loop-ivopts.c (contains_abnormal_ssa_name_p): Avoid using C++
+ keywords as variable names.
+ (idx_find_step): Cast according to the coding conventions.
+ (idx_record_use): Likewise.
+ (find_depends): Likewise.
+ (prepare_decl_rtl): Likewise.
+ (mbc_entry_hash): Likewise.
+ (mbc_entry_eq): Likewise.
+ * tree-ssa-loop-niter.c (SWAP): Use the correct the type for tmp.
+ (simplify_replace_tree): Avoid using C++ keywords as variable names.
+ (idx_infer_loop_bounds): Cast according to the coding conventions.
+ * tree-ssa-loop-prefetch.c (idx_analyze_ref): Likewise.
+ * tree-ssa-math-opts.c (occ_new ): Likwise.
+ * tree-ssanames.c (duplicate_ssa_name_ptr_info): Use type safe memory
+ macros.
+ * tree-ssa-operands.c (add_def_op): Avoid using C++ keywords as variable
+ names.
+ (add_use_op): Likewise.
+ (add_vop): Likewise.
+ (add_vuse_op): Likewise.
+ (add_vdef_op): Likewise.
+ (get_expr_operands): Likewise.
+ (push_stmt_changes): Use type safe memory macros.
+ * tree-ssa-phiopt.c (replace_phi_edge_with_variable): Avoid using C++
+ keywords as variable names.
+ (conditional_replacement): Likewise.
+ (minmax_replacement): Likewise.
+ (abs_replacement): Likewise.
+ * tree-ssa-pre.c (create_expression_by_pieces): Cast according to the
+ coding conventions.
+ (insert_fake_stores): Avoid using C++ keywords as variable names.
+ * tree-ssa-reassoc.c (add_to_ops_vec): Cast according to the coding
+ conventions.
+ * tree-ssa-structalias.c (heapvar_lookup): Likewise.
+ (heapvar_insert): Use type safe memory macros.
+ (new_var_info): Cast according to the coding conventions.
+ (new_constraint): Likewise.
+ (remove_preds_and_fake_succs): Use type safe memory macros.
+ * tree-ssa-threadupdate.c (thread_block): Cast according to the coding
+ conventions.
+ (thread_single_edge): Likewise.
+ (thread_through_loop_header): Likewise.
+
2007-06-06 Eric Christopher <echristo@apple.com>
* config/i386/i386.c (override_options): Move handling
static inline void
maybe_add_match (htab_t ref_map, struct tree_map *key)
{
- struct tree_map *found = htab_find (ref_map, key);
+ struct tree_map *found = (struct tree_map *) htab_find (ref_map, key);
if (found && !found->to)
found->to = key->to;
add_key (htab_t ht, tree t, alloc_pool references_pool)
{
void **slot;
- struct tree_map *tp = pool_alloc (references_pool);
+ struct tree_map *tp = (struct tree_map *) pool_alloc (references_pool);
tp->base.from = t;
tp->to = NULL_TREE;
case BUILT_IN_STRLEN:
if (val[0])
{
- tree new = fold_convert (TREE_TYPE (fn), val[0]);
+ tree new_val = fold_convert (TREE_TYPE (fn), val[0]);
/* If the result is not a valid gimple value, or not a cast
of a valid gimple value, then we can not use the result. */
- if (is_gimple_val (new)
- || (is_gimple_cast (new)
- && is_gimple_val (TREE_OPERAND (new, 0))))
- return new;
+ if (is_gimple_val (new_val)
+ || (is_gimple_cast (new_val)
+ && is_gimple_val (TREE_OPERAND (new_val, 0))))
+ return new_val;
}
break;
if (create && !pair)
{
gcc_assert (cl->sorted == NULL);
- pair = xmalloc (sizeof (struct coalesce_pair));
+ pair = XNEW (struct coalesce_pair);
pair->first_element = p.first_element;
pair->second_element = p.second_element;
pair->cost = 0;
{
cost_one_pair_p pair;
- pair = xmalloc (sizeof (struct cost_one_pair_d));
+ pair = XNEW (struct cost_one_pair_d);
pair->first_element = p1;
pair->second_element = p2;
pair->next = cl->cost_one_list;
they both share the same memory tags. */
void
-merge_alias_info (tree orig, tree new)
+merge_alias_info (tree orig_name, tree new_name)
{
- tree new_sym = SSA_NAME_VAR (new);
- tree orig_sym = SSA_NAME_VAR (orig);
+ tree new_sym = SSA_NAME_VAR (new_name);
+ tree orig_sym = SSA_NAME_VAR (orig_name);
var_ann_t new_ann = var_ann (new_sym);
var_ann_t orig_ann = var_ann (orig_sym);
/* No merging necessary when memory partitions are involved. */
- if (factoring_name_p (new))
+ if (factoring_name_p (new_name))
{
gcc_assert (!is_gimple_reg (orig_sym));
return;
}
- else if (factoring_name_p (orig))
+ else if (factoring_name_p (orig_name))
{
gcc_assert (!is_gimple_reg (new_sym));
return;
}
- gcc_assert (POINTER_TYPE_P (TREE_TYPE (orig)));
- gcc_assert (POINTER_TYPE_P (TREE_TYPE (new)));
+ gcc_assert (POINTER_TYPE_P (TREE_TYPE (orig_name)));
+ gcc_assert (POINTER_TYPE_P (TREE_TYPE (new_name)));
#if defined ENABLE_CHECKING
- gcc_assert (lang_hooks.types_compatible_p (TREE_TYPE (orig),
- TREE_TYPE (new)));
+ gcc_assert (lang_hooks.types_compatible_p (TREE_TYPE (orig_name),
+ TREE_TYPE (new_name)));
/* If the pointed-to alias sets are different, these two pointers
would never have the same memory tag. In this case, NEW should
Since we cannot distinguish one case from another in this
function, we can only make sure that if P_i and Q_j have
flow-sensitive information, they should be compatible. */
- if (SSA_NAME_PTR_INFO (orig) && SSA_NAME_PTR_INFO (new))
+ if (SSA_NAME_PTR_INFO (orig_name) && SSA_NAME_PTR_INFO (new_name))
{
- struct ptr_info_def *orig_ptr_info = SSA_NAME_PTR_INFO (orig);
- struct ptr_info_def *new_ptr_info = SSA_NAME_PTR_INFO (new);
+ struct ptr_info_def *orig_ptr_info = SSA_NAME_PTR_INFO (orig_name);
+ struct ptr_info_def *new_ptr_info = SSA_NAME_PTR_INFO (new_name);
/* Note that pointer NEW and ORIG may actually have different
pointed-to variables (e.g., PR 18291 represented in
walk_data->global_data = dummy_cond;
}
- thread_across_edge (walk_data->global_data, e, false,
+ thread_across_edge ((tree) walk_data->global_data, e, false,
&const_and_copies_stack,
simplify_stmt_for_jump_threading);
}
indx = e->dest_idx;
for ( ; phi; phi = PHI_CHAIN (phi))
{
- tree new;
+ tree new_val;
use_operand_p orig_p;
- tree orig;
+ tree orig_val;
/* The alternative may be associated with a constant, so verify
it is an SSA_NAME before doing anything with it. */
orig_p = PHI_ARG_DEF_PTR (phi, indx);
- orig = USE_FROM_PTR (orig_p);
- if (TREE_CODE (orig) != SSA_NAME)
+ orig_val = USE_FROM_PTR (orig_p);
+ if (TREE_CODE (orig_val) != SSA_NAME)
continue;
/* If we have *ORIG_P in our constant/copy table, then replace
ORIG_P with its value in our constant/copy table. */
- new = SSA_NAME_VALUE (orig);
- if (new
- && new != orig
- && (TREE_CODE (new) == SSA_NAME
- || is_gimple_min_invariant (new))
- && may_propagate_copy (orig, new))
- propagate_value (orig_p, new);
+ new_val = SSA_NAME_VALUE (orig_val);
+ if (new_val
+ && new_val != orig_val
+ && (TREE_CODE (new_val) == SSA_NAME
+ || is_gimple_min_invariant (new_val))
+ && may_propagate_copy (orig_val, new_val))
+ propagate_value (orig_p, new_val);
}
}
}
&& !is_gimple_reg (lhs))
{
tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
- tree new;
+ tree new_stmt;
/* FIXME: If the LHS of the assignment is a bitfield and the RHS
is a constant, we need to adjust the constant to fit into the
if (rhs)
{
/* Build a new statement with the RHS and LHS exchanged. */
- new = build_gimple_modify_stmt (rhs, lhs);
+ new_stmt = build_gimple_modify_stmt (rhs, lhs);
- create_ssa_artificial_load_stmt (new, stmt);
+ create_ssa_artificial_load_stmt (new_stmt, stmt);
/* Finally enter the statement into the available expression
table. */
- lookup_avail_expr (new, true);
+ lookup_avail_expr (new_stmt, true);
}
}
}
bool recycled)
{
struct dse_block_local_data *bd
- = VEC_last (void_p, walk_data->block_data_stack);
+ = (struct dse_block_local_data *)
+ VEC_last (void_p, walk_data->block_data_stack);
/* If we are given a recycled block local data structure, ensure any
bitmap associated with the block is cleared. */
memory_ssa_name_same (tree *expr_p, int *walk_subtrees ATTRIBUTE_UNUSED,
void *data)
{
- struct address_walk_data *walk_data = data;
+ struct address_walk_data *walk_data = (struct address_walk_data *) data;
tree expr = *expr_p;
tree def_stmt;
basic_block def_bb;
block_stmt_iterator bsi)
{
struct dse_block_local_data *bd
- = VEC_last (void_p, walk_data->block_data_stack);
- struct dse_global_data *dse_gd = walk_data->global_data;
+ = (struct dse_block_local_data *)
+ VEC_last (void_p, walk_data->block_data_stack);
+ struct dse_global_data *dse_gd
+ = (struct dse_global_data *) walk_data->global_data;
tree stmt = bsi_stmt (bsi);
stmt_ann_t ann = stmt_ann (stmt);
dse_record_phis (struct dom_walk_data *walk_data, basic_block bb)
{
struct dse_block_local_data *bd
- = VEC_last (void_p, walk_data->block_data_stack);
- struct dse_global_data *dse_gd = walk_data->global_data;
+ = (struct dse_block_local_data *)
+ VEC_last (void_p, walk_data->block_data_stack);
+ struct dse_global_data *dse_gd
+ = (struct dse_global_data *) walk_data->global_data;
tree phi;
for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
basic_block bb ATTRIBUTE_UNUSED)
{
struct dse_block_local_data *bd
- = VEC_last (void_p, walk_data->block_data_stack);
- struct dse_global_data *dse_gd = walk_data->global_data;
+ = (struct dse_block_local_data *)
+ VEC_last (void_p, walk_data->block_data_stack);
+ struct dse_global_data *dse_gd
+ = (struct dse_global_data *) walk_data->global_data;
bitmap stores = dse_gd->stores;
unsigned int i;
bitmap_iterator bi;
static struct loop *
outermost_invariant_loop_expr (tree expr, struct loop *loop)
{
- enum tree_code_class class = TREE_CODE_CLASS (TREE_CODE (expr));
+ enum tree_code_class codeclass = TREE_CODE_CLASS (TREE_CODE (expr));
unsigned i, nops;
struct loop *max_loop = superloop_at_depth (loop, 1), *aloop;
|| is_gimple_min_invariant (expr))
return outermost_invariant_loop (expr, loop);
- if (class != tcc_unary
- && class != tcc_binary
- && class != tcc_expression
- && class != tcc_vl_exp
- && class != tcc_comparison)
+ if (codeclass != tcc_unary
+ && codeclass != tcc_binary
+ && codeclass != tcc_expression
+ && codeclass != tcc_vl_exp
+ && codeclass != tcc_comparison)
return NULL;
nops = TREE_OPERAND_LENGTH (expr);
static bool
may_move_till (tree ref, tree *index, void *data)
{
- struct loop *loop = data, *max_loop;
+ struct loop *loop = (struct loop*) data, *max_loop;
/* If REF is an array reference, check also that the step and the lower
bound is invariant in LOOP. */
static void
force_move_till_expr (tree expr, struct loop *orig_loop, struct loop *loop)
{
- enum tree_code_class class = TREE_CODE_CLASS (TREE_CODE (expr));
+ enum tree_code_class codeclass = TREE_CODE_CLASS (TREE_CODE (expr));
unsigned i, nops;
if (TREE_CODE (expr) == SSA_NAME)
return;
}
- if (class != tcc_unary
- && class != tcc_binary
- && class != tcc_expression
- && class != tcc_vl_exp
- && class != tcc_comparison)
+ if (codeclass != tcc_unary
+ && codeclass != tcc_binary
+ && codeclass != tcc_expression
+ && codeclass != tcc_vl_exp
+ && codeclass != tcc_comparison)
return;
nops = TREE_OPERAND_LENGTH (expr);
force_move_till (tree ref, tree *index, void *data)
{
tree stmt;
- struct fmt_data *fmt_data = data;
+ struct fmt_data *fmt_data = (struct fmt_data *) data;
if (TREE_CODE (ref) == ARRAY_REF)
{
static hashval_t
memref_hash (const void *obj)
{
- const struct mem_ref *mem = obj;
-
- return mem->hash;
+ return ((const struct mem_ref *) obj)->hash;
}
/* An equality function for struct mem_ref object OBJ1 with
static int
memref_eq (const void *obj1, const void *obj2)
{
- const struct mem_ref *mem1 = obj1;
+ const struct mem_ref *mem1 = (const struct mem_ref *) obj1;
return operand_equal_p (mem1->mem, (tree) obj2, 0);
}
slot = htab_find_slot_with_hash (mem_refs, *mem, hash, INSERT);
if (*slot)
- ref = *slot;
+ ref = (struct mem_ref *) *slot;
else
{
ref = XNEW (struct mem_ref);
contains_abnormal_ssa_name_p (tree expr)
{
enum tree_code code;
- enum tree_code_class class;
+ enum tree_code_class codeclass;
if (!expr)
return false;
code = TREE_CODE (expr);
- class = TREE_CODE_CLASS (code);
+ codeclass = TREE_CODE_CLASS (code);
if (code == SSA_NAME)
return SSA_NAME_OCCURS_IN_ABNORMAL_PHI (expr) != 0;
idx_contains_abnormal_ssa_name_p,
NULL);
- switch (class)
+ switch (codeclass)
{
case tcc_binary:
case tcc_comparison:
*pointer_map_insert (data->niters, exit) = niter;
}
else
- niter = *slot;
+ niter = (tree) *slot;
return niter;
}
static bool
idx_find_step (tree base, tree *idx, void *data)
{
- struct ifs_ivopts_data *dta = data;
+ struct ifs_ivopts_data *dta = (struct ifs_ivopts_data *) data;
struct iv *iv;
tree step, iv_base, iv_step, lbound, off;
struct loop *loop = dta->ivopts_data->current_loop;
static bool
idx_record_use (tree base, tree *idx,
- void *data)
+ void *vdata)
{
+ struct ivopts_data *data = (struct ivopts_data *) vdata;
find_interesting_uses_op (data, *idx);
if (TREE_CODE (base) == ARRAY_REF)
{
static tree
find_depends (tree *expr_p, int *ws ATTRIBUTE_UNUSED, void *data)
{
- bitmap *depends_on = data;
+ bitmap *depends_on = (bitmap *) data;
struct version_info *info;
if (TREE_CODE (*expr_p) != SSA_NAME)
{
tree obj = NULL_TREE;
rtx x = NULL_RTX;
- int *regno = data;
+ int *regno = (int *) data;
switch (TREE_CODE (*expr_p))
{
static hashval_t
mbc_entry_hash (const void *entry)
{
- const struct mbc_entry *e = entry;
+ const struct mbc_entry *e = (const struct mbc_entry *) entry;
return 57 * (hashval_t) e->mode + (hashval_t) (e->cst % 877);
}
static int
mbc_entry_eq (const void *entry1, const void *entry2)
{
- const struct mbc_entry *e1 = entry1;
- const struct mbc_entry *e2 = entry2;
+ const struct mbc_entry *e1 = (const struct mbc_entry *) entry1;
+ const struct mbc_entry *e2 = (const struct mbc_entry *) entry2;
return (e1->mode == e2->mode
&& e1->cst == e2->cst);
#include "tree-inline.h"
#include "gmp.h"
-#define SWAP(X, Y) do { void *tmp = (X); (X) = (Y); (Y) = tmp; } while (0)
+#define SWAP(X, Y) do { affine_iv *tmp = (X); (X) = (Y); (Y) = tmp; } while (0)
/* The maximum number of dominator BBs we search for conditions
of loop header copies we use for simplifying a conditional
/* Substitute NEW for OLD in EXPR and fold the result. */
static tree
-simplify_replace_tree (tree expr, tree old, tree new)
+simplify_replace_tree (tree expr, tree old, tree new_tree)
{
unsigned i, n;
tree ret = NULL_TREE, e, se;
if (expr == old
|| operand_equal_p (expr, old, 0))
- return unshare_expr (new);
+ return unshare_expr (new_tree);
if (!EXPR_P (expr) && !GIMPLE_STMT_P (expr))
return expr;
for (i = 0; i < n; i++)
{
e = TREE_OPERAND (expr, i);
- se = simplify_replace_tree (e, old, new);
+ se = simplify_replace_tree (e, old, new_tree);
if (e == se)
continue;
static bool
idx_infer_loop_bounds (tree base, tree *idx, void *dta)
{
- struct ilb_data *data = dta;
+ struct ilb_data *data = (struct ilb_data *) dta;
tree ev, init, step;
tree low, high, type, next;
bool sign, upper = data->reliable, at_end = false;
static bool
idx_analyze_ref (tree base, tree *index, void *data)
{
- struct ar_data *ar_data = data;
+ struct ar_data *ar_data = (struct ar_data *) data;
tree ibase, step, stepsize;
HOST_WIDE_INT istep, idelta = 0, imult = 1;
affine_iv iv;
{
struct occurrence *occ;
- occ = bb->aux = pool_alloc (occ_pool);
+ bb->aux = occ = (struct occurrence *) pool_alloc (occ_pool);
memset (occ, 0, sizeof (struct occurrence));
occ->bb = bb;
static inline def_optype_p
add_def_op (tree *op, def_optype_p last)
{
- def_optype_p new;
+ def_optype_p new_def;
- new = alloc_def ();
- DEF_OP_PTR (new) = op;
- last->next = new;
- new->next = NULL;
- return new;
+ new_def = alloc_def ();
+ DEF_OP_PTR (new_def) = op;
+ last->next = new_def;
+ new_def->next = NULL;
+ return new_def;
}
static inline use_optype_p
add_use_op (tree stmt, tree *op, use_optype_p last)
{
- use_optype_p new;
+ use_optype_p new_use;
- new = alloc_use ();
- USE_OP_PTR (new)->use = op;
- link_imm_use_stmt (USE_OP_PTR (new), *op, stmt);
- last->next = new;
- new->next = NULL;
- return new;
+ new_use = alloc_use ();
+ USE_OP_PTR (new_use)->use = op;
+ link_imm_use_stmt (USE_OP_PTR (new_use), *op, stmt);
+ last->next = new_use;
+ new_use->next = NULL;
+ return new_use;
}
static inline voptype_p
add_vop (tree stmt, tree op, int num, voptype_p prev)
{
- voptype_p new;
+ voptype_p new_vop;
int x;
- new = alloc_vop (num);
+ new_vop = alloc_vop (num);
for (x = 0; x < num; x++)
{
- VUSE_OP_PTR (new, x)->prev = NULL;
- SET_VUSE_OP (new, x, op);
- VUSE_OP_PTR (new, x)->use = &new->usev.uses[x].use_var;
- link_imm_use_stmt (VUSE_OP_PTR (new, x), new->usev.uses[x].use_var, stmt);
+ VUSE_OP_PTR (new_vop, x)->prev = NULL;
+ SET_VUSE_OP (new_vop, x, op);
+ VUSE_OP_PTR (new_vop, x)->use = &new_vop->usev.uses[x].use_var;
+ link_imm_use_stmt (VUSE_OP_PTR (new_vop, x),
+ new_vop->usev.uses[x].use_var, stmt);
}
if (prev)
- prev->next = new;
- new->next = NULL;
- return new;
+ prev->next = new_vop;
+ new_vop->next = NULL;
+ return new_vop;
}
static inline voptype_p
add_vuse_op (tree stmt, tree op, int num, voptype_p last)
{
- voptype_p new = add_vop (stmt, op, num, last);
- VDEF_RESULT (new) = NULL_TREE;
- return new;
+ voptype_p new_vop = add_vop (stmt, op, num, last);
+ VDEF_RESULT (new_vop) = NULL_TREE;
+ return new_vop;
}
static inline voptype_p
add_vdef_op (tree stmt, tree op, int num, voptype_p last)
{
- voptype_p new = add_vop (stmt, op, num, last);
- VDEF_RESULT (new) = op;
- return new;
+ voptype_p new_vop = add_vop (stmt, op, num, last);
+ VDEF_RESULT (new_vop) = op;
+ return new_vop;
}
get_expr_operands (tree stmt, tree *expr_p, int flags)
{
enum tree_code code;
- enum tree_code_class class;
+ enum tree_code_class codeclass;
tree expr = *expr_p;
stmt_ann_t s_ann = stmt_ann (stmt);
return;
code = TREE_CODE (expr);
- class = TREE_CODE_CLASS (code);
+ codeclass = TREE_CODE_CLASS (code);
switch (code)
{
return;
default:
- if (class == tcc_unary)
+ if (codeclass == tcc_unary)
goto do_unary;
- if (class == tcc_binary || class == tcc_comparison)
+ if (codeclass == tcc_binary || codeclass == tcc_comparison)
goto do_binary;
- if (class == tcc_constant || class == tcc_type)
+ if (codeclass == tcc_constant || codeclass == tcc_type)
return;
}
if (TREE_CODE (stmt) == PHI_NODE)
return;
- buf = xmalloc (sizeof *buf);
+ buf = XNEW (struct scb_d);
memset (buf, 0, sizeof *buf);
buf->stmt_p = stmt_p;
static void
replace_phi_edge_with_variable (basic_block cond_block,
- edge e, tree phi, tree new)
+ edge e, tree phi, tree new_tree)
{
basic_block bb = bb_for_stmt (phi);
basic_block block_to_remove;
block_stmt_iterator bsi;
/* Change the PHI argument to new. */
- SET_USE (PHI_ARG_DEF_PTR (phi, e->dest_idx), new);
+ SET_USE (PHI_ARG_DEF_PTR (phi, e->dest_idx), new_tree);
/* Remove the empty basic block. */
if (EDGE_SUCC (cond_block, 0)->dest == bb)
{
tree result;
tree old_result = NULL;
- tree new, cond;
+ tree new_stmt, cond;
block_stmt_iterator bsi;
edge true_edge, false_edge;
tree new_var = NULL;
|| (e1 == true_edge && integer_onep (arg1))
|| (e1 == false_edge && integer_zerop (arg1)))
{
- new = build_gimple_modify_stmt (new_var1, cond);
+ new_stmt = build_gimple_modify_stmt (new_var1, cond);
}
else
{
tmp = create_tmp_var (TREE_TYPE (op0), NULL);
add_referenced_var (tmp);
cond_tmp = make_ssa_name (tmp, NULL);
- new = build_gimple_modify_stmt (cond_tmp, op0);
- SSA_NAME_DEF_STMT (cond_tmp) = new;
+ new_stmt = build_gimple_modify_stmt (cond_tmp, op0);
+ SSA_NAME_DEF_STMT (cond_tmp) = new_stmt;
- bsi_insert_after (&bsi, new, BSI_NEW_STMT);
+ bsi_insert_after (&bsi, new_stmt, BSI_NEW_STMT);
cond = fold_convert (TREE_TYPE (result), cond_tmp);
}
- new = build_gimple_modify_stmt (new_var1, cond);
+ new_stmt = build_gimple_modify_stmt (new_var1, cond);
}
- bsi_insert_after (&bsi, new, BSI_NEW_STMT);
+ bsi_insert_after (&bsi, new_stmt, BSI_NEW_STMT);
- SSA_NAME_DEF_STMT (new_var1) = new;
+ SSA_NAME_DEF_STMT (new_var1) = new_stmt;
replace_phi_edge_with_variable (cond_bb, e1, phi, new_var1);
tree arg0, tree arg1)
{
tree result, type;
- tree cond, new;
+ tree cond, new_stmt;
edge true_edge, false_edge;
enum tree_code cmp, minmax, ass_code;
tree smaller, larger, arg_true, arg_false;
/* Emit the statement to compute min/max. */
result = duplicate_ssa_name (PHI_RESULT (phi), NULL);
- new = build_gimple_modify_stmt (result, build2 (minmax, type, arg0, arg1));
- SSA_NAME_DEF_STMT (result) = new;
+ new_stmt = build_gimple_modify_stmt (result, build2 (minmax, type, arg0, arg1));
+ SSA_NAME_DEF_STMT (result) = new_stmt;
bsi = bsi_last (cond_bb);
- bsi_insert_before (&bsi, new, BSI_NEW_STMT);
+ bsi_insert_before (&bsi, new_stmt, BSI_NEW_STMT);
replace_phi_edge_with_variable (cond_bb, e1, phi, result);
return true;
tree phi, tree arg0, tree arg1)
{
tree result;
- tree new, cond;
+ tree new_stmt, cond;
block_stmt_iterator bsi;
edge true_edge, false_edge;
tree assign;
lhs = result;
/* Build the modify expression with abs expression. */
- new = build_gimple_modify_stmt (lhs,
- build1 (ABS_EXPR, TREE_TYPE (lhs), rhs));
- SSA_NAME_DEF_STMT (lhs) = new;
+ new_stmt = build_gimple_modify_stmt (lhs,
+ build1 (ABS_EXPR, TREE_TYPE (lhs), rhs));
+ SSA_NAME_DEF_STMT (lhs) = new_stmt;
bsi = bsi_last (cond_bb);
- bsi_insert_before (&bsi, new, BSI_NEW_STMT);
+ bsi_insert_before (&bsi, new_stmt, BSI_NEW_STMT);
if (negate)
{
/* Get the right BSI. We want to insert after the recently
added ABS_EXPR statement (which we know is the first statement
in the block. */
- new = build_gimple_modify_stmt (result,
- build1 (NEGATE_EXPR, TREE_TYPE (lhs),
- lhs));
- SSA_NAME_DEF_STMT (result) = new;
+ new_stmt = build_gimple_modify_stmt (result,
+ build1 (NEGATE_EXPR, TREE_TYPE (lhs),
+ lhs));
+ SSA_NAME_DEF_STMT (result) = new_stmt;
- bsi_insert_after (&bsi, new, BSI_NEW_STMT);
+ bsi_insert_after (&bsi, new_stmt, BSI_NEW_STMT);
}
replace_phi_edge_with_variable (cond_bb, e1, phi, result);
genfn = find_or_generate_expression (block, fn, stmts);
nargs = call_expr_nargs (expr);
- buffer = alloca (nargs * sizeof (tree));
+ buffer = (tree*) alloca (nargs * sizeof (tree));
for (i = 0; i < nargs; i++)
{
def_operand_p defp;
tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
- tree new;
+ tree new_tree;
bool notokay = false;
FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_VIRTUAL_DEFS)
get_var_ann (storetemp);
}
- new = poolify_modify_stmt (storetemp, lhs);
+ new_tree = poolify_modify_stmt (storetemp, lhs);
- lhs = make_ssa_name (storetemp, new);
- GIMPLE_STMT_OPERAND (new, 0) = lhs;
- create_ssa_artificial_load_stmt (new, stmt);
+ lhs = make_ssa_name (storetemp, new_tree);
+ GIMPLE_STMT_OPERAND (new_tree, 0) = lhs;
+ create_ssa_artificial_load_stmt (new_tree, stmt);
- NECESSARY (new) = 0;
- VEC_safe_push (tree, heap, inserted_exprs, new);
- VEC_safe_push (tree, heap, need_creation, new);
- bsi_insert_after (&bsi, new, BSI_NEW_STMT);
+ NECESSARY (new_tree) = 0;
+ VEC_safe_push (tree, heap, inserted_exprs, new_tree);
+ VEC_safe_push (tree, heap, need_creation, new_tree);
+ bsi_insert_after (&bsi, new_tree, BSI_NEW_STMT);
}
}
}
static void
add_to_ops_vec (VEC(operand_entry_t, heap) **ops, tree op)
{
- operand_entry_t oe = pool_alloc (operand_entry_pool);
+ operand_entry_t oe = (operand_entry_t) pool_alloc (operand_entry_pool);
oe->op = op;
oe->rank = get_rank (op);
struct tree_map *h, in;
in.base.from = from;
- h = htab_find_with_hash (heapvar_for_stmt, &in, htab_hash_pointer (from));
+ h = (struct tree_map *) htab_find_with_hash (heapvar_for_stmt, &in,
+ htab_hash_pointer (from));
if (h)
return h->to;
return NULL_TREE;
struct tree_map *h;
void **loc;
- h = ggc_alloc (sizeof (struct tree_map));
+ h = GGC_NEW (struct tree_map);
h->hash = htab_hash_pointer (from);
h->base.from = from;
h->to = to;
static varinfo_t
new_var_info (tree t, unsigned int id, const char *name)
{
- varinfo_t ret = pool_alloc (variable_info_pool);
+ varinfo_t ret = (varinfo_t) pool_alloc (variable_info_pool);
ret->id = id;
ret->name = name;
new_constraint (const struct constraint_expr lhs,
const struct constraint_expr rhs)
{
- constraint_t ret = pool_alloc (constraint_pool);
+ constraint_t ret = (constraint_t) pool_alloc (constraint_pool);
ret->lhs = lhs;
ret->rhs = rhs;
return ret;
/* Now reallocate the size of the successor list as, and blow away
the predecessor bitmaps. */
graph->size = VEC_length (varinfo_t, varmap);
- graph->succs = xrealloc (graph->succs, graph->size * sizeof (bitmap));
+ graph->succs = XRESIZEVEC (bitmap, graph->succs, graph->size);
free (graph->implicit_preds);
graph->implicit_preds = NULL;
if (loop->header == bb)
{
e = loop_latch_edge (loop);
- e2 = e->aux;
+ e2 = (edge) e->aux;
if (e2 && loop_exit_edge_p (loop, e2))
{
efficient lookups. */
FOR_EACH_EDGE (e, ei, bb->preds)
{
- e2 = e->aux;
+ e2 = (edge) e->aux;
if (!e2
/* If NOLOOP_ONLY is true, we only allow threading through the
}
update_bb_profile_for_threading (e->dest, EDGE_FREQUENCY (e),
- e->count, e->aux);
+ e->count, (edge) e->aux);
/* Insert the outgoing edge into the hash table if it is not
already in the hash table. */
DO_NOT_DUPLICATE attribute. */
if (all)
{
- edge e = EDGE_PRED (bb, 0)->aux;
+ edge e = (edge) EDGE_PRED (bb, 0)->aux;
lookup_redirection_data (e, NULL, NO_INSERT)->do_not_duplicate = true;
}
thread_single_edge (edge e)
{
basic_block bb = e->dest;
- edge eto = e->aux;
+ edge eto = (edge) e->aux;
struct redirection_data rd;
struct local_info local_info;
if (latch->aux)
{
- tgt_edge = latch->aux;
+ tgt_edge = (edge) latch->aux;
tgt_bb = tgt_edge->dest;
}
else if (!may_peel_loop_headers
goto fail;
}
- tgt_edge = e->aux;
+ tgt_edge = (edge) e->aux;
atgt_bb = tgt_edge->dest;
if (!tgt_bb)
tgt_bb = atgt_bb;
void
init_tree_ssa (void)
{
- cfun->gimple_df = ggc_alloc_cleared (sizeof (struct gimple_df));
+ cfun->gimple_df = GGC_CNEW (struct gimple_df);
cfun->gimple_df->referenced_vars = htab_create_ggc (20, int_tree_map_hash,
int_tree_map_eq, NULL);
cfun->gimple_df->default_defs = htab_create_ggc (20, int_tree_map_hash,
if (!ptr_info)
return;
- new_ptr_info = ggc_alloc (sizeof (struct ptr_info_def));
+ new_ptr_info = GGC_NEW (struct ptr_info_def);
*new_ptr_info = *ptr_info;
if (ptr_info->pt_vars)