#include "tree-flow-inline.h"
#include "langhooks.h"
#include "hashtab.h"
-#include "toplev.h"
#include "flags.h"
#include "ggc.h"
#include "debug.h"
initial address and index of each dimension. */
struct access_site_info
{
- /* The statement (INDIRECT_REF or POINTER_PLUS_EXPR). */
+ /* The statement (MEM_REF or POINTER_PLUS_EXPR). */
gimple stmt;
/* In case of POINTER_PLUS_EXPR, what is the offset. */
/* The variable whose accesses in the tree we are looking for. */
tree ssa_var;
/* The tree and code inside it the ssa_var is accessed, currently
- it could be an INDIRECT_REF or CALL_EXPR. */
+ it could be an MEM_REF or CALL_EXPR. */
enum tree_code t_code;
tree t_tree;
/* The place in the containing tree. */
static bool
may_flatten_matrices_1 (gimple stmt)
{
- tree t;
-
switch (gimple_code (stmt))
{
case GIMPLE_ASSIGN:
- if (!gimple_assign_cast_p (stmt))
+ case GIMPLE_CALL:
+ if (!gimple_has_lhs (stmt))
return true;
-
- t = gimple_assign_rhs1 (stmt);
- while (CONVERT_EXPR_P (t))
+ if (TREE_CODE (TREE_TYPE (gimple_get_lhs (stmt))) == VECTOR_TYPE)
{
- if (TREE_TYPE (t) && POINTER_TYPE_P (TREE_TYPE (t)))
- {
- tree pointee;
-
- pointee = TREE_TYPE (t);
- while (POINTER_TYPE_P (pointee))
- pointee = TREE_TYPE (pointee);
- if (TREE_CODE (pointee) == VECTOR_TYPE)
- {
- if (dump_file)
- fprintf (dump_file,
- "Found vector type, don't flatten matrix\n");
- return false;
- }
- }
- t = TREE_OPERAND (t, 0);
+ if (dump_file)
+ fprintf (dump_file,
+ "Found vector type, don't flatten matrix\n");
+ return false;
}
break;
case GIMPLE_ASM:
basic_block bb;
gimple_stmt_iterator gsi;
- decl = node->decl;
+ decl = node->symbol.decl;
if (node->analyzed)
{
func = DECL_STRUCT_FUNCTION (decl);
if (!mat)
return;
- if (mat->free_stmts)
- free (mat->free_stmts);
- if (mat->dim_hot_level)
- free (mat->dim_hot_level);
- if (mat->malloc_for_level)
- free (mat->malloc_for_level);
+ free (mat->free_stmts);
+ free (mat->dim_hot_level);
+ free (mat->malloc_for_level);
}
/* Find all potential matrices.
/* For every global variable in the program:
Check to see if it's of a candidate type and record it. */
- for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
+ FOR_EACH_DEFINED_VARIABLE (vnode)
{
- tree var_decl = vnode->decl;
+ tree var_decl = vnode->symbol.decl;
if (!var_decl || TREE_CODE (var_decl) != VAR_DECL)
continue;
/* Find if the SSA variable is accessed inside the
tree and record the tree containing it.
The only relevant uses are the case of SSA_NAME, or SSA inside
- INDIRECT_REF, PLUS_EXPR, POINTER_PLUS_EXPR, MULT_EXPR. */
+ MEM_REF, PLUS_EXPR, POINTER_PLUS_EXPR, MULT_EXPR. */
static void
ssa_accessed_in_tree (tree t, struct ssa_acc_in_tree *a)
{
if (t == a->ssa_var)
a->var_found = true;
break;
- case INDIRECT_REF:
+ case MEM_REF:
if (SSA_VAR_P (TREE_OPERAND (t, 0))
&& TREE_OPERAND (t, 0) == a->ssa_var)
a->var_found = true;
tree op1, op2;
case SSA_NAME:
- case INDIRECT_REF:
+ case MEM_REF:
CASE_CONVERT:
case VIEW_CONVERT_EXPR:
ssa_accessed_in_tree (gimple_assign_rhs1 (stmt), a);
must be set accordingly. */
for (min_malloc_level = 0;
min_malloc_level < mi->max_malloced_level
- && mi->malloc_for_level[min_malloc_level]; min_malloc_level++);
+ && mi->malloc_for_level[min_malloc_level]; min_malloc_level++)
+ ;
if (level < min_malloc_level)
{
mi->allocation_function_decl = current_function_decl;
}
/* The transposing decision making.
- In order to to calculate the profitability of transposing, we collect two
+ In order to calculate the profitability of transposing, we collect two
types of information regarding the accesses:
1. profiling information used to express the hotness of an access, that
is how often the matrix is accessed by this access site (count of the
{
if (mi->access_l)
{
- for (i = 0;
- VEC_iterate (access_site_info_p, mi->access_l, i, acc_info);
- i++)
+ FOR_EACH_VEC_ELT (access_site_info_p, mi->access_l, i, acc_info)
free (acc_info);
VEC_free (access_site_info_p, heap, mi->access_l);
/* update MI->dimension_type_size[CURRENT_INDIRECT_LEVEL] with the size
of the type related to the SSA_VAR, or the type related to the
- lhs of STMT, in the case that it is an INDIRECT_REF. */
+ lhs of STMT, in the case that it is an MEM_REF. */
static void
update_type_size (struct matrix_info *mi, gimple stmt, tree ssa_var,
int current_indirect_level)
tree lhs;
HOST_WIDE_INT type_size;
- /* Update type according to the type of the INDIRECT_REF expr. */
+ /* Update type according to the type of the MEM_REF expr. */
if (is_gimple_assign (stmt)
- && TREE_CODE (gimple_assign_lhs (stmt)) == INDIRECT_REF)
+ && TREE_CODE (gimple_assign_lhs (stmt)) == MEM_REF)
{
lhs = gimple_assign_lhs (stmt);
gcc_assert (POINTER_TYPE_P
at this level because in this case we cannot calculate the
address correctly. */
if ((lhs_acc.var_found && rhs_acc.var_found
- && lhs_acc.t_code == INDIRECT_REF)
+ && lhs_acc.t_code == MEM_REF)
|| (!rhs_acc.var_found && !lhs_acc.var_found))
{
mark_min_matrix_escape_level (mi, current_indirect_level, use_stmt);
{
int l = current_indirect_level + 1;
- gcc_assert (lhs_acc.t_code == INDIRECT_REF);
+ gcc_assert (lhs_acc.t_code == MEM_REF);
mark_min_matrix_escape_level (mi, l, use_stmt);
return current_indirect_level;
}
at this level because in this case we cannot calculate the
address correctly. */
if ((lhs_acc.var_found && rhs_acc.var_found
- && lhs_acc.t_code == INDIRECT_REF)
+ && lhs_acc.t_code == MEM_REF)
|| (!rhs_acc.var_found && !lhs_acc.var_found))
{
mark_min_matrix_escape_level (mi, current_indirect_level, use_stmt);
{
int l = current_indirect_level + 1;
- gcc_assert (lhs_acc.t_code == INDIRECT_REF);
+ gcc_assert (lhs_acc.t_code == MEM_REF);
if (!(gimple_assign_copy_p (use_stmt)
|| gimple_assign_cast_p (use_stmt))
is used. */
if (rhs_acc.var_found)
{
- if (rhs_acc.t_code != INDIRECT_REF
+ if (rhs_acc.t_code != MEM_REF
&& rhs_acc.t_code != POINTER_PLUS_EXPR && rhs_acc.t_code != SSA_NAME)
{
mark_min_matrix_escape_level (mi, current_indirect_level, use_stmt);
}
/* If the access in the RHS has an indirection increase the
indirection level. */
- if (rhs_acc.t_code == INDIRECT_REF)
+ if (rhs_acc.t_code == MEM_REF)
{
if (record_accesses)
record_access_alloc_site_info (mi, use_stmt, NULL_TREE,
}
/* If we are storing this level of indirection mark it as
escaping. */
- if (lhs_acc.t_code == INDIRECT_REF || TREE_CODE (lhs) != SSA_NAME)
+ if (lhs_acc.t_code == MEM_REF || TREE_CODE (lhs) != SSA_NAME)
{
int l = current_indirect_level;
return;
/* Now go over the uses of the SSA_NAME and check how it is used in
- each one of them. We are mainly looking for the pattern INDIRECT_REF,
- then a POINTER_PLUS_EXPR, then INDIRECT_REF etc. while in between there could
+ each one of them. We are mainly looking for the pattern MEM_REF,
+ then a POINTER_PLUS_EXPR, then MEM_REF etc. while in between there could
be any number of copies and casts. */
gcc_assert (TREE_CODE (ssa_var) == SSA_NAME);
gimple new_stmt;
gcc_assert (gimple_assign_rhs_code (acc_info->stmt)
- == INDIRECT_REF);
+ == MEM_REF);
/* Emit convert statement to convert to type of use. */
tmp = create_tmp_var (TREE_TYPE (lhs), "new");
add_referenced_var (tmp);
continue;
}
code = gimple_assign_rhs_code (acc_info->stmt);
- if (code == INDIRECT_REF
+ if (code == MEM_REF
&& acc_info->level < min_escape_l - 1)
{
- /* Replace the INDIRECT_REF with NOP (cast) usually we are casting
+ /* Replace the MEM_REF with NOP (cast) usually we are casting
from "pointer to type" to "type". */
tree t =
build1 (NOP_EXPR, TREE_TYPE (gimple_assign_rhs1 (acc_info->stmt)),
num_elements =
fold_build2 (MULT_EXPR, sizetype, fold_convert (sizetype, acc_info->index),
fold_convert (sizetype, d_size));
- add_referenced_var (d_size);
gsi = gsi_for_stmt (acc_info->stmt);
tmp1 = force_gimple_operand_gsi (&gsi, num_elements, true,
NULL, true, GSI_SAME_STMT);
true, GSI_SAME_STMT);
/* GLOBAL_HOLDING_THE_SIZE = DIM_SIZE. */
stmt = gimple_build_assign (dim_var, dim_size);
- mark_symbols_for_renaming (stmt);
gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
prev_dim_size = mi->dimension_size[i] = dim_var;
update_ssa (TODO_update_ssa);
/* Replace the malloc size argument in the malloc of level 0 to be
the size of all the dimensions. */
- c_node = cgraph_node (mi->allocation_function_decl);
+ c_node = cgraph_get_node (mi->allocation_function_decl);
+ gcc_checking_assert (c_node);
old_size_0 = gimple_call_arg (call_stmt_0, 0);
tmp = force_gimple_operand_gsi (&gsi, mi->dimension_size[0], true,
NULL, true, GSI_SAME_STMT);
for (i = 1; i < mi->min_indirect_level_escape; i++)
{
gimple_stmt_iterator gsi;
- gimple use_stmt1 = NULL;
gimple call_stmt = mi->malloc_for_level[i];
gcc_assert (is_gimple_call (call_stmt));
gsi = gsi_for_stmt (call_stmt);
/* Remove the call stmt. */
gsi_remove (&gsi, true);
- /* remove the type cast stmt. */
- FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter,
- gimple_call_lhs (call_stmt))
- {
- use_stmt1 = use_stmt;
- gsi = gsi_for_stmt (use_stmt);
- gsi_remove (&gsi, true);
- }
/* Remove the assignment of the allocated area. */
FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter,
- gimple_get_lhs (use_stmt1))
+ gimple_call_lhs (call_stmt))
{
gsi = gsi_for_stmt (use_stmt);
gsi_remove (&gsi, true);
if (!mi->free_stmts[i].stmt)
continue;
- c_node = cgraph_node (mi->free_stmts[i].func);
+ c_node = cgraph_get_node (mi->free_stmts[i].func);
+ gcc_checking_assert (c_node);
gcc_assert (is_gimple_call (mi->free_stmts[i].stmt));
e = cgraph_edge (c_node, mi->free_stmts[i].stmt);
gcc_assert (e);
else
check_transpose_p = false;
/* If there are hand written vectors, we skip this optimization. */
- for (node = cgraph_nodes; node; node = node->next)
+ FOR_EACH_FUNCTION (node)
if (!may_flatten_matrices (node))
return 0;
matrices_to_reorg = htab_create (37, mtt_info_hash, mtt_info_eq, mat_free);
/* Find and record all potential matrices in the program. */
find_matrices_decl ();
/* Analyze the accesses of the matrices (escaping analysis). */
- for (node = cgraph_nodes; node; node = node->next)
- if (node->analyzed)
- {
- tree temp_fn;
+ FOR_EACH_DEFINED_FUNCTION (node)
+ {
+ tree temp_fn;
- temp_fn = current_function_decl;
- current_function_decl = node->decl;
- push_cfun (DECL_STRUCT_FUNCTION (node->decl));
- bitmap_obstack_initialize (NULL);
- gimple_register_cfg_hooks ();
+ temp_fn = current_function_decl;
+ current_function_decl = node->symbol.decl;
+ push_cfun (DECL_STRUCT_FUNCTION (node->symbol.decl));
+ bitmap_obstack_initialize (NULL);
+ gimple_register_cfg_hooks ();
- if (!gimple_in_ssa_p (cfun))
- {
- free_dominance_info (CDI_DOMINATORS);
- free_dominance_info (CDI_POST_DOMINATORS);
- pop_cfun ();
- current_function_decl = temp_fn;
- bitmap_obstack_release (NULL);
+ if (!gimple_in_ssa_p (cfun))
+ {
+ free_dominance_info (CDI_DOMINATORS);
+ free_dominance_info (CDI_POST_DOMINATORS);
+ pop_cfun ();
+ current_function_decl = temp_fn;
+ bitmap_obstack_release (NULL);
- return 0;
- }
+ return 0;
+ }
#ifdef ENABLE_CHECKING
- verify_flow_info ();
+ verify_flow_info ();
#endif
- if (!matrices_to_reorg)
- {
- free_dominance_info (CDI_DOMINATORS);
- free_dominance_info (CDI_POST_DOMINATORS);
- pop_cfun ();
- current_function_decl = temp_fn;
- bitmap_obstack_release (NULL);
+ if (!matrices_to_reorg)
+ {
+ free_dominance_info (CDI_DOMINATORS);
+ free_dominance_info (CDI_POST_DOMINATORS);
+ pop_cfun ();
+ current_function_decl = temp_fn;
+ bitmap_obstack_release (NULL);
- return 0;
- }
+ return 0;
+ }
- /* Create htap for phi nodes. */
- htab_mat_acc_phi_nodes = htab_create (37, mat_acc_phi_hash,
- mat_acc_phi_eq, free);
- if (!check_transpose_p)
- find_sites_in_func (false);
- else
- {
- find_sites_in_func (true);
- loop_optimizer_init (LOOPS_NORMAL);
- if (current_loops)
- scev_initialize ();
- htab_traverse (matrices_to_reorg, analyze_transpose, NULL);
- if (current_loops)
- {
- scev_finalize ();
- loop_optimizer_finalize ();
- current_loops = NULL;
- }
- }
- /* If the current function is the allocation function for any of
- the matrices we check its allocation and the escaping level. */
- htab_traverse (matrices_to_reorg, check_allocation_function, NULL);
- free_dominance_info (CDI_DOMINATORS);
- free_dominance_info (CDI_POST_DOMINATORS);
- pop_cfun ();
- current_function_decl = temp_fn;
- bitmap_obstack_release (NULL);
- }
+ /* Create htap for phi nodes. */
+ htab_mat_acc_phi_nodes = htab_create (37, mat_acc_phi_hash,
+ mat_acc_phi_eq, free);
+ if (!check_transpose_p)
+ find_sites_in_func (false);
+ else
+ {
+ find_sites_in_func (true);
+ loop_optimizer_init (LOOPS_NORMAL);
+ if (current_loops)
+ scev_initialize ();
+ htab_traverse (matrices_to_reorg, analyze_transpose, NULL);
+ if (current_loops)
+ {
+ scev_finalize ();
+ loop_optimizer_finalize ();
+ current_loops = NULL;
+ }
+ }
+ /* If the current function is the allocation function for any of
+ the matrices we check its allocation and the escaping level. */
+ htab_traverse (matrices_to_reorg, check_allocation_function, NULL);
+ free_dominance_info (CDI_DOMINATORS);
+ free_dominance_info (CDI_POST_DOMINATORS);
+ pop_cfun ();
+ current_function_decl = temp_fn;
+ bitmap_obstack_release (NULL);
+ }
htab_traverse (matrices_to_reorg, transform_allocation_sites, NULL);
/* Now transform the accesses. */
- for (node = cgraph_nodes; node; node = node->next)
- if (node->analyzed)
- {
- /* Remember that allocation sites have been handled. */
- tree temp_fn;
-
- temp_fn = current_function_decl;
- current_function_decl = node->decl;
- push_cfun (DECL_STRUCT_FUNCTION (node->decl));
- bitmap_obstack_initialize (NULL);
- gimple_register_cfg_hooks ();
- record_all_accesses_in_func ();
- htab_traverse (matrices_to_reorg, transform_access_sites, NULL);
- cgraph_rebuild_references ();
- free_dominance_info (CDI_DOMINATORS);
- free_dominance_info (CDI_POST_DOMINATORS);
- pop_cfun ();
- current_function_decl = temp_fn;
- bitmap_obstack_release (NULL);
- }
+ FOR_EACH_DEFINED_FUNCTION (node)
+ {
+ /* Remember that allocation sites have been handled. */
+ tree temp_fn;
+
+ temp_fn = current_function_decl;
+ current_function_decl = node->symbol.decl;
+ push_cfun (DECL_STRUCT_FUNCTION (node->symbol.decl));
+ bitmap_obstack_initialize (NULL);
+ gimple_register_cfg_hooks ();
+ record_all_accesses_in_func ();
+ htab_traverse (matrices_to_reorg, transform_access_sites, NULL);
+ cgraph_rebuild_references ();
+ free_dominance_info (CDI_DOMINATORS);
+ free_dominance_info (CDI_POST_DOMINATORS);
+ pop_cfun ();
+ current_function_decl = temp_fn;
+ bitmap_obstack_release (NULL);
+ }
htab_traverse (matrices_to_reorg, dump_matrix_reorg_analysis, NULL);
current_function_decl = NULL;
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_dump_cgraph | TODO_dump_func /* todo_flags_finish */
+ TODO_dump_symtab /* todo_flags_finish */
}
};