/* Operand is having its address taken. */
#define opf_address_taken (1 << 5)
-/* Array for building all the use operands. */
-static vec<tree *> build_uses;
+/* Class containing temporary per-stmt state. */
-/* The built VDEF operand. */
-static tree build_vdef;
+class operands_scanner
+{
+ public:
+ operands_scanner (struct function *fun, gimple *statement)
+ {
+ build_vuse = NULL_TREE;
+ build_vdef = NULL_TREE;
+ fn = fun;
+ stmt = statement;
+ }
+
+ /* Create an operands cache for STMT. */
+ void build_ssa_operands ();
+
+ /* Verifies SSA statement operands. */
+ DEBUG_FUNCTION bool verify_ssa_operands ();
+
+ private:
+ /* Disable copy and assign of this class, as it may have problems with
+ build_uses vec. */
+ DISABLE_COPY_AND_ASSIGN (operands_scanner);
+
+ /* Array for building all the use operands. */
+ auto_vec<tree *, 16> build_uses;
+
+ /* The built VDEF operand. */
+ tree build_vdef;
+
+ /* The built VUSE operand. */
+ tree build_vuse;
+
+ /* Function which STMT belongs to. */
+ struct function *fn;
+
+ /* Statement to work on. */
+ gimple *stmt;
+
+ /* Takes elements from build_uses and turns them into use operands of STMT. */
+ void finalize_ssa_uses ();
+
+ /* Clear the in_list bits and empty the build array for VDEFs and
+ VUSEs. */
+ void cleanup_build_arrays ();
+
+ /* Finalize all the build vectors, fill the new ones into INFO. */
+ void finalize_ssa_stmt_operands ();
+
+ /* Start the process of building up operands vectors in INFO. */
+ void start_ssa_stmt_operands ();
+
+ /* Add USE_P to the list of pointers to operands. */
+ void append_use (tree *use_p);
-/* The built VUSE operand. */
-static tree build_vuse;
+ /* Add VAR to the set of variables that require a VDEF operator. */
+ void append_vdef (tree var);
-/* Bitmap obstack for our datastructures that needs to survive across
- compilations of multiple functions. */
-static bitmap_obstack operands_bitmap_obstack;
+ /* Add VAR to the set of variables that require a VUSE operator. */
+ void append_vuse (tree var);
-static void get_expr_operands (struct function *, gimple *, tree *, int);
+ /* Add virtual operands for STMT. FLAGS is as in get_expr_operands. */
+ void add_virtual_operand (int flags);
-/* Number of functions with initialized ssa_operands. */
-static int n_initialized = 0;
+
+ /* Add *VAR_P to the appropriate operand array for statement STMT.
+ FLAGS is as in get_expr_operands. If *VAR_P is a GIMPLE register,
+ it will be added to the statement's real operands, otherwise it is
+ added to virtual operands. */
+ void add_stmt_operand (tree *var_p, int flags);
+
+ /* A subroutine of get_expr_operands to handle MEM_REF.
+
+ STMT is the statement being processed, EXPR is the MEM_REF
+ that got us here.
+
+ FLAGS is as in get_expr_operands. */
+ void get_mem_ref_operands (tree expr, int flags);
+
+ /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
+ void get_tmr_operands (tree expr, int flags);
+
+
+ /* If STMT is a call that may clobber globals and other symbols that
+ escape, add them to the VDEF/VUSE lists for it. */
+ void maybe_add_call_vops (gcall *stmt);
+
+ /* Scan operands in the ASM_EXPR stmt referred to in INFO. */
+ void get_asm_stmt_operands (gasm *stmt);
+
+
+ /* Recursively scan the expression pointed to by EXPR_P in statement
+ STMT. FLAGS is one of the OPF_* constants modifying how to
+ interpret the operands found. */
+ void get_expr_operands (tree *expr_p, int flags);
+
+ /* Parse STMT looking for operands. When finished, the various
+ build_* operand vectors will have potential operands in them. */
+ void parse_ssa_operands ();
+
+
+ /* Takes elements from build_defs and turns them into def operands of STMT.
+ TODO -- Make build_defs vec of tree *. */
+ void finalize_ssa_defs ();
+};
/* Accessor to tree-ssa-operands.c caches. */
static inline struct ssa_operands *
void
init_ssa_operands (struct function *fn)
{
- if (!n_initialized++)
- {
- build_uses.create (10);
- build_vuse = NULL_TREE;
- build_vdef = NULL_TREE;
- bitmap_obstack_initialize (&operands_bitmap_obstack);
- }
-
gcc_assert (gimple_ssa_operands (fn)->operand_memory == NULL);
gimple_ssa_operands (fn)->operand_memory_index
= gimple_ssa_operands (fn)->ssa_operand_mem_size;
{
struct ssa_operand_memory_d *ptr;
- if (!--n_initialized)
- {
- build_uses.release ();
- build_vdef = NULL_TREE;
- build_vuse = NULL_TREE;
- }
-
gimple_ssa_operands (fn)->free_uses = NULL;
while ((ptr = gimple_ssa_operands (fn)->operand_memory) != NULL)
gimple_ssa_operands (fn)->ops_active = false;
- if (!n_initialized)
- bitmap_obstack_release (&operands_bitmap_obstack);
-
fn->gimple_df->vop = NULL_TREE;
}
/* Takes elements from build_defs and turns them into def operands of STMT.
TODO -- Make build_defs vec of tree *. */
-static inline void
-finalize_ssa_defs (struct function *fn, gimple *stmt)
+inline void
+operands_scanner::finalize_ssa_defs ()
{
/* Pre-pend the vdef we may have built. */
if (build_vdef != NULL_TREE)
/* Takes elements from build_uses and turns them into use operands of STMT. */
-static inline void
-finalize_ssa_uses (struct function *fn, gimple *stmt)
+inline void
+operands_scanner::finalize_ssa_uses ()
{
unsigned new_i;
struct use_optype_d new_list;
/* Clear the in_list bits and empty the build array for VDEFs and
VUSEs. */
-static inline void
-cleanup_build_arrays (void)
+inline void
+operands_scanner::cleanup_build_arrays ()
{
build_vdef = NULL_TREE;
build_vuse = NULL_TREE;
/* Finalize all the build vectors, fill the new ones into INFO. */
-static inline void
-finalize_ssa_stmt_operands (struct function *fn, gimple *stmt)
+inline void
+operands_scanner::finalize_ssa_stmt_operands ()
{
- finalize_ssa_defs (fn, stmt);
- finalize_ssa_uses (fn, stmt);
+ finalize_ssa_defs ();
+ finalize_ssa_uses ();
cleanup_build_arrays ();
}
/* Start the process of building up operands vectors in INFO. */
-static inline void
-start_ssa_stmt_operands (void)
+inline void
+operands_scanner::start_ssa_stmt_operands ()
{
gcc_assert (build_uses.length () == 0);
gcc_assert (build_vuse == NULL_TREE);
/* Add USE_P to the list of pointers to operands. */
-static inline void
-append_use (tree *use_p)
+inline void
+operands_scanner::append_use (tree *use_p)
{
build_uses.safe_push (use_p);
}
/* Add VAR to the set of variables that require a VDEF operator. */
-static inline void
-append_vdef (tree var)
+inline void
+operands_scanner::append_vdef (tree var)
{
gcc_assert ((build_vdef == NULL_TREE
|| build_vdef == var)
/* Add VAR to the set of variables that require a VUSE operator. */
-static inline void
-append_vuse (tree var)
+inline void
+operands_scanner::append_vuse (tree var)
{
gcc_assert (build_vuse == NULL_TREE
|| build_vuse == var);
/* Add virtual operands for STMT. FLAGS is as in get_expr_operands. */
-static void
-add_virtual_operand (struct function *fn,
- gimple *stmt ATTRIBUTE_UNUSED, int flags)
+void
+operands_scanner::add_virtual_operand (int flags)
{
/* Add virtual operands to the stmt, unless the caller has specifically
requested not to do that (used when adding operands inside an
it will be added to the statement's real operands, otherwise it is
added to virtual operands. */
-static void
-add_stmt_operand (struct function *fn, tree *var_p, gimple *stmt, int flags)
+void
+operands_scanner::add_stmt_operand (tree *var_p, int flags)
{
tree var = *var_p;
gimple_set_has_volatile_ops (stmt, true);
/* The variable is a memory access. Add virtual operands. */
- add_virtual_operand (fn, stmt, flags);
+ add_virtual_operand (flags);
}
}
FLAGS is as in get_expr_operands. */
-static void
-get_mem_ref_operands (struct function *fn,
- gimple *stmt, tree expr, int flags)
+void
+operands_scanner::get_mem_ref_operands (tree expr, int flags)
{
tree *pptr = &TREE_OPERAND (expr, 0);
gimple_set_has_volatile_ops (stmt, true);
/* Add the VOP. */
- add_virtual_operand (fn, stmt, flags);
+ add_virtual_operand (flags);
/* If requested, add a USE operand for the base pointer. */
- get_expr_operands (fn, stmt, pptr,
+ get_expr_operands (pptr,
opf_non_addressable | opf_use
| (flags & (opf_no_vops|opf_not_non_addressable)));
}
/* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
-static void
-get_tmr_operands (struct function *fn, gimple *stmt, tree expr, int flags)
+void
+operands_scanner::get_tmr_operands(tree expr, int flags)
{
if (!(flags & opf_no_vops)
&& TREE_THIS_VOLATILE (expr))
gimple_set_has_volatile_ops (stmt, true);
/* First record the real operands. */
- get_expr_operands (fn, stmt,
- &TMR_BASE (expr), opf_use | (flags & opf_no_vops));
- get_expr_operands (fn, stmt,
- &TMR_INDEX (expr), opf_use | (flags & opf_no_vops));
- get_expr_operands (fn, stmt,
- &TMR_INDEX2 (expr), opf_use | (flags & opf_no_vops));
-
- add_virtual_operand (fn, stmt, flags);
+ get_expr_operands (&TMR_BASE (expr), opf_use | (flags & opf_no_vops));
+ get_expr_operands (&TMR_INDEX (expr), opf_use | (flags & opf_no_vops));
+ get_expr_operands (&TMR_INDEX2 (expr), opf_use | (flags & opf_no_vops));
+
+ add_virtual_operand (flags);
}
/* If STMT is a call that may clobber globals and other symbols that
escape, add them to the VDEF/VUSE lists for it. */
-static void
-maybe_add_call_vops (struct function *fn, gcall *stmt)
+void
+operands_scanner::maybe_add_call_vops (gcall *stmt)
{
int call_flags = gimple_call_flags (stmt);
{
/* A 'pure' or a 'const' function never call-clobbers anything. */
if (!(call_flags & (ECF_PURE | ECF_CONST)))
- add_virtual_operand (fn, stmt, opf_def);
+ add_virtual_operand (opf_def);
else if (!(call_flags & ECF_CONST))
- add_virtual_operand (fn, stmt, opf_use);
+ add_virtual_operand (opf_use);
}
}
/* Scan operands in the ASM_EXPR stmt referred to in INFO. */
-static void
-get_asm_stmt_operands (struct function *fn, gasm *stmt)
+void
+operands_scanner::get_asm_stmt_operands (gasm *stmt)
{
size_t i, noutputs;
const char **oconstraints;
if (!allows_reg && allows_mem)
mark_address_taken (TREE_VALUE (link));
- get_expr_operands (fn, stmt,
- &TREE_VALUE (link), opf_def | opf_not_non_addressable);
+ get_expr_operands (&TREE_VALUE (link), opf_def | opf_not_non_addressable);
}
/* Gather all input operands. */
if (!allows_reg && allows_mem)
mark_address_taken (TREE_VALUE (link));
- get_expr_operands (fn, stmt, &TREE_VALUE (link), opf_not_non_addressable);
+ get_expr_operands (&TREE_VALUE (link), opf_not_non_addressable);
}
/* Clobber all memory and addressable symbols for asm ("" : : : "memory"); */
if (gimple_asm_clobbers_memory_p (stmt))
- add_virtual_operand (fn, stmt, opf_def);
+ add_virtual_operand (opf_def);
}
STMT. FLAGS is one of the OPF_* constants modifying how to
interpret the operands found. */
-static void
-get_expr_operands (struct function *fn, gimple *stmt, tree *expr_p, int flags)
+void
+operands_scanner::get_expr_operands (tree *expr_p, int flags)
{
enum tree_code code;
enum tree_code_class codeclass;
here are ARRAY_REF indices which will always be real operands
(GIMPLE does not allow non-registers as array indices). */
flags |= opf_no_vops;
- get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0),
+ get_expr_operands (&TREE_OPERAND (expr, 0),
flags | opf_not_non_addressable | opf_address_taken);
return;
case STRING_CST:
case CONST_DECL:
if (!(flags & opf_address_taken))
- add_stmt_operand (fn, expr_p, stmt, flags);
+ add_stmt_operand (expr_p, flags);
return;
case DEBUG_EXPR_DECL:
return;
case MEM_REF:
- get_mem_ref_operands (fn, stmt, expr, flags);
+ get_mem_ref_operands (expr, flags);
return;
case TARGET_MEM_REF:
- get_tmr_operands (fn, stmt, expr, flags);
+ get_tmr_operands (expr, flags);
return;
case ARRAY_REF:
&& TREE_THIS_VOLATILE (expr))
gimple_set_has_volatile_ops (stmt, true);
- get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags);
+ get_expr_operands (&TREE_OPERAND (expr, 0), flags);
if (code == COMPONENT_REF)
{
if (!(flags & opf_no_vops)
&& TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
gimple_set_has_volatile_ops (stmt, true);
- get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 2), uflags);
+ get_expr_operands (&TREE_OPERAND (expr, 2), uflags);
}
else if (code == ARRAY_REF || code == ARRAY_RANGE_REF)
{
- get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), uflags);
- get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 2), uflags);
- get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 3), uflags);
+ get_expr_operands (&TREE_OPERAND (expr, 1), uflags);
+ get_expr_operands (&TREE_OPERAND (expr, 2), uflags);
+ get_expr_operands (&TREE_OPERAND (expr, 3), uflags);
}
return;
case WITH_SIZE_EXPR:
/* WITH_SIZE_EXPR is a pass-through reference to its first argument,
and an rvalue reference to its second argument. */
- get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), uflags);
- get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags);
+ get_expr_operands (&TREE_OPERAND (expr, 1), uflags);
+ get_expr_operands (&TREE_OPERAND (expr, 0), flags);
return;
case COND_EXPR:
case VEC_COND_EXPR:
case VEC_PERM_EXPR:
- get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), uflags);
- get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), uflags);
- get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 2), uflags);
+ get_expr_operands (&TREE_OPERAND (expr, 0), uflags);
+ get_expr_operands (&TREE_OPERAND (expr, 1), uflags);
+ get_expr_operands (&TREE_OPERAND (expr, 2), uflags);
return;
case CONSTRUCTOR:
for (idx = 0;
vec_safe_iterate (CONSTRUCTOR_ELTS (expr), idx, &ce);
idx++)
- get_expr_operands (fn, stmt, &ce->value, uflags);
+ get_expr_operands (&ce->value, uflags);
return;
}
case VIEW_CONVERT_EXPR:
do_unary:
- get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags);
+ get_expr_operands (&TREE_OPERAND (expr, 0), flags);
return;
case BIT_INSERT_EXPR:
case ASSERT_EXPR:
do_binary:
{
- get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags);
- get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), flags);
+ get_expr_operands (&TREE_OPERAND (expr, 0), flags);
+ get_expr_operands (&TREE_OPERAND (expr, 1), flags);
return;
}
case WIDEN_MULT_PLUS_EXPR:
case WIDEN_MULT_MINUS_EXPR:
{
- get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags);
- get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), flags);
- get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 2), flags);
+ get_expr_operands (&TREE_OPERAND (expr, 0), flags);
+ get_expr_operands (&TREE_OPERAND (expr, 1), flags);
+ get_expr_operands (&TREE_OPERAND (expr, 2), flags);
return;
}
/* Parse STMT looking for operands. When finished, the various
build_* operand vectors will have potential operands in them. */
-static void
-parse_ssa_operands (struct function *fn, gimple *stmt)
+void
+operands_scanner::parse_ssa_operands ()
{
enum gimple_code code = gimple_code (stmt);
size_t i, n, start = 0;
switch (code)
{
case GIMPLE_ASM:
- get_asm_stmt_operands (fn, as_a <gasm *> (stmt));
+ get_asm_stmt_operands (as_a <gasm *> (stmt));
break;
case GIMPLE_TRANSACTION:
/* The start of a transaction is a memory barrier. */
- add_virtual_operand (fn, stmt, opf_def | opf_use);
+ add_virtual_operand (opf_def | opf_use);
break;
case GIMPLE_DEBUG:
if (gimple_debug_bind_p (stmt)
&& gimple_debug_bind_has_value_p (stmt))
- get_expr_operands (fn, stmt, gimple_debug_bind_get_value_ptr (stmt),
+ get_expr_operands (gimple_debug_bind_get_value_ptr (stmt),
opf_use | opf_no_vops);
break;
case GIMPLE_CALL:
/* Add call-clobbered operands, if needed. */
- maybe_add_call_vops (fn, as_a <gcall *> (stmt));
+ maybe_add_call_vops (as_a <gcall *> (stmt));
/* FALLTHRU */
case GIMPLE_ASSIGN:
- get_expr_operands (fn, stmt, gimple_op_ptr (stmt, 0), opf_def);
+ get_expr_operands (gimple_op_ptr (stmt, 0), opf_def);
start = 1;
/* FALLTHRU */
do_default:
n = gimple_num_ops (stmt);
for (i = start; i < n; i++)
- get_expr_operands (fn, stmt, gimple_op_ptr (stmt, i), opf_use);
+ get_expr_operands (gimple_op_ptr (stmt, i), opf_use);
break;
}
}
/* Create an operands cache for STMT. */
-static void
-build_ssa_operands (struct function *fn, gimple *stmt)
+void
+operands_scanner::build_ssa_operands ()
{
/* Initially assume that the statement has no volatile operands. */
gimple_set_has_volatile_ops (stmt, false);
start_ssa_stmt_operands ();
- parse_ssa_operands (fn, stmt);
- finalize_ssa_stmt_operands (fn, stmt);
+ parse_ssa_operands ();
+ finalize_ssa_stmt_operands ();
}
/* Verifies SSA statement operands. */
DEBUG_FUNCTION bool
-verify_ssa_operands (struct function *fn, gimple *stmt)
+operands_scanner::verify_ssa_operands ()
{
use_operand_p use_p;
def_operand_p def_p;
/* build_ssa_operands w/o finalizing them. */
gimple_set_has_volatile_ops (stmt, false);
start_ssa_stmt_operands ();
- parse_ssa_operands (fn, stmt);
+ parse_ssa_operands ();
/* Now verify the built operands are the same as present in STMT. */
def = gimple_vdef (stmt);
return false;
}
+/* Interface for external use. */
+
+DEBUG_FUNCTION bool
+verify_ssa_operands (struct function *fn, gimple *stmt)
+{
+ return operands_scanner (fn, stmt).verify_ssa_operands ();
+}
+
/* Releases the operands of STMT back to their freelists, and clears
the stmt operand lists. */
timevar_push (TV_TREE_OPS);
gcc_assert (gimple_modified_p (stmt));
- build_ssa_operands (fn, stmt);
+ operands_scanner (fn, stmt).build_ssa_operands ();
gimple_set_modified (stmt, false);
timevar_pop (TV_TREE_OPS);