exploded_node *enode_for_diag,
const program_state *old_state,
program_state *new_state,
+ uncertainty_t *uncertainty,
const gimple *stmt,
stmt_finder *stmt_finder)
: m_eg (&eg), m_logger (eg.get_logger ()),
m_new_state (new_state),
m_stmt (stmt),
m_stmt_finder (stmt_finder),
- m_ext_state (eg.get_ext_state ())
+ m_ext_state (eg.get_ext_state ()),
+ m_uncertainty (uncertainty)
{
}
impl_region_model_context::
impl_region_model_context (program_state *state,
const extrinsic_state &ext_state,
+ uncertainty_t *uncertainty,
logger *logger)
: m_eg (NULL), m_logger (logger), m_enode_for_diag (NULL),
m_old_state (NULL),
m_new_state (state),
m_stmt (NULL),
m_stmt_finder (NULL),
- m_ext_state (ext_state)
+ m_ext_state (ext_state),
+ m_uncertainty (uncertainty)
{
}
m_eg->on_escaped_function (fndecl);
}
+uncertainty_t *
+impl_region_model_context::get_uncertainty ()
+{
+ return m_uncertainty;
+}
+
/* struct setjmp_record. */
int
{
impl_region_model_context old_ctxt
(m_eg, m_enode_for_diag, NULL, NULL/*m_enode->get_state ()*/,
- call);
+ NULL, call);
region_model *model = m_new_state->m_region_model;
return model->get_fndecl_for_call (call, &old_ctxt);
}
LOG_FUNC (logger);
impl_region_model_context old_ctxt
(m_eg, m_enode_for_diag, NULL, NULL/*m_enode->get_state ()*/,
- stmt);
+ NULL, stmt);
const svalue *var_old_sval
= m_old_state->m_region_model->get_rvalue (var, &old_ctxt);
LOG_FUNC (logger);
impl_region_model_context old_ctxt
(m_eg, m_enode_for_diag, NULL, NULL/*m_enode->get_state ()*/,
- stmt);
+ NULL, stmt);
const svalue *var_old_sval
= m_old_state->m_region_model->get_rvalue (var, &old_ctxt);
impl_region_model_context new_ctxt (m_eg, m_enode_for_diag,
m_old_state, m_new_state,
+ NULL,
stmt);
const svalue *var_new_sval
= m_new_state->m_region_model->get_rvalue (var, &new_ctxt);
LOG_FUNC (get_logger ());
gcc_assert (d); // take ownership
impl_region_model_context old_ctxt
- (m_eg, m_enode_for_diag, m_old_state, m_new_state, NULL);
+ (m_eg, m_enode_for_diag, m_old_state, m_new_state, NULL, NULL);
const svalue *var_old_sval
= m_old_state->m_region_model->get_rvalue (var, &old_ctxt);
if (!assign_stmt)
return NULL_TREE;
impl_region_model_context old_ctxt
- (m_eg, m_enode_for_diag, m_old_state, m_new_state, stmt);
+ (m_eg, m_enode_for_diag, m_old_state, m_new_state, NULL, stmt);
if (const svalue *sval
= m_new_state->m_region_model->get_gassign_result (assign_stmt,
&old_ctxt))
exploded_node::on_stmt (exploded_graph &eg,
const supernode *snode,
const gimple *stmt,
- program_state *state)
+ program_state *state,
+ uncertainty_t *uncertainty)
{
logger *logger = eg.get_logger ();
LOG_SCOPE (logger);
const program_state old_state (*state);
impl_region_model_context ctxt (eg, this,
- &old_state, state,
+ &old_state, state, uncertainty,
stmt);
bool unknown_side_effects = false;
exploded_node::on_edge (exploded_graph &eg,
const superedge *succ,
program_point *next_point,
- program_state *next_state)
+ program_state *next_state,
+ uncertainty_t *uncertainty)
{
LOG_FUNC (eg.get_logger ());
if (!next_point->on_edge (eg, succ))
return false;
- if (!next_state->on_edge (eg, this, succ))
+ if (!next_state->on_edge (eg, this, succ, uncertainty))
return false;
return true;
gcc_assert (new_state.m_region_model);
+ uncertainty_t uncertainty;
impl_region_model_context ctxt (eg, this,
- &old_state, &new_state,
+ &old_state, &new_state, &uncertainty,
get_stmt ());
const svalue *result = NULL;
new_state.m_region_model->pop_frame (NULL, &result, &ctxt);
/* Prune state to try to improve the chances of a cache hit,
avoiding generating redundant nodes. */
+ uncertainty_t uncertainty;
program_state pruned_state
- = state.prune_for_point (*this, point, enode_for_diag);
+ = state.prune_for_point (*this, point, enode_for_diag, &uncertainty);
pruned_state.validate (get_ext_state ());
const program_point &iter_point = iter_enode->get_point ();
if (const superedge *iter_sedge = iter_point.get_from_edge ())
{
+ uncertainty_t uncertainty;
impl_region_model_context ctxt (*this, iter_enode,
- &state, next_state, NULL);
+ &state, next_state,
+ &uncertainty, NULL);
const cfg_superedge *last_cfg_superedge
= iter_sedge->dyn_cast_cfg_superedge ();
if (last_cfg_superedge)
case PK_BEFORE_SUPERNODE:
{
program_state next_state (state);
+ uncertainty_t uncertainty;
if (point.get_from_edge ())
{
impl_region_model_context ctxt (*this, node,
- &state, &next_state, NULL);
+ &state, &next_state,
+ &uncertainty, NULL);
const cfg_superedge *last_cfg_superedge
= point.get_from_edge ()->dyn_cast_cfg_superedge ();
if (last_cfg_superedge)
the sm-state-change occurs on an edge where the src enode has
exactly one stmt, the one that caused the change. */
program_state next_state (state);
+ uncertainty_t uncertainty;
const supernode *snode = point.get_supernode ();
unsigned stmt_idx;
const gimple *prev_stmt = NULL;
/* Process the stmt. */
exploded_node::on_stmt_flags flags
- = node->on_stmt (*this, snode, stmt, &next_state);
+ = node->on_stmt (*this, snode, stmt, &next_state, &uncertainty);
node->m_num_processed_stmts++;
/* If flags.m_terminate_path, stop analyzing; any nodes/edges
if (next_state.m_region_model)
{
impl_region_model_context ctxt (*this, node,
- &old_state, &next_state, stmt);
+ &old_state, &next_state,
+ &uncertainty, stmt);
program_state::detect_leaks (old_state, next_state, NULL,
get_ext_state (), &ctxt);
}
point.get_call_string ())
: program_point::after_supernode (point.get_supernode (),
point.get_call_string ()));
- next_state = next_state.prune_for_point (*this, next_point, node);
+ next_state = next_state.prune_for_point (*this, next_point, node,
+ &uncertainty);
if (flags.m_sm_changes || flag_analyzer_fine_grained)
{
= program_point::before_supernode (succ->m_dest, succ,
point.get_call_string ());
program_state next_state (state);
-
- if (!node->on_edge (*this, succ, &next_point, &next_state))
+ uncertainty_t uncertainty;
+ if (!node->on_edge (*this, succ, &next_point, &next_state,
+ &uncertainty))
{
if (logger)
logger->log ("skipping impossible edge to SN: %i",
succ->m_dest->m_index);
continue;
}
-
exploded_node *next = get_or_create_node (next_point, next_state,
node);
if (next)
old state, rather than the new? */
const program_state *old_state,
program_state *new_state,
+ uncertainty_t *uncertainty,
const gimple *stmt,
stmt_finder *stmt_finder = NULL);
impl_region_model_context (program_state *state,
const extrinsic_state &ext_state,
+ uncertainty_t *uncertainty,
logger *logger = NULL);
void warn (pending_diagnostic *d) FINAL OVERRIDE;
void on_escaped_function (tree fndecl) FINAL OVERRIDE;
+ uncertainty_t *get_uncertainty () FINAL OVERRIDE;
+
exploded_graph *m_eg;
log_user m_logger;
exploded_node *m_enode_for_diag;
const gimple *m_stmt;
stmt_finder *m_stmt_finder;
const extrinsic_state &m_ext_state;
+ uncertainty_t *m_uncertainty;
};
/* A <program_point, program_state> pair, used internally by
on_stmt_flags on_stmt (exploded_graph &eg,
const supernode *snode,
const gimple *stmt,
- program_state *state);
+ program_state *state,
+ uncertainty_t *uncertainty);
bool on_edge (exploded_graph &eg,
const superedge *succ,
program_point *next_point,
- program_state *next_state);
+ program_state *next_state,
+ uncertainty_t *uncertainty);
void on_longjmp (exploded_graph &eg,
const gcall *call,
program_state *new_state,
impl_region_model_context *ctxt)
{
svalue_set svals_to_unset;
+ uncertainty_t *uncertainty = ctxt->get_uncertainty ();
auto_vec<const svalue *> leaked_svals (m_map.elements ());
for (map_t::iterator iter = m_map.begin ();
if (!m_sm.can_purge_p (e.m_state))
leaked_svals.quick_push (iter_sval);
}
+ if (uncertainty)
+ if (uncertainty->unknown_sm_state_p (iter_sval))
+ svals_to_unset.add (iter_sval);
}
leaked_svals.qsort (svalue::cmp_ptr_ptr);
bool
program_state::on_edge (exploded_graph &eg,
exploded_node *enode,
- const superedge *succ)
+ const superedge *succ,
+ uncertainty_t *uncertainty)
{
/* Update state. */
const program_point &point = enode->get_point ();
impl_region_model_context ctxt (eg, enode,
&enode->get_state (),
this,
+ uncertainty,
last_stmt);
if (!m_region_model->maybe_update_for_edge (*succ,
last_stmt,
}
program_state::detect_leaks (enode->get_state (), *this,
- NULL, eg.get_ext_state (),
- &ctxt);
+ NULL, eg.get_ext_state (),
+ &ctxt);
return true;
}
program_state
program_state::prune_for_point (exploded_graph &eg,
const program_point &point,
- exploded_node *enode_for_diag) const
+ exploded_node *enode_for_diag,
+ uncertainty_t *uncertainty) const
{
logger * const logger = eg.get_logger ();
LOG_SCOPE (logger);
impl_region_model_context ctxt (eg, enode_for_diag,
this,
&new_state,
+ uncertainty,
point.get_stmt ());
detect_leaks (*this, new_state, NULL, eg.get_ext_state (), &ctxt);
}
{
logger *logger = ext_state.get_logger ();
LOG_SCOPE (logger);
+ const uncertainty_t *uncertainty = ctxt->get_uncertainty ();
if (logger)
{
pretty_printer *pp = logger->get_printer ();
extra_sval->dump_to_pp (pp, true);
logger->end_log_line ();
}
+ if (uncertainty)
+ {
+ logger->start_log_line ();
+ pp_string (pp, "uncertainty: ");
+ uncertainty->dump_to_pp (pp, true);
+ logger->end_log_line ();
+ }
}
- /* Get svalues reachable from each of src_state and dst_state. */
- svalue_set src_svalues;
- svalue_set dest_svalues;
- src_state.m_region_model->get_reachable_svalues (&src_svalues, NULL);
- dest_state.m_region_model->get_reachable_svalues (&dest_svalues, extra_sval);
+ /* Get svalues reachable from each of src_state and dest_state.
+ Get svalues *known* to be reachable in src_state.
+ Pass in uncertainty for dest_state so that we additionally get svalues that
+ *might* still be reachable in dst_state. */
+ svalue_set known_src_svalues;
+ src_state.m_region_model->get_reachable_svalues (&known_src_svalues,
+ NULL, NULL);
+ svalue_set maybe_dest_svalues;
+ dest_state.m_region_model->get_reachable_svalues (&maybe_dest_svalues,
+ extra_sval, uncertainty);
if (logger)
{
- log_set_of_svalues (logger, "src_state reachable svalues:", src_svalues);
- log_set_of_svalues (logger, "dest_state reachable svalues:",
- dest_svalues);
+ log_set_of_svalues (logger, "src_state known reachable svalues:",
+ known_src_svalues);
+ log_set_of_svalues (logger, "dest_state maybe reachable svalues:",
+ maybe_dest_svalues);
}
- auto_vec <const svalue *> dead_svals (src_svalues.elements ());
- for (svalue_set::iterator iter = src_svalues.begin ();
- iter != src_svalues.end (); ++iter)
+ auto_vec <const svalue *> dead_svals (known_src_svalues.elements ());
+ for (svalue_set::iterator iter = known_src_svalues.begin ();
+ iter != known_src_svalues.end (); ++iter)
{
const svalue *sval = (*iter);
/* For each sval reachable from SRC_STATE, determine if it is
- live in DEST_STATE: either explicitly reachable, or implicitly
- live based on the set of explicitly reachable svalues.
- Record those that have ceased to be live. */
- if (!sval->live_p (&dest_svalues, dest_state.m_region_model))
+ live in DEST_STATE: either explicitly reachable, implicitly
+ live based on the set of explicitly reachable svalues,
+ or possibly reachable as recorded in uncertainty.
+ Record those that have ceased to be live i.e. were known
+ to be live, and are now not known to be even possibly-live. */
+ if (!sval->live_p (&maybe_dest_svalues, dest_state.m_region_model))
dead_svals.quick_push (sval);
}
ctxt->on_svalue_leak (sval);
/* Purge dead svals from sm-state. */
- ctxt->on_liveness_change (dest_svalues, dest_state.m_region_model);
+ ctxt->on_liveness_change (maybe_dest_svalues,
+ dest_state.m_region_model);
/* Purge dead svals from constraints. */
dest_state.m_region_model->get_constraints ()->on_liveness_change
- (dest_svalues, dest_state.m_region_model);
+ (maybe_dest_svalues, dest_state.m_region_model);
}
#if CHECKING_P
region_model_manager *mgr = eng.get_model_manager ();
program_state s0 (ext_state);
- impl_region_model_context ctxt (&s0, ext_state);
+ uncertainty_t uncertainty;
+ impl_region_model_context ctxt (&s0, ext_state, &uncertainty);
region_model *model0 = s0.m_region_model;
const svalue *size_in_bytes
bool on_edge (exploded_graph &eg,
exploded_node *enode,
- const superedge *succ);
+ const superedge *succ,
+ uncertainty_t *uncertainty);
program_state prune_for_point (exploded_graph &eg,
const program_point &point,
- exploded_node *enode_for_diag) const;
+ exploded_node *enode_for_diag,
+ uncertainty_t *uncertainty) const;
tree get_representative_tree (const svalue *sval) const;
}
}
+/* Get any uncertainty_t associated with the region_model_context. */
+
+uncertainty_t *
+call_details::get_uncertainty () const
+{
+ return m_ctxt->get_uncertainty ();
+}
+
/* If the callsite has a left-hand-side region, set it to RESULT
and return true.
Otherwise do nothing and return false. */
check_for_writable_region (dest_reg, cd.get_ctxt ());
/* Otherwise, mark region's contents as unknown. */
- mark_region_as_unknown (dest_reg);
+ mark_region_as_unknown (dest_reg, cd.get_uncertainty ());
}
/* Handle the on_call_pre part of "memset" and "__builtin_memset". */
check_for_writable_region (dest_reg, cd.get_ctxt ());
/* Otherwise, mark region's contents as unknown. */
- mark_region_as_unknown (dest_reg);
+ mark_region_as_unknown (dest_reg, cd.get_uncertainty ());
return false;
}
check_for_writable_region (dest_reg, cd.get_ctxt ());
/* For now, just mark region's contents as unknown. */
- mark_region_as_unknown (dest_reg);
+ mark_region_as_unknown (dest_reg, cd.get_uncertainty ());
}
/* Handle the on_call_pre part of "strlen".
reachable_regions::handle_sval (const svalue *sval)
{
m_reachable_svals.add (sval);
+ m_mutable_svals.add (sval);
if (const region_svalue *ptr = sval->dyn_cast_region_svalue ())
{
const region *pointee = ptr->get_pointee ();
access will "inherit" the individual chars. */
const svalue *rhs_sval = get_rvalue (rhs1, ctxt);
m_store.set_value (m_mgr->get_store_manager(), lhs_reg, rhs_sval,
- BK_default);
+ BK_default, ctxt->get_uncertainty ());
}
break;
}
}
}
+ uncertainty_t *uncertainty = ctxt->get_uncertainty ();
+
/* Purge sm-state for the svalues that were reachable,
both in non-mutable and mutable form. */
for (svalue_set::iterator iter
{
const svalue *sval = (*iter);
ctxt->on_unknown_change (sval, true);
+ if (uncertainty)
+ uncertainty->on_mutable_sval_at_unknown_call (sval);
}
/* Mark any clusters that have escaped. */
for reachability (for handling return values from functions when
analyzing return of the only function on the stack).
+ If UNCERTAINTY is non-NULL, treat any svalues that were recorded
+ within it as being maybe-bound as additional "roots" for reachability.
+
Find svalues that haven't leaked. */
void
region_model::get_reachable_svalues (svalue_set *out,
- const svalue *extra_sval)
+ const svalue *extra_sval,
+ const uncertainty_t *uncertainty)
{
reachable_regions reachable_regs (this);
if (extra_sval)
reachable_regs.handle_sval (extra_sval);
+ if (uncertainty)
+ for (uncertainty_t::iterator iter
+ = uncertainty->begin_maybe_bound_svals ();
+ iter != uncertainty->end_maybe_bound_svals (); ++iter)
+ reachable_regs.handle_sval (*iter);
+
/* Get regions for locals that have explicitly bound values. */
for (store::cluster_map_t::iterator iter = m_store.begin ();
iter != m_store.end (); ++iter)
check_for_writable_region (lhs_reg, ctxt);
m_store.set_value (m_mgr->get_store_manager(), lhs_reg, rhs_sval,
- BK_direct);
+ BK_direct, ctxt ? ctxt->get_uncertainty () : NULL);
}
/* Set the value of the region given by LHS to the value given by RHS. */
/* Mark REG as having unknown content. */
void
-region_model::mark_region_as_unknown (const region *reg)
+region_model::mark_region_as_unknown (const region *reg,
+ uncertainty_t *uncertainty)
{
- m_store.mark_region_as_unknown (m_mgr->get_store_manager(), reg);
+ m_store.mark_region_as_unknown (m_mgr->get_store_manager(), reg,
+ uncertainty);
}
/* Determine what is known about the condition "LHS_SVAL OP RHS_SVAL" within
const gcall *call_stmt = cg_sedge.get_call_stmt ();
tree lhs = gimple_call_lhs (call_stmt);
if (lhs)
- mark_region_as_unknown (get_lvalue (lhs, ctxt));
+ mark_region_as_unknown (get_lvalue (lhs, ctxt),
+ ctxt ? ctxt->get_uncertainty () : NULL);
// TODO: actually implement some kind of summary here
}
region_model_context *ctxt);
region_model_context *get_ctxt () const { return m_ctxt; }
+ uncertainty_t *get_uncertainty () const;
tree get_lhs_type () const { return m_lhs_type; }
const region *get_lhs_region () const { return m_lhs_region; }
void handle_unrecognized_call (const gcall *call,
region_model_context *ctxt);
void get_reachable_svalues (svalue_set *out,
- const svalue *extra_sval);
+ const svalue *extra_sval,
+ const uncertainty_t *uncertainty);
void on_return (const greturn *stmt, region_model_context *ctxt);
void on_setjmp (const gcall *stmt, const exploded_node *enode,
void clobber_region (const region *reg);
void purge_region (const region *reg);
void zero_fill_region (const region *reg);
- void mark_region_as_unknown (const region *reg);
+ void mark_region_as_unknown (const region *reg, uncertainty_t *uncertainty);
void copy_region (const region *dst_reg, const region *src_reg,
region_model_context *ctxt);
/* Hook for clients to be notified when a function_decl escapes. */
virtual void on_escaped_function (tree fndecl) = 0;
+
+ virtual uncertainty_t *get_uncertainty () = 0;
};
/* A "do nothing" subclass of region_model_context. */
void on_unexpected_tree_code (tree, const dump_location_t &) OVERRIDE {}
void on_escaped_function (tree) OVERRIDE {}
+
+ uncertainty_t *get_uncertainty () OVERRIDE { return NULL; }
};
/* A subclass of region_model_context for determining if operations fail
namespace ana {
+/* Dump SVALS to PP, sorting them to ensure determinism. */
+
+static void
+dump_svalue_set (const hash_set <const svalue *> &svals,
+ pretty_printer *pp, bool simple)
+{
+ auto_vec <const svalue *> v;
+ for (hash_set<const svalue *>::iterator iter = svals.begin ();
+ iter != svals.end (); ++iter)
+ {
+ v.safe_push (*iter);
+ }
+ v.qsort (svalue::cmp_ptr_ptr);
+
+ pp_character (pp, '{');
+ const svalue *sval;
+ unsigned i;
+ FOR_EACH_VEC_ELT (v, i, sval)
+ {
+ if (i > 0)
+ pp_string (pp, ", ");
+ sval->dump_to_pp (pp, simple);
+ }
+ pp_character (pp, '}');
+}
+
+/* class uncertainty_t. */
+
+/* Dump this object to PP. */
+
+void
+uncertainty_t::dump_to_pp (pretty_printer *pp, bool simple) const
+{
+ pp_string (pp, "{m_maybe_bound_svals: ");
+ dump_svalue_set (m_maybe_bound_svals, pp, simple);
+
+ pp_string (pp, ", m_mutable_at_unknown_call_svals: ");
+ dump_svalue_set (m_mutable_at_unknown_call_svals, pp, simple);
+ pp_string (pp, "}");
+}
+
+/* Dump this object to stderr. */
+
+DEBUG_FUNCTION void
+uncertainty_t::dump (bool simple) const
+{
+ pretty_printer pp;
+ pp_format_decoder (&pp) = default_tree_printer;
+ pp_show_color (&pp) = pp_show_color (global_dc->printer);
+ pp.buffer->stream = stderr;
+ dump_to_pp (&pp, simple);
+ pp_newline (&pp);
+ pp_flush (&pp);
+}
+
/* Get a human-readable string for KIND for dumps. */
const char *binding_kind_to_string (enum binding_kind kind)
void
binding_cluster::clobber_region (store_manager *mgr, const region *reg)
{
- remove_overlapping_bindings (mgr, reg);
+ remove_overlapping_bindings (mgr, reg, NULL);
}
/* Remove any bindings for REG within this cluster. */
m_touched = false;
}
-/* Mark REG within this cluster as being unknown. */
+/* Mark REG within this cluster as being unknown.
+ If UNCERTAINTY is non-NULL, use it to record any svalues that
+ had bindings to them removed, as being maybe-bound. */
void
binding_cluster::mark_region_as_unknown (store_manager *mgr,
- const region *reg)
+ const region *reg,
+ uncertainty_t *uncertainty)
{
- remove_overlapping_bindings (mgr, reg);
+ remove_overlapping_bindings (mgr, reg, uncertainty);
/* Add a default binding to "unknown". */
region_model_manager *sval_mgr = mgr->get_svalue_manager ();
/* Remove any bindings within this cluster that overlap REG,
but retain default bindings that overlap but aren't fully covered
- by REG. */
+ by REG.
+ If UNCERTAINTY is non-NULL, use it to record any svalues that
+ were removed, as being maybe-bound. */
void
binding_cluster::remove_overlapping_bindings (store_manager *mgr,
- const region *reg)
+ const region *reg,
+ uncertainty_t *uncertainty)
{
auto_vec<const binding_key *> bindings;
get_overlapping_bindings (mgr, reg, &bindings);
if (reg_binding != iter_binding)
continue;
}
+ if (uncertainty)
+ uncertainty->on_maybe_bound_sval (m_map.get (iter_binding));
m_map.remove (iter_binding);
}
}
void
store::set_value (store_manager *mgr, const region *lhs_reg,
- const svalue *rhs_sval, enum binding_kind kind)
+ const svalue *rhs_sval, enum binding_kind kind,
+ uncertainty_t *uncertainty)
{
remove_overlapping_bindings (mgr, lhs_reg);
gcc_unreachable ();
case tristate::TS_UNKNOWN:
- iter_cluster->mark_region_as_unknown (mgr, iter_base_reg);
+ iter_cluster->mark_region_as_unknown (mgr, iter_base_reg,
+ uncertainty);
break;
case tristate::TS_TRUE:
/* Mark REG as having unknown content. */
void
-store::mark_region_as_unknown (store_manager *mgr, const region *reg)
+store::mark_region_as_unknown (store_manager *mgr, const region *reg,
+ uncertainty_t *uncertainty)
{
const region *base_reg = reg->get_base_region ();
if (base_reg->symbolic_for_unknown_ptr_p ())
return;
binding_cluster *cluster = get_or_create_cluster (base_reg);
- cluster->mark_region_as_unknown (mgr, reg);
+ cluster->mark_region_as_unknown (mgr, reg, uncertainty);
}
/* Get the cluster for BASE_REG, or NULL (const version). */
delete cluster;
return;
}
- cluster->remove_overlapping_bindings (mgr, reg);
+ cluster->remove_overlapping_bindings (mgr, reg, NULL);
}
}
namespace ana {
+/* A class for keeping track of aspects of a program_state that we don't
+ know about, to avoid false positives about leaks.
+
+ Consider:
+
+ p->field = malloc (1024);
+ q->field = NULL;
+
+ where we don't know whether or not p and q point to the same memory,
+ and:
+
+ p->field = malloc (1024);
+ unknown_fn (p);
+
+ In both cases, the svalue for the address of the allocated buffer
+ goes from being bound to p->field to not having anything explicitly bound
+ to it.
+
+ Given that we conservatively discard bindings due to possible aliasing or
+ calls to unknown function, the store loses references to svalues,
+ but these svalues could still be live. We don't want to warn about
+ them leaking - they're effectively in a "maybe live" state.
+
+ This "maybe live" information is somewhat transient.
+
+ We don't want to store this "maybe live" information in the program_state,
+ region_model, or store, since we don't want to bloat these objects (and
+ potentially bloat the exploded_graph with more nodes).
+ However, we can't store it in the region_model_context, as these context
+ objects sometimes don't last long enough to be around when comparing the
+ old vs the new state.
+
+ This class is a way to track a set of such svalues, so that we can
+ temporarily capture that they are in a "maybe live" state whilst
+ comparing old and new states. */
+
+class uncertainty_t
+{
+public:
+ typedef hash_set<const svalue *>::iterator iterator;
+
+ void on_maybe_bound_sval (const svalue *sval)
+ {
+ m_maybe_bound_svals.add (sval);
+ }
+ void on_mutable_sval_at_unknown_call (const svalue *sval)
+ {
+ m_mutable_at_unknown_call_svals.add (sval);
+ }
+
+ bool unknown_sm_state_p (const svalue *sval)
+ {
+ return (m_maybe_bound_svals.contains (sval)
+ || m_mutable_at_unknown_call_svals.contains (sval));
+ }
+
+ void dump_to_pp (pretty_printer *pp, bool simple) const;
+ void dump (bool simple) const;
+
+ iterator begin_maybe_bound_svals () const
+ {
+ return m_maybe_bound_svals.begin ();
+ }
+ iterator end_maybe_bound_svals () const
+ {
+ return m_maybe_bound_svals.end ();
+ }
+
+private:
+
+ /* svalues that might or might not still be bound. */
+ hash_set<const svalue *> m_maybe_bound_svals;
+
+ /* svalues that have mutable sm-state at unknown calls. */
+ hash_set<const svalue *> m_mutable_at_unknown_call_svals;
+};
+
class concrete_binding;
/* An enum for discriminating between "direct" vs "default" levels of
void clobber_region (store_manager *mgr, const region *reg);
void purge_region (store_manager *mgr, const region *reg);
void zero_fill_region (store_manager *mgr, const region *reg);
- void mark_region_as_unknown (store_manager *mgr, const region *reg);
+ void mark_region_as_unknown (store_manager *mgr, const region *reg,
+ uncertainty_t *uncertainty);
const svalue *get_binding (store_manager *mgr, const region *reg,
binding_kind kind) const;
const svalue *maybe_get_compound_binding (store_manager *mgr,
const region *reg) const;
- void remove_overlapping_bindings (store_manager *mgr, const region *reg);
+ void remove_overlapping_bindings (store_manager *mgr, const region *reg,
+ uncertainty_t *uncertainty);
template <typename T>
void for_each_value (void (*cb) (const svalue *sval, T user_data),
bool called_unknown_fn_p () const { return m_called_unknown_fn; }
void set_value (store_manager *mgr, const region *lhs_reg,
- const svalue *rhs_sval, enum binding_kind kind);
+ const svalue *rhs_sval, enum binding_kind kind,
+ uncertainty_t *uncertainty);
void clobber_region (store_manager *mgr, const region *reg);
void purge_region (store_manager *mgr, const region *reg);
void zero_fill_region (store_manager *mgr, const region *reg);
- void mark_region_as_unknown (store_manager *mgr, const region *reg);
+ void mark_region_as_unknown (store_manager *mgr, const region *reg,
+ uncertainty_t *uncertainty);
const binding_cluster *get_cluster (const region *base_reg) const;
binding_cluster *get_cluster (const region *base_reg);
--- /dev/null
+#include <stdio.h>
+
+struct foo {
+ FILE *file;
+};
+
+extern void unknown_fn ();
+extern void unknown_fn2 (const struct foo *f);
+
+int test_1 (struct foo *p)
+{
+ if ((p->file = fopen("test.txt", "w")) == NULL)
+ return 1;
+ unknown_fn ();
+ return 0; /* { dg-bogus "leak" } */
+}
+
+int test_2 (struct foo *p)
+{
+ if ((p->file = fopen("test.txt", "w")) == NULL)
+ return 1;
+ return 0; /* { dg-bogus "leak" } */
+}
+
+int test_3 (void)
+{
+ struct foo f;
+ struct foo *p = &f;
+ if ((p->file = fopen("test.txt", "w")) == NULL)
+ return 1;
+ unknown_fn ();
+ return 0; /* { dg-warning "leak" } */
+}
+
+int test_4 (void)
+{
+ struct foo f;
+ struct foo *p = &f;
+ if ((p->file = fopen("test.txt", "w")) == NULL)
+ return 1;
+ return 0; /* { dg-warning "leak" } */
+}
+
+int test_5 (void)
+{
+ struct foo f;
+ struct foo *p = &f;
+ if ((p->file = fopen("test.txt", "w")) == NULL)
+ return 1;
+ /* Although p is const, the underlying FILE * is not and could be closed. */
+ unknown_fn2 (p);
+ return 0; /* { dg-bogus "leak" } */
+}
--- /dev/null
+/* Reproducer for report from -Wanalyzer-malloc-leak
+ Reduced from
+ https://git.qemu.org/?p=qemu.git;a=blob;f=subprojects/libvhost-user/libvhost-user.c;h=fab7ca17ee1fb27bcfc338527d1aeb9f923aade5;hb=HEAD#l1184
+ which is licensed under GNU GPLv2 or later. */
+
+typedef unsigned char uint8_t;
+typedef unsigned short uint16_t;
+typedef unsigned long uint64_t;
+typedef unsigned long uint64_t;
+typedef long unsigned int size_t;
+
+extern void *calloc(size_t __nmemb, size_t __size)
+ __attribute__((__nothrow__, __leaf__))
+ __attribute__((__malloc__))
+ __attribute__((__alloc_size__(1, 2)))
+ __attribute__((__warn_unused_result__));
+
+typedef struct VuDescStateSplit {
+ uint8_t inflight;
+ uint64_t counter;
+} VuDescStateSplit;
+
+typedef struct VuVirtqInflight {
+ uint16_t desc_num;
+ VuDescStateSplit desc[];
+} VuVirtqInflight;
+
+typedef struct VuVirtqInflightDesc {
+ uint16_t index;
+ uint64_t counter;
+} VuVirtqInflightDesc;
+
+typedef struct VuVirtq {
+ VuVirtqInflight *inflight;
+ VuVirtqInflightDesc *resubmit_list;
+ uint16_t resubmit_num;
+ uint64_t counter;
+ int inuse;
+} VuVirtq;
+
+int vu_check_queue_inflights(VuVirtq *vq) {
+ int i = 0;
+
+ if (vq->inuse) {
+ vq->resubmit_list = calloc(vq->inuse, sizeof(VuVirtqInflightDesc));
+ if (!vq->resubmit_list) {
+ return -1;
+ }
+
+ for (i = 0; i < vq->inflight->desc_num; i++) {
+ if (vq->inflight->desc[i].inflight) {
+ vq->resubmit_list[vq->resubmit_num].index = i; /* { dg-bogus "leak" } */
+ vq->resubmit_list[vq->resubmit_num].counter =
+ vq->inflight->desc[i].counter;
+ vq->resubmit_num++;
+ }
+ }
+ }
+
+ return 0;
+}
--- /dev/null
+#include <stdlib.h>
+
+struct st
+{
+ void *m_f;
+};
+
+struct node
+{
+ struct node *m_next;
+};
+
+extern void unknown_fn (void *);
+extern void const_unknown_fn (const void *);
+
+void
+test_1 (struct st *p, struct st *q)
+{
+ p->m_f = malloc (1024);
+ q->m_f = NULL; /* { dg-bogus "leak" } */
+ free (p->m_f);
+}
+
+void
+test_2 (void)
+{
+ struct st s;
+ s.m_f = malloc (1024);
+ unknown_fn (&s);
+ free (s.m_f);
+}
+
+void
+test_3 (void)
+{
+ struct st s;
+ s.m_f = malloc (1024);
+ const_unknown_fn (&s);
+ free (s.m_f);
+}
+
+void
+test_4 (void)
+{
+ struct st s;
+ s.m_f = malloc (1024);
+ unknown_fn (&s);
+} /* { dg-bogus "leak" } */
+
+void
+test_5 (void)
+{
+ struct st s;
+ s.m_f = malloc (1024);
+ /* s is const, but the pointer could still be freed; hence not a leak. */
+ const_unknown_fn (&s);
+} /* { dg-bogus "leak" } */
+
+void
+test_6 (void)
+{
+ struct st s;
+ s.m_f = malloc (1024);
+} /* { dg-warning "leak" } */
+
+struct st
+test_7 (void)
+{
+ struct st s;
+ s.m_f = malloc (1024);
+ return s;
+} /* { dg-bogus "leak" } */
+
+struct node *
+test_8 (void)
+{
+ struct node *n1 = malloc (sizeof (struct node));
+ if (!n1)
+ return NULL;
+ n1->m_next = malloc (sizeof (struct node));
+ return n1;
+}
+
+void
+test_9 (void)
+{
+ struct node *n1 = malloc (sizeof (struct node));
+ if (!n1)
+ return;
+ n1->m_next = malloc (sizeof (struct node));
+ /* Could free n1 and n1->m_next. */
+ unknown_fn (n1);
+}
+
+void
+test_10 (void)
+{
+ struct node *n1 = malloc (sizeof (struct node));
+ if (!n1)
+ return;
+ n1->m_next = malloc (sizeof (struct node));
+ /* Could free n1->m_next, but not n1. */
+ const_unknown_fn (n1); /* { dg-warning "leak of 'n1'" } */
+}
+
+void
+test_11 (void)
+{
+ struct node *n1 = malloc (sizeof (struct node));
+ if (!n1)
+ return;
+ n1->m_next = malloc (sizeof (struct node));
+ /* Could free n1->m_next, but not n1. */
+ unknown_fn (n1->m_next); /* { dg-warning "leak of 'n1'" } */
+}
+
+void
+test_12a (void)
+{
+ int *ip = malloc (sizeof (int));
+ *ip = 42; /* { dg-warning "dereference of possibly-NULL 'ip'" } */
+ free (ip);
+}
+
+void
+test_12b (void)
+{
+ int *ip = malloc (sizeof (int));
+ unknown_fn (ip);
+ /* Might not be a null-deref, as unknown_fn could abort on NULL. */
+ *ip = 42;
+ free (ip);
+}
+
+void
+test_12c (void)
+{
+ int *ip = malloc (sizeof (int));
+ /* Might not be a null-deref, as const_unknown_fn could abort on NULL.
+ Right now we don't have a great way of handling this. */
+ const_unknown_fn (ip);
+ *ip = 42; /* { dg-bogus "dereference of possibly-NULL 'ip'" "" { xfail *-*-* } } */
+ free (ip);
+}