1 /* Classes for saving, deduplicating, and emitting analyzer diagnostics.
2 Copyright (C) 2019-2022 Free Software Foundation, Inc.
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
22 #define INCLUDE_MEMORY
24 #include "coretypes.h"
26 #include "pretty-print.h"
27 #include "gcc-rich-location.h"
28 #include "gimple-pretty-print.h"
30 #include "diagnostic-core.h"
31 #include "diagnostic-event-id.h"
32 #include "diagnostic-path.h"
34 #include "ordered-hash-map.h"
35 #include "analyzer/analyzer.h"
36 #include "analyzer/analyzer-logging.h"
37 #include "analyzer/sm.h"
38 #include "analyzer/pending-diagnostic.h"
39 #include "analyzer/diagnostic-manager.h"
40 #include "analyzer/call-string.h"
41 #include "analyzer/program-point.h"
42 #include "analyzer/store.h"
43 #include "analyzer/region-model.h"
44 #include "analyzer/constraint-manager.h"
46 #include "basic-block.h"
48 #include "gimple-iterator.h"
49 #include "inlining-iterator.h"
52 #include "analyzer/supergraph.h"
53 #include "analyzer/program-state.h"
54 #include "analyzer/exploded-graph.h"
55 #include "analyzer/trimmed-graph.h"
56 #include "analyzer/feasible-graph.h"
57 #include "analyzer/checker-path.h"
58 #include "analyzer/reachability.h"
59 #include "make-unique.h"
65 class feasible_worklist;
67 /* State for finding the shortest feasible exploded_path for a
69 This is shared between all diagnostics, so that we avoid repeating work. */
74 epath_finder (const exploded_graph &eg)
78 /* This is shared by all diagnostics, but only needed if
79 !flag_analyzer_feasibility. */
80 if (!flag_analyzer_feasibility)
81 m_sep = new shortest_exploded_paths (eg, eg.get_origin (),
82 SPS_FROM_GIVEN_ORIGIN);
85 ~epath_finder () { delete m_sep; }
87 logger *get_logger () const { return m_eg.get_logger (); }
89 std::unique_ptr<exploded_path>
90 get_best_epath (const exploded_node *target_enode,
91 const char *desc, unsigned diag_idx,
92 std::unique_ptr<feasibility_problem> *out_problem);
95 DISABLE_COPY_AND_ASSIGN(epath_finder);
97 std::unique_ptr<exploded_path>
98 explore_feasible_paths (const exploded_node *target_enode,
99 const char *desc, unsigned diag_idx);
101 process_worklist_item (feasible_worklist *worklist,
102 const trimmed_graph &tg,
104 const exploded_node *target_enode,
106 std::unique_ptr<exploded_path> *out_best_path) const;
107 void dump_trimmed_graph (const exploded_node *target_enode,
108 const char *desc, unsigned diag_idx,
109 const trimmed_graph &tg,
110 const shortest_paths<eg_traits, exploded_path> &sep);
111 void dump_feasible_graph (const exploded_node *target_enode,
112 const char *desc, unsigned diag_idx,
113 const feasible_graph &fg);
114 void dump_feasible_path (const exploded_node *target_enode,
116 const feasible_graph &fg,
117 const feasible_node &fnode) const;
119 const exploded_graph &m_eg;
120 shortest_exploded_paths *m_sep;
123 /* class epath_finder. */
125 /* Get the "best" exploded_path for reaching ENODE from the origin,
126 returning ownership of it to the caller.
128 Ideally we want to report the shortest feasible path.
129 Return NULL if we could not find a feasible path
130 (when flag_analyzer_feasibility is true).
132 If flag_analyzer_feasibility is false, then simply return the
135 Use DESC and DIAG_IDX when logging.
137 Write any feasibility_problem to *OUT_PROBLEM. */
139 std::unique_ptr<exploded_path>
140 epath_finder::get_best_epath (const exploded_node *enode,
141 const char *desc, unsigned diag_idx,
142 std::unique_ptr<feasibility_problem> *out_problem)
144 logger *logger = get_logger ();
147 unsigned snode_idx = enode->get_supernode ()->m_index;
149 logger->log ("considering %qs at EN: %i, SN: %i (sd: %i)",
150 desc, enode->m_index, snode_idx, diag_idx);
152 /* State-merging means that not every path in the egraph corresponds
153 to a feasible one w.r.t. states.
155 We want to find the shortest feasible path from the origin to ENODE
158 if (flag_analyzer_feasibility)
160 /* Attempt to find the shortest feasible path using feasible_graph. */
162 logger->log ("trying to find shortest feasible path");
163 if (std::unique_ptr<exploded_path> epath
164 = explore_feasible_paths (enode, desc, diag_idx))
167 logger->log ("accepting %qs at EN: %i, SN: %i (sd: %i)"
168 " with feasible path (length: %i)",
169 desc, enode->m_index, snode_idx, diag_idx,
176 logger->log ("rejecting %qs at EN: %i, SN: %i (sd: %i)"
177 " due to not finding feasible path",
178 desc, enode->m_index, snode_idx, diag_idx);
184 /* As a crude approximation to shortest feasible path, simply find
185 the shortest path, and note whether it is feasible.
186 There could be longer feasible paths within the egraph, so this
187 approach would lead to diagnostics being falsely rejected
188 (PR analyzer/96374). */
190 logger->log ("trying to find shortest path ignoring feasibility");
192 std::unique_ptr<exploded_path> epath
193 = make_unique<exploded_path> (m_sep->get_shortest_path (enode));
194 if (epath->feasible_p (logger, out_problem, m_eg.get_engine (), &m_eg))
197 logger->log ("accepting %qs at EN: %i, SN: %i (sn: %i)"
198 " with feasible path (length: %i)",
199 desc, enode->m_index, snode_idx, diag_idx,
205 logger->log ("accepting %qs at EN: %i, SN: %i (sn: %i) (length: %i)"
206 " despite infeasible path (due to %qs)",
207 desc, enode->m_index, snode_idx, diag_idx,
209 "-fno-analyzer-feasibility");
215 /* A class for managing the worklist of feasible_nodes in
216 epath_finder::explore_feasible_paths, prioritizing them
217 so that shorter paths appear earlier in the queue. */
219 class feasible_worklist
222 feasible_worklist (const shortest_paths<eg_traits, exploded_path> &sep)
223 : m_queue (key_t (*this, NULL)),
228 feasible_node *take_next () { return m_queue.extract_min (); }
230 void add_node (feasible_node *fnode)
232 m_queue.insert (key_t (*this, fnode), fnode);
238 key_t (const feasible_worklist &w, feasible_node *fnode)
239 : m_worklist (w), m_fnode (fnode)
242 bool operator< (const key_t &other) const
244 return cmp (*this, other) < 0;
247 bool operator== (const key_t &other) const
249 return cmp (*this, other) == 0;
252 bool operator> (const key_t &other) const
254 return !(*this == other || *this < other);
258 static int cmp (const key_t &ka, const key_t &kb)
260 /* Choose the node for which if the remaining path were feasible,
261 it would be the shortest path (summing the length of the
262 known-feasible path so far with that of the remaining
263 possibly-feasible path). */
264 int ca = ka.m_worklist.get_estimated_cost (ka.m_fnode);
265 int cb = kb.m_worklist.get_estimated_cost (kb.m_fnode);
269 const feasible_worklist &m_worklist;
270 feasible_node *m_fnode;
273 /* Get the estimated length of a path involving FNODE from
274 the origin to the target enode.
275 Sum the length of the known-feasible path so far with
276 that of the remaining possibly-feasible path. */
278 int get_estimated_cost (const feasible_node *fnode) const
280 unsigned length_so_far = fnode->get_path_length ();
281 int shortest_remaining_path
282 = m_sep.get_shortest_distance (fnode->get_inner_node ());
284 gcc_assert (shortest_remaining_path >= 0);
285 /* This should be true since we're only exploring nodes within
286 the trimmed graph (and we anticipate it being much smaller
287 than this, and thus not overflowing the sum). */
288 gcc_assert (shortest_remaining_path < INT_MAX);
290 return length_so_far + shortest_remaining_path;
293 /* Priority queue, backed by a fibonacci_heap. */
294 typedef fibonacci_heap<key_t, feasible_node> queue_t;
296 const shortest_paths<eg_traits, exploded_path> &m_sep;
299 /* When we're building the exploded graph we want to simplify
300 overly-complicated symbolic values down to "UNKNOWN" to try to avoid
301 state explosions and unbounded chains of exploration.
303 However, when we're building the feasibility graph for a diagnostic
304 (actually a tree), we don't want UNKNOWN values, as conditions on them
305 are also unknown: we don't want to have a contradiction such as a path
306 where (VAL != 0) and then (VAL == 0) along the same path.
308 Hence this is an RAII class for temporarily disabling complexity-checking
309 in the region_model_manager, for use within
310 epath_finder::explore_feasible_paths.
312 We also disable the creation of unknown_svalue instances during feasibility
313 checking, instead creating unique svalues, to avoid paradoxes in paths. */
315 class auto_checking_feasibility
318 auto_checking_feasibility (region_model_manager *mgr) : m_mgr (mgr)
320 m_mgr->begin_checking_feasibility ();
322 ~auto_checking_feasibility ()
324 m_mgr->end_checking_feasibility ();
327 region_model_manager *m_mgr;
330 /* Attempt to find the shortest feasible path from the origin to
331 TARGET_ENODE by iteratively building a feasible_graph, in which
332 every path to a feasible_node is feasible by construction.
334 We effectively explore the tree of feasible paths in order of shortest
335 path until we either find a feasible path to TARGET_ENODE, or hit
339 - Find the shortest path from each node to the TARGET_ENODE (without
340 checking feasibility), so that we can prioritize our worklist.
341 - Construct a trimmed_graph: the subset of nodes/edges that
342 are on a path that eventually reaches TARGET_ENODE. We will only need
343 to consider these when considering the shortest feasible path.
345 Build a feasible_graph, in which every path to a feasible_node
346 is feasible by construction.
347 We use a worklist to flatten the exploration into an iteration.
348 Starting at the origin, find feasible out-edges within the trimmed graph.
349 At each stage, choose the node for which if the remaining path were feasible,
350 it would be the shortest path (summing the length of the known-feasible path
351 so far with that of the remaining possibly-feasible path).
352 This way, the first feasible path we find to TARGET_ENODE is the shortest.
353 We start by trying the shortest possible path, but if that fails,
354 we explore progressively longer paths, eventually trying iterations through
355 loops. The exploration is captured in the feasible_graph, which can be
356 dumped as a .dot file to visualize the exploration. The indices of the
357 feasible_nodes show the order in which they were created.
359 This is something of a brute-force approach, but the trimmed_graph
360 hopefully keeps the complexity manageable.
362 Terminate with failure when the number of infeasible edges exceeds
363 a threshold (--param=analyzer-max-infeasible-edges=).
364 This is guaranteed to eventually lead to terminatation, as
365 we can't keep creating feasible nodes without eventually
366 either reaching an infeasible edge, or reaching the
367 TARGET_ENODE. Specifically, there can't be a cycle of
368 feasible edges that doesn't reach the target_enode without
369 an out-edge that either fails feasibility or gets closer
370 to the TARGET_ENODE: on each iteration we are either:
371 - effectively getting closer to the TARGET_ENODE (which can't
372 continue forever without reaching the target), or
373 - getting monotonically closer to the termination threshold. */
375 std::unique_ptr<exploded_path>
376 epath_finder::explore_feasible_paths (const exploded_node *target_enode,
377 const char *desc, unsigned diag_idx)
379 logger *logger = get_logger ();
382 region_model_manager *mgr = m_eg.get_engine ()->get_model_manager ();
384 /* Determine the shortest path to TARGET_ENODE from each node in
385 the exploded graph. */
386 shortest_paths<eg_traits, exploded_path> sep
387 (m_eg, target_enode, SPS_TO_GIVEN_TARGET);
389 /* Construct a trimmed_graph: the subset of nodes/edges that
390 are on a path that eventually reaches TARGET_ENODE.
391 We only need to consider these when considering the shortest
393 trimmed_graph tg (m_eg, target_enode);
395 if (flag_dump_analyzer_feasibility)
396 dump_trimmed_graph (target_enode, desc, diag_idx, tg, sep);
399 feasible_worklist worklist (sep);
401 /* Populate the worklist with the origin node. */
403 feasibility_state init_state (mgr, m_eg.get_supergraph ());
404 feasible_node *origin = fg.add_node (m_eg.get_origin (), init_state, 0);
405 worklist.add_node (origin);
408 /* Iteratively explore the tree of feasible paths in order of shortest
409 path until we either find a feasible path to TARGET_ENODE, or hit
412 /* Set this if we find a feasible path to TARGET_ENODE. */
413 std::unique_ptr<exploded_path> best_path = NULL;
416 auto_checking_feasibility sentinel (mgr);
418 while (process_worklist_item (&worklist, tg, &fg, target_enode, diag_idx,
421 /* Empty; the work is done within process_worklist_item. */
427 logger->log ("tg for sd: %i:", diag_idx);
428 logger->inc_indent ();
429 tg.log_stats (logger);
430 logger->dec_indent ();
432 logger->log ("fg for sd: %i:", diag_idx);
433 logger->inc_indent ();
434 fg.log_stats (logger);
435 logger->dec_indent ();
438 /* Dump the feasible_graph. */
439 if (flag_dump_analyzer_feasibility)
440 dump_feasible_graph (target_enode, desc, diag_idx, fg);
445 /* Process the next item in WORKLIST, potentially adding new items
446 based on feasible out-edges, and extending FG accordingly.
447 Use TG to ignore out-edges that don't lead to TARGET_ENODE.
448 Return true if the worklist processing should continue.
449 Return false if the processing of the worklist should stop
450 (either due to reaching TARGET_ENODE, or hitting a limit).
451 Write to *OUT_BEST_PATH if stopping due to finding a feasible path
456 process_worklist_item (feasible_worklist *worklist,
457 const trimmed_graph &tg,
459 const exploded_node *target_enode,
461 std::unique_ptr<exploded_path> *out_best_path) const
463 logger *logger = get_logger ();
465 feasible_node *fnode = worklist->take_next ();
469 logger->log ("drained worklist for sd: %i"
470 " without finding feasible path",
475 log_scope s (logger, "fg worklist item",
476 "considering FN: %i (EN: %i) for sd: %i",
477 fnode->get_index (), fnode->get_inner_node ()->m_index,
480 /* Iterate through all out-edges from this item. */
482 exploded_edge *succ_eedge;
483 FOR_EACH_VEC_ELT (fnode->get_inner_node ()->m_succs, i, succ_eedge)
485 log_scope s (logger, "edge", "considering edge: EN:%i -> EN:%i",
486 succ_eedge->m_src->m_index,
487 succ_eedge->m_dest->m_index);
488 /* Reject edges that aren't in the trimmed graph. */
489 if (!tg.contains_p (succ_eedge))
492 logger->log ("rejecting: not in trimmed graph");
496 feasibility_state succ_state (fnode->get_state ());
497 rejected_constraint *rc = NULL;
498 if (succ_state.maybe_update_for_edge (logger, succ_eedge, &rc))
500 gcc_assert (rc == NULL);
501 feasible_node *succ_fnode
502 = fg->add_node (succ_eedge->m_dest,
504 fnode->get_path_length () + 1);
506 logger->log ("accepting as FN: %i", succ_fnode->get_index ());
507 fg->add_edge (new feasible_edge (fnode, succ_fnode, succ_eedge));
509 /* Have we reached TARGET_ENODE? */
510 if (succ_fnode->get_inner_node () == target_enode)
513 logger->log ("success: got feasible path to EN: %i (sd: %i)"
515 target_enode->m_index, diag_idx,
516 succ_fnode->get_path_length ());
517 *out_best_path = fg->make_epath (succ_fnode);
518 if (flag_dump_analyzer_feasibility)
519 dump_feasible_path (target_enode, diag_idx, *fg, *succ_fnode);
521 /* Success: stop the worklist iteration. */
525 worklist->add_node (succ_fnode);
530 logger->log ("infeasible");
532 fg->add_feasibility_problem (fnode,
536 /* Give up if there have been too many infeasible edges. */
537 if (fg->get_num_infeasible ()
538 > (unsigned)param_analyzer_max_infeasible_edges)
541 logger->log ("too many infeasible edges (%i); giving up",
542 fg->get_num_infeasible ());
548 /* Continue the worklist iteration. */
552 /* Helper class for epath_finder::dump_trimmed_graph
553 to dump extra per-node information.
554 Use SEP to add the length of the shortest path from each
555 node to the target node to each node's dump. */
557 class dump_eg_with_shortest_path : public eg_traits::dump_args_t
560 dump_eg_with_shortest_path
561 (const exploded_graph &eg,
562 const shortest_paths<eg_traits, exploded_path> &sep)
568 void dump_extra_info (const exploded_node *enode,
569 pretty_printer *pp) const final override
571 pp_printf (pp, "sp: %i", m_sep.get_shortest_path (enode).length ());
576 const shortest_paths<eg_traits, exploded_path> &m_sep;
579 /* Dump TG to "BASE_NAME.DESC.DIAG_IDX.to-enN.tg.dot",
580 annotating each node with the length of the shortest path
581 from that node to TARGET_ENODE (using SEP). */
585 dump_trimmed_graph (const exploded_node *target_enode,
586 const char *desc, unsigned diag_idx,
587 const trimmed_graph &tg,
588 const shortest_paths<eg_traits, exploded_path> &sep)
590 auto_timevar tv (TV_ANALYZER_DUMP);
591 dump_eg_with_shortest_path inner_args (m_eg, sep);
592 trimmed_graph::dump_args_t args (inner_args);
594 pp_printf (&pp, "%s.%s.%i.to-en%i.tg.dot",
595 dump_base_name, desc, diag_idx, target_enode->m_index);
596 char *filename = xstrdup (pp_formatted_text (&pp));
597 tg.dump_dot (filename, NULL, args);
601 /* Dump FG to "BASE_NAME.DESC.DIAG_IDX.to-enN.fg.dot". */
604 epath_finder::dump_feasible_graph (const exploded_node *target_enode,
605 const char *desc, unsigned diag_idx,
606 const feasible_graph &fg)
608 auto_timevar tv (TV_ANALYZER_DUMP);
609 exploded_graph::dump_args_t inner_args (m_eg);
610 feasible_graph::dump_args_t args (inner_args);
612 pp_printf (&pp, "%s.%s.%i.to-en%i.fg.dot",
613 dump_base_name, desc, diag_idx, target_enode->m_index);
614 char *filename = xstrdup (pp_formatted_text (&pp));
615 fg.dump_dot (filename, NULL, args);
619 /* Dump the path to FNODE to "BASE_NAME.DIAG_IDX.to-enN.fpath.txt". */
622 epath_finder::dump_feasible_path (const exploded_node *target_enode,
624 const feasible_graph &fg,
625 const feasible_node &fnode) const
627 auto_timevar tv (TV_ANALYZER_DUMP);
629 pp_printf (&pp, "%s.%i.to-en%i.fpath.txt",
630 dump_base_name, diag_idx, target_enode->m_index);
631 char *filename = xstrdup (pp_formatted_text (&pp));
632 fg.dump_feasible_path (fnode, filename);
636 /* class saved_diagnostic. */
638 /* saved_diagnostic's ctor.
639 Take ownership of D and STMT_FINDER. */
641 saved_diagnostic::saved_diagnostic (const state_machine *sm,
642 const exploded_node *enode,
643 const supernode *snode, const gimple *stmt,
644 const stmt_finder *stmt_finder,
647 state_machine::state_t state,
648 std::unique_ptr<pending_diagnostic> d,
650 : m_sm (sm), m_enode (enode), m_snode (snode), m_stmt (stmt),
651 /* stmt_finder could be on-stack; we want our own copy that can
653 m_stmt_finder (stmt_finder ? stmt_finder->clone () : NULL),
654 m_var (var), m_sval (sval), m_state (state),
655 m_d (std::move (d)), m_trailing_eedge (NULL),
657 m_best_epath (NULL), m_problem (NULL),
660 gcc_assert (m_stmt || m_stmt_finder);
662 /* We must have an enode in order to be able to look for paths
663 through the exploded_graph to this diagnostic. */
664 gcc_assert (m_enode);
668 saved_diagnostic::operator== (const saved_diagnostic &other) const
670 if (m_notes.length () != other.m_notes.length ())
672 for (unsigned i = 0; i < m_notes.length (); i++)
673 if (!m_notes[i]->equal_p (*other.m_notes[i]))
675 return (m_sm == other.m_sm
676 /* We don't compare m_enode. */
677 && m_snode == other.m_snode
678 && m_stmt == other.m_stmt
679 /* We don't compare m_stmt_finder. */
680 && pending_diagnostic::same_tree_p (m_var, other.m_var)
681 && m_state == other.m_state
682 && m_d->equal_p (*other.m_d)
683 && m_trailing_eedge == other.m_trailing_eedge);
686 /* Add PN to this diagnostic, taking ownership of it. */
689 saved_diagnostic::add_note (std::unique_ptr<pending_note> pn)
692 m_notes.safe_push (pn.release ());
695 /* Return a new json::object of the form
699 "sval": optional str,
700 "state": optional str,
701 "path_length": optional int,
702 "pending_diagnostic": str,
706 saved_diagnostic::to_json () const
708 json::object *sd_obj = new json::object ();
711 sd_obj->set ("sm", new json::string (m_sm->get_name ()));
712 sd_obj->set ("enode", new json::integer_number (m_enode->m_index));
713 sd_obj->set ("snode", new json::integer_number (m_snode->m_index));
715 sd_obj->set ("sval", m_sval->to_json ());
717 sd_obj->set ("state", m_state->to_json ());
719 sd_obj->set ("path_length", new json::integer_number (get_epath_length ()));
720 sd_obj->set ("pending_diagnostic", new json::string (m_d->get_kind ()));
721 sd_obj->set ("idx", new json::integer_number (m_idx));
723 /* We're not yet JSONifying the following fields:
724 const gimple *m_stmt;
725 stmt_finder *m_stmt_finder;
727 exploded_edge *m_trailing_eedge;
728 enum status m_status;
729 feasibility_problem *m_problem;
730 auto_delete_vec <pending_note> m_notes;
736 /* Dump this to PP in a form suitable for use as an id in .dot output. */
739 saved_diagnostic::dump_dot_id (pretty_printer *pp) const
741 pp_printf (pp, "sd_%i", m_idx);
744 /* Dump this to PP in a form suitable for use as a node in .dot output. */
747 saved_diagnostic::dump_as_dot_node (pretty_printer *pp) const
751 " [shape=none,margin=0,style=filled,fillcolor=\"red\",label=\"");
752 pp_write_text_to_stream (pp);
755 pp_printf (pp, "DIAGNOSTIC: %s (sd: %i)\n",
756 m_d->get_kind (), m_idx);
759 pp_printf (pp, "sm: %s", m_sm->get_name ());
762 pp_printf (pp, "; state: ");
763 m_state->dump_to_pp (pp);
769 pp_string (pp, "stmt: ");
770 pp_gimple_stmt_1 (pp, m_stmt, 0, (dump_flags_t)0);
774 pp_printf (pp, "var: %qE\n", m_var);
777 pp_string (pp, "sval: ");
778 m_sval->dump_to_pp (pp, true);
782 pp_printf (pp, "path length: %i\n", get_epath_length ());
784 pp_write_text_as_dot_label_to_stream (pp, /*for_record=*/true);
785 pp_string (pp, "\"];\n\n");
787 /* Show links to duplicates. */
788 for (auto iter : m_duplicates)
791 pp_string (pp, " -> ");
792 iter->dump_dot_id (pp);
793 pp_string (pp, " [style=\"dotted\" arrowhead=\"none\"];");
798 /* Use PF to find the best exploded_path for this saved_diagnostic,
799 and store it in m_best_epath.
800 If m_stmt is still NULL, use m_stmt_finder on the epath to populate
802 Return true if a best path was found. */
805 saved_diagnostic::calc_best_epath (epath_finder *pf)
807 logger *logger = pf->get_logger ();
811 m_best_epath = pf->get_best_epath (m_enode, m_d->get_kind (), m_idx,
814 /* Handle failure to find a feasible path. */
815 if (m_best_epath == NULL)
818 gcc_assert (m_best_epath);
821 gcc_assert (m_stmt_finder);
822 m_stmt = m_stmt_finder->find_stmt (*m_best_epath);
830 saved_diagnostic::get_epath_length () const
832 gcc_assert (m_best_epath);
833 return m_best_epath->length ();
836 /* Record that OTHER (and its duplicates) are duplicates
837 of this saved_diagnostic. */
840 saved_diagnostic::add_duplicate (saved_diagnostic *other)
843 m_duplicates.reserve (m_duplicates.length ()
844 + other->m_duplicates.length ()
846 m_duplicates.splice (other->m_duplicates);
847 other->m_duplicates.truncate (0);
848 m_duplicates.safe_push (other);
851 /* Return true if this diagnostic supercedes OTHER, and that OTHER should
852 therefore not be emitted. */
855 saved_diagnostic::supercedes_p (const saved_diagnostic &other) const
857 /* They should be at the same stmt. */
858 if (m_stmt != other.m_stmt)
860 return m_d->supercedes_p (*other.m_d);
863 /* Emit any pending notes owned by this diagnostic. */
866 saved_diagnostic::emit_any_notes () const
868 for (auto pn : m_notes)
872 /* State for building a checker_path from a particular exploded_path.
873 In particular, this precomputes reachability information: the set of
874 source enodes for which a path be found to the diagnostic enode. */
879 path_builder (const exploded_graph &eg,
880 const exploded_path &epath,
881 const feasibility_problem *problem,
882 const saved_diagnostic &sd)
884 m_diag_enode (epath.get_final_enode ()),
886 m_reachability (eg, m_diag_enode),
887 m_feasibility_problem (problem)
890 const exploded_node *get_diag_node () const { return m_diag_enode; }
892 pending_diagnostic *get_pending_diagnostic () const
894 return m_sd.m_d.get ();
897 bool reachable_from_p (const exploded_node *src_enode) const
899 return m_reachability.reachable_from_p (src_enode);
902 const extrinsic_state &get_ext_state () const { return m_eg.get_ext_state (); }
904 const feasibility_problem *get_feasibility_problem () const
906 return m_feasibility_problem;
909 const state_machine *get_sm () const { return m_sd.m_sm; }
912 typedef reachability<eg_traits> enode_reachability;
914 const exploded_graph &m_eg;
916 /* The enode where the diagnostic occurs. */
917 const exploded_node *m_diag_enode;
919 const saved_diagnostic &m_sd;
921 /* Precompute all enodes from which the diagnostic is reachable. */
922 enode_reachability m_reachability;
924 const feasibility_problem *m_feasibility_problem;
927 /* Determine the emission location for PD at STMT in FUN. */
930 get_emission_location (const gimple *stmt, function *fun,
931 const pending_diagnostic &pd)
933 location_t loc = get_stmt_location (stmt, fun);
935 /* Allow the pending_diagnostic to fix up the location. */
936 loc = pd.fixup_location (loc, true);
941 /* class diagnostic_manager. */
943 /* diagnostic_manager's ctor. */
945 diagnostic_manager::diagnostic_manager (logger *logger, engine *eng,
947 : log_user (logger), m_eng (eng), m_verbosity (verbosity),
948 m_num_disabled_diagnostics (0)
952 /* Queue pending_diagnostic D at ENODE for later emission.
953 Return true/false signifying if the diagnostic was actually added. */
956 diagnostic_manager::add_diagnostic (const state_machine *sm,
957 exploded_node *enode,
958 const supernode *snode, const gimple *stmt,
959 const stmt_finder *finder,
962 state_machine::state_t state,
963 std::unique_ptr<pending_diagnostic> d)
965 LOG_FUNC (get_logger ());
967 /* We must have an enode in order to be able to look for paths
968 through the exploded_graph to the diagnostic. */
971 /* If this warning is ultimately going to be rejected by a -Wno-analyzer-*
973 We can only do this for diagnostics where we already know the stmt,
974 and thus can determine the emission location. */
977 location_t loc = get_emission_location (stmt, snode->m_fun, *d);
978 int option = d->get_controlling_option ();
979 if (!warning_enabled_at (loc, option))
982 get_logger ()->log ("rejecting disabled warning %qs",
984 m_num_disabled_diagnostics++;
990 = new saved_diagnostic (sm, enode, snode, stmt, finder, var, sval,
991 state, std::move (d), m_saved_diagnostics.length ());
992 m_saved_diagnostics.safe_push (sd);
993 enode->add_diagnostic (sd);
995 log ("adding saved diagnostic %i at SN %i to EN %i: %qs",
997 snode->m_index, enode->m_index, sd->m_d->get_kind ());
1001 /* Queue pending_diagnostic D at ENODE for later emission.
1002 Return true/false signifying if the diagnostic was actually added.
1003 Take ownership of D (or delete it). */
1006 diagnostic_manager::add_diagnostic (exploded_node *enode,
1007 const supernode *snode, const gimple *stmt,
1008 const stmt_finder *finder,
1009 std::unique_ptr<pending_diagnostic> d)
1012 return add_diagnostic (NULL, enode, snode, stmt, finder, NULL_TREE,
1013 NULL, 0, std::move (d));
1016 /* Add PN to the most recent saved_diagnostic. */
1019 diagnostic_manager::add_note (std::unique_ptr<pending_note> pn)
1021 LOG_FUNC (get_logger ());
1024 /* Get most recent saved_diagnostic. */
1025 gcc_assert (m_saved_diagnostics.length () > 0);
1026 saved_diagnostic *sd = m_saved_diagnostics[m_saved_diagnostics.length () - 1];
1027 sd->add_note (std::move (pn));
1030 /* Return a new json::object of the form
1031 {"diagnostics" : [obj for saved_diagnostic]}. */
1034 diagnostic_manager::to_json () const
1036 json::object *dm_obj = new json::object ();
1039 json::array *sd_arr = new json::array ();
1041 saved_diagnostic *sd;
1042 FOR_EACH_VEC_ELT (m_saved_diagnostics, i, sd)
1043 sd_arr->append (sd->to_json ());
1044 dm_obj->set ("diagnostics", sd_arr);
1050 /* A class for identifying sets of duplicated pending_diagnostic.
1052 We want to find the simplest saved_diagnostic amongst those that share a
1058 dedupe_key (const saved_diagnostic &sd)
1059 : m_sd (sd), m_stmt (sd.m_stmt)
1061 gcc_assert (m_stmt);
1064 hashval_t hash () const
1066 inchash::hash hstate;
1067 hstate.add_ptr (m_stmt);
1069 return hstate.end ();
1071 bool operator== (const dedupe_key &other) const
1073 return (m_sd == other.m_sd
1074 && m_stmt == other.m_stmt);
1077 location_t get_location () const
1079 return m_stmt->location;
1082 /* A qsort comparator for use by dedupe_winners::emit_best
1083 to sort them into location_t order. */
1086 comparator (const void *p1, const void *p2)
1088 const dedupe_key *pk1 = *(const dedupe_key * const *)p1;
1089 const dedupe_key *pk2 = *(const dedupe_key * const *)p2;
1091 location_t loc1 = pk1->get_location ();
1092 location_t loc2 = pk2->get_location ();
1094 if (int cmp = linemap_compare_locations (line_table, loc2, loc1))
1096 if (int cmp = ((int)pk1->m_sd.get_epath_length ()
1097 - (int)pk2->m_sd.get_epath_length ()))
1099 if (int cmp = strcmp (pk1->m_sd.m_d->get_kind (),
1100 pk2->m_sd.m_d->get_kind ()))
1105 const saved_diagnostic &m_sd;
1106 const gimple *m_stmt;
1109 /* Traits for use by dedupe_winners. */
1111 class dedupe_hash_map_traits
1114 typedef const dedupe_key *key_type;
1115 typedef saved_diagnostic *value_type;
1116 typedef saved_diagnostic *compare_type;
1118 static inline hashval_t hash (const key_type &v)
1122 static inline bool equal_keys (const key_type &k1, const key_type &k2)
1126 template <typename T>
1127 static inline void remove (T &)
1131 template <typename T>
1132 static inline void mark_deleted (T &entry)
1134 entry.m_key = reinterpret_cast<key_type> (1);
1136 template <typename T>
1137 static inline void mark_empty (T &entry)
1141 template <typename T>
1142 static inline bool is_deleted (const T &entry)
1144 return entry.m_key == reinterpret_cast<key_type> (1);
1146 template <typename T>
1147 static inline bool is_empty (const T &entry)
1149 return entry.m_key == NULL;
1151 static const bool empty_zero_p = true;
1154 /* A class for deduplicating diagnostics and finding (and emitting) the
1155 best saved_diagnostic within each partition. */
1157 class dedupe_winners
1162 /* Delete all keys, but not the saved_diagnostics. */
1163 for (map_t::iterator iter = m_map.begin ();
1164 iter != m_map.end ();
1166 delete (*iter).first;
1169 /* Determine an exploded_path for SD using PF and, if it's feasible,
1170 determine if SD is the best seen so far for its dedupe_key.
1171 Record the winning SD for each dedupe_key. */
1173 void add (logger *logger,
1175 saved_diagnostic *sd)
1177 /* Determine best epath for SD. */
1178 if (!sd->calc_best_epath (pf))
1181 dedupe_key *key = new dedupe_key (*sd);
1182 if (saved_diagnostic **slot = m_map.get (key))
1185 logger->log ("already have this dedupe_key");
1187 saved_diagnostic *cur_best_sd = *slot;
1189 if (sd->get_epath_length () < cur_best_sd->get_epath_length ())
1191 /* We've got a shorter path for the key; replace
1192 the current candidate, marking it as a duplicate of SD. */
1194 logger->log ("length %i is better than existing length %i;"
1195 " taking over this dedupe_key",
1196 sd->get_epath_length (),
1197 cur_best_sd->get_epath_length ());
1198 sd->add_duplicate (cur_best_sd);
1202 /* We haven't beaten the current best candidate; add SD
1203 as a duplicate of it. */
1206 logger->log ("length %i isn't better than existing length %i;"
1207 " dropping this candidate",
1208 sd->get_epath_length (),
1209 cur_best_sd->get_epath_length ());
1210 cur_best_sd->add_duplicate (sd);
1216 /* This is the first candidate for this key. */
1217 m_map.put (key, sd);
1219 logger->log ("first candidate for this dedupe_key");
1223 /* Handle interactions between the dedupe winners, so that some
1224 diagnostics can supercede others (of different kinds).
1226 We want use-after-free to supercede use-of-unitialized-value,
1227 so that if we have these at the same stmt, we don't emit
1228 a use-of-uninitialized, just the use-after-free. */
1230 void handle_interactions (diagnostic_manager *dm)
1232 LOG_SCOPE (dm->get_logger ());
1233 auto_vec<const dedupe_key *> superceded;
1234 for (auto outer : m_map)
1236 const saved_diagnostic *outer_sd = outer.second;
1237 for (auto inner : m_map)
1239 const saved_diagnostic *inner_sd = inner.second;
1240 if (inner_sd->supercedes_p (*outer_sd))
1242 superceded.safe_push (outer.first);
1243 if (dm->get_logger ())
1244 dm->log ("sd[%i] \"%s\" superceded by sd[%i] \"%s\"",
1245 outer_sd->get_index (), outer_sd->m_d->get_kind (),
1246 inner_sd->get_index (), inner_sd->m_d->get_kind ());
1251 for (auto iter : superceded)
1252 m_map.remove (iter);
1255 /* Emit the simplest diagnostic within each set. */
1257 void emit_best (diagnostic_manager *dm,
1258 const exploded_graph &eg)
1260 LOG_SCOPE (dm->get_logger ());
1262 /* Get keys into a vec for sorting. */
1263 auto_vec<const dedupe_key *> keys (m_map.elements ());
1264 for (map_t::iterator iter = m_map.begin ();
1265 iter != m_map.end ();
1267 keys.quick_push ((*iter).first);
1269 dm->log ("# keys after de-duplication: %i", keys.length ());
1271 /* Sort into a good emission order. */
1272 keys.qsort (dedupe_key::comparator);
1274 /* Emit the best saved_diagnostics for each key. */
1276 const dedupe_key *key;
1277 FOR_EACH_VEC_ELT (keys, i, key)
1279 saved_diagnostic **slot = m_map.get (key);
1281 const saved_diagnostic *sd = *slot;
1282 dm->emit_saved_diagnostic (eg, *sd);
1287 /* This maps from each dedupe_key to a current best saved_diagnostic. */
1289 typedef hash_map<const dedupe_key *, saved_diagnostic *,
1290 dedupe_hash_map_traits> map_t;
1294 /* Emit all saved diagnostics. */
1297 diagnostic_manager::emit_saved_diagnostics (const exploded_graph &eg)
1299 LOG_SCOPE (get_logger ());
1300 auto_timevar tv (TV_ANALYZER_DIAGNOSTICS);
1301 log ("# saved diagnostics: %i", m_saved_diagnostics.length ());
1302 log ("# disabled diagnostics: %i", m_num_disabled_diagnostics);
1306 saved_diagnostic *sd;
1307 FOR_EACH_VEC_ELT (m_saved_diagnostics, i, sd)
1308 log ("[%i] sd: %qs at EN: %i, SN: %i",
1309 i, sd->m_d->get_kind (), sd->m_enode->m_index,
1310 sd->m_snode->m_index);
1313 if (m_saved_diagnostics.length () == 0)
1316 /* Compute the shortest_paths once, sharing it between all diagnostics. */
1317 epath_finder pf (eg);
1319 /* Iterate through all saved diagnostics, adding them to a dedupe_winners
1320 instance. This partitions the saved diagnostics by dedupe_key,
1321 generating exploded_paths for them, and retaining the best one in each
1323 dedupe_winners best_candidates;
1326 saved_diagnostic *sd;
1327 FOR_EACH_VEC_ELT (m_saved_diagnostics, i, sd)
1328 best_candidates.add (get_logger (), &pf, sd);
1330 best_candidates.handle_interactions (this);
1332 /* For each dedupe-key, call emit_saved_diagnostic on the "best"
1333 saved_diagnostic. */
1334 best_candidates.emit_best (this, eg);
1337 /* Given a saved_diagnostic SD with m_best_epath through EG,
1338 create an checker_path of suitable events and use it to call
1339 SD's underlying pending_diagnostic "emit" vfunc to emit a diagnostic. */
1342 diagnostic_manager::emit_saved_diagnostic (const exploded_graph &eg,
1343 const saved_diagnostic &sd)
1345 LOG_SCOPE (get_logger ());
1346 log ("sd: %qs at SN: %i", sd.m_d->get_kind (), sd.m_snode->m_index);
1347 log ("num dupes: %i", sd.get_num_dupes ());
1349 pretty_printer *pp = global_dc->printer->clone ();
1351 const exploded_path *epath = sd.get_best_epath ();
1354 /* Precompute all enodes from which the diagnostic is reachable. */
1355 path_builder pb (eg, *epath, sd.get_feasibility_problem (), sd);
1357 /* This is the diagnostic_path subclass that will be built for
1359 checker_path emission_path (get_logger ());
1361 /* Populate emission_path with a full description of EPATH. */
1362 build_emission_path (pb, *epath, &emission_path);
1364 /* Now prune it to just cover the most pertinent events. */
1365 prune_path (&emission_path, sd.m_sm, sd.m_sval, sd.m_state);
1367 /* Add a final event to the path, covering the diagnostic itself.
1368 We use the final enode from the epath, which might be different from
1369 the sd.m_enode, as the dedupe code doesn't care about enodes, just
1371 sd.m_d->add_final_event (sd.m_sm, epath->get_final_enode (), sd.m_stmt,
1372 sd.m_var, sd.m_state, &emission_path);
1374 /* The "final" event might not be final; if the saved_diagnostic has a
1375 trailing eedge stashed, add any events for it. This is for use
1376 in handling longjmp, to show where a longjmp is rewinding to. */
1377 if (sd.m_trailing_eedge)
1378 add_events_for_eedge (pb, *sd.m_trailing_eedge, &emission_path, NULL);
1380 emission_path.inject_any_inlined_call_events (get_logger ());
1382 emission_path.prepare_for_emission (sd.m_d.get ());
1385 = get_emission_location (sd.m_stmt, sd.m_snode->m_fun, *sd.m_d);
1387 /* Allow the pending_diagnostic to fix up the locations of events. */
1388 emission_path.fixup_locations (sd.m_d.get ());
1390 gcc_rich_location rich_loc (loc);
1391 rich_loc.set_path (&emission_path);
1393 auto_diagnostic_group d;
1394 auto_cfun sentinel (sd.m_snode->m_fun);
1395 if (sd.m_d->emit (&rich_loc))
1397 sd.emit_any_notes ();
1399 unsigned num_dupes = sd.get_num_dupes ();
1400 if (flag_analyzer_show_duplicate_count && num_dupes > 0)
1401 inform_n (loc, num_dupes,
1402 "%i duplicate", "%i duplicates",
1404 if (flag_dump_analyzer_exploded_paths)
1406 auto_timevar tv (TV_ANALYZER_DUMP);
1408 pp_printf (&pp, "%s.%i.%s.epath.txt",
1409 dump_base_name, sd.get_index (), sd.m_d->get_kind ());
1410 char *filename = xstrdup (pp_formatted_text (&pp));
1411 epath->dump_to_file (filename, eg.get_ext_state ());
1412 inform (loc, "exploded path written to %qs", filename);
1419 /* Emit a "path" of events to EMISSION_PATH describing the exploded path
1423 diagnostic_manager::build_emission_path (const path_builder &pb,
1424 const exploded_path &epath,
1425 checker_path *emission_path) const
1427 LOG_SCOPE (get_logger ());
1429 interesting_t interest;
1430 pb.get_pending_diagnostic ()->mark_interesting_stuff (&interest);
1432 /* Add region creation events for any globals of interest, at the
1433 beginning of the path. */
1435 for (auto reg : interest.m_region_creation)
1436 switch (reg->get_memory_space ())
1441 case MEMSPACE_GLOBALS:
1442 case MEMSPACE_READONLY_DATA:
1444 const region *base_reg = reg->get_base_region ();
1445 if (tree decl = base_reg->maybe_get_decl ())
1447 && DECL_SOURCE_LOCATION (decl) != UNKNOWN_LOCATION)
1449 emission_path->add_region_creation_events
1450 (pb.get_pending_diagnostic (),
1452 DECL_SOURCE_LOCATION (decl),
1461 /* Walk EPATH, adding events as appropriate. */
1462 for (unsigned i = 0; i < epath.m_edges.length (); i++)
1464 const exploded_edge *eedge = epath.m_edges[i];
1465 add_events_for_eedge (pb, *eedge, emission_path, &interest);
1467 add_event_on_final_node (pb, epath.get_final_enode (),
1468 emission_path, &interest);
1471 /* Emit a region_creation_event when requested on the last statement in
1474 If a region_creation_event should be emitted on the last statement of the
1475 path, we need to peek to the successors to get whether the final enode
1480 diagnostic_manager::add_event_on_final_node (const path_builder &pb,
1481 const exploded_node *final_enode,
1482 checker_path *emission_path,
1483 interesting_t *interest) const
1485 const program_point &src_point = final_enode->get_point ();
1486 const int src_stack_depth = src_point.get_stack_depth ();
1487 const program_state &src_state = final_enode->get_state ();
1488 const region_model *src_model = src_state.m_region_model;
1492 FOR_EACH_VEC_ELT (final_enode->m_succs, j, e)
1494 exploded_node *dst = e->m_dest;
1495 const program_state &dst_state = dst->get_state ();
1496 const region_model *dst_model = dst_state.m_region_model;
1497 if (src_model->get_dynamic_extents ()
1498 != dst_model->get_dynamic_extents ())
1502 bool emitted = false;
1503 FOR_EACH_VEC_ELT (interest->m_region_creation, i, reg)
1505 const region *base_reg = reg->get_base_region ();
1506 const svalue *old_extents
1507 = src_model->get_dynamic_extents (base_reg);
1508 const svalue *new_extents
1509 = dst_model->get_dynamic_extents (base_reg);
1510 if (old_extents == NULL && new_extents != NULL)
1511 switch (base_reg->get_kind ())
1515 case RK_HEAP_ALLOCATED:
1517 emission_path->add_region_creation_events
1518 (pb.get_pending_diagnostic (),
1521 src_point.get_location (),
1522 src_point.get_fndecl (),
1535 /* Subclass of state_change_visitor that creates state_change_event
1538 class state_change_event_creator : public state_change_visitor
1541 state_change_event_creator (const path_builder &pb,
1542 const exploded_edge &eedge,
1543 checker_path *emission_path)
1546 m_emission_path (emission_path)
1549 bool on_global_state_change (const state_machine &sm,
1550 state_machine::state_t src_sm_val,
1551 state_machine::state_t dst_sm_val)
1554 if (&sm != m_pb.get_sm ())
1556 const exploded_node *src_node = m_eedge.m_src;
1557 const program_point &src_point = src_node->get_point ();
1558 const int src_stack_depth = src_point.get_stack_depth ();
1559 const exploded_node *dst_node = m_eedge.m_dest;
1560 const gimple *stmt = src_point.get_stmt ();
1561 const supernode *supernode = src_point.get_supernode ();
1562 const program_state &dst_state = dst_node->get_state ();
1564 int stack_depth = src_stack_depth;
1566 m_emission_path->add_event
1567 (make_unique<state_change_event> (supernode,
1579 bool on_state_change (const state_machine &sm,
1580 state_machine::state_t src_sm_val,
1581 state_machine::state_t dst_sm_val,
1583 const svalue *dst_origin_sval) final override
1585 if (&sm != m_pb.get_sm ())
1587 const exploded_node *src_node = m_eedge.m_src;
1588 const program_point &src_point = src_node->get_point ();
1589 const int src_stack_depth = src_point.get_stack_depth ();
1590 const exploded_node *dst_node = m_eedge.m_dest;
1591 const gimple *stmt = src_point.get_stmt ();
1592 const supernode *supernode = src_point.get_supernode ();
1593 const program_state &dst_state = dst_node->get_state ();
1595 int stack_depth = src_stack_depth;
1598 && m_eedge.m_sedge->m_kind == SUPEREDGE_CFG_EDGE)
1600 supernode = src_point.get_supernode ();
1601 stmt = supernode->get_last_stmt ();
1602 stack_depth = src_stack_depth;
1605 /* Bulletproofing for state changes at calls/returns;
1606 TODO: is there a better way? */
1610 m_emission_path->add_event
1611 (make_unique<state_change_event> (supernode,
1623 const path_builder &m_pb;
1624 const exploded_edge &m_eedge;
1625 checker_path *m_emission_path;
1628 /* Compare SRC_STATE and DST_STATE (which use EXT_STATE), and call
1629 VISITOR's on_state_change for every sm-state change that occurs
1630 to a tree, and on_global_state_change for every global state change
1633 This determines the state changes that ought to be reported to
1634 the user: a combination of the effects of changes to sm_state_map
1635 (which maps svalues to sm-states), and of region_model changes
1636 (which map trees to svalues).
1638 Bail out early and return true if any call to on_global_state_change
1639 or on_state_change returns true, otherwise return false.
1641 This is split out to make it easier to experiment with changes to
1642 exploded_node granularity (so that we can observe what state changes
1643 lead to state_change_events being emitted). */
1646 for_each_state_change (const program_state &src_state,
1647 const program_state &dst_state,
1648 const extrinsic_state &ext_state,
1649 state_change_visitor *visitor)
1651 gcc_assert (src_state.m_checker_states.length ()
1652 == ext_state.get_num_checkers ());
1653 gcc_assert (dst_state.m_checker_states.length ()
1654 == ext_state.get_num_checkers ());
1655 for (unsigned i = 0; i < ext_state.get_num_checkers (); i++)
1657 const state_machine &sm = ext_state.get_sm (i);
1658 const sm_state_map &src_smap = *src_state.m_checker_states[i];
1659 const sm_state_map &dst_smap = *dst_state.m_checker_states[i];
1661 /* Add events for any global state changes. */
1662 if (src_smap.get_global_state () != dst_smap.get_global_state ())
1663 if (visitor->on_global_state_change (sm,
1664 src_smap.get_global_state (),
1665 dst_smap.get_global_state ()))
1668 /* Add events for per-svalue state changes. */
1669 for (sm_state_map::iterator_t iter = dst_smap.begin ();
1670 iter != dst_smap.end ();
1673 const svalue *sval = (*iter).first;
1674 state_machine::state_t dst_sm_val = (*iter).second.m_state;
1675 state_machine::state_t src_sm_val
1676 = src_smap.get_state (sval, ext_state);
1677 if (dst_sm_val != src_sm_val)
1679 const svalue *origin_sval = (*iter).second.m_origin;
1680 if (visitor->on_state_change (sm, src_sm_val, dst_sm_val,
1689 /* An sm_context for adding state_change_event on assignments to NULL,
1690 where the default state isn't m_start. Storing such state in the
1691 sm_state_map would lead to bloat of the exploded_graph, so we want
1692 to leave it as a default state, and inject state change events here
1693 when we have a diagnostic.
1694 Find transitions of constants, for handling on_zero_assignment. */
1696 struct null_assignment_sm_context : public sm_context
1698 null_assignment_sm_context (int sm_idx,
1699 const state_machine &sm,
1700 const program_state *old_state,
1701 const program_state *new_state,
1703 const program_point *point,
1704 checker_path *emission_path,
1705 const extrinsic_state &ext_state)
1706 : sm_context (sm_idx, sm), m_old_state (old_state), m_new_state (new_state),
1707 m_stmt (stmt), m_point (point), m_emission_path (emission_path),
1708 m_ext_state (ext_state)
1712 tree get_fndecl_for_call (const gcall */*call*/) final override
1717 state_machine::state_t get_state (const gimple *stmt ATTRIBUTE_UNUSED,
1718 tree var) final override
1720 const svalue *var_old_sval
1721 = m_old_state->m_region_model->get_rvalue (var, NULL);
1722 const sm_state_map *old_smap = m_old_state->m_checker_states[m_sm_idx];
1724 state_machine::state_t current
1725 = old_smap->get_state (var_old_sval, m_ext_state);
1730 state_machine::state_t get_state (const gimple *stmt ATTRIBUTE_UNUSED,
1731 const svalue *sval) final override
1733 const sm_state_map *old_smap = m_old_state->m_checker_states[m_sm_idx];
1734 state_machine::state_t current = old_smap->get_state (sval, m_ext_state);
1738 void set_next_state (const gimple *stmt,
1740 state_machine::state_t to,
1741 tree origin ATTRIBUTE_UNUSED) final override
1743 state_machine::state_t from = get_state (stmt, var);
1744 if (from != m_sm.get_start_state ())
1746 if (!is_transition_to_null (to))
1749 const svalue *var_new_sval
1750 = m_new_state->m_region_model->get_rvalue (var, NULL);
1752 const supernode *supernode = m_point->get_supernode ();
1753 int stack_depth = m_point->get_stack_depth ();
1755 m_emission_path->add_event
1756 (make_unique<state_change_event> (supernode,
1766 void set_next_state (const gimple *stmt,
1768 state_machine::state_t to,
1769 tree origin ATTRIBUTE_UNUSED) final override
1771 state_machine::state_t from = get_state (stmt, sval);
1772 if (from != m_sm.get_start_state ())
1774 if (!is_transition_to_null (to))
1777 const supernode *supernode = m_point->get_supernode ();
1778 int stack_depth = m_point->get_stack_depth ();
1780 m_emission_path->add_event
1781 (make_unique<state_change_event> (supernode,
1791 void warn (const supernode *, const gimple *,
1792 tree, std::unique_ptr<pending_diagnostic>) final override
1795 void warn (const supernode *, const gimple *,
1796 const svalue *, std::unique_ptr<pending_diagnostic>) final override
1800 tree get_diagnostic_tree (tree expr) final override
1805 tree get_diagnostic_tree (const svalue *sval) final override
1807 return m_new_state->m_region_model->get_representative_tree (sval);
1810 state_machine::state_t get_global_state () const final override
1815 void set_global_state (state_machine::state_t) final override
1820 void on_custom_transition (custom_transition *) final override
1824 tree is_zero_assignment (const gimple *stmt) final override
1826 const gassign *assign_stmt = dyn_cast <const gassign *> (stmt);
1829 if (const svalue *sval
1830 = m_new_state->m_region_model->get_gassign_result (assign_stmt, NULL))
1831 if (tree cst = sval->maybe_get_constant ())
1833 return gimple_assign_lhs (assign_stmt);
1837 const program_state *get_old_program_state () const final override
1841 const program_state *get_new_program_state () const final override
1846 /* We only care about transitions to the "null" state
1847 within sm-malloc. Special-case this. */
1848 static bool is_transition_to_null (state_machine::state_t s)
1850 return !strcmp (s->get_name (), "null");
1853 const program_state *m_old_state;
1854 const program_state *m_new_state;
1855 const gimple *m_stmt;
1856 const program_point *m_point;
1857 checker_path *m_emission_path;
1858 const extrinsic_state &m_ext_state;
1861 /* Subroutine of diagnostic_manager::build_emission_path.
1862 Add any events for EEDGE to EMISSION_PATH. */
1865 diagnostic_manager::add_events_for_eedge (const path_builder &pb,
1866 const exploded_edge &eedge,
1867 checker_path *emission_path,
1868 interesting_t *interest) const
1870 const exploded_node *src_node = eedge.m_src;
1871 const program_point &src_point = src_node->get_point ();
1872 const int src_stack_depth = src_point.get_stack_depth ();
1873 const exploded_node *dst_node = eedge.m_dest;
1874 const program_point &dst_point = dst_node->get_point ();
1875 const int dst_stack_depth = dst_point.get_stack_depth ();
1878 get_logger ()->start_log_line ();
1879 pretty_printer *pp = get_logger ()->get_printer ();
1880 pp_printf (pp, "EN %i -> EN %i: ",
1881 eedge.m_src->m_index,
1882 eedge.m_dest->m_index);
1883 src_point.print (pp, format (false));
1884 pp_string (pp, "-> ");
1885 dst_point.print (pp, format (false));
1886 get_logger ()->end_log_line ();
1888 const program_state &src_state = src_node->get_state ();
1889 const program_state &dst_state = dst_node->get_state ();
1891 /* Add state change events for the states that have changed.
1892 We add these before events for superedges, so that if we have a
1893 state_change_event due to following an edge, we'll get this sequence
1899 | (1) assuming 'ptr' is non-NULL (state_change_event)
1900 | (2) following 'false' branch... (start_cfg_edge_event)
1902 | do_something (ptr);
1903 | ~~~~~~~~~~~~~^~~~~
1905 | (3) ...to here (end_cfg_edge_event). */
1906 state_change_event_creator visitor (pb, eedge, emission_path);
1907 for_each_state_change (src_state, dst_state, pb.get_ext_state (),
1910 /* Allow non-standard edges to add events, e.g. when rewinding from
1911 longjmp to a setjmp. */
1912 if (eedge.m_custom_info)
1913 eedge.m_custom_info->add_events_to_path (emission_path, eedge);
1915 /* Add events for superedges, function entries, and for statements. */
1916 switch (dst_point.get_kind ())
1920 case PK_BEFORE_SUPERNODE:
1921 if (src_point.get_kind () == PK_AFTER_SUPERNODE)
1924 add_events_for_superedge (pb, eedge, emission_path);
1926 /* Add function entry events. */
1927 if (dst_point.get_supernode ()->entry_p ())
1929 pb.get_pending_diagnostic ()->add_function_entry_event
1930 (eedge, emission_path);
1931 /* Create region_creation_events for on-stack regions within
1937 FOR_EACH_VEC_ELT (interest->m_region_creation, i, reg)
1938 if (const frame_region *frame = reg->maybe_get_frame_region ())
1939 if (frame->get_fndecl () == dst_point.get_fndecl ())
1941 const region *base_reg = reg->get_base_region ();
1942 if (tree decl = base_reg->maybe_get_decl ())
1944 && DECL_SOURCE_LOCATION (decl) != UNKNOWN_LOCATION)
1946 emission_path->add_region_creation_events
1947 (pb.get_pending_diagnostic (),
1948 reg, dst_state.m_region_model,
1949 DECL_SOURCE_LOCATION (decl),
1950 dst_point.get_fndecl (),
1958 case PK_BEFORE_STMT:
1960 const gimple *stmt = dst_point.get_stmt ();
1961 const gcall *call = dyn_cast <const gcall *> (stmt);
1962 if (call && is_setjmp_call_p (call))
1963 emission_path->add_event
1964 (make_unique<setjmp_event> (stmt->location,
1966 dst_point.get_fndecl (),
1970 emission_path->add_event
1971 (make_unique<statement_event> (stmt,
1972 dst_point.get_fndecl (),
1973 dst_stack_depth, dst_state));
1975 /* Create state change events for assignment to NULL.
1976 Iterate through the stmts in dst_enode, adding state change
1978 if (dst_state.m_region_model)
1980 log_scope s (get_logger (), "processing run of stmts");
1981 program_state iter_state (dst_state);
1982 program_point iter_point (dst_point);
1985 const gimple *stmt = iter_point.get_stmt ();
1986 if (const gassign *assign = dyn_cast<const gassign *> (stmt))
1988 const extrinsic_state &ext_state = pb.get_ext_state ();
1989 program_state old_state (iter_state);
1990 iter_state.m_region_model->on_assignment (assign, NULL);
1991 for (unsigned i = 0; i < ext_state.get_num_checkers (); i++)
1993 const state_machine &sm = ext_state.get_sm (i);
1994 null_assignment_sm_context sm_ctxt (i, sm,
2000 pb.get_ext_state ());
2001 sm.on_stmt (&sm_ctxt, dst_point.get_supernode (), stmt);
2002 // TODO: what about phi nodes?
2005 iter_point.next_stmt ();
2006 if (iter_point.get_kind () == PK_AFTER_SUPERNODE
2007 || (dst_node->m_succs.length () > 1
2009 == dst_node->m_succs[0]->m_dest->get_point ())))
2018 /* Look for changes in dynamic extents, which will identify
2019 the creation of heap-based regions and alloca regions. */
2022 const region_model *src_model = src_state.m_region_model;
2023 const region_model *dst_model = dst_state.m_region_model;
2024 if (src_model->get_dynamic_extents ()
2025 != dst_model->get_dynamic_extents ())
2029 FOR_EACH_VEC_ELT (interest->m_region_creation, i, reg)
2031 const region *base_reg = reg->get_base_region ();
2032 const svalue *old_extents
2033 = src_model->get_dynamic_extents (base_reg);
2034 const svalue *new_extents
2035 = dst_model->get_dynamic_extents (base_reg);
2036 if (old_extents == NULL && new_extents != NULL)
2037 switch (base_reg->get_kind ())
2041 case RK_HEAP_ALLOCATED:
2043 emission_path->add_region_creation_events
2044 (pb.get_pending_diagnostic (),
2046 src_point.get_location (),
2047 src_point.get_fndecl (),
2056 if (pb.get_feasibility_problem ()
2057 && &pb.get_feasibility_problem ()->m_eedge == &eedge)
2060 pp_format_decoder (&pp) = default_tree_printer;
2062 "this path would have been rejected as infeasible"
2064 pb.get_feasibility_problem ()->dump_to_pp (&pp);
2065 emission_path->add_event
2066 (make_unique<precanned_custom_event>
2067 (dst_point.get_location (),
2068 dst_point.get_fndecl (),
2070 pp_formatted_text (&pp)));
2074 /* Return true if EEDGE is a significant edge in the path to the diagnostic
2077 Consider all of the sibling out-eedges from the same source enode
2079 If there's no path from the destinations of those eedges to the
2080 diagnostic enode, then we have to take this eedge and thus it's
2083 Conversely if there is a path from the destination of any other sibling
2084 eedge to the diagnostic enode, then this edge is insignificant.
2086 Example 1: redundant if-else:
2094 D is reachable by either B or C, so neither of these edges
2097 Example 2: pertinent if-else:
2102 (C) [NECESSARY CONDITION] | |
2103 (D) [POSSIBLE DIAGNOSTIC] D1 D2
2105 D becomes D1 and D2 in the exploded graph, where the diagnostic occurs
2106 at D2. D2 is only reachable via C, so the A -> C edge is significant.
2108 Example 3: redundant loop:
2110 (A) while (...) +-->A
2116 D is reachable from both B and C, so the A->C edge is not significant. */
2119 diagnostic_manager::significant_edge_p (const path_builder &pb,
2120 const exploded_edge &eedge) const
2123 exploded_edge *sibling;
2124 FOR_EACH_VEC_ELT (eedge.m_src->m_succs, i, sibling)
2126 if (sibling == &eedge)
2128 if (pb.reachable_from_p (sibling->m_dest))
2131 get_logger ()->log (" edge EN: %i -> EN: %i is insignificant as"
2132 " EN: %i is also reachable via"
2133 " EN: %i -> EN: %i",
2134 eedge.m_src->m_index, eedge.m_dest->m_index,
2135 pb.get_diag_node ()->m_index,
2136 sibling->m_src->m_index,
2137 sibling->m_dest->m_index);
2145 /* Subroutine of diagnostic_manager::add_events_for_eedge
2146 where EEDGE has an underlying superedge i.e. a CFG edge,
2147 or an interprocedural call/return.
2148 Add any events for the superedge to EMISSION_PATH. */
2151 diagnostic_manager::add_events_for_superedge (const path_builder &pb,
2152 const exploded_edge &eedge,
2153 checker_path *emission_path)
2156 gcc_assert (eedge.m_sedge);
2158 /* Give diagnostics an opportunity to override this function. */
2159 pending_diagnostic *pd = pb.get_pending_diagnostic ();
2160 if (pd->maybe_add_custom_events_for_superedge (eedge, emission_path))
2163 /* Don't add events for insignificant edges at verbosity levels below 3. */
2164 if (m_verbosity < 3)
2165 if (!significant_edge_p (pb, eedge))
2168 const exploded_node *src_node = eedge.m_src;
2169 const program_point &src_point = src_node->get_point ();
2170 const exploded_node *dst_node = eedge.m_dest;
2171 const program_point &dst_point = dst_node->get_point ();
2172 const int src_stack_depth = src_point.get_stack_depth ();
2173 const int dst_stack_depth = dst_point.get_stack_depth ();
2174 const gimple *last_stmt = src_point.get_supernode ()->get_last_stmt ();
2176 switch (eedge.m_sedge->m_kind)
2178 case SUPEREDGE_CFG_EDGE:
2180 emission_path->add_event
2181 (make_unique<start_cfg_edge_event> (eedge,
2183 ? last_stmt->location
2184 : UNKNOWN_LOCATION),
2185 src_point.get_fndecl (),
2187 emission_path->add_event
2188 (make_unique<end_cfg_edge_event>
2190 dst_point.get_supernode ()->get_start_location (),
2191 dst_point.get_fndecl (),
2196 case SUPEREDGE_CALL:
2197 pd->add_call_event (eedge, emission_path);
2200 case SUPEREDGE_INTRAPROCEDURAL_CALL:
2202 /* TODO: add a subclass for this, or generate events for the
2204 emission_path->add_event
2205 (make_unique<debug_event> ((last_stmt
2206 ? last_stmt->location
2207 : UNKNOWN_LOCATION),
2208 src_point.get_fndecl (),
2214 case SUPEREDGE_RETURN:
2216 const return_superedge *return_edge
2217 = as_a <const return_superedge *> (eedge.m_sedge);
2219 const gcall *call_stmt = return_edge->get_call_stmt ();
2220 emission_path->add_event
2221 (make_unique<return_event> (eedge,
2223 ? call_stmt->location
2224 : UNKNOWN_LOCATION),
2225 dst_point.get_fndecl (),
2232 /* Prune PATH, based on the verbosity level, to the most pertinent
2233 events for a diagnostic that involves VAR ending in state STATE
2234 (for state machine SM).
2236 PATH is updated in place, and the redundant checker_events are deleted.
2238 As well as deleting events, call record_critical_state on events in
2239 which state critical to the pending_diagnostic is being handled; see
2240 the comment for diagnostic_manager::prune_for_sm_diagnostic. */
2243 diagnostic_manager::prune_path (checker_path *path,
2244 const state_machine *sm,
2246 state_machine::state_t state) const
2248 LOG_FUNC (get_logger ());
2249 path->maybe_log (get_logger (), "path");
2250 prune_for_sm_diagnostic (path, sm, sval, state);
2251 prune_interproc_events (path);
2252 consolidate_conditions (path);
2253 finish_pruning (path);
2254 path->maybe_log (get_logger (), "pruned");
2257 /* A cheap test to determine if EXPR can be the expression of interest in
2258 an sm-diagnostic, so that we can reject cases where we have a non-lvalue.
2259 We don't have always have a model when calling this, so we can't use
2260 tentative_region_model_context, so there can be false positives. */
2263 can_be_expr_of_interest_p (tree expr)
2268 /* Reject constants. */
2269 if (CONSTANT_CLASS_P (expr))
2272 /* Otherwise assume that it can be an lvalue. */
2276 /* First pass of diagnostic_manager::prune_path: apply verbosity level,
2277 pruning unrelated state change events.
2279 Iterate backwards through PATH, skipping state change events that aren't
2280 VAR but update the pertinent VAR when state-copying occurs.
2282 As well as deleting events, call record_critical_state on events in
2283 which state critical to the pending_diagnostic is being handled, so
2284 that the event's get_desc vfunc can potentially supply a more precise
2285 description of the event to the user.
2287 "calling 'foo' from 'bar'"
2289 "passing possibly-NULL pointer 'ptr' to 'foo' from 'bar' as param 1"
2290 when the diagnostic relates to later dereferencing 'ptr'. */
2293 diagnostic_manager::prune_for_sm_diagnostic (checker_path *path,
2294 const state_machine *sm,
2296 state_machine::state_t state) const
2298 int idx = path->num_events () - 1;
2299 while (idx >= 0 && idx < (signed)path->num_events ())
2301 checker_event *base_event = path->get_checker_event (idx);
2308 label_text sval_desc = sval->get_desc ();
2309 log ("considering event %i (%s), with sval: %qs, state: %qs",
2310 idx, event_kind_to_string (base_event->m_kind),
2311 sval_desc.get (), state->get_name ());
2314 log ("considering event %i (%s), with global state: %qs",
2315 idx, event_kind_to_string (base_event->m_kind),
2316 state->get_name ());
2319 log ("considering event %i", idx);
2322 switch (base_event->m_kind)
2328 if (m_verbosity < 4)
2330 log ("filtering event %i: debug event", idx);
2331 path->delete_event (idx);
2336 /* Don't filter custom events. */
2341 if (m_verbosity < 4)
2343 log ("filtering event %i: statement event", idx);
2344 path->delete_event (idx);
2349 case EK_REGION_CREATION:
2350 /* Don't filter these. */
2353 case EK_FUNCTION_ENTRY:
2354 if (m_verbosity < 1)
2356 log ("filtering event %i: function entry", idx);
2357 path->delete_event (idx);
2361 case EK_STATE_CHANGE:
2363 state_change_event *state_change = (state_change_event *)base_event;
2364 gcc_assert (state_change->m_dst_state.m_region_model);
2366 if (state_change->m_sval == sval)
2368 if (state_change->m_origin)
2372 label_text sval_desc = sval->get_desc ();
2373 label_text origin_sval_desc
2374 = state_change->m_origin->get_desc ();
2376 " switching var of interest from %qs to %qs",
2377 idx, sval_desc.get (),
2378 origin_sval_desc.get ());
2380 sval = state_change->m_origin;
2382 log ("event %i: switching state of interest from %qs to %qs",
2383 idx, state_change->m_to->get_name (),
2384 state_change->m_from->get_name ());
2385 state = state_change->m_from;
2387 else if (m_verbosity < 4)
2391 if (state_change->m_sval)
2393 label_text change_sval_desc
2394 = state_change->m_sval->get_desc ();
2397 label_text sval_desc = sval->get_desc ();
2398 log ("filtering event %i:"
2399 " state change to %qs unrelated to %qs",
2400 idx, change_sval_desc.get (),
2404 log ("filtering event %i: state change to %qs",
2405 idx, change_sval_desc.get ());
2408 log ("filtering event %i: global state change", idx);
2410 path->delete_event (idx);
2415 case EK_START_CFG_EDGE:
2417 cfg_edge_event *event = (cfg_edge_event *)base_event;
2419 /* TODO: is this edge significant to var?
2420 See if var can be in other states in the dest, but not
2421 in other states in the src?
2422 Must have multiple sibling edges. */
2424 if (event->should_filter_p (m_verbosity))
2426 log ("filtering events %i and %i: CFG edge", idx, idx + 1);
2427 path->delete_event (idx);
2428 /* Also delete the corresponding EK_END_CFG_EDGE. */
2429 gcc_assert (path->get_checker_event (idx)->m_kind
2430 == EK_END_CFG_EDGE);
2431 path->delete_event (idx);
2436 case EK_END_CFG_EDGE:
2437 /* These come in pairs with EK_START_CFG_EDGE events and are
2438 filtered when their start event is filtered. */
2443 call_event *event = (call_event *)base_event;
2444 const region_model *callee_model
2445 = event->m_eedge.m_dest->get_state ().m_region_model;
2446 const region_model *caller_model
2447 = event->m_eedge.m_src->get_state ().m_region_model;
2448 tree callee_var = callee_model->get_representative_tree (sval);
2454 const callgraph_superedge& cg_superedge
2455 = event->get_callgraph_superedge ();
2456 if (cg_superedge.m_cedge)
2458 = cg_superedge.map_expr_from_callee_to_caller (callee_var,
2461 caller_var = caller_model->get_representative_tree (sval);
2464 caller_var = caller_model->get_representative_tree (sval);
2470 label_text sval_desc = sval->get_desc ();
2472 " recording critical state for %qs at call"
2473 " from %qE in callee to %qE in caller",
2474 idx, sval_desc.get (), callee_var, caller_var);
2476 if (expr.param_p ())
2477 event->record_critical_state (caller_var, state);
2482 case EK_RETURN_EDGE:
2486 return_event *event = (return_event *)base_event;
2487 const region_model *caller_model
2488 = event->m_eedge.m_dest->get_state ().m_region_model;
2489 tree caller_var = caller_model->get_representative_tree (sval);
2490 const region_model *callee_model
2491 = event->m_eedge.m_src->get_state ().m_region_model;
2497 const callgraph_superedge& cg_superedge
2498 = event->get_callgraph_superedge ();
2499 if (cg_superedge.m_cedge)
2501 = cg_superedge.map_expr_from_caller_to_callee (caller_var,
2504 callee_var = callee_model->get_representative_tree (sval);
2507 callee_var = callee_model->get_representative_tree (sval);
2513 label_text sval_desc = sval->get_desc ();
2515 " recording critical state for %qs at return"
2516 " from %qE in caller to %qE in callee",
2517 idx, sval_desc.get (), callee_var, callee_var);
2519 if (expr.return_value_p ())
2520 event->record_critical_state (callee_var, state);
2526 case EK_INLINED_CALL:
2527 /* We don't expect to see these yet, as they're added later.
2528 We'd want to keep them around. */
2532 /* TODO: only show setjmp_events that matter i.e. those for which
2533 there is a later rewind event using them. */
2534 case EK_REWIND_FROM_LONGJMP:
2535 case EK_REWIND_TO_SETJMP:
2539 /* Always show the final "warning" event in the path. */
2546 /* Subroutine of diagnostic_manager::prune_for_sm_diagnostic.
2547 If *EXPR is not suitable to be the expression of interest in
2548 an sm-diagnostic, set *EXPR to NULL and log. */
2551 diagnostic_manager::update_for_unsuitable_sm_exprs (tree *expr) const
2554 if (*expr && !can_be_expr_of_interest_p (*expr))
2556 log ("new var %qE is unsuitable; setting var to NULL", *expr);
2561 /* Second pass of diagnostic_manager::prune_path: remove redundant
2562 interprocedural information.
2565 (1)- calling "f2" from "f1"
2566 (2)--- entry to "f2"
2567 (3)--- calling "f3" from "f2"
2568 (4)----- entry to "f3"
2569 (5)--- returning to "f2" to "f3"
2570 (6)- returning to "f1" to "f2"
2571 with no other intervening events, then none of these events are
2572 likely to be interesting to the user.
2574 Prune [..., call, function-entry, return, ...] triples repeatedly
2575 until nothing has changed. For the example above, this would
2576 remove events (3, 4, 5), and then remove events (1, 2, 6). */
2579 diagnostic_manager::prune_interproc_events (checker_path *path) const
2581 bool changed = false;
2585 int idx = (signed)path->num_events () - 1;
2588 /* Prune [..., call, function-entry, return, ...] triples. */
2589 if (idx + 2 < (signed)path->num_events ()
2590 && path->get_checker_event (idx)->is_call_p ()
2591 && path->get_checker_event (idx + 1)->is_function_entry_p ()
2592 && path->get_checker_event (idx + 2)->is_return_p ())
2597 (path->get_checker_event (idx)->get_desc (false));
2598 log ("filtering events %i-%i:"
2599 " irrelevant call/entry/return: %s",
2600 idx, idx + 2, desc.get ());
2602 path->delete_event (idx + 2);
2603 path->delete_event (idx + 1);
2604 path->delete_event (idx);
2610 /* Prune [..., call, return, ...] pairs
2611 (for -fanalyzer-verbosity=0). */
2612 if (idx + 1 < (signed)path->num_events ()
2613 && path->get_checker_event (idx)->is_call_p ()
2614 && path->get_checker_event (idx + 1)->is_return_p ())
2619 (path->get_checker_event (idx)->get_desc (false));
2620 log ("filtering events %i-%i:"
2621 " irrelevant call/return: %s",
2622 idx, idx + 1, desc.get ());
2624 path->delete_event (idx + 1);
2625 path->delete_event (idx);
2638 /* Return true iff event IDX within PATH is on the same line as REF_EXP_LOC. */
2641 same_line_as_p (const expanded_location &ref_exp_loc,
2642 checker_path *path, unsigned idx)
2644 const checker_event *ev = path->get_checker_event (idx);
2645 expanded_location idx_exp_loc = expand_location (ev->get_location ());
2646 gcc_assert (ref_exp_loc.file);
2647 if (idx_exp_loc.file == NULL)
2649 if (strcmp (ref_exp_loc.file, idx_exp_loc.file))
2651 return ref_exp_loc.line == idx_exp_loc.line;
2654 /* This path-readability optimization reduces the verbosity of compound
2655 conditional statements (without needing to reconstruct the AST, which
2656 has already been lost).
2658 For example, it converts:
2660 | 61 | if (cp[0] != '\0' && cp[0] != '#')
2661 | | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
2663 | | | | (6) ...to here
2664 | | | (7) following ‘true’ branch...
2665 | | (5) following ‘true’ branch...
2667 | 63 | alias = cp++;
2674 | 61 | if (cp[0] != '\0' && cp[0] != '#')
2677 | | (5) following ‘true’ branch...
2679 | 63 | alias = cp++;
2684 by combining events 5-8 into new events 5-6.
2686 Find runs of consecutive (start_cfg_edge_event, end_cfg_edge_event) pairs
2687 in which all events apart from the final end_cfg_edge_event are on the same
2688 line, and for which either all the CFG edges are TRUE edges, or all are
2691 Consolidate each such run into a
2692 (start_consolidated_cfg_edges_event, end_consolidated_cfg_edges_event)
2696 diagnostic_manager::consolidate_conditions (checker_path *path) const
2698 /* Don't simplify edges if we're debugging them. */
2699 if (flag_analyzer_verbose_edges)
2702 for (int start_idx = 0;
2703 start_idx < (signed)path->num_events () - 1;
2706 if (path->cfg_edge_pair_at_p (start_idx))
2708 const checker_event *old_start_ev
2709 = path->get_checker_event (start_idx);
2710 expanded_location start_exp_loc
2711 = expand_location (old_start_ev->get_location ());
2712 if (start_exp_loc.file == NULL)
2714 if (!same_line_as_p (start_exp_loc, path, start_idx + 1))
2717 /* Are we looking for a run of all TRUE edges, or all FALSE edges? */
2718 gcc_assert (old_start_ev->m_kind == EK_START_CFG_EDGE);
2719 const start_cfg_edge_event *old_start_cfg_ev
2720 = (const start_cfg_edge_event *)old_start_ev;
2721 const cfg_superedge& first_cfg_sedge
2722 = old_start_cfg_ev->get_cfg_superedge ();
2724 if (first_cfg_sedge.true_value_p ())
2726 else if (first_cfg_sedge.false_value_p ())
2731 /* Find a run of CFG start/end event pairs from
2732 [start_idx, next_idx)
2733 where all apart from the final event are on the same line,
2734 and all are either TRUE or FALSE edges, matching the initial. */
2735 int next_idx = start_idx + 2;
2736 while (path->cfg_edge_pair_at_p (next_idx)
2737 && same_line_as_p (start_exp_loc, path, next_idx))
2739 const checker_event *iter_ev
2740 = path->get_checker_event (next_idx);
2741 gcc_assert (iter_ev->m_kind == EK_START_CFG_EDGE);
2742 const start_cfg_edge_event *iter_cfg_ev
2743 = (const start_cfg_edge_event *)iter_ev;
2744 const cfg_superedge& iter_cfg_sedge
2745 = iter_cfg_ev->get_cfg_superedge ();
2748 if (!iter_cfg_sedge.true_value_p ())
2753 if (!iter_cfg_sedge.false_value_p ())
2759 /* If we have more than one pair in the run, consolidate. */
2760 if (next_idx > start_idx + 2)
2762 const checker_event *old_end_ev
2763 = path->get_checker_event (next_idx - 1);
2764 log ("consolidating CFG edge events %i-%i into %i-%i",
2765 start_idx, next_idx - 1, start_idx, start_idx +1);
2766 start_consolidated_cfg_edges_event *new_start_ev
2767 = new start_consolidated_cfg_edges_event
2768 (old_start_ev->get_location (),
2769 old_start_ev->get_fndecl (),
2770 old_start_ev->get_stack_depth (),
2772 checker_event *new_end_ev
2773 = new end_consolidated_cfg_edges_event
2774 (old_end_ev->get_location (),
2775 old_end_ev->get_fndecl (),
2776 old_end_ev->get_stack_depth ());
2777 path->replace_event (start_idx, new_start_ev);
2778 path->replace_event (start_idx + 1, new_end_ev);
2779 path->delete_events (start_idx + 2, next_idx - (start_idx + 2));
2785 /* Final pass of diagnostic_manager::prune_path.
2787 If all we're left with is in one function, then filter function entry
2791 diagnostic_manager::finish_pruning (checker_path *path) const
2793 if (!path->interprocedural_p ())
2795 int idx = path->num_events () - 1;
2796 while (idx >= 0 && idx < (signed)path->num_events ())
2798 checker_event *base_event = path->get_checker_event (idx);
2799 if (base_event->m_kind == EK_FUNCTION_ENTRY)
2801 log ("filtering event %i:"
2802 " function entry for purely intraprocedural path", idx);
2803 path->delete_event (idx);
2812 #endif /* #if ENABLE_ANALYZER */