Daily bump.
[official-gcc.git] / gcc / analyzer / diagnostic-manager.cc
blob7ffe00043568ec7ef704ddd482e88a249e3ceede
1 /* Classes for saving, deduplicating, and emitting analyzer diagnostics.
2 Copyright (C) 2019-2021 Free Software Foundation, Inc.
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tree.h"
25 #include "pretty-print.h"
26 #include "gcc-rich-location.h"
27 #include "gimple-pretty-print.h"
28 #include "function.h"
29 #include "diagnostic-core.h"
30 #include "diagnostic-event-id.h"
31 #include "diagnostic-path.h"
32 #include "alloc-pool.h"
33 #include "fibonacci_heap.h"
34 #include "shortest-paths.h"
35 #include "sbitmap.h"
36 #include "bitmap.h"
37 #include "tristate.h"
38 #include "selftest.h"
39 #include "ordered-hash-map.h"
40 #include "json.h"
41 #include "analyzer/analyzer.h"
42 #include "analyzer/analyzer-logging.h"
43 #include "analyzer/sm.h"
44 #include "analyzer/pending-diagnostic.h"
45 #include "analyzer/diagnostic-manager.h"
46 #include "analyzer/call-string.h"
47 #include "analyzer/program-point.h"
48 #include "analyzer/store.h"
49 #include "analyzer/region-model.h"
50 #include "analyzer/constraint-manager.h"
51 #include "cfg.h"
52 #include "basic-block.h"
53 #include "gimple.h"
54 #include "gimple-iterator.h"
55 #include "cgraph.h"
56 #include "digraph.h"
57 #include "analyzer/supergraph.h"
58 #include "analyzer/program-state.h"
59 #include "analyzer/exploded-graph.h"
60 #include "analyzer/trimmed-graph.h"
61 #include "analyzer/feasible-graph.h"
62 #include "analyzer/checker-path.h"
63 #include "analyzer/reachability.h"
65 #if ENABLE_ANALYZER
67 namespace ana {
69 class feasible_worklist;
71 /* State for finding the shortest feasible exploded_path for a
72 saved_diagnostic.
73 This is shared between all diagnostics, so that we avoid repeating work. */
75 class epath_finder
77 public:
78 epath_finder (const exploded_graph &eg)
79 : m_eg (eg),
80 m_sep (NULL)
82 /* This is shared by all diagnostics, but only needed if
83 !flag_analyzer_feasibility. */
84 if (!flag_analyzer_feasibility)
85 m_sep = new shortest_exploded_paths (eg, eg.get_origin (),
86 SPS_FROM_GIVEN_ORIGIN);
89 ~epath_finder () { delete m_sep; }
91 logger *get_logger () const { return m_eg.get_logger (); }
93 exploded_path *get_best_epath (const exploded_node *target_enode,
94 const char *desc, unsigned diag_idx,
95 feasibility_problem **out_problem);
97 private:
98 DISABLE_COPY_AND_ASSIGN(epath_finder);
100 exploded_path *explore_feasible_paths (const exploded_node *target_enode,
101 const char *desc, unsigned diag_idx);
102 bool process_worklist_item (feasible_worklist *worklist,
103 const trimmed_graph &tg,
104 feasible_graph *fg,
105 const exploded_node *target_enode,
106 unsigned diag_idx,
107 exploded_path **out_best_path) const;
108 void dump_trimmed_graph (const exploded_node *target_enode,
109 const char *desc, unsigned diag_idx,
110 const trimmed_graph &tg,
111 const shortest_paths<eg_traits, exploded_path> &sep);
112 void dump_feasible_graph (const exploded_node *target_enode,
113 const char *desc, unsigned diag_idx,
114 const feasible_graph &fg);
116 const exploded_graph &m_eg;
117 shortest_exploded_paths *m_sep;
120 /* class epath_finder. */
122 /* Get the "best" exploded_path for reaching ENODE from the origin,
123 returning ownership of it to the caller.
125 Ideally we want to report the shortest feasible path.
126 Return NULL if we could not find a feasible path
127 (when flag_analyzer_feasibility is true).
129 If flag_analyzer_feasibility is false, then simply return the
130 shortest path.
132 Use DESC and DIAG_IDX when logging.
134 Write any feasibility_problem to *OUT_PROBLEM. */
136 exploded_path *
137 epath_finder::get_best_epath (const exploded_node *enode,
138 const char *desc, unsigned diag_idx,
139 feasibility_problem **out_problem)
141 logger *logger = get_logger ();
142 LOG_SCOPE (logger);
144 unsigned snode_idx = enode->get_supernode ()->m_index;
145 if (logger)
146 logger->log ("considering %qs at EN: %i, SN: %i (sd: %i)",
147 desc, enode->m_index, snode_idx, diag_idx);
149 /* State-merging means that not every path in the egraph corresponds
150 to a feasible one w.r.t. states.
152 We want to find the shortest feasible path from the origin to ENODE
153 in the egraph. */
155 if (flag_analyzer_feasibility)
157 /* Attempt to find the shortest feasible path using feasible_graph. */
158 if (logger)
159 logger->log ("trying to find shortest feasible path");
160 if (exploded_path *epath = explore_feasible_paths (enode, desc, diag_idx))
162 if (logger)
163 logger->log ("accepting %qs at EN: %i, SN: %i (sd: %i)"
164 " with feasible path (length: %i)",
165 desc, enode->m_index, snode_idx, diag_idx,
166 epath->length ());
167 return epath;
169 else
171 if (logger)
172 logger->log ("rejecting %qs at EN: %i, SN: %i (sd: %i)"
173 " due to not finding feasible path",
174 desc, enode->m_index, snode_idx, diag_idx);
175 return NULL;
178 else
180 /* As a crude approximation to shortest feasible path, simply find
181 the shortest path, and note whether it is feasible.
182 There could be longer feasible paths within the egraph, so this
183 approach would lead to diagnostics being falsely rejected
184 (PR analyzer/96374). */
185 if (logger)
186 logger->log ("trying to find shortest path ignoring feasibility");
187 gcc_assert (m_sep);
188 exploded_path *epath
189 = new exploded_path (m_sep->get_shortest_path (enode));
190 if (epath->feasible_p (logger, out_problem, m_eg.get_engine (), &m_eg))
192 if (logger)
193 logger->log ("accepting %qs at EN: %i, SN: %i (sn: %i)"
194 " with feasible path (length: %i)",
195 desc, enode->m_index, snode_idx, diag_idx,
196 epath->length ());
198 else
200 if (logger)
201 logger->log ("accepting %qs at EN: %i, SN: %i (sn: %i) (length: %i)"
202 " despite infeasible path (due to %qs)",
203 desc, enode->m_index, snode_idx, diag_idx,
204 epath->length (),
205 "-fno-analyzer-feasibility");
207 return epath;
211 /* A class for managing the worklist of feasible_nodes in
212 epath_finder::explore_feasible_paths, prioritizing them
213 so that shorter paths appear earlier in the queue. */
215 class feasible_worklist
217 public:
218 feasible_worklist (const shortest_paths<eg_traits, exploded_path> &sep)
219 : m_queue (key_t (*this, NULL)),
220 m_sep (sep)
224 feasible_node *take_next () { return m_queue.extract_min (); }
226 void add_node (feasible_node *fnode)
228 m_queue.insert (key_t (*this, fnode), fnode);
231 private:
232 struct key_t
234 key_t (const feasible_worklist &w, feasible_node *fnode)
235 : m_worklist (w), m_fnode (fnode)
238 bool operator< (const key_t &other) const
240 return cmp (*this, other) < 0;
243 bool operator== (const key_t &other) const
245 return cmp (*this, other) == 0;
248 bool operator> (const key_t &other) const
250 return !(*this == other || *this < other);
253 private:
254 static int cmp (const key_t &ka, const key_t &kb)
256 /* Choose the node for which if the remaining path were feasible,
257 it would be the shortest path (summing the length of the
258 known-feasible path so far with that of the remaining
259 possibly-feasible path). */
260 int ca = ka.m_worklist.get_estimated_cost (ka.m_fnode);
261 int cb = kb.m_worklist.get_estimated_cost (kb.m_fnode);
262 return ca - cb;
265 const feasible_worklist &m_worklist;
266 feasible_node *m_fnode;
269 /* Get the estimated length of a path involving FNODE from
270 the origin to the target enode.
271 Sum the length of the known-feasible path so far with
272 that of the remaining possibly-feasible path. */
274 int get_estimated_cost (const feasible_node *fnode) const
276 unsigned length_so_far = fnode->get_path_length ();
277 int shortest_remaining_path
278 = m_sep.get_shortest_distance (fnode->get_inner_node ());
280 gcc_assert (shortest_remaining_path >= 0);
281 /* This should be true since we're only exploring nodes within
282 the trimmed graph (and we anticipate it being much smaller
283 than this, and thus not overflowing the sum). */
284 gcc_assert (shortest_remaining_path < INT_MAX);
286 return length_so_far + shortest_remaining_path;
289 /* Priority queue, backed by a fibonacci_heap. */
290 typedef fibonacci_heap<key_t, feasible_node> queue_t;
291 queue_t m_queue;
292 const shortest_paths<eg_traits, exploded_path> &m_sep;
295 /* When we're building the exploded graph we want to simplify
296 overly-complicated symbolic values down to "UNKNOWN" to try to avoid
297 state explosions and unbounded chains of exploration.
299 However, when we're building the feasibility graph for a diagnostic
300 (actually a tree), we don't want UNKNOWN values, as conditions on them
301 are also unknown: we don't want to have a contradiction such as a path
302 where (VAL != 0) and then (VAL == 0) along the same path.
304 Hence this is an RAII class for temporarily disabling complexity-checking
305 in the region_model_manager, for use within
306 epath_finder::explore_feasible_paths. */
308 class auto_disable_complexity_checks
310 public:
311 auto_disable_complexity_checks (region_model_manager *mgr) : m_mgr (mgr)
313 m_mgr->disable_complexity_check ();
315 ~auto_disable_complexity_checks ()
317 m_mgr->enable_complexity_check ();
319 private:
320 region_model_manager *m_mgr;
323 /* Attempt to find the shortest feasible path from the origin to
324 TARGET_ENODE by iteratively building a feasible_graph, in which
325 every path to a feasible_node is feasible by construction.
327 We effectively explore the tree of feasible paths in order of shortest
328 path until we either find a feasible path to TARGET_ENODE, or hit
329 a limit and give up.
331 Preliminaries:
332 - Find the shortest path from each node to the TARGET_ENODE (without
333 checking feasibility), so that we can prioritize our worklist.
334 - Construct a trimmed_graph: the subset of nodes/edges that
335 are on a path that eventually reaches TARGET_ENODE. We will only need
336 to consider these when considering the shortest feasible path.
338 Build a feasible_graph, in which every path to a feasible_node
339 is feasible by construction.
340 We use a worklist to flatten the exploration into an iteration.
341 Starting at the origin, find feasible out-edges within the trimmed graph.
342 At each stage, choose the node for which if the remaining path were feasible,
343 it would be the shortest path (summing the length of the known-feasible path
344 so far with that of the remaining possibly-feasible path).
345 This way, the first feasible path we find to TARGET_ENODE is the shortest.
346 We start by trying the shortest possible path, but if that fails,
347 we explore progressively longer paths, eventually trying iterations through
348 loops. The exploration is captured in the feasible_graph, which can be
349 dumped as a .dot file to visualize the exploration. The indices of the
350 feasible_nodes show the order in which they were created.
352 This is something of a brute-force approach, but the trimmed_graph
353 hopefully keeps the complexity manageable.
355 Terminate with failure when the number of infeasible edges exceeds
356 a threshold (--param=analyzer-max-infeasible-edges=).
357 This is guaranteed to eventually lead to terminatation, as
358 we can't keep creating feasible nodes without eventually
359 either reaching an infeasible edge, or reaching the
360 TARGET_ENODE. Specifically, there can't be a cycle of
361 feasible edges that doesn't reach the target_enode without
362 an out-edge that either fails feasibility or gets closer
363 to the TARGET_ENODE: on each iteration we are either:
364 - effectively getting closer to the TARGET_ENODE (which can't
365 continue forever without reaching the target), or
366 - getting monotonically closer to the termination threshold. */
368 exploded_path *
369 epath_finder::explore_feasible_paths (const exploded_node *target_enode,
370 const char *desc, unsigned diag_idx)
372 logger *logger = get_logger ();
373 LOG_SCOPE (logger);
375 region_model_manager *mgr = m_eg.get_engine ()->get_model_manager ();
377 /* Determine the shortest path to TARGET_ENODE from each node in
378 the exploded graph. */
379 shortest_paths<eg_traits, exploded_path> sep
380 (m_eg, target_enode, SPS_TO_GIVEN_TARGET);
382 /* Construct a trimmed_graph: the subset of nodes/edges that
383 are on a path that eventually reaches TARGET_ENODE.
384 We only need to consider these when considering the shortest
385 feasible path. */
386 trimmed_graph tg (m_eg, target_enode);
388 if (flag_dump_analyzer_feasibility)
389 dump_trimmed_graph (target_enode, desc, diag_idx, tg, sep);
391 feasible_graph fg;
392 feasible_worklist worklist (sep);
394 /* Populate the worklist with the origin node. */
396 feasibility_state init_state (mgr, m_eg.get_supergraph ());
397 feasible_node *origin = fg.add_node (m_eg.get_origin (), init_state, 0);
398 worklist.add_node (origin);
401 /* Iteratively explore the tree of feasible paths in order of shortest
402 path until we either find a feasible path to TARGET_ENODE, or hit
403 a limit. */
405 /* Set this if we find a feasible path to TARGET_ENODE. */
406 exploded_path *best_path = NULL;
409 auto_disable_complexity_checks sentinel (mgr);
411 while (process_worklist_item (&worklist, tg, &fg, target_enode, diag_idx,
412 &best_path))
414 /* Empty; the work is done within process_worklist_item. */
418 if (logger)
420 logger->log ("tg for sd: %i:", diag_idx);
421 logger->inc_indent ();
422 tg.log_stats (logger);
423 logger->dec_indent ();
425 logger->log ("fg for sd: %i:", diag_idx);
426 logger->inc_indent ();
427 fg.log_stats (logger);
428 logger->dec_indent ();
431 /* Dump the feasible_graph. */
432 if (flag_dump_analyzer_feasibility)
433 dump_feasible_graph (target_enode, desc, diag_idx, fg);
435 return best_path;
438 /* Process the next item in WORKLIST, potentially adding new items
439 based on feasible out-edges, and extending FG accordingly.
440 Use TG to ignore out-edges that don't lead to TARGET_ENODE.
441 Return true if the worklist processing should continue.
442 Return false if the processing of the worklist should stop
443 (either due to reaching TARGET_ENODE, or hitting a limit).
444 Write to *OUT_BEST_PATH if stopping due to finding a feasible path
445 to TARGET_ENODE. */
447 bool
448 epath_finder::process_worklist_item (feasible_worklist *worklist,
449 const trimmed_graph &tg,
450 feasible_graph *fg,
451 const exploded_node *target_enode,
452 unsigned diag_idx,
453 exploded_path **out_best_path) const
455 logger *logger = get_logger ();
457 feasible_node *fnode = worklist->take_next ();
458 if (!fnode)
460 if (logger)
461 logger->log ("drained worklist for sd: %i"
462 " without finding feasible path",
463 diag_idx);
464 return false;
467 log_scope s (logger, "fg worklist item",
468 "considering FN: %i (EN: %i) for sd: %i",
469 fnode->get_index (), fnode->get_inner_node ()->m_index,
470 diag_idx);
472 /* Iterate through all out-edges from this item. */
473 unsigned i;
474 exploded_edge *succ_eedge;
475 FOR_EACH_VEC_ELT (fnode->get_inner_node ()->m_succs, i, succ_eedge)
477 log_scope s (logger, "edge", "considering edge: EN:%i -> EN:%i",
478 succ_eedge->m_src->m_index,
479 succ_eedge->m_dest->m_index);
480 /* Reject edges that aren't in the trimmed graph. */
481 if (!tg.contains_p (succ_eedge))
483 if (logger)
484 logger->log ("rejecting: not in trimmed graph");
485 continue;
488 feasibility_state succ_state (fnode->get_state ());
489 rejected_constraint *rc = NULL;
490 if (succ_state.maybe_update_for_edge (logger, succ_eedge, &rc))
492 gcc_assert (rc == NULL);
493 feasible_node *succ_fnode
494 = fg->add_node (succ_eedge->m_dest,
495 succ_state,
496 fnode->get_path_length () + 1);
497 if (logger)
498 logger->log ("accepting as FN: %i", succ_fnode->get_index ());
499 fg->add_edge (new feasible_edge (fnode, succ_fnode, succ_eedge));
501 /* Have we reached TARGET_ENODE? */
502 if (succ_fnode->get_inner_node () == target_enode)
504 if (logger)
505 logger->log ("success: got feasible path to EN: %i (sd: %i)"
506 " (length: %i)",
507 target_enode->m_index, diag_idx,
508 succ_fnode->get_path_length ());
509 *out_best_path = fg->make_epath (succ_fnode);
510 /* Success: stop the worklist iteration. */
511 return false;
513 else
514 worklist->add_node (succ_fnode);
516 else
518 if (logger)
519 logger->log ("infeasible");
520 gcc_assert (rc);
521 fg->add_feasibility_problem (fnode,
522 succ_eedge,
523 rc);
525 /* Give up if there have been too many infeasible edges. */
526 if (fg->get_num_infeasible ()
527 > (unsigned)param_analyzer_max_infeasible_edges)
529 if (logger)
530 logger->log ("too many infeasible edges (%i); giving up",
531 fg->get_num_infeasible ());
532 return false;
537 /* Continue the worklist iteration. */
538 return true;
541 /* Helper class for epath_finder::dump_trimmed_graph
542 to dump extra per-node information.
543 Use SEP to add the length of the shortest path from each
544 node to the target node to each node's dump. */
546 class dump_eg_with_shortest_path : public eg_traits::dump_args_t
548 public:
549 dump_eg_with_shortest_path
550 (const exploded_graph &eg,
551 const shortest_paths<eg_traits, exploded_path> &sep)
552 : dump_args_t (eg),
553 m_sep (sep)
557 void dump_extra_info (const exploded_node *enode,
558 pretty_printer *pp) const FINAL OVERRIDE
560 pp_printf (pp, "sp: %i", m_sep.get_shortest_path (enode).length ());
561 pp_newline (pp);
564 private:
565 const shortest_paths<eg_traits, exploded_path> &m_sep;
568 /* Dump TG to "BASE_NAME.DESC.DIAG_IDX.to-enN.tg.dot",
569 annotating each node with the length of the shortest path
570 from that node to TARGET_ENODE (using SEP). */
572 void
573 epath_finder::
574 dump_trimmed_graph (const exploded_node *target_enode,
575 const char *desc, unsigned diag_idx,
576 const trimmed_graph &tg,
577 const shortest_paths<eg_traits, exploded_path> &sep)
579 auto_timevar tv (TV_ANALYZER_DUMP);
580 dump_eg_with_shortest_path inner_args (m_eg, sep);
581 trimmed_graph::dump_args_t args (inner_args);
582 pretty_printer pp;
583 pp_printf (&pp, "%s.%s.%i.to-en%i.tg.dot",
584 dump_base_name, desc, diag_idx, target_enode->m_index);
585 char *filename = xstrdup (pp_formatted_text (&pp));
586 tg.dump_dot (filename, NULL, args);
587 free (filename);
590 /* Dump FG to "BASE_NAME.DESC.DIAG_IDX.to-enN.fg.dot". */
592 void
593 epath_finder::dump_feasible_graph (const exploded_node *target_enode,
594 const char *desc, unsigned diag_idx,
595 const feasible_graph &fg)
597 auto_timevar tv (TV_ANALYZER_DUMP);
598 exploded_graph::dump_args_t inner_args (m_eg);
599 feasible_graph::dump_args_t args (inner_args);
600 pretty_printer pp;
601 pp_printf (&pp, "%s.%s.%i.to-en%i.fg.dot",
602 dump_base_name, desc, diag_idx, target_enode->m_index);
603 char *filename = xstrdup (pp_formatted_text (&pp));
604 fg.dump_dot (filename, NULL, args);
605 free (filename);
608 /* class saved_diagnostic. */
610 /* saved_diagnostic's ctor.
611 Take ownership of D and STMT_FINDER. */
613 saved_diagnostic::saved_diagnostic (const state_machine *sm,
614 const exploded_node *enode,
615 const supernode *snode, const gimple *stmt,
616 stmt_finder *stmt_finder,
617 tree var,
618 const svalue *sval,
619 state_machine::state_t state,
620 pending_diagnostic *d,
621 unsigned idx)
622 : m_sm (sm), m_enode (enode), m_snode (snode), m_stmt (stmt),
623 /* stmt_finder could be on-stack; we want our own copy that can
624 outlive that. */
625 m_stmt_finder (stmt_finder ? stmt_finder->clone () : NULL),
626 m_var (var), m_sval (sval), m_state (state),
627 m_d (d), m_trailing_eedge (NULL),
628 m_idx (idx),
629 m_best_epath (NULL), m_problem (NULL)
631 gcc_assert (m_stmt || m_stmt_finder);
633 /* We must have an enode in order to be able to look for paths
634 through the exploded_graph to this diagnostic. */
635 gcc_assert (m_enode);
638 /* saved_diagnostic's dtor. */
640 saved_diagnostic::~saved_diagnostic ()
642 delete m_stmt_finder;
643 delete m_d;
644 delete m_best_epath;
645 delete m_problem;
648 bool
649 saved_diagnostic::operator== (const saved_diagnostic &other) const
651 return (m_sm == other.m_sm
652 /* We don't compare m_enode. */
653 && m_snode == other.m_snode
654 && m_stmt == other.m_stmt
655 /* We don't compare m_stmt_finder. */
656 && pending_diagnostic::same_tree_p (m_var, other.m_var)
657 && m_state == other.m_state
658 && m_d->equal_p (*other.m_d)
659 && m_trailing_eedge == other.m_trailing_eedge);
662 /* Return a new json::object of the form
663 {"sm": optional str,
664 "enode": int,
665 "snode": int,
666 "sval": optional str,
667 "state": optional str,
668 "path_length": optional int,
669 "pending_diagnostic": str,
670 "idx": int}. */
672 json::object *
673 saved_diagnostic::to_json () const
675 json::object *sd_obj = new json::object ();
677 if (m_sm)
678 sd_obj->set ("sm", new json::string (m_sm->get_name ()));
679 sd_obj->set ("enode", new json::integer_number (m_enode->m_index));
680 sd_obj->set ("snode", new json::integer_number (m_snode->m_index));
681 if (m_sval)
682 sd_obj->set ("sval", m_sval->to_json ());
683 if (m_state)
684 sd_obj->set ("state", m_state->to_json ());
685 if (m_best_epath)
686 sd_obj->set ("path_length", new json::integer_number (get_epath_length ()));
687 sd_obj->set ("pending_diagnostic", new json::string (m_d->get_kind ()));
688 sd_obj->set ("idx", new json::integer_number (m_idx));
690 /* We're not yet JSONifying the following fields:
691 const gimple *m_stmt;
692 stmt_finder *m_stmt_finder;
693 tree m_var;
694 exploded_edge *m_trailing_eedge;
695 enum status m_status;
696 feasibility_problem *m_problem;
699 return sd_obj;
702 /* Use PF to find the best exploded_path for this saved_diagnostic,
703 and store it in m_best_epath.
704 If m_stmt is still NULL, use m_stmt_finder on the epath to populate
705 m_stmt.
706 Return true if a best path was found. */
708 bool
709 saved_diagnostic::calc_best_epath (epath_finder *pf)
711 logger *logger = pf->get_logger ();
712 LOG_SCOPE (logger);
713 delete m_best_epath;
714 delete m_problem;
715 m_problem = NULL;
717 m_best_epath = pf->get_best_epath (m_enode, m_d->get_kind (), m_idx,
718 &m_problem);
720 /* Handle failure to find a feasible path. */
721 if (m_best_epath == NULL)
722 return false;
724 gcc_assert (m_best_epath);
725 if (m_stmt == NULL)
727 gcc_assert (m_stmt_finder);
728 m_stmt = m_stmt_finder->find_stmt (*m_best_epath);
730 gcc_assert (m_stmt);
732 return true;
735 unsigned
736 saved_diagnostic::get_epath_length () const
738 gcc_assert (m_best_epath);
739 return m_best_epath->length ();
742 /* Record that OTHER (and its duplicates) are duplicates
743 of this saved_diagnostic. */
745 void
746 saved_diagnostic::add_duplicate (saved_diagnostic *other)
748 gcc_assert (other);
749 m_duplicates.reserve (m_duplicates.length ()
750 + other->m_duplicates.length ()
751 + 1);
752 m_duplicates.splice (other->m_duplicates);
753 other->m_duplicates.truncate (0);
754 m_duplicates.safe_push (other);
757 /* Return true if this diagnostic supercedes OTHER, and that OTHER should
758 therefore not be emitted. */
760 bool
761 saved_diagnostic::supercedes_p (const saved_diagnostic &other) const
763 /* They should be at the same stmt. */
764 if (m_stmt != other.m_stmt)
765 return false;
766 return m_d->supercedes_p (*other.m_d);
769 /* State for building a checker_path from a particular exploded_path.
770 In particular, this precomputes reachability information: the set of
771 source enodes for which a path be found to the diagnostic enode. */
773 class path_builder
775 public:
776 path_builder (const exploded_graph &eg,
777 const exploded_path &epath,
778 const feasibility_problem *problem,
779 const saved_diagnostic &sd)
780 : m_eg (eg),
781 m_diag_enode (epath.get_final_enode ()),
782 m_sd (sd),
783 m_reachability (eg, m_diag_enode),
784 m_feasibility_problem (problem)
787 const exploded_node *get_diag_node () const { return m_diag_enode; }
789 pending_diagnostic *get_pending_diagnostic () const
791 return m_sd.m_d;
794 bool reachable_from_p (const exploded_node *src_enode) const
796 return m_reachability.reachable_from_p (src_enode);
799 const extrinsic_state &get_ext_state () const { return m_eg.get_ext_state (); }
801 const feasibility_problem *get_feasibility_problem () const
803 return m_feasibility_problem;
806 const state_machine *get_sm () const { return m_sd.m_sm; }
808 private:
809 typedef reachability<eg_traits> enode_reachability;
811 const exploded_graph &m_eg;
813 /* The enode where the diagnostic occurs. */
814 const exploded_node *m_diag_enode;
816 const saved_diagnostic &m_sd;
818 /* Precompute all enodes from which the diagnostic is reachable. */
819 enode_reachability m_reachability;
821 const feasibility_problem *m_feasibility_problem;
824 /* class diagnostic_manager. */
826 /* diagnostic_manager's ctor. */
828 diagnostic_manager::diagnostic_manager (logger *logger, engine *eng,
829 int verbosity)
830 : log_user (logger), m_eng (eng), m_verbosity (verbosity)
834 /* Queue pending_diagnostic D at ENODE for later emission. */
836 void
837 diagnostic_manager::add_diagnostic (const state_machine *sm,
838 exploded_node *enode,
839 const supernode *snode, const gimple *stmt,
840 stmt_finder *finder,
841 tree var,
842 const svalue *sval,
843 state_machine::state_t state,
844 pending_diagnostic *d)
846 LOG_FUNC (get_logger ());
848 /* We must have an enode in order to be able to look for paths
849 through the exploded_graph to the diagnostic. */
850 gcc_assert (enode);
852 saved_diagnostic *sd
853 = new saved_diagnostic (sm, enode, snode, stmt, finder, var, sval,
854 state, d, m_saved_diagnostics.length ());
855 m_saved_diagnostics.safe_push (sd);
856 enode->add_diagnostic (sd);
857 if (get_logger ())
858 log ("adding saved diagnostic %i at SN %i to EN %i: %qs",
859 sd->get_index (),
860 snode->m_index, enode->m_index, d->get_kind ());
863 /* Queue pending_diagnostic D at ENODE for later emission. */
865 void
866 diagnostic_manager::add_diagnostic (exploded_node *enode,
867 const supernode *snode, const gimple *stmt,
868 stmt_finder *finder,
869 pending_diagnostic *d)
871 gcc_assert (enode);
872 add_diagnostic (NULL, enode, snode, stmt, finder, NULL_TREE, NULL, 0, d);
875 /* Return a new json::object of the form
876 {"diagnostics" : [obj for saved_diagnostic]}. */
878 json::object *
879 diagnostic_manager::to_json () const
881 json::object *dm_obj = new json::object ();
884 json::array *sd_arr = new json::array ();
885 int i;
886 saved_diagnostic *sd;
887 FOR_EACH_VEC_ELT (m_saved_diagnostics, i, sd)
888 sd_arr->append (sd->to_json ());
889 dm_obj->set ("diagnostics", sd_arr);
892 return dm_obj;
895 /* A class for identifying sets of duplicated pending_diagnostic.
897 We want to find the simplest saved_diagnostic amongst those that share a
898 dedupe_key. */
900 class dedupe_key
902 public:
903 dedupe_key (const saved_diagnostic &sd)
904 : m_sd (sd), m_stmt (sd.m_stmt)
906 gcc_assert (m_stmt);
909 hashval_t hash () const
911 inchash::hash hstate;
912 hstate.add_ptr (m_stmt);
913 // TODO: m_sd
914 return hstate.end ();
916 bool operator== (const dedupe_key &other) const
918 return (m_sd == other.m_sd
919 && m_stmt == other.m_stmt);
922 location_t get_location () const
924 return m_stmt->location;
927 /* A qsort comparator for use by dedupe_winners::emit_best
928 to sort them into location_t order. */
930 static int
931 comparator (const void *p1, const void *p2)
933 const dedupe_key *pk1 = *(const dedupe_key * const *)p1;
934 const dedupe_key *pk2 = *(const dedupe_key * const *)p2;
936 location_t loc1 = pk1->get_location ();
937 location_t loc2 = pk2->get_location ();
939 if (int cmp = linemap_compare_locations (line_table, loc2, loc1))
940 return cmp;
941 if (int cmp = ((int)pk1->m_sd.get_epath_length ()
942 - (int)pk2->m_sd.get_epath_length ()))
943 return cmp;
944 if (int cmp = strcmp (pk1->m_sd.m_d->get_kind (),
945 pk2->m_sd.m_d->get_kind ()))
946 return cmp;
947 return 0;
950 const saved_diagnostic &m_sd;
951 const gimple *m_stmt;
954 /* Traits for use by dedupe_winners. */
956 class dedupe_hash_map_traits
958 public:
959 typedef const dedupe_key *key_type;
960 typedef saved_diagnostic *value_type;
961 typedef saved_diagnostic *compare_type;
963 static inline hashval_t hash (const key_type &v)
965 return v->hash ();
967 static inline bool equal_keys (const key_type &k1, const key_type &k2)
969 return *k1 == *k2;
971 template <typename T>
972 static inline void remove (T &)
974 // TODO
976 template <typename T>
977 static inline void mark_deleted (T &entry)
979 entry.m_key = reinterpret_cast<key_type> (1);
981 template <typename T>
982 static inline void mark_empty (T &entry)
984 entry.m_key = NULL;
986 template <typename T>
987 static inline bool is_deleted (const T &entry)
989 return entry.m_key == reinterpret_cast<key_type> (1);
991 template <typename T>
992 static inline bool is_empty (const T &entry)
994 return entry.m_key == NULL;
996 static const bool empty_zero_p = true;
999 /* A class for deduplicating diagnostics and finding (and emitting) the
1000 best saved_diagnostic within each partition. */
1002 class dedupe_winners
1004 public:
1005 ~dedupe_winners ()
1007 /* Delete all keys, but not the saved_diagnostics. */
1008 for (map_t::iterator iter = m_map.begin ();
1009 iter != m_map.end ();
1010 ++iter)
1011 delete (*iter).first;
1014 /* Determine an exploded_path for SD using PF and, if it's feasible,
1015 determine if SD is the best seen so far for its dedupe_key.
1016 Record the winning SD for each dedupe_key. */
1018 void add (logger *logger,
1019 epath_finder *pf,
1020 saved_diagnostic *sd)
1022 /* Determine best epath for SD. */
1023 if (!sd->calc_best_epath (pf))
1024 return;
1026 dedupe_key *key = new dedupe_key (*sd);
1027 if (saved_diagnostic **slot = m_map.get (key))
1029 if (logger)
1030 logger->log ("already have this dedupe_key");
1032 saved_diagnostic *cur_best_sd = *slot;
1034 if (sd->get_epath_length () < cur_best_sd->get_epath_length ())
1036 /* We've got a shorter path for the key; replace
1037 the current candidate, marking it as a duplicate of SD. */
1038 if (logger)
1039 logger->log ("length %i is better than existing length %i;"
1040 " taking over this dedupe_key",
1041 sd->get_epath_length (),
1042 cur_best_sd->get_epath_length ());
1043 sd->add_duplicate (cur_best_sd);
1044 *slot = sd;
1046 else
1047 /* We haven't beaten the current best candidate; add SD
1048 as a duplicate of it. */
1050 if (logger)
1051 logger->log ("length %i isn't better than existing length %i;"
1052 " dropping this candidate",
1053 sd->get_epath_length (),
1054 cur_best_sd->get_epath_length ());
1055 cur_best_sd->add_duplicate (sd);
1057 delete key;
1059 else
1061 /* This is the first candidate for this key. */
1062 m_map.put (key, sd);
1063 if (logger)
1064 logger->log ("first candidate for this dedupe_key");
1068 /* Handle interactions between the dedupe winners, so that some
1069 diagnostics can supercede others (of different kinds).
1071 We want use-after-free to supercede use-of-unitialized-value,
1072 so that if we have these at the same stmt, we don't emit
1073 a use-of-uninitialized, just the use-after-free. */
1075 void handle_interactions (diagnostic_manager *dm)
1077 LOG_SCOPE (dm->get_logger ());
1078 auto_vec<const dedupe_key *> superceded;
1079 for (auto outer : m_map)
1081 const saved_diagnostic *outer_sd = outer.second;
1082 for (auto inner : m_map)
1084 const saved_diagnostic *inner_sd = inner.second;
1085 if (inner_sd->supercedes_p (*outer_sd))
1087 superceded.safe_push (outer.first);
1088 if (dm->get_logger ())
1089 dm->log ("sd[%i] \"%s\" superceded by sd[%i] \"%s\"",
1090 outer_sd->get_index (), outer_sd->m_d->get_kind (),
1091 inner_sd->get_index (), inner_sd->m_d->get_kind ());
1092 break;
1096 for (auto iter : superceded)
1097 m_map.remove (iter);
1100 /* Emit the simplest diagnostic within each set. */
1102 void emit_best (diagnostic_manager *dm,
1103 const exploded_graph &eg)
1105 LOG_SCOPE (dm->get_logger ());
1107 /* Get keys into a vec for sorting. */
1108 auto_vec<const dedupe_key *> keys (m_map.elements ());
1109 for (map_t::iterator iter = m_map.begin ();
1110 iter != m_map.end ();
1111 ++iter)
1112 keys.quick_push ((*iter).first);
1114 dm->log ("# keys after de-duplication: %i", keys.length ());
1116 /* Sort into a good emission order. */
1117 keys.qsort (dedupe_key::comparator);
1119 /* Emit the best saved_diagnostics for each key. */
1120 int i;
1121 const dedupe_key *key;
1122 FOR_EACH_VEC_ELT (keys, i, key)
1124 saved_diagnostic **slot = m_map.get (key);
1125 gcc_assert (*slot);
1126 const saved_diagnostic *sd = *slot;
1127 dm->emit_saved_diagnostic (eg, *sd);
1131 private:
1132 /* This maps from each dedupe_key to a current best saved_diagnostic. */
1134 typedef hash_map<const dedupe_key *, saved_diagnostic *,
1135 dedupe_hash_map_traits> map_t;
1136 map_t m_map;
1139 /* Emit all saved diagnostics. */
1141 void
1142 diagnostic_manager::emit_saved_diagnostics (const exploded_graph &eg)
1144 LOG_SCOPE (get_logger ());
1145 auto_timevar tv (TV_ANALYZER_DIAGNOSTICS);
1146 log ("# saved diagnostics: %i", m_saved_diagnostics.length ());
1147 if (get_logger ())
1149 unsigned i;
1150 saved_diagnostic *sd;
1151 FOR_EACH_VEC_ELT (m_saved_diagnostics, i, sd)
1152 log ("[%i] sd: %qs at EN: %i, SN: %i",
1153 i, sd->m_d->get_kind (), sd->m_enode->m_index,
1154 sd->m_snode->m_index);
1157 if (m_saved_diagnostics.length () == 0)
1158 return;
1160 /* Compute the shortest_paths once, sharing it between all diagnostics. */
1161 epath_finder pf (eg);
1163 /* Iterate through all saved diagnostics, adding them to a dedupe_winners
1164 instance. This partitions the saved diagnostics by dedupe_key,
1165 generating exploded_paths for them, and retaining the best one in each
1166 partition. */
1167 dedupe_winners best_candidates;
1169 int i;
1170 saved_diagnostic *sd;
1171 FOR_EACH_VEC_ELT (m_saved_diagnostics, i, sd)
1172 best_candidates.add (get_logger (), &pf, sd);
1174 best_candidates.handle_interactions (this);
1176 /* For each dedupe-key, call emit_saved_diagnostic on the "best"
1177 saved_diagnostic. */
1178 best_candidates.emit_best (this, eg);
1181 /* Given a saved_diagnostic SD with m_best_epath through EG,
1182 create an checker_path of suitable events and use it to call
1183 SD's underlying pending_diagnostic "emit" vfunc to emit a diagnostic. */
1185 void
1186 diagnostic_manager::emit_saved_diagnostic (const exploded_graph &eg,
1187 const saved_diagnostic &sd)
1189 LOG_SCOPE (get_logger ());
1190 log ("sd: %qs at SN: %i", sd.m_d->get_kind (), sd.m_snode->m_index);
1191 log ("num dupes: %i", sd.get_num_dupes ());
1193 pretty_printer *pp = global_dc->printer->clone ();
1195 const exploded_path *epath = sd.get_best_epath ();
1196 gcc_assert (epath);
1198 /* Precompute all enodes from which the diagnostic is reachable. */
1199 path_builder pb (eg, *epath, sd.get_feasibility_problem (), sd);
1201 /* This is the diagnostic_path subclass that will be built for
1202 the diagnostic. */
1203 checker_path emission_path;
1205 /* Populate emission_path with a full description of EPATH. */
1206 build_emission_path (pb, *epath, &emission_path);
1208 /* Now prune it to just cover the most pertinent events. */
1209 prune_path (&emission_path, sd.m_sm, sd.m_sval, sd.m_state);
1211 /* Add a final event to the path, covering the diagnostic itself.
1212 We use the final enode from the epath, which might be different from
1213 the sd.m_enode, as the dedupe code doesn't care about enodes, just
1214 snodes. */
1215 emission_path.add_final_event (sd.m_sm, epath->get_final_enode (), sd.m_stmt,
1216 sd.m_var, sd.m_state);
1218 /* The "final" event might not be final; if the saved_diagnostic has a
1219 trailing eedge stashed, add any events for it. This is for use
1220 in handling longjmp, to show where a longjmp is rewinding to. */
1221 if (sd.m_trailing_eedge)
1222 add_events_for_eedge (pb, *sd.m_trailing_eedge, &emission_path);
1224 emission_path.prepare_for_emission (sd.m_d);
1226 location_t loc = get_stmt_location (sd.m_stmt, sd.m_snode->m_fun);
1228 /* Allow the pending_diagnostic to fix up the primary location
1229 and any locations for events. */
1230 loc = sd.m_d->fixup_location (loc);
1231 emission_path.fixup_locations (sd.m_d);
1233 gcc_rich_location rich_loc (loc);
1234 rich_loc.set_path (&emission_path);
1236 auto_diagnostic_group d;
1237 auto_cfun sentinel (sd.m_snode->m_fun);
1238 if (sd.m_d->emit (&rich_loc))
1240 unsigned num_dupes = sd.get_num_dupes ();
1241 if (flag_analyzer_show_duplicate_count && num_dupes > 0)
1242 inform_n (loc, num_dupes,
1243 "%i duplicate", "%i duplicates",
1244 num_dupes);
1245 if (flag_dump_analyzer_exploded_paths)
1247 auto_timevar tv (TV_ANALYZER_DUMP);
1248 pretty_printer pp;
1249 pp_printf (&pp, "%s.%i.%s.epath.txt",
1250 dump_base_name, sd.get_index (), sd.m_d->get_kind ());
1251 char *filename = xstrdup (pp_formatted_text (&pp));
1252 epath->dump_to_file (filename, eg.get_ext_state ());
1253 inform (loc, "exploded path written to %qs", filename);
1254 free (filename);
1257 delete pp;
1260 /* Emit a "path" of events to EMISSION_PATH describing the exploded path
1261 EPATH within EG. */
1263 void
1264 diagnostic_manager::build_emission_path (const path_builder &pb,
1265 const exploded_path &epath,
1266 checker_path *emission_path) const
1268 LOG_SCOPE (get_logger ());
1269 for (unsigned i = 0; i < epath.m_edges.length (); i++)
1271 const exploded_edge *eedge = epath.m_edges[i];
1272 add_events_for_eedge (pb, *eedge, emission_path);
1276 /* Subclass of state_change_visitor that creates state_change_event
1277 instances. */
1279 class state_change_event_creator : public state_change_visitor
1281 public:
1282 state_change_event_creator (const path_builder &pb,
1283 const exploded_edge &eedge,
1284 checker_path *emission_path)
1285 : m_pb (pb),
1286 m_eedge (eedge),
1287 m_emission_path (emission_path)
1290 bool on_global_state_change (const state_machine &sm,
1291 state_machine::state_t src_sm_val,
1292 state_machine::state_t dst_sm_val)
1293 FINAL OVERRIDE
1295 if (&sm != m_pb.get_sm ())
1296 return false;
1297 const exploded_node *src_node = m_eedge.m_src;
1298 const program_point &src_point = src_node->get_point ();
1299 const int src_stack_depth = src_point.get_stack_depth ();
1300 const exploded_node *dst_node = m_eedge.m_dest;
1301 const gimple *stmt = src_point.get_stmt ();
1302 const supernode *supernode = src_point.get_supernode ();
1303 const program_state &dst_state = dst_node->get_state ();
1305 int stack_depth = src_stack_depth;
1307 m_emission_path->add_event (new state_change_event (supernode,
1308 stmt,
1309 stack_depth,
1311 NULL,
1312 src_sm_val,
1313 dst_sm_val,
1314 NULL,
1315 dst_state));
1316 return false;
1319 bool on_state_change (const state_machine &sm,
1320 state_machine::state_t src_sm_val,
1321 state_machine::state_t dst_sm_val,
1322 const svalue *sval,
1323 const svalue *dst_origin_sval) FINAL OVERRIDE
1325 if (&sm != m_pb.get_sm ())
1326 return false;
1327 const exploded_node *src_node = m_eedge.m_src;
1328 const program_point &src_point = src_node->get_point ();
1329 const int src_stack_depth = src_point.get_stack_depth ();
1330 const exploded_node *dst_node = m_eedge.m_dest;
1331 const gimple *stmt = src_point.get_stmt ();
1332 const supernode *supernode = src_point.get_supernode ();
1333 const program_state &dst_state = dst_node->get_state ();
1335 int stack_depth = src_stack_depth;
1337 if (m_eedge.m_sedge
1338 && m_eedge.m_sedge->m_kind == SUPEREDGE_CFG_EDGE)
1340 supernode = src_point.get_supernode ();
1341 stmt = supernode->get_last_stmt ();
1342 stack_depth = src_stack_depth;
1345 /* Bulletproofing for state changes at calls/returns;
1346 TODO: is there a better way? */
1347 if (!stmt)
1348 return false;
1350 m_emission_path->add_event (new state_change_event (supernode,
1351 stmt,
1352 stack_depth,
1354 sval,
1355 src_sm_val,
1356 dst_sm_val,
1357 dst_origin_sval,
1358 dst_state));
1359 return false;
1362 const path_builder &m_pb;
1363 const exploded_edge &m_eedge;
1364 checker_path *m_emission_path;
1367 /* Compare SRC_STATE and DST_STATE (which use EXT_STATE), and call
1368 VISITOR's on_state_change for every sm-state change that occurs
1369 to a tree, and on_global_state_change for every global state change
1370 that occurs.
1372 This determines the state changes that ought to be reported to
1373 the user: a combination of the effects of changes to sm_state_map
1374 (which maps svalues to sm-states), and of region_model changes
1375 (which map trees to svalues).
1377 Bail out early and return true if any call to on_global_state_change
1378 or on_state_change returns true, otherwise return false.
1380 This is split out to make it easier to experiment with changes to
1381 exploded_node granularity (so that we can observe what state changes
1382 lead to state_change_events being emitted). */
1384 bool
1385 for_each_state_change (const program_state &src_state,
1386 const program_state &dst_state,
1387 const extrinsic_state &ext_state,
1388 state_change_visitor *visitor)
1390 gcc_assert (src_state.m_checker_states.length ()
1391 == ext_state.get_num_checkers ());
1392 gcc_assert (dst_state.m_checker_states.length ()
1393 == ext_state.get_num_checkers ());
1394 for (unsigned i = 0; i < ext_state.get_num_checkers (); i++)
1396 const state_machine &sm = ext_state.get_sm (i);
1397 const sm_state_map &src_smap = *src_state.m_checker_states[i];
1398 const sm_state_map &dst_smap = *dst_state.m_checker_states[i];
1400 /* Add events for any global state changes. */
1401 if (src_smap.get_global_state () != dst_smap.get_global_state ())
1402 if (visitor->on_global_state_change (sm,
1403 src_smap.get_global_state (),
1404 dst_smap.get_global_state ()))
1405 return true;
1407 /* Add events for per-svalue state changes. */
1408 for (sm_state_map::iterator_t iter = dst_smap.begin ();
1409 iter != dst_smap.end ();
1410 ++iter)
1412 const svalue *sval = (*iter).first;
1413 state_machine::state_t dst_sm_val = (*iter).second.m_state;
1414 state_machine::state_t src_sm_val
1415 = src_smap.get_state (sval, ext_state);
1416 if (dst_sm_val != src_sm_val)
1418 const svalue *origin_sval = (*iter).second.m_origin;
1419 if (visitor->on_state_change (sm, src_sm_val, dst_sm_val,
1420 sval, origin_sval))
1421 return true;
1425 return false;
1428 /* An sm_context for adding state_change_event on assignments to NULL,
1429 where the default state isn't m_start. Storing such state in the
1430 sm_state_map would lead to bloat of the exploded_graph, so we want
1431 to leave it as a default state, and inject state change events here
1432 when we have a diagnostic.
1433 Find transitions of constants, for handling on_zero_assignment. */
1435 struct null_assignment_sm_context : public sm_context
1437 null_assignment_sm_context (int sm_idx,
1438 const state_machine &sm,
1439 const program_state *old_state,
1440 const program_state *new_state,
1441 const gimple *stmt,
1442 const program_point *point,
1443 checker_path *emission_path,
1444 const extrinsic_state &ext_state)
1445 : sm_context (sm_idx, sm), m_old_state (old_state), m_new_state (new_state),
1446 m_stmt (stmt), m_point (point), m_emission_path (emission_path),
1447 m_ext_state (ext_state)
1451 tree get_fndecl_for_call (const gcall */*call*/) FINAL OVERRIDE
1453 return NULL_TREE;
1456 state_machine::state_t get_state (const gimple *stmt ATTRIBUTE_UNUSED,
1457 tree var) FINAL OVERRIDE
1459 const svalue *var_old_sval
1460 = m_old_state->m_region_model->get_rvalue (var, NULL);
1461 const sm_state_map *old_smap = m_old_state->m_checker_states[m_sm_idx];
1463 state_machine::state_t current
1464 = old_smap->get_state (var_old_sval, m_ext_state);
1466 return current;
1469 state_machine::state_t get_state (const gimple *stmt ATTRIBUTE_UNUSED,
1470 const svalue *sval) FINAL OVERRIDE
1472 const sm_state_map *old_smap = m_old_state->m_checker_states[m_sm_idx];
1473 state_machine::state_t current = old_smap->get_state (sval, m_ext_state);
1474 return current;
1477 void set_next_state (const gimple *stmt,
1478 tree var,
1479 state_machine::state_t to,
1480 tree origin ATTRIBUTE_UNUSED) FINAL OVERRIDE
1482 state_machine::state_t from = get_state (stmt, var);
1483 if (from != m_sm.get_start_state ())
1484 return;
1486 const svalue *var_new_sval
1487 = m_new_state->m_region_model->get_rvalue (var, NULL);
1488 const supernode *supernode = m_point->get_supernode ();
1489 int stack_depth = m_point->get_stack_depth ();
1491 m_emission_path->add_event (new state_change_event (supernode,
1492 m_stmt,
1493 stack_depth,
1494 m_sm,
1495 var_new_sval,
1496 from, to,
1497 NULL,
1498 *m_new_state));
1501 void set_next_state (const gimple *stmt,
1502 const svalue *sval,
1503 state_machine::state_t to,
1504 tree origin ATTRIBUTE_UNUSED) FINAL OVERRIDE
1506 state_machine::state_t from = get_state (stmt, sval);
1507 if (from != m_sm.get_start_state ())
1508 return;
1510 const supernode *supernode = m_point->get_supernode ();
1511 int stack_depth = m_point->get_stack_depth ();
1513 m_emission_path->add_event (new state_change_event (supernode,
1514 m_stmt,
1515 stack_depth,
1516 m_sm,
1517 sval,
1518 from, to,
1519 NULL,
1520 *m_new_state));
1523 void warn (const supernode *, const gimple *,
1524 tree, pending_diagnostic *d) FINAL OVERRIDE
1526 delete d;
1529 tree get_diagnostic_tree (tree expr) FINAL OVERRIDE
1531 return expr;
1534 tree get_diagnostic_tree (const svalue *sval) FINAL OVERRIDE
1536 return m_new_state->m_region_model->get_representative_tree (sval);
1539 state_machine::state_t get_global_state () const FINAL OVERRIDE
1541 return 0;
1544 void set_global_state (state_machine::state_t) FINAL OVERRIDE
1546 /* No-op. */
1549 void on_custom_transition (custom_transition *) FINAL OVERRIDE
1553 tree is_zero_assignment (const gimple *stmt) FINAL OVERRIDE
1555 const gassign *assign_stmt = dyn_cast <const gassign *> (stmt);
1556 if (!assign_stmt)
1557 return NULL_TREE;
1558 if (const svalue *sval
1559 = m_new_state->m_region_model->get_gassign_result (assign_stmt, NULL))
1560 if (tree cst = sval->maybe_get_constant ())
1561 if (::zerop(cst))
1562 return gimple_assign_lhs (assign_stmt);
1563 return NULL_TREE;
1566 const program_state *m_old_state;
1567 const program_state *m_new_state;
1568 const gimple *m_stmt;
1569 const program_point *m_point;
1570 checker_path *m_emission_path;
1571 const extrinsic_state &m_ext_state;
1574 /* Subroutine of diagnostic_manager::build_emission_path.
1575 Add any events for EEDGE to EMISSION_PATH. */
1577 void
1578 diagnostic_manager::add_events_for_eedge (const path_builder &pb,
1579 const exploded_edge &eedge,
1580 checker_path *emission_path) const
1582 const exploded_node *src_node = eedge.m_src;
1583 const program_point &src_point = src_node->get_point ();
1584 const exploded_node *dst_node = eedge.m_dest;
1585 const program_point &dst_point = dst_node->get_point ();
1586 const int dst_stack_depth = dst_point.get_stack_depth ();
1587 if (get_logger ())
1589 get_logger ()->start_log_line ();
1590 pretty_printer *pp = get_logger ()->get_printer ();
1591 pp_printf (pp, "EN %i -> EN %i: ",
1592 eedge.m_src->m_index,
1593 eedge.m_dest->m_index);
1594 src_point.print (pp, format (false));
1595 pp_string (pp, "-> ");
1596 dst_point.print (pp, format (false));
1597 get_logger ()->end_log_line ();
1599 const program_state &src_state = src_node->get_state ();
1600 const program_state &dst_state = dst_node->get_state ();
1602 /* Add state change events for the states that have changed.
1603 We add these before events for superedges, so that if we have a
1604 state_change_event due to following an edge, we'll get this sequence
1605 of events:
1607 | if (!ptr)
1610 | (1) assuming 'ptr' is non-NULL (state_change_event)
1611 | (2) following 'false' branch... (start_cfg_edge_event)
1613 | do_something (ptr);
1614 | ~~~~~~~~~~~~~^~~~~
1616 | (3) ...to here (end_cfg_edge_event). */
1617 state_change_event_creator visitor (pb, eedge, emission_path);
1618 for_each_state_change (src_state, dst_state, pb.get_ext_state (),
1619 &visitor);
1621 /* Allow non-standard edges to add events, e.g. when rewinding from
1622 longjmp to a setjmp. */
1623 if (eedge.m_custom_info)
1624 eedge.m_custom_info->add_events_to_path (emission_path, eedge);
1626 /* Add events for superedges, function entries, and for statements. */
1627 switch (dst_point.get_kind ())
1629 default:
1630 break;
1631 case PK_BEFORE_SUPERNODE:
1632 if (src_point.get_kind () == PK_AFTER_SUPERNODE)
1634 if (eedge.m_sedge)
1635 add_events_for_superedge (pb, eedge, emission_path);
1637 /* Add function entry events. */
1638 if (dst_point.get_supernode ()->entry_p ())
1640 emission_path->add_event
1641 (new function_entry_event
1642 (dst_point.get_supernode ()->get_start_location (),
1643 dst_point.get_fndecl (),
1644 dst_stack_depth));
1646 break;
1647 case PK_BEFORE_STMT:
1649 const gimple *stmt = dst_point.get_stmt ();
1650 const gcall *call = dyn_cast <const gcall *> (stmt);
1651 if (call && is_setjmp_call_p (call))
1652 emission_path->add_event
1653 (new setjmp_event (stmt->location,
1654 dst_node,
1655 dst_point.get_fndecl (),
1656 dst_stack_depth,
1657 call));
1658 else
1659 emission_path->add_event
1660 (new statement_event (stmt,
1661 dst_point.get_fndecl (),
1662 dst_stack_depth, dst_state));
1664 /* Create state change events for assignment to NULL.
1665 Iterate through the stmts in dst_enode, adding state change
1666 events for them. */
1667 if (dst_state.m_region_model)
1669 program_state iter_state (dst_state);
1670 program_point iter_point (dst_point);
1671 while (1)
1673 const gimple *stmt = iter_point.get_stmt ();
1674 if (const gassign *assign = dyn_cast<const gassign *> (stmt))
1676 const extrinsic_state &ext_state = pb.get_ext_state ();
1677 program_state old_state (iter_state);
1678 iter_state.m_region_model->on_assignment (assign, NULL);
1679 for (unsigned i = 0; i < ext_state.get_num_checkers (); i++)
1681 const state_machine &sm = ext_state.get_sm (i);
1682 null_assignment_sm_context sm_ctxt (i, sm,
1683 &old_state,
1684 &iter_state,
1685 stmt,
1686 &iter_point,
1687 emission_path,
1688 pb.get_ext_state ());
1689 sm.on_stmt (&sm_ctxt, dst_point.get_supernode (), stmt);
1690 // TODO: what about phi nodes?
1693 iter_point.next_stmt ();
1694 if (iter_point.get_kind () == PK_AFTER_SUPERNODE
1695 || (dst_node->m_succs.length () > 1
1696 && (iter_point
1697 == dst_node->m_succs[0]->m_dest->get_point ())))
1698 break;
1702 break;
1705 if (pb.get_feasibility_problem ()
1706 && &pb.get_feasibility_problem ()->m_eedge == &eedge)
1708 pretty_printer pp;
1709 pp_format_decoder (&pp) = default_tree_printer;
1710 pp_string (&pp,
1711 "this path would have been rejected as infeasible"
1712 " at this edge: ");
1713 pb.get_feasibility_problem ()->dump_to_pp (&pp);
1714 emission_path->add_event (new precanned_custom_event
1715 (dst_point.get_location (),
1716 dst_point.get_fndecl (),
1717 dst_stack_depth,
1718 pp_formatted_text (&pp)));
1722 /* Return true if EEDGE is a significant edge in the path to the diagnostic
1723 for PB.
1725 Consider all of the sibling out-eedges from the same source enode
1726 as EEDGE.
1727 If there's no path from the destinations of those eedges to the
1728 diagnostic enode, then we have to take this eedge and thus it's
1729 significant.
1731 Conversely if there is a path from the destination of any other sibling
1732 eedge to the diagnostic enode, then this edge is insignificant.
1734 Example 1: redundant if-else:
1736 (A) if (...) A
1737 (B) ... / \
1738 else B C
1739 (C) ... \ /
1740 (D) [DIAGNOSTIC] D
1742 D is reachable by either B or C, so neither of these edges
1743 are significant.
1745 Example 2: pertinent if-else:
1747 (A) if (...) A
1748 (B) ... / \
1749 else B C
1750 (C) [NECESSARY CONDITION] | |
1751 (D) [POSSIBLE DIAGNOSTIC] D1 D2
1753 D becomes D1 and D2 in the exploded graph, where the diagnostic occurs
1754 at D2. D2 is only reachable via C, so the A -> C edge is significant.
1756 Example 3: redundant loop:
1758 (A) while (...) +-->A
1759 (B) ... | / \
1760 (C) ... +-B C
1761 (D) [DIAGNOSTIC] |
1764 D is reachable from both B and C, so the A->C edge is not significant. */
1766 bool
1767 diagnostic_manager::significant_edge_p (const path_builder &pb,
1768 const exploded_edge &eedge) const
1770 int i;
1771 exploded_edge *sibling;
1772 FOR_EACH_VEC_ELT (eedge.m_src->m_succs, i, sibling)
1774 if (sibling == &eedge)
1775 continue;
1776 if (pb.reachable_from_p (sibling->m_dest))
1778 if (get_logger ())
1779 get_logger ()->log (" edge EN: %i -> EN: %i is insignificant as"
1780 " EN: %i is also reachable via"
1781 " EN: %i -> EN: %i",
1782 eedge.m_src->m_index, eedge.m_dest->m_index,
1783 pb.get_diag_node ()->m_index,
1784 sibling->m_src->m_index,
1785 sibling->m_dest->m_index);
1786 return false;
1790 return true;
1793 /* Subroutine of diagnostic_manager::add_events_for_eedge
1794 where EEDGE has an underlying superedge i.e. a CFG edge,
1795 or an interprocedural call/return.
1796 Add any events for the superedge to EMISSION_PATH. */
1798 void
1799 diagnostic_manager::add_events_for_superedge (const path_builder &pb,
1800 const exploded_edge &eedge,
1801 checker_path *emission_path)
1802 const
1804 gcc_assert (eedge.m_sedge);
1806 /* Give diagnostics an opportunity to override this function. */
1807 pending_diagnostic *pd = pb.get_pending_diagnostic ();
1808 if (pd->maybe_add_custom_events_for_superedge (eedge, emission_path))
1809 return;
1811 /* Don't add events for insignificant edges at verbosity levels below 3. */
1812 if (m_verbosity < 3)
1813 if (!significant_edge_p (pb, eedge))
1814 return;
1816 const exploded_node *src_node = eedge.m_src;
1817 const program_point &src_point = src_node->get_point ();
1818 const exploded_node *dst_node = eedge.m_dest;
1819 const program_point &dst_point = dst_node->get_point ();
1820 const int src_stack_depth = src_point.get_stack_depth ();
1821 const int dst_stack_depth = dst_point.get_stack_depth ();
1822 const gimple *last_stmt = src_point.get_supernode ()->get_last_stmt ();
1824 switch (eedge.m_sedge->m_kind)
1826 case SUPEREDGE_CFG_EDGE:
1828 emission_path->add_event
1829 (new start_cfg_edge_event (eedge,
1830 (last_stmt
1831 ? last_stmt->location
1832 : UNKNOWN_LOCATION),
1833 src_point.get_fndecl (),
1834 src_stack_depth));
1835 emission_path->add_event
1836 (new end_cfg_edge_event (eedge,
1837 dst_point.get_supernode ()->get_start_location (),
1838 dst_point.get_fndecl (),
1839 dst_stack_depth));
1841 break;
1843 case SUPEREDGE_CALL:
1845 emission_path->add_event
1846 (new call_event (eedge,
1847 (last_stmt
1848 ? last_stmt->location
1849 : UNKNOWN_LOCATION),
1850 src_point.get_fndecl (),
1851 src_stack_depth));
1853 break;
1855 case SUPEREDGE_INTRAPROCEDURAL_CALL:
1857 /* TODO: add a subclass for this, or generate events for the
1858 summary. */
1859 emission_path->add_event
1860 (new debug_event ((last_stmt
1861 ? last_stmt->location
1862 : UNKNOWN_LOCATION),
1863 src_point.get_fndecl (),
1864 src_stack_depth,
1865 "call summary"));
1867 break;
1869 case SUPEREDGE_RETURN:
1871 const return_superedge *return_edge
1872 = as_a <const return_superedge *> (eedge.m_sedge);
1874 const gcall *call_stmt = return_edge->get_call_stmt ();
1875 emission_path->add_event
1876 (new return_event (eedge,
1877 (call_stmt
1878 ? call_stmt->location
1879 : UNKNOWN_LOCATION),
1880 dst_point.get_fndecl (),
1881 dst_stack_depth));
1883 break;
1887 /* Prune PATH, based on the verbosity level, to the most pertinent
1888 events for a diagnostic that involves VAR ending in state STATE
1889 (for state machine SM).
1891 PATH is updated in place, and the redundant checker_events are deleted.
1893 As well as deleting events, call record_critical_state on events in
1894 which state critical to the pending_diagnostic is being handled; see
1895 the comment for diagnostic_manager::prune_for_sm_diagnostic. */
1897 void
1898 diagnostic_manager::prune_path (checker_path *path,
1899 const state_machine *sm,
1900 const svalue *sval,
1901 state_machine::state_t state) const
1903 LOG_FUNC (get_logger ());
1904 path->maybe_log (get_logger (), "path");
1905 prune_for_sm_diagnostic (path, sm, sval, state);
1906 prune_interproc_events (path);
1907 consolidate_conditions (path);
1908 finish_pruning (path);
1909 path->maybe_log (get_logger (), "pruned");
1912 /* A cheap test to determine if EXPR can be the expression of interest in
1913 an sm-diagnostic, so that we can reject cases where we have a non-lvalue.
1914 We don't have always have a model when calling this, so we can't use
1915 tentative_region_model_context, so there can be false positives. */
1917 static bool
1918 can_be_expr_of_interest_p (tree expr)
1920 if (!expr)
1921 return false;
1923 /* Reject constants. */
1924 if (CONSTANT_CLASS_P (expr))
1925 return false;
1927 /* Otherwise assume that it can be an lvalue. */
1928 return true;
1931 /* First pass of diagnostic_manager::prune_path: apply verbosity level,
1932 pruning unrelated state change events.
1934 Iterate backwards through PATH, skipping state change events that aren't
1935 VAR but update the pertinent VAR when state-copying occurs.
1937 As well as deleting events, call record_critical_state on events in
1938 which state critical to the pending_diagnostic is being handled, so
1939 that the event's get_desc vfunc can potentially supply a more precise
1940 description of the event to the user.
1941 e.g. improving
1942 "calling 'foo' from 'bar'"
1944 "passing possibly-NULL pointer 'ptr' to 'foo' from 'bar' as param 1"
1945 when the diagnostic relates to later dereferencing 'ptr'. */
1947 void
1948 diagnostic_manager::prune_for_sm_diagnostic (checker_path *path,
1949 const state_machine *sm,
1950 const svalue *sval,
1951 state_machine::state_t state) const
1953 int idx = path->num_events () - 1;
1954 while (idx >= 0 && idx < (signed)path->num_events ())
1956 checker_event *base_event = path->get_checker_event (idx);
1957 if (get_logger ())
1959 if (sm)
1961 if (sval)
1963 label_text sval_desc = sval->get_desc ();
1964 log ("considering event %i (%s), with sval: %qs, state: %qs",
1965 idx, event_kind_to_string (base_event->m_kind),
1966 sval_desc.m_buffer, state->get_name ());
1968 else
1969 log ("considering event %i (%s), with global state: %qs",
1970 idx, event_kind_to_string (base_event->m_kind),
1971 state->get_name ());
1973 else
1974 log ("considering event %i", idx);
1977 switch (base_event->m_kind)
1979 default:
1980 gcc_unreachable ();
1982 case EK_DEBUG:
1983 if (m_verbosity < 4)
1985 log ("filtering event %i: debug event", idx);
1986 path->delete_event (idx);
1988 break;
1990 case EK_CUSTOM:
1991 /* Don't filter custom events. */
1992 break;
1994 case EK_STMT:
1996 if (m_verbosity < 4)
1998 log ("filtering event %i: statement event", idx);
1999 path->delete_event (idx);
2002 break;
2004 case EK_FUNCTION_ENTRY:
2005 if (m_verbosity < 1)
2007 log ("filtering event %i: function entry", idx);
2008 path->delete_event (idx);
2010 break;
2012 case EK_STATE_CHANGE:
2014 state_change_event *state_change = (state_change_event *)base_event;
2015 gcc_assert (state_change->m_dst_state.m_region_model);
2017 if (state_change->m_sval == sval)
2019 if (state_change->m_origin)
2021 if (get_logger ())
2023 label_text sval_desc = sval->get_desc ();
2024 label_text origin_sval_desc
2025 = state_change->m_origin->get_desc ();
2026 log ("event %i:"
2027 " switching var of interest from %qs to %qs",
2028 idx, sval_desc.m_buffer,
2029 origin_sval_desc.m_buffer);
2031 sval = state_change->m_origin;
2033 log ("event %i: switching state of interest from %qs to %qs",
2034 idx, state_change->m_to->get_name (),
2035 state_change->m_from->get_name ());
2036 state = state_change->m_from;
2038 else if (m_verbosity < 4)
2040 if (get_logger ())
2042 if (state_change->m_sval)
2044 label_text change_sval_desc
2045 = state_change->m_sval->get_desc ();
2046 if (sval)
2048 label_text sval_desc = sval->get_desc ();
2049 log ("filtering event %i:"
2050 " state change to %qs unrelated to %qs",
2051 idx, change_sval_desc.m_buffer,
2052 sval_desc.m_buffer);
2054 else
2055 log ("filtering event %i: state change to %qs",
2056 idx, change_sval_desc.m_buffer);
2058 else
2059 log ("filtering event %i: global state change", idx);
2061 path->delete_event (idx);
2064 break;
2066 case EK_START_CFG_EDGE:
2068 cfg_edge_event *event = (cfg_edge_event *)base_event;
2070 /* TODO: is this edge significant to var?
2071 See if var can be in other states in the dest, but not
2072 in other states in the src?
2073 Must have multiple sibling edges. */
2075 if (event->should_filter_p (m_verbosity))
2077 log ("filtering events %i and %i: CFG edge", idx, idx + 1);
2078 path->delete_event (idx);
2079 /* Also delete the corresponding EK_END_CFG_EDGE. */
2080 gcc_assert (path->get_checker_event (idx)->m_kind
2081 == EK_END_CFG_EDGE);
2082 path->delete_event (idx);
2085 break;
2087 case EK_END_CFG_EDGE:
2088 /* These come in pairs with EK_START_CFG_EDGE events and are
2089 filtered when their start event is filtered. */
2090 break;
2092 case EK_CALL_EDGE:
2094 call_event *event = (call_event *)base_event;
2095 const region_model *callee_model
2096 = event->m_eedge.m_dest->get_state ().m_region_model;
2097 const region_model *caller_model
2098 = event->m_eedge.m_src->get_state ().m_region_model;
2099 tree callee_var = callee_model->get_representative_tree (sval);
2100 callsite_expr expr;
2102 tree caller_var;
2103 if(event->m_sedge)
2105 const callgraph_superedge& cg_superedge
2106 = event->get_callgraph_superedge ();
2107 if (cg_superedge.m_cedge)
2108 caller_var
2109 = cg_superedge.map_expr_from_callee_to_caller (callee_var,
2110 &expr);
2111 else
2112 caller_var = caller_model->get_representative_tree (sval);
2114 else
2115 caller_var = caller_model->get_representative_tree (sval);
2117 if (caller_var)
2119 if (get_logger ())
2121 label_text sval_desc = sval->get_desc ();
2122 log ("event %i:"
2123 " recording critical state for %qs at call"
2124 " from %qE in callee to %qE in caller",
2125 idx, sval_desc.m_buffer, callee_var, caller_var);
2127 if (expr.param_p ())
2128 event->record_critical_state (caller_var, state);
2131 break;
2133 case EK_RETURN_EDGE:
2135 if (sval)
2137 return_event *event = (return_event *)base_event;
2138 const region_model *caller_model
2139 = event->m_eedge.m_dest->get_state ().m_region_model;
2140 tree caller_var = caller_model->get_representative_tree (sval);
2141 const region_model *callee_model
2142 = event->m_eedge.m_src->get_state ().m_region_model;
2143 callsite_expr expr;
2145 tree callee_var;
2146 if (event->m_sedge)
2148 const callgraph_superedge& cg_superedge
2149 = event->get_callgraph_superedge ();
2150 if (cg_superedge.m_cedge)
2151 callee_var
2152 = cg_superedge.map_expr_from_caller_to_callee (caller_var,
2153 &expr);
2154 else
2155 callee_var = callee_model->get_representative_tree (sval);
2157 else
2158 callee_var = callee_model->get_representative_tree (sval);
2160 if (callee_var)
2162 if (get_logger ())
2164 label_text sval_desc = sval->get_desc ();
2165 log ("event %i:"
2166 " recording critical state for %qs at return"
2167 " from %qE in caller to %qE in callee",
2168 idx, sval_desc.m_buffer, callee_var, callee_var);
2170 if (expr.return_value_p ())
2171 event->record_critical_state (callee_var, state);
2175 break;
2177 case EK_SETJMP:
2178 /* TODO: only show setjmp_events that matter i.e. those for which
2179 there is a later rewind event using them. */
2180 case EK_REWIND_FROM_LONGJMP:
2181 case EK_REWIND_TO_SETJMP:
2182 break;
2184 case EK_WARNING:
2185 /* Always show the final "warning" event in the path. */
2186 break;
2188 idx--;
2192 /* Subroutine of diagnostic_manager::prune_for_sm_diagnostic.
2193 If *EXPR is not suitable to be the expression of interest in
2194 an sm-diagnostic, set *EXPR to NULL and log. */
2196 void
2197 diagnostic_manager::update_for_unsuitable_sm_exprs (tree *expr) const
2199 gcc_assert (expr);
2200 if (*expr && !can_be_expr_of_interest_p (*expr))
2202 log ("new var %qE is unsuitable; setting var to NULL", *expr);
2203 *expr = NULL_TREE;
2207 /* Second pass of diagnostic_manager::prune_path: remove redundant
2208 interprocedural information.
2210 For example, given:
2211 (1)- calling "f2" from "f1"
2212 (2)--- entry to "f2"
2213 (3)--- calling "f3" from "f2"
2214 (4)----- entry to "f3"
2215 (5)--- returning to "f2" to "f3"
2216 (6)- returning to "f1" to "f2"
2217 with no other intervening events, then none of these events are
2218 likely to be interesting to the user.
2220 Prune [..., call, function-entry, return, ...] triples repeatedly
2221 until nothing has changed. For the example above, this would
2222 remove events (3, 4, 5), and then remove events (1, 2, 6). */
2224 void
2225 diagnostic_manager::prune_interproc_events (checker_path *path) const
2227 bool changed = false;
2230 changed = false;
2231 int idx = (signed)path->num_events () - 1;
2232 while (idx >= 0)
2234 /* Prune [..., call, function-entry, return, ...] triples. */
2235 if (idx + 2 < (signed)path->num_events ()
2236 && path->get_checker_event (idx)->is_call_p ()
2237 && path->get_checker_event (idx + 1)->is_function_entry_p ()
2238 && path->get_checker_event (idx + 2)->is_return_p ())
2240 if (get_logger ())
2242 label_text desc
2243 (path->get_checker_event (idx)->get_desc (false));
2244 log ("filtering events %i-%i:"
2245 " irrelevant call/entry/return: %s",
2246 idx, idx + 2, desc.m_buffer);
2247 desc.maybe_free ();
2249 path->delete_event (idx + 2);
2250 path->delete_event (idx + 1);
2251 path->delete_event (idx);
2252 changed = true;
2253 idx--;
2254 continue;
2257 /* Prune [..., call, return, ...] pairs
2258 (for -fanalyzer-verbosity=0). */
2259 if (idx + 1 < (signed)path->num_events ()
2260 && path->get_checker_event (idx)->is_call_p ()
2261 && path->get_checker_event (idx + 1)->is_return_p ())
2263 if (get_logger ())
2265 label_text desc
2266 (path->get_checker_event (idx)->get_desc (false));
2267 log ("filtering events %i-%i:"
2268 " irrelevant call/return: %s",
2269 idx, idx + 1, desc.m_buffer);
2270 desc.maybe_free ();
2272 path->delete_event (idx + 1);
2273 path->delete_event (idx);
2274 changed = true;
2275 idx--;
2276 continue;
2279 idx--;
2283 while (changed);
2286 /* Return true iff event IDX within PATH is on the same line as REF_EXP_LOC. */
2288 static bool
2289 same_line_as_p (const expanded_location &ref_exp_loc,
2290 checker_path *path, unsigned idx)
2292 const checker_event *ev = path->get_checker_event (idx);
2293 expanded_location idx_exp_loc = expand_location (ev->get_location ());
2294 gcc_assert (ref_exp_loc.file);
2295 if (idx_exp_loc.file == NULL)
2296 return false;
2297 if (strcmp (ref_exp_loc.file, idx_exp_loc.file))
2298 return false;
2299 return ref_exp_loc.line == idx_exp_loc.line;
2302 /* This path-readability optimization reduces the verbosity of compound
2303 conditional statements (without needing to reconstruct the AST, which
2304 has already been lost).
2306 For example, it converts:
2308 | 61 | if (cp[0] != '\0' && cp[0] != '#')
2309 | | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
2310 | | | | |
2311 | | | | (6) ...to here
2312 | | | (7) following ‘true’ branch...
2313 | | (5) following ‘true’ branch...
2314 | 62 | {
2315 | 63 | alias = cp++;
2316 | | ~~~~
2317 | | |
2318 | | (8) ...to here
2320 into:
2322 | 61 | if (cp[0] != '\0' && cp[0] != '#')
2323 | | ~
2324 | | |
2325 | | (5) following ‘true’ branch...
2326 | 62 | {
2327 | 63 | alias = cp++;
2328 | | ~~~~
2329 | | |
2330 | | (6) ...to here
2332 by combining events 5-8 into new events 5-6.
2334 Find runs of consecutive (start_cfg_edge_event, end_cfg_edge_event) pairs
2335 in which all events apart from the final end_cfg_edge_event are on the same
2336 line, and for which either all the CFG edges are TRUE edges, or all are
2337 FALSE edges.
2339 Consolidate each such run into a
2340 (start_consolidated_cfg_edges_event, end_consolidated_cfg_edges_event)
2341 pair. */
2343 void
2344 diagnostic_manager::consolidate_conditions (checker_path *path) const
2346 /* Don't simplify edges if we're debugging them. */
2347 if (flag_analyzer_verbose_edges)
2348 return;
2350 for (int start_idx = 0;
2351 start_idx < (signed)path->num_events () - 1;
2352 start_idx++)
2354 if (path->cfg_edge_pair_at_p (start_idx))
2356 const checker_event *old_start_ev
2357 = path->get_checker_event (start_idx);
2358 expanded_location start_exp_loc
2359 = expand_location (old_start_ev->get_location ());
2360 if (start_exp_loc.file == NULL)
2361 continue;
2362 if (!same_line_as_p (start_exp_loc, path, start_idx + 1))
2363 continue;
2365 /* Are we looking for a run of all TRUE edges, or all FALSE edges? */
2366 gcc_assert (old_start_ev->m_kind == EK_START_CFG_EDGE);
2367 const start_cfg_edge_event *old_start_cfg_ev
2368 = (const start_cfg_edge_event *)old_start_ev;
2369 const cfg_superedge& first_cfg_sedge
2370 = old_start_cfg_ev->get_cfg_superedge ();
2371 bool edge_sense;
2372 if (first_cfg_sedge.true_value_p ())
2373 edge_sense = true;
2374 else if (first_cfg_sedge.false_value_p ())
2375 edge_sense = false;
2376 else
2377 continue;
2379 /* Find a run of CFG start/end event pairs from
2380 [start_idx, next_idx)
2381 where all apart from the final event are on the same line,
2382 and all are either TRUE or FALSE edges, matching the initial. */
2383 int next_idx = start_idx + 2;
2384 while (path->cfg_edge_pair_at_p (next_idx)
2385 && same_line_as_p (start_exp_loc, path, next_idx))
2387 const checker_event *iter_ev
2388 = path->get_checker_event (next_idx);
2389 gcc_assert (iter_ev->m_kind == EK_START_CFG_EDGE);
2390 const start_cfg_edge_event *iter_cfg_ev
2391 = (const start_cfg_edge_event *)iter_ev;
2392 const cfg_superedge& iter_cfg_sedge
2393 = iter_cfg_ev->get_cfg_superedge ();
2394 if (edge_sense)
2396 if (!iter_cfg_sedge.true_value_p ())
2397 break;
2399 else
2401 if (!iter_cfg_sedge.false_value_p ())
2402 break;
2404 next_idx += 2;
2407 /* If we have more than one pair in the run, consolidate. */
2408 if (next_idx > start_idx + 2)
2410 const checker_event *old_end_ev
2411 = path->get_checker_event (next_idx - 1);
2412 log ("consolidating CFG edge events %i-%i into %i-%i",
2413 start_idx, next_idx - 1, start_idx, start_idx +1);
2414 start_consolidated_cfg_edges_event *new_start_ev
2415 = new start_consolidated_cfg_edges_event
2416 (old_start_ev->get_location (),
2417 old_start_ev->get_fndecl (),
2418 old_start_ev->get_stack_depth (),
2419 edge_sense);
2420 checker_event *new_end_ev
2421 = new end_consolidated_cfg_edges_event
2422 (old_end_ev->get_location (),
2423 old_end_ev->get_fndecl (),
2424 old_end_ev->get_stack_depth ());
2425 path->replace_event (start_idx, new_start_ev);
2426 path->replace_event (start_idx + 1, new_end_ev);
2427 path->delete_events (start_idx + 2, next_idx - (start_idx + 2));
2433 /* Final pass of diagnostic_manager::prune_path.
2435 If all we're left with is in one function, then filter function entry
2436 events. */
2438 void
2439 diagnostic_manager::finish_pruning (checker_path *path) const
2441 if (!path->interprocedural_p ())
2443 int idx = path->num_events () - 1;
2444 while (idx >= 0 && idx < (signed)path->num_events ())
2446 checker_event *base_event = path->get_checker_event (idx);
2447 if (base_event->m_kind == EK_FUNCTION_ENTRY)
2449 log ("filtering event %i:"
2450 " function entry for purely intraprocedural path", idx);
2451 path->delete_event (idx);
2453 idx--;
2458 } // namespace ana
2460 #endif /* #if ENABLE_ANALYZER */