1 /* Classes for saving, deduplicating, and emitting analyzer diagnostics.
2 Copyright (C) 2019-2023 Free Software Foundation, Inc.
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
22 #define INCLUDE_MEMORY
24 #include "coretypes.h"
27 #include "pretty-print.h"
28 #include "gcc-rich-location.h"
29 #include "gimple-pretty-print.h"
31 #include "diagnostic-core.h"
32 #include "diagnostic-event-id.h"
33 #include "diagnostic-path.h"
35 #include "ordered-hash-map.h"
36 #include "analyzer/analyzer.h"
37 #include "analyzer/analyzer-logging.h"
38 #include "analyzer/sm.h"
39 #include "analyzer/pending-diagnostic.h"
40 #include "analyzer/diagnostic-manager.h"
41 #include "analyzer/call-string.h"
42 #include "analyzer/program-point.h"
43 #include "analyzer/store.h"
44 #include "analyzer/region-model.h"
45 #include "analyzer/constraint-manager.h"
47 #include "basic-block.h"
49 #include "gimple-iterator.h"
50 #include "inlining-iterator.h"
53 #include "analyzer/supergraph.h"
54 #include "analyzer/program-state.h"
55 #include "analyzer/exploded-graph.h"
56 #include "analyzer/trimmed-graph.h"
57 #include "analyzer/feasible-graph.h"
58 #include "analyzer/checker-path.h"
59 #include "analyzer/reachability.h"
60 #include "make-unique.h"
66 class feasible_worklist
;
68 /* State for finding the shortest feasible exploded_path for a
70 This is shared between all diagnostics, so that we avoid repeating work. */
75 epath_finder (const exploded_graph
&eg
)
79 /* This is shared by all diagnostics, but only needed if
80 !flag_analyzer_feasibility. */
81 if (!flag_analyzer_feasibility
)
82 m_sep
= new shortest_exploded_paths (eg
, eg
.get_origin (),
83 SPS_FROM_GIVEN_ORIGIN
);
86 ~epath_finder () { delete m_sep
; }
88 logger
*get_logger () const { return m_eg
.get_logger (); }
90 std::unique_ptr
<exploded_path
>
91 get_best_epath (const exploded_node
*target_enode
,
92 const gimple
*target_stmt
,
93 const pending_diagnostic
&pd
,
94 const char *desc
, unsigned diag_idx
,
95 std::unique_ptr
<feasibility_problem
> *out_problem
);
98 DISABLE_COPY_AND_ASSIGN(epath_finder
);
100 std::unique_ptr
<exploded_path
>
101 explore_feasible_paths (const exploded_node
*target_enode
,
102 const gimple
*target_stmt
,
103 const pending_diagnostic
&pd
,
104 const char *desc
, unsigned diag_idx
);
106 process_worklist_item (feasible_worklist
*worklist
,
107 const trimmed_graph
&tg
,
109 const exploded_node
*target_enode
,
110 const gimple
*target_stmt
,
111 const pending_diagnostic
&pd
,
113 std::unique_ptr
<exploded_path
> *out_best_path
) const;
114 void dump_trimmed_graph (const exploded_node
*target_enode
,
115 const char *desc
, unsigned diag_idx
,
116 const trimmed_graph
&tg
,
117 const shortest_paths
<eg_traits
, exploded_path
> &sep
);
118 void dump_feasible_graph (const exploded_node
*target_enode
,
119 const char *desc
, unsigned diag_idx
,
120 const feasible_graph
&fg
);
121 void dump_feasible_path (const exploded_node
*target_enode
,
123 const feasible_graph
&fg
,
124 const feasible_node
&fnode
) const;
126 const exploded_graph
&m_eg
;
127 shortest_exploded_paths
*m_sep
;
130 /* class epath_finder. */
132 /* Get the "best" exploded_path for reaching ENODE from the origin,
133 returning ownership of it to the caller.
135 If TARGET_STMT is non-NULL, then check for reaching that stmt
138 Ideally we want to report the shortest feasible path.
139 Return NULL if we could not find a feasible path
140 (when flag_analyzer_feasibility is true).
142 If flag_analyzer_feasibility is false, then simply return the
145 Use DESC and DIAG_IDX when logging.
147 Write any feasibility_problem to *OUT_PROBLEM. */
149 std::unique_ptr
<exploded_path
>
150 epath_finder::get_best_epath (const exploded_node
*enode
,
151 const gimple
*target_stmt
,
152 const pending_diagnostic
&pd
,
153 const char *desc
, unsigned diag_idx
,
154 std::unique_ptr
<feasibility_problem
> *out_problem
)
156 logger
*logger
= get_logger ();
159 unsigned snode_idx
= enode
->get_supernode ()->m_index
;
161 logger
->log ("considering %qs at EN: %i, SN: %i (sd: %i)",
162 desc
, enode
->m_index
, snode_idx
, diag_idx
);
164 /* State-merging means that not every path in the egraph corresponds
165 to a feasible one w.r.t. states.
167 We want to find the shortest feasible path from the origin to ENODE
170 if (flag_analyzer_feasibility
)
172 /* Attempt to find the shortest feasible path using feasible_graph. */
174 logger
->log ("trying to find shortest feasible path");
175 if (std::unique_ptr
<exploded_path
> epath
176 = explore_feasible_paths (enode
, target_stmt
, pd
, desc
, diag_idx
))
179 logger
->log ("accepting %qs at EN: %i, SN: %i (sd: %i)"
180 " with feasible path (length: %i)",
181 desc
, enode
->m_index
, snode_idx
, diag_idx
,
188 logger
->log ("rejecting %qs at EN: %i, SN: %i (sd: %i)"
189 " due to not finding feasible path",
190 desc
, enode
->m_index
, snode_idx
, diag_idx
);
196 /* As a crude approximation to shortest feasible path, simply find
197 the shortest path, and note whether it is feasible.
198 There could be longer feasible paths within the egraph, so this
199 approach would lead to diagnostics being falsely rejected
200 (PR analyzer/96374). */
202 logger
->log ("trying to find shortest path ignoring feasibility");
204 std::unique_ptr
<exploded_path
> epath
205 = make_unique
<exploded_path
> (m_sep
->get_shortest_path (enode
));
206 if (epath
->feasible_p (logger
, out_problem
, m_eg
.get_engine (), &m_eg
))
209 logger
->log ("accepting %qs at EN: %i, SN: %i (sn: %i)"
210 " with feasible path (length: %i)",
211 desc
, enode
->m_index
, snode_idx
, diag_idx
,
217 logger
->log ("accepting %qs at EN: %i, SN: %i (sn: %i) (length: %i)"
218 " despite infeasible path (due to %qs)",
219 desc
, enode
->m_index
, snode_idx
, diag_idx
,
221 "-fno-analyzer-feasibility");
227 /* A class for managing the worklist of feasible_nodes in
228 epath_finder::explore_feasible_paths, prioritizing them
229 so that shorter paths appear earlier in the queue. */
231 class feasible_worklist
234 feasible_worklist (const shortest_paths
<eg_traits
, exploded_path
> &sep
)
235 : m_queue (key_t (*this, NULL
)),
240 feasible_node
*take_next () { return m_queue
.extract_min (); }
242 void add_node (feasible_node
*fnode
)
244 m_queue
.insert (key_t (*this, fnode
), fnode
);
250 key_t (const feasible_worklist
&w
, feasible_node
*fnode
)
251 : m_worklist (w
), m_fnode (fnode
)
254 bool operator< (const key_t
&other
) const
256 return cmp (*this, other
) < 0;
259 bool operator== (const key_t
&other
) const
261 return cmp (*this, other
) == 0;
264 bool operator> (const key_t
&other
) const
266 return !(*this == other
|| *this < other
);
270 static int cmp (const key_t
&ka
, const key_t
&kb
)
272 /* Choose the node for which if the remaining path were feasible,
273 it would be the shortest path (summing the length of the
274 known-feasible path so far with that of the remaining
275 possibly-feasible path). */
276 int ca
= ka
.m_worklist
.get_estimated_cost (ka
.m_fnode
);
277 int cb
= kb
.m_worklist
.get_estimated_cost (kb
.m_fnode
);
281 const feasible_worklist
&m_worklist
;
282 feasible_node
*m_fnode
;
285 /* Get the estimated length of a path involving FNODE from
286 the origin to the target enode.
287 Sum the length of the known-feasible path so far with
288 that of the remaining possibly-feasible path. */
290 int get_estimated_cost (const feasible_node
*fnode
) const
292 unsigned length_so_far
= fnode
->get_path_length ();
293 int shortest_remaining_path
294 = m_sep
.get_shortest_distance (fnode
->get_inner_node ());
296 gcc_assert (shortest_remaining_path
>= 0);
297 /* This should be true since we're only exploring nodes within
298 the trimmed graph (and we anticipate it being much smaller
299 than this, and thus not overflowing the sum). */
300 gcc_assert (shortest_remaining_path
< INT_MAX
);
302 return length_so_far
+ shortest_remaining_path
;
305 /* Priority queue, backed by a fibonacci_heap. */
306 typedef fibonacci_heap
<key_t
, feasible_node
> queue_t
;
308 const shortest_paths
<eg_traits
, exploded_path
> &m_sep
;
311 /* When we're building the exploded graph we want to simplify
312 overly-complicated symbolic values down to "UNKNOWN" to try to avoid
313 state explosions and unbounded chains of exploration.
315 However, when we're building the feasibility graph for a diagnostic
316 (actually a tree), we don't want UNKNOWN values, as conditions on them
317 are also unknown: we don't want to have a contradiction such as a path
318 where (VAL != 0) and then (VAL == 0) along the same path.
320 Hence this is an RAII class for temporarily disabling complexity-checking
321 in the region_model_manager, for use within
322 epath_finder::explore_feasible_paths.
324 We also disable the creation of unknown_svalue instances during feasibility
325 checking, instead creating unique svalues, to avoid paradoxes in paths. */
327 class auto_checking_feasibility
330 auto_checking_feasibility (region_model_manager
*mgr
) : m_mgr (mgr
)
332 m_mgr
->begin_checking_feasibility ();
334 ~auto_checking_feasibility ()
336 m_mgr
->end_checking_feasibility ();
339 region_model_manager
*m_mgr
;
342 /* Attempt to find the shortest feasible path from the origin to
343 TARGET_ENODE by iteratively building a feasible_graph, in which
344 every path to a feasible_node is feasible by construction.
346 If TARGET_STMT is non-NULL, then check for reaching that stmt
349 We effectively explore the tree of feasible paths in order of shortest
350 path until we either find a feasible path to TARGET_ENODE, or hit
354 - Find the shortest path from each node to the TARGET_ENODE (without
355 checking feasibility), so that we can prioritize our worklist.
356 - Construct a trimmed_graph: the subset of nodes/edges that
357 are on a path that eventually reaches TARGET_ENODE. We will only need
358 to consider these when considering the shortest feasible path.
360 Build a feasible_graph, in which every path to a feasible_node
361 is feasible by construction.
362 We use a worklist to flatten the exploration into an iteration.
363 Starting at the origin, find feasible out-edges within the trimmed graph.
364 At each stage, choose the node for which if the remaining path were feasible,
365 it would be the shortest path (summing the length of the known-feasible path
366 so far with that of the remaining possibly-feasible path).
367 This way, the first feasible path we find to TARGET_ENODE is the shortest.
368 We start by trying the shortest possible path, but if that fails,
369 we explore progressively longer paths, eventually trying iterations through
370 loops. The exploration is captured in the feasible_graph, which can be
371 dumped as a .dot file to visualize the exploration. The indices of the
372 feasible_nodes show the order in which they were created.
374 This is something of a brute-force approach, but the trimmed_graph
375 hopefully keeps the complexity manageable.
377 Terminate with failure when the number of infeasible edges exceeds
378 a threshold (--param=analyzer-max-infeasible-edges=).
379 This is guaranteed to eventually lead to terminatation, as
380 we can't keep creating feasible nodes without eventually
381 either reaching an infeasible edge, or reaching the
382 TARGET_ENODE. Specifically, there can't be a cycle of
383 feasible edges that doesn't reach the target_enode without
384 an out-edge that either fails feasibility or gets closer
385 to the TARGET_ENODE: on each iteration we are either:
386 - effectively getting closer to the TARGET_ENODE (which can't
387 continue forever without reaching the target), or
388 - getting monotonically closer to the termination threshold. */
390 std::unique_ptr
<exploded_path
>
391 epath_finder::explore_feasible_paths (const exploded_node
*target_enode
,
392 const gimple
*target_stmt
,
393 const pending_diagnostic
&pd
,
394 const char *desc
, unsigned diag_idx
)
396 logger
*logger
= get_logger ();
399 region_model_manager
*mgr
= m_eg
.get_engine ()->get_model_manager ();
401 /* Determine the shortest path to TARGET_ENODE from each node in
402 the exploded graph. */
403 shortest_paths
<eg_traits
, exploded_path
> sep
404 (m_eg
, target_enode
, SPS_TO_GIVEN_TARGET
);
406 /* Construct a trimmed_graph: the subset of nodes/edges that
407 are on a path that eventually reaches TARGET_ENODE.
408 We only need to consider these when considering the shortest
410 trimmed_graph
tg (m_eg
, target_enode
);
412 if (flag_dump_analyzer_feasibility
)
413 dump_trimmed_graph (target_enode
, desc
, diag_idx
, tg
, sep
);
416 feasible_worklist
worklist (sep
);
418 /* Populate the worklist with the origin node. */
420 feasibility_state
init_state (mgr
, m_eg
.get_supergraph ());
421 feasible_node
*origin
= fg
.add_node (m_eg
.get_origin (), init_state
, 0);
422 worklist
.add_node (origin
);
425 /* Iteratively explore the tree of feasible paths in order of shortest
426 path until we either find a feasible path to TARGET_ENODE, or hit
429 /* Set this if we find a feasible path to TARGET_ENODE. */
430 std::unique_ptr
<exploded_path
> best_path
= NULL
;
433 auto_checking_feasibility
sentinel (mgr
);
435 while (process_worklist_item (&worklist
, tg
, &fg
, target_enode
, target_stmt
,
436 pd
, diag_idx
, &best_path
))
438 /* Empty; the work is done within process_worklist_item. */
444 logger
->log ("tg for sd: %i:", diag_idx
);
445 logger
->inc_indent ();
446 tg
.log_stats (logger
);
447 logger
->dec_indent ();
449 logger
->log ("fg for sd: %i:", diag_idx
);
450 logger
->inc_indent ();
451 fg
.log_stats (logger
);
452 logger
->dec_indent ();
455 /* Dump the feasible_graph. */
456 if (flag_dump_analyzer_feasibility
)
457 dump_feasible_graph (target_enode
, desc
, diag_idx
, fg
);
462 /* Process the next item in WORKLIST, potentially adding new items
463 based on feasible out-edges, and extending FG accordingly.
464 Use TG to ignore out-edges that don't lead to TARGET_ENODE.
465 Return true if the worklist processing should continue.
466 Return false if the processing of the worklist should stop
467 (either due to reaching TARGET_ENODE, or hitting a limit).
468 Write to *OUT_BEST_PATH if stopping due to finding a feasible path
470 Use PD to provide additional restrictions on feasibility of
471 the final path in the feasible_graph before converting to
476 process_worklist_item (feasible_worklist
*worklist
,
477 const trimmed_graph
&tg
,
479 const exploded_node
*target_enode
,
480 const gimple
*target_stmt
,
481 const pending_diagnostic
&pd
,
483 std::unique_ptr
<exploded_path
> *out_best_path
) const
485 logger
*logger
= get_logger ();
487 feasible_node
*fnode
= worklist
->take_next ();
491 logger
->log ("drained worklist for sd: %i"
492 " without finding feasible path",
497 log_scope
s (logger
, "fg worklist item",
498 "considering FN: %i (EN: %i) for sd: %i",
499 fnode
->get_index (), fnode
->get_inner_node ()->m_index
,
502 /* Iterate through all out-edges from this item. */
504 exploded_edge
*succ_eedge
;
505 FOR_EACH_VEC_ELT (fnode
->get_inner_node ()->m_succs
, i
, succ_eedge
)
507 log_scope
s (logger
, "edge", "considering edge: EN:%i -> EN:%i",
508 succ_eedge
->m_src
->m_index
,
509 succ_eedge
->m_dest
->m_index
);
510 /* Reject edges that aren't in the trimmed graph. */
511 if (!tg
.contains_p (succ_eedge
))
514 logger
->log ("rejecting: not in trimmed graph");
518 feasibility_state
succ_state (fnode
->get_state ());
519 std::unique_ptr
<rejected_constraint
> rc
;
520 if (succ_state
.maybe_update_for_edge (logger
, succ_eedge
, &rc
))
522 gcc_assert (rc
== NULL
);
523 feasible_node
*succ_fnode
524 = fg
->add_node (succ_eedge
->m_dest
,
526 fnode
->get_path_length () + 1);
528 logger
->log ("accepting as FN: %i", succ_fnode
->get_index ());
529 fg
->add_edge (new feasible_edge (fnode
, succ_fnode
, succ_eedge
));
531 /* Have we reached TARGET_ENODE? */
532 if (succ_fnode
->get_inner_node () == target_enode
)
535 logger
->log ("success: got feasible path to EN: %i (sd: %i)"
537 target_enode
->m_index
, diag_idx
,
538 succ_fnode
->get_path_length ());
539 if (!pd
.check_valid_fpath_p (*succ_fnode
, target_stmt
))
542 logger
->log ("rejecting feasible path due to"
543 " pending_diagnostic");
546 *out_best_path
= fg
->make_epath (succ_fnode
);
547 if (flag_dump_analyzer_feasibility
)
548 dump_feasible_path (target_enode
, diag_idx
, *fg
, *succ_fnode
);
550 /* Success: stop the worklist iteration. */
554 worklist
->add_node (succ_fnode
);
559 logger
->log ("infeasible");
561 fg
->add_feasibility_problem (fnode
,
565 /* Give up if there have been too many infeasible edges. */
566 if (fg
->get_num_infeasible ()
567 > (unsigned)param_analyzer_max_infeasible_edges
)
570 logger
->log ("too many infeasible edges (%i); giving up",
571 fg
->get_num_infeasible ());
577 /* Continue the worklist iteration. */
581 /* Helper class for epath_finder::dump_trimmed_graph
582 to dump extra per-node information.
583 Use SEP to add the length of the shortest path from each
584 node to the target node to each node's dump. */
586 class dump_eg_with_shortest_path
: public eg_traits::dump_args_t
589 dump_eg_with_shortest_path
590 (const exploded_graph
&eg
,
591 const shortest_paths
<eg_traits
, exploded_path
> &sep
)
597 void dump_extra_info (const exploded_node
*enode
,
598 pretty_printer
*pp
) const final override
600 pp_printf (pp
, "sp: %i", m_sep
.get_shortest_path (enode
).length ());
605 const shortest_paths
<eg_traits
, exploded_path
> &m_sep
;
608 /* Dump TG to "BASE_NAME.DESC.DIAG_IDX.to-enN.tg.dot",
609 annotating each node with the length of the shortest path
610 from that node to TARGET_ENODE (using SEP). */
614 dump_trimmed_graph (const exploded_node
*target_enode
,
615 const char *desc
, unsigned diag_idx
,
616 const trimmed_graph
&tg
,
617 const shortest_paths
<eg_traits
, exploded_path
> &sep
)
619 auto_timevar
tv (TV_ANALYZER_DUMP
);
620 dump_eg_with_shortest_path
inner_args (m_eg
, sep
);
621 trimmed_graph::dump_args_t
args (inner_args
);
623 pp_printf (&pp
, "%s.%s.%i.to-en%i.tg.dot",
624 dump_base_name
, desc
, diag_idx
, target_enode
->m_index
);
625 char *filename
= xstrdup (pp_formatted_text (&pp
));
626 tg
.dump_dot (filename
, NULL
, args
);
630 /* Dump FG to "BASE_NAME.DESC.DIAG_IDX.to-enN.fg.dot". */
633 epath_finder::dump_feasible_graph (const exploded_node
*target_enode
,
634 const char *desc
, unsigned diag_idx
,
635 const feasible_graph
&fg
)
637 auto_timevar
tv (TV_ANALYZER_DUMP
);
638 exploded_graph::dump_args_t
inner_args (m_eg
);
639 feasible_graph::dump_args_t
args (inner_args
);
641 pp_printf (&pp
, "%s.%s.%i.to-en%i.fg.dot",
642 dump_base_name
, desc
, diag_idx
, target_enode
->m_index
);
643 char *filename
= xstrdup (pp_formatted_text (&pp
));
644 fg
.dump_dot (filename
, NULL
, args
);
648 /* Dump the path to FNODE to "BASE_NAME.DIAG_IDX.to-enN.fpath.txt". */
651 epath_finder::dump_feasible_path (const exploded_node
*target_enode
,
653 const feasible_graph
&fg
,
654 const feasible_node
&fnode
) const
656 auto_timevar
tv (TV_ANALYZER_DUMP
);
658 pp_printf (&pp
, "%s.%i.to-en%i.fpath.txt",
659 dump_base_name
, diag_idx
, target_enode
->m_index
);
660 char *filename
= xstrdup (pp_formatted_text (&pp
));
661 fg
.dump_feasible_path (fnode
, filename
);
665 /* class saved_diagnostic. */
667 /* saved_diagnostic's ctor. */
669 saved_diagnostic::saved_diagnostic (const state_machine
*sm
,
670 const pending_location
&ploc
,
673 state_machine::state_t state
,
674 std::unique_ptr
<pending_diagnostic
> d
,
676 : m_sm (sm
), m_enode (ploc
.m_enode
), m_snode (ploc
.m_snode
),
677 m_stmt (ploc
.m_stmt
),
678 /* stmt_finder could be on-stack; we want our own copy that can
680 m_stmt_finder (ploc
.m_finder
? ploc
.m_finder
->clone () : NULL
),
682 m_var (var
), m_sval (sval
), m_state (state
),
683 m_d (std::move (d
)), m_trailing_eedge (NULL
),
685 m_best_epath (NULL
), m_problem (NULL
),
688 /* We must have an enode in order to be able to look for paths
689 through the exploded_graph to this diagnostic. */
690 gcc_assert (m_enode
);
694 saved_diagnostic::operator== (const saved_diagnostic
&other
) const
696 if (m_notes
.length () != other
.m_notes
.length ())
698 for (unsigned i
= 0; i
< m_notes
.length (); i
++)
699 if (!m_notes
[i
]->equal_p (*other
.m_notes
[i
]))
701 return (m_sm
== other
.m_sm
702 /* We don't compare m_enode. */
703 && m_snode
== other
.m_snode
704 && m_stmt
== other
.m_stmt
705 /* We don't compare m_stmt_finder. */
706 && m_loc
== other
.m_loc
707 && pending_diagnostic::same_tree_p (m_var
, other
.m_var
)
708 && m_state
== other
.m_state
709 && m_d
->equal_p (*other
.m_d
)
710 && m_trailing_eedge
== other
.m_trailing_eedge
);
713 /* Add PN to this diagnostic, taking ownership of it. */
716 saved_diagnostic::add_note (std::unique_ptr
<pending_note
> pn
)
719 m_notes
.safe_push (pn
.release ());
722 /* Add EVENT to this diagnostic. */
725 saved_diagnostic::add_event (std::unique_ptr
<checker_event
> event
)
728 m_saved_events
.safe_push (event
.release ());
731 /* Return a new json::object of the form
735 "sval": optional str,
736 "state": optional str,
737 "path_length": optional int,
738 "pending_diagnostic": str,
742 saved_diagnostic::to_json () const
744 json::object
*sd_obj
= new json::object ();
747 sd_obj
->set ("sm", new json::string (m_sm
->get_name ()));
748 sd_obj
->set ("enode", new json::integer_number (m_enode
->m_index
));
749 sd_obj
->set ("snode", new json::integer_number (m_snode
->m_index
));
751 sd_obj
->set ("sval", m_sval
->to_json ());
753 sd_obj
->set ("state", m_state
->to_json ());
755 sd_obj
->set ("path_length", new json::integer_number (get_epath_length ()));
756 sd_obj
->set ("pending_diagnostic", new json::string (m_d
->get_kind ()));
757 sd_obj
->set ("idx", new json::integer_number (m_idx
));
759 /* We're not yet JSONifying the following fields:
760 const gimple *m_stmt;
761 stmt_finder *m_stmt_finder;
763 exploded_edge *m_trailing_eedge;
764 enum status m_status;
765 feasibility_problem *m_problem;
766 auto_delete_vec <pending_note> m_notes;
772 /* Dump this to PP in a form suitable for use as an id in .dot output. */
775 saved_diagnostic::dump_dot_id (pretty_printer
*pp
) const
777 pp_printf (pp
, "sd_%i", m_idx
);
780 /* Dump this to PP in a form suitable for use as a node in .dot output. */
783 saved_diagnostic::dump_as_dot_node (pretty_printer
*pp
) const
787 " [shape=none,margin=0,style=filled,fillcolor=\"red\",label=\"");
788 pp_write_text_to_stream (pp
);
791 pp_printf (pp
, "DIAGNOSTIC: %s (sd: %i)\n",
792 m_d
->get_kind (), m_idx
);
795 pp_printf (pp
, "sm: %s", m_sm
->get_name ());
798 pp_printf (pp
, "; state: ");
799 m_state
->dump_to_pp (pp
);
805 pp_string (pp
, "stmt: ");
806 pp_gimple_stmt_1 (pp
, m_stmt
, 0, (dump_flags_t
)0);
810 pp_printf (pp
, "var: %qE\n", m_var
);
813 pp_string (pp
, "sval: ");
814 m_sval
->dump_to_pp (pp
, true);
818 pp_printf (pp
, "path length: %i\n", get_epath_length ());
820 pp_write_text_as_dot_label_to_stream (pp
, /*for_record=*/true);
821 pp_string (pp
, "\"];\n\n");
823 /* Show links to duplicates. */
824 for (auto iter
: m_duplicates
)
827 pp_string (pp
, " -> ");
828 iter
->dump_dot_id (pp
);
829 pp_string (pp
, " [style=\"dotted\" arrowhead=\"none\"];");
834 /* Use PF to find the best exploded_path for this saved_diagnostic,
835 and store it in m_best_epath.
836 If we don't have a specific location in m_loc and m_stmt is still NULL,
837 use m_stmt_finder on the epath to populate m_stmt.
838 Return true if a best path was found. */
841 saved_diagnostic::calc_best_epath (epath_finder
*pf
)
843 logger
*logger
= pf
->get_logger ();
847 m_best_epath
= pf
->get_best_epath (m_enode
, m_stmt
,
848 *m_d
, m_d
->get_kind (), m_idx
,
851 /* Handle failure to find a feasible path. */
852 if (m_best_epath
== NULL
)
855 gcc_assert (m_best_epath
);
856 if (m_loc
== UNKNOWN_LOCATION
)
860 gcc_assert (m_stmt_finder
);
861 m_stmt
= m_stmt_finder
->find_stmt (*m_best_epath
);
870 saved_diagnostic::get_epath_length () const
872 gcc_assert (m_best_epath
);
873 return m_best_epath
->length ();
876 /* Record that OTHER (and its duplicates) are duplicates
877 of this saved_diagnostic. */
880 saved_diagnostic::add_duplicate (saved_diagnostic
*other
)
883 m_duplicates
.reserve (m_duplicates
.length ()
884 + other
->m_duplicates
.length ()
886 m_duplicates
.splice (other
->m_duplicates
);
887 other
->m_duplicates
.truncate (0);
888 m_duplicates
.safe_push (other
);
891 /* Walk up the sedges of each of the two paths.
892 If the two sequences of sedges do not perfectly correspond,
893 then paths are incompatible.
894 If there is at least one sedge that either cannot be paired up
895 or its counterpart is not equal, then the paths are incompatible
896 and this function returns FALSE.
897 Otherwise return TRUE.
912 Both LHS_PATH and RHS_PATH final enodes should be
913 over the same gimple statement. */
916 compatible_epath_p (const exploded_path
*lhs_path
,
917 const exploded_path
*rhs_path
)
919 gcc_assert (lhs_path
);
920 gcc_assert (rhs_path
);
921 gcc_assert (rhs_path
->length () > 0);
922 gcc_assert (rhs_path
->length () > 0);
923 int lhs_eedge_idx
= lhs_path
->length () - 1;
924 int rhs_eedge_idx
= rhs_path
->length () - 1;
925 const exploded_edge
*lhs_eedge
;
926 const exploded_edge
*rhs_eedge
;
928 while (lhs_eedge_idx
>= 0 && rhs_eedge_idx
>= 0)
930 while (lhs_eedge_idx
>= 0)
932 /* Find LHS_PATH's next superedge. */
933 lhs_eedge
= lhs_path
->m_edges
[lhs_eedge_idx
];
934 if (lhs_eedge
->m_sedge
)
939 while (rhs_eedge_idx
>= 0)
941 /* Find RHS_PATH's next superedge. */
942 rhs_eedge
= rhs_path
->m_edges
[rhs_eedge_idx
];
943 if (rhs_eedge
->m_sedge
)
949 if (lhs_eedge
->m_sedge
&& rhs_eedge
->m_sedge
)
951 if (lhs_eedge
->m_sedge
!= rhs_eedge
->m_sedge
)
952 /* Both superedges do not match.
953 Superedges are not dependent on the exploded path, so even
954 different epaths will have similar sedges if they follow
955 the same outcome of a conditional node. */
962 else if (lhs_eedge
->m_sedge
== nullptr && rhs_eedge
->m_sedge
== nullptr)
963 /* Both paths were drained up entirely.
964 No discriminant was found. */
967 /* A superedge was found for only one of the two paths. */
971 /* A superedge was found for only one of the two paths. */
972 if (lhs_eedge_idx
>= 0 || rhs_eedge_idx
>= 0)
975 /* Both paths were drained up entirely.
976 No discriminant was found. */
981 /* Return true if this diagnostic supercedes OTHER, and that OTHER should
982 therefore not be emitted. */
985 saved_diagnostic::supercedes_p (const saved_diagnostic
&other
) const
987 /* They should be at the same stmt. */
988 if (m_stmt
!= other
.m_stmt
)
990 /* return early if OTHER won't be superseded anyway. */
991 if (!m_d
->supercedes_p (*other
.m_d
))
994 /* If the two saved_diagnostics' path are not compatible
995 then they cannot supersede one another. */
996 return compatible_epath_p (m_best_epath
.get (), other
.m_best_epath
.get ());
999 /* Move any saved checker_events from this saved_diagnostic to
1000 the end of DST_PATH. */
1003 saved_diagnostic::add_any_saved_events (checker_path
&dst_path
)
1005 for (auto &event
: m_saved_events
)
1007 dst_path
.add_event (std::unique_ptr
<checker_event
> (event
));
1012 /* Emit any pending notes owned by this diagnostic. */
1015 saved_diagnostic::emit_any_notes () const
1017 for (auto pn
: m_notes
)
1021 /* State for building a checker_path from a particular exploded_path.
1022 In particular, this precomputes reachability information: the set of
1023 source enodes for which a path be found to the diagnostic enode. */
1028 path_builder (const exploded_graph
&eg
,
1029 const exploded_path
&epath
,
1030 const feasibility_problem
*problem
,
1031 const saved_diagnostic
&sd
)
1033 m_diag_enode (epath
.get_final_enode ()),
1035 m_reachability (eg
, m_diag_enode
),
1036 m_feasibility_problem (problem
)
1039 const exploded_node
*get_diag_node () const { return m_diag_enode
; }
1041 pending_diagnostic
*get_pending_diagnostic () const
1043 return m_sd
.m_d
.get ();
1046 bool reachable_from_p (const exploded_node
*src_enode
) const
1048 return m_reachability
.reachable_from_p (src_enode
);
1051 const extrinsic_state
&get_ext_state () const { return m_eg
.get_ext_state (); }
1053 const feasibility_problem
*get_feasibility_problem () const
1055 return m_feasibility_problem
;
1058 const state_machine
*get_sm () const { return m_sd
.m_sm
; }
1061 typedef reachability
<eg_traits
> enode_reachability
;
1063 const exploded_graph
&m_eg
;
1065 /* The enode where the diagnostic occurs. */
1066 const exploded_node
*m_diag_enode
;
1068 const saved_diagnostic
&m_sd
;
1070 /* Precompute all enodes from which the diagnostic is reachable. */
1071 enode_reachability m_reachability
;
1073 const feasibility_problem
*m_feasibility_problem
;
1076 /* Determine the emission location for PD at STMT in FUN. */
1079 get_emission_location (const gimple
*stmt
, function
*fun
,
1080 const pending_diagnostic
&pd
)
1082 location_t loc
= get_stmt_location (stmt
, fun
);
1084 /* Allow the pending_diagnostic to fix up the location. */
1085 loc
= pd
.fixup_location (loc
, true);
1090 /* class diagnostic_manager. */
1092 /* diagnostic_manager's ctor. */
1094 diagnostic_manager::diagnostic_manager (logger
*logger
, engine
*eng
,
1096 : log_user (logger
), m_eng (eng
), m_verbosity (verbosity
),
1097 m_num_disabled_diagnostics (0)
1101 /* Queue pending_diagnostic D at ENODE for later emission.
1102 Return true/false signifying if the diagnostic was actually added. */
1105 diagnostic_manager::add_diagnostic (const state_machine
*sm
,
1106 const pending_location
&ploc
,
1109 state_machine::state_t state
,
1110 std::unique_ptr
<pending_diagnostic
> d
)
1112 LOG_FUNC (get_logger ());
1114 /* We must have an enode in order to be able to look for paths
1115 through the exploded_graph to the diagnostic. */
1116 gcc_assert (ploc
.m_enode
);
1118 /* If this warning is ultimately going to be rejected by a -Wno-analyzer-*
1119 flag, reject it now.
1120 We can only do this for diagnostics where we already know the stmt,
1121 and thus can determine the emission location. */
1125 = get_emission_location (ploc
.m_stmt
, ploc
.m_snode
->m_fun
, *d
);
1126 int option
= d
->get_controlling_option ();
1127 if (!warning_enabled_at (loc
, option
))
1130 get_logger ()->log ("rejecting disabled warning %qs",
1132 m_num_disabled_diagnostics
++;
1137 saved_diagnostic
*sd
1138 = new saved_diagnostic (sm
, ploc
, var
, sval
, state
, std::move (d
),
1139 m_saved_diagnostics
.length ());
1140 m_saved_diagnostics
.safe_push (sd
);
1141 ploc
.m_enode
->add_diagnostic (sd
);
1143 log ("adding saved diagnostic %i at SN %i to EN %i: %qs",
1145 ploc
.m_snode
->m_index
, ploc
.m_enode
->m_index
, sd
->m_d
->get_kind ());
1149 /* Queue pending_diagnostic D at ENODE for later emission.
1150 Return true/false signifying if the diagnostic was actually added.
1151 Take ownership of D (or delete it). */
1154 diagnostic_manager::add_diagnostic (const pending_location
&ploc
,
1155 std::unique_ptr
<pending_diagnostic
> d
)
1157 gcc_assert (ploc
.m_enode
);
1158 return add_diagnostic (NULL
, ploc
, NULL_TREE
, NULL
, 0, std::move (d
));
1161 /* Add PN to the most recent saved_diagnostic. */
1164 diagnostic_manager::add_note (std::unique_ptr
<pending_note
> pn
)
1166 LOG_FUNC (get_logger ());
1169 /* Get most recent saved_diagnostic. */
1170 gcc_assert (m_saved_diagnostics
.length () > 0);
1171 saved_diagnostic
*sd
= m_saved_diagnostics
[m_saved_diagnostics
.length () - 1];
1172 sd
->add_note (std::move (pn
));
1175 /* Add EVENT to the most recent saved_diagnostic. */
1178 diagnostic_manager::add_event (std::unique_ptr
<checker_event
> event
)
1180 LOG_FUNC (get_logger ());
1183 /* Get most recent saved_diagnostic. */
1184 gcc_assert (m_saved_diagnostics
.length () > 0);
1185 saved_diagnostic
*sd
= m_saved_diagnostics
[m_saved_diagnostics
.length () - 1];
1186 sd
->add_event (std::move (event
));
1189 /* Return a new json::object of the form
1190 {"diagnostics" : [obj for saved_diagnostic]}. */
1193 diagnostic_manager::to_json () const
1195 json::object
*dm_obj
= new json::object ();
1198 json::array
*sd_arr
= new json::array ();
1200 saved_diagnostic
*sd
;
1201 FOR_EACH_VEC_ELT (m_saved_diagnostics
, i
, sd
)
1202 sd_arr
->append (sd
->to_json ());
1203 dm_obj
->set ("diagnostics", sd_arr
);
1209 /* A class for identifying sets of duplicated pending_diagnostic.
1211 We want to find the simplest saved_diagnostic amongst those that share a
1217 dedupe_key (const saved_diagnostic
&sd
)
1218 : m_sd (sd
), m_stmt (sd
.m_stmt
), m_loc (sd
.m_loc
)
1220 gcc_assert (m_stmt
|| m_loc
!= UNKNOWN_LOCATION
);
1223 hashval_t
hash () const
1225 inchash::hash hstate
;
1226 hstate
.add_ptr (m_stmt
);
1228 return hstate
.end ();
1230 bool operator== (const dedupe_key
&other
) const
1232 return (m_sd
== other
.m_sd
1233 && m_stmt
== other
.m_stmt
1234 && m_loc
== other
.m_loc
);
1237 location_t
get_location () const
1239 if (m_loc
!= UNKNOWN_LOCATION
)
1241 gcc_assert (m_stmt
);
1242 return m_stmt
->location
;
1245 /* A qsort comparator for use by dedupe_winners::emit_best
1246 to sort them into location_t order. */
1249 comparator (const void *p1
, const void *p2
)
1251 const dedupe_key
*pk1
= *(const dedupe_key
* const *)p1
;
1252 const dedupe_key
*pk2
= *(const dedupe_key
* const *)p2
;
1254 location_t loc1
= pk1
->get_location ();
1255 location_t loc2
= pk2
->get_location ();
1257 if (int cmp
= linemap_compare_locations (line_table
, loc2
, loc1
))
1259 if (int cmp
= ((int)pk1
->m_sd
.get_epath_length ()
1260 - (int)pk2
->m_sd
.get_epath_length ()))
1262 if (int cmp
= strcmp (pk1
->m_sd
.m_d
->get_kind (),
1263 pk2
->m_sd
.m_d
->get_kind ()))
1268 const saved_diagnostic
&m_sd
;
1269 const gimple
*m_stmt
;
1273 /* Traits for use by dedupe_winners. */
1275 class dedupe_hash_map_traits
1278 typedef const dedupe_key
*key_type
;
1279 typedef saved_diagnostic
*value_type
;
1280 typedef saved_diagnostic
*compare_type
;
1282 static inline hashval_t
hash (const key_type
&v
)
1286 static inline bool equal_keys (const key_type
&k1
, const key_type
&k2
)
1290 template <typename T
>
1291 static inline void remove (T
&)
1295 template <typename T
>
1296 static inline void mark_deleted (T
&entry
)
1298 entry
.m_key
= reinterpret_cast<key_type
> (1);
1300 template <typename T
>
1301 static inline void mark_empty (T
&entry
)
1305 template <typename T
>
1306 static inline bool is_deleted (const T
&entry
)
1308 return entry
.m_key
== reinterpret_cast<key_type
> (1);
1310 template <typename T
>
1311 static inline bool is_empty (const T
&entry
)
1313 return entry
.m_key
== NULL
;
1315 static const bool empty_zero_p
= true;
1318 /* A class for deduplicating diagnostics and finding (and emitting) the
1319 best saved_diagnostic within each partition. */
1321 class dedupe_winners
1326 /* Delete all keys, but not the saved_diagnostics. */
1327 for (map_t::iterator iter
= m_map
.begin ();
1328 iter
!= m_map
.end ();
1330 delete (*iter
).first
;
1333 /* Determine an exploded_path for SD using PF and, if it's feasible,
1334 determine if SD is the best seen so far for its dedupe_key.
1335 Record the winning SD for each dedupe_key. */
1337 void add (logger
*logger
,
1339 saved_diagnostic
*sd
)
1341 /* Determine best epath for SD. */
1342 if (!sd
->calc_best_epath (pf
))
1345 dedupe_key
*key
= new dedupe_key (*sd
);
1346 if (saved_diagnostic
**slot
= m_map
.get (key
))
1349 logger
->log ("already have this dedupe_key");
1351 saved_diagnostic
*cur_best_sd
= *slot
;
1353 if (sd
->get_epath_length () < cur_best_sd
->get_epath_length ())
1355 /* We've got a shorter path for the key; replace
1356 the current candidate, marking it as a duplicate of SD. */
1358 logger
->log ("length %i is better than existing length %i;"
1359 " taking over this dedupe_key",
1360 sd
->get_epath_length (),
1361 cur_best_sd
->get_epath_length ());
1362 sd
->add_duplicate (cur_best_sd
);
1366 /* We haven't beaten the current best candidate; add SD
1367 as a duplicate of it. */
1370 logger
->log ("length %i isn't better than existing length %i;"
1371 " dropping this candidate",
1372 sd
->get_epath_length (),
1373 cur_best_sd
->get_epath_length ());
1374 cur_best_sd
->add_duplicate (sd
);
1380 /* This is the first candidate for this key. */
1381 m_map
.put (key
, sd
);
1383 logger
->log ("first candidate for this dedupe_key");
1387 /* Handle interactions between the dedupe winners, so that some
1388 diagnostics can supercede others (of different kinds).
1390 We want use-after-free to supercede use-of-unitialized-value,
1391 so that if we have these at the same stmt, we don't emit
1392 a use-of-uninitialized, just the use-after-free. */
1394 void handle_interactions (diagnostic_manager
*dm
)
1396 LOG_SCOPE (dm
->get_logger ());
1397 auto_vec
<const dedupe_key
*> superceded
;
1398 for (auto outer
: m_map
)
1400 const saved_diagnostic
*outer_sd
= outer
.second
;
1401 for (auto inner
: m_map
)
1403 const saved_diagnostic
*inner_sd
= inner
.second
;
1404 if (inner_sd
->supercedes_p (*outer_sd
))
1406 superceded
.safe_push (outer
.first
);
1407 if (dm
->get_logger ())
1408 dm
->log ("sd[%i] \"%s\" superceded by sd[%i] \"%s\"",
1409 outer_sd
->get_index (), outer_sd
->m_d
->get_kind (),
1410 inner_sd
->get_index (), inner_sd
->m_d
->get_kind ());
1415 for (auto iter
: superceded
)
1416 m_map
.remove (iter
);
1419 /* Emit the simplest diagnostic within each set. */
1421 void emit_best (diagnostic_manager
*dm
,
1422 const exploded_graph
&eg
)
1424 LOG_SCOPE (dm
->get_logger ());
1426 /* Get keys into a vec for sorting. */
1427 auto_vec
<const dedupe_key
*> keys (m_map
.elements ());
1428 for (map_t::iterator iter
= m_map
.begin ();
1429 iter
!= m_map
.end ();
1431 keys
.quick_push ((*iter
).first
);
1433 dm
->log ("# keys after de-duplication: %i", keys
.length ());
1435 /* Sort into a good emission order. */
1436 keys
.qsort (dedupe_key::comparator
);
1438 /* Emit the best saved_diagnostics for each key. */
1440 const dedupe_key
*key
;
1441 FOR_EACH_VEC_ELT (keys
, i
, key
)
1443 saved_diagnostic
**slot
= m_map
.get (key
);
1445 saved_diagnostic
*sd
= *slot
;
1446 dm
->emit_saved_diagnostic (eg
, *sd
);
1451 /* This maps from each dedupe_key to a current best saved_diagnostic. */
1453 typedef hash_map
<const dedupe_key
*, saved_diagnostic
*,
1454 dedupe_hash_map_traits
> map_t
;
1458 /* Emit all saved diagnostics. */
1461 diagnostic_manager::emit_saved_diagnostics (const exploded_graph
&eg
)
1463 LOG_SCOPE (get_logger ());
1464 auto_timevar
tv (TV_ANALYZER_DIAGNOSTICS
);
1465 log ("# saved diagnostics: %i", m_saved_diagnostics
.length ());
1466 log ("# disabled diagnostics: %i", m_num_disabled_diagnostics
);
1470 saved_diagnostic
*sd
;
1471 FOR_EACH_VEC_ELT (m_saved_diagnostics
, i
, sd
)
1472 log ("[%i] sd: %qs at EN: %i, SN: %i",
1473 i
, sd
->m_d
->get_kind (), sd
->m_enode
->m_index
,
1474 sd
->m_snode
->m_index
);
1477 if (m_saved_diagnostics
.length () == 0)
1480 /* Compute the shortest_paths once, sharing it between all diagnostics. */
1481 epath_finder
pf (eg
);
1483 /* Iterate through all saved diagnostics, adding them to a dedupe_winners
1484 instance. This partitions the saved diagnostics by dedupe_key,
1485 generating exploded_paths for them, and retaining the best one in each
1487 dedupe_winners best_candidates
;
1490 saved_diagnostic
*sd
;
1491 FOR_EACH_VEC_ELT (m_saved_diagnostics
, i
, sd
)
1492 best_candidates
.add (get_logger (), &pf
, sd
);
1494 best_candidates
.handle_interactions (this);
1496 /* For each dedupe-key, call emit_saved_diagnostic on the "best"
1497 saved_diagnostic. */
1498 best_candidates
.emit_best (this, eg
);
1501 /* Given a saved_diagnostic SD with m_best_epath through EG,
1502 create an checker_path of suitable events and use it to call
1503 SD's underlying pending_diagnostic "emit" vfunc to emit a diagnostic. */
1506 diagnostic_manager::emit_saved_diagnostic (const exploded_graph
&eg
,
1507 saved_diagnostic
&sd
)
1509 LOG_SCOPE (get_logger ());
1510 log ("sd[%i]: %qs at SN: %i",
1511 sd
.get_index (), sd
.m_d
->get_kind (), sd
.m_snode
->m_index
);
1512 log ("num dupes: %i", sd
.get_num_dupes ());
1514 pretty_printer
*pp
= global_dc
->printer
->clone ();
1516 const exploded_path
*epath
= sd
.get_best_epath ();
1519 /* Precompute all enodes from which the diagnostic is reachable. */
1520 path_builder
pb (eg
, *epath
, sd
.get_feasibility_problem (), sd
);
1522 /* This is the diagnostic_path subclass that will be built for
1524 checker_path
emission_path (get_logger ());
1526 /* Populate emission_path with a full description of EPATH. */
1527 build_emission_path (pb
, *epath
, &emission_path
);
1529 /* Now prune it to just cover the most pertinent events. */
1530 prune_path (&emission_path
, sd
.m_sm
, sd
.m_sval
, sd
.m_state
);
1532 /* Add any saved events to the path, giving contextual information
1533 about what the analyzer was simulating as the diagnostic was
1534 generated. These don't get pruned, as they are probably pertinent. */
1535 sd
.add_any_saved_events (emission_path
);
1537 /* Add a final event to the path, covering the diagnostic itself.
1538 We use the final enode from the epath, which might be different from
1539 the sd.m_enode, as the dedupe code doesn't care about enodes, just
1541 sd
.m_d
->add_final_event (sd
.m_sm
, epath
->get_final_enode (), sd
.m_stmt
,
1542 sd
.m_var
, sd
.m_state
, &emission_path
);
1544 /* The "final" event might not be final; if the saved_diagnostic has a
1545 trailing eedge stashed, add any events for it. This is for use
1546 in handling longjmp, to show where a longjmp is rewinding to. */
1547 if (sd
.m_trailing_eedge
)
1548 add_events_for_eedge (pb
, *sd
.m_trailing_eedge
, &emission_path
, NULL
);
1550 emission_path
.inject_any_inlined_call_events (get_logger ());
1552 emission_path
.prepare_for_emission (sd
.m_d
.get ());
1554 location_t loc
= sd
.m_loc
;
1555 if (loc
== UNKNOWN_LOCATION
)
1556 loc
= get_emission_location (sd
.m_stmt
, sd
.m_snode
->m_fun
, *sd
.m_d
);
1558 /* Allow the pending_diagnostic to fix up the locations of events. */
1559 emission_path
.fixup_locations (sd
.m_d
.get ());
1561 gcc_rich_location
rich_loc (loc
);
1562 rich_loc
.set_path (&emission_path
);
1564 auto_diagnostic_group d
;
1565 auto_cfun
sentinel (sd
.m_snode
->m_fun
);
1566 if (sd
.m_d
->emit (&rich_loc
, get_logger ()))
1568 sd
.emit_any_notes ();
1570 unsigned num_dupes
= sd
.get_num_dupes ();
1571 if (flag_analyzer_show_duplicate_count
&& num_dupes
> 0)
1572 inform_n (loc
, num_dupes
,
1573 "%i duplicate", "%i duplicates",
1575 if (flag_dump_analyzer_exploded_paths
)
1577 auto_timevar
tv (TV_ANALYZER_DUMP
);
1579 pp_printf (&pp
, "%s.%i.%s.epath.txt",
1580 dump_base_name
, sd
.get_index (), sd
.m_d
->get_kind ());
1581 char *filename
= xstrdup (pp_formatted_text (&pp
));
1582 epath
->dump_to_file (filename
, eg
.get_ext_state ());
1583 inform (loc
, "exploded path written to %qs", filename
);
1590 /* Emit a "path" of events to EMISSION_PATH describing the exploded path
1594 diagnostic_manager::build_emission_path (const path_builder
&pb
,
1595 const exploded_path
&epath
,
1596 checker_path
*emission_path
) const
1598 LOG_SCOPE (get_logger ());
1600 interesting_t interest
;
1601 pb
.get_pending_diagnostic ()->mark_interesting_stuff (&interest
);
1603 /* Add region creation events for any globals of interest, at the
1604 beginning of the path. */
1606 for (auto reg
: interest
.m_region_creation
)
1607 switch (reg
->get_memory_space ())
1612 case MEMSPACE_GLOBALS
:
1613 case MEMSPACE_READONLY_DATA
:
1615 const region
*base_reg
= reg
->get_base_region ();
1616 if (tree decl
= base_reg
->maybe_get_decl ())
1618 && DECL_SOURCE_LOCATION (decl
) != UNKNOWN_LOCATION
)
1620 emission_path
->add_region_creation_events
1621 (pb
.get_pending_diagnostic (),
1623 event_loc_info (DECL_SOURCE_LOCATION (decl
),
1632 /* Walk EPATH, adding events as appropriate. */
1633 for (unsigned i
= 0; i
< epath
.m_edges
.length (); i
++)
1635 const exploded_edge
*eedge
= epath
.m_edges
[i
];
1636 add_events_for_eedge (pb
, *eedge
, emission_path
, &interest
);
1638 add_event_on_final_node (pb
, epath
.get_final_enode (),
1639 emission_path
, &interest
);
1642 /* Emit a region_creation_event when requested on the last statement in
1645 If a region_creation_event should be emitted on the last statement of the
1646 path, we need to peek to the successors to get whether the final enode
1651 diagnostic_manager::add_event_on_final_node (const path_builder
&pb
,
1652 const exploded_node
*final_enode
,
1653 checker_path
*emission_path
,
1654 interesting_t
*interest
) const
1656 const program_point
&src_point
= final_enode
->get_point ();
1657 const int src_stack_depth
= src_point
.get_stack_depth ();
1658 const program_state
&src_state
= final_enode
->get_state ();
1659 const region_model
*src_model
= src_state
.m_region_model
;
1663 FOR_EACH_VEC_ELT (final_enode
->m_succs
, j
, e
)
1665 exploded_node
*dst
= e
->m_dest
;
1666 const program_state
&dst_state
= dst
->get_state ();
1667 const region_model
*dst_model
= dst_state
.m_region_model
;
1668 if (src_model
->get_dynamic_extents ()
1669 != dst_model
->get_dynamic_extents ())
1673 bool emitted
= false;
1674 FOR_EACH_VEC_ELT (interest
->m_region_creation
, i
, reg
)
1676 const region
*base_reg
= reg
->get_base_region ();
1677 const svalue
*old_extents
1678 = src_model
->get_dynamic_extents (base_reg
);
1679 const svalue
*new_extents
1680 = dst_model
->get_dynamic_extents (base_reg
);
1681 if (old_extents
== NULL
&& new_extents
!= NULL
)
1682 switch (base_reg
->get_kind ())
1686 case RK_HEAP_ALLOCATED
:
1688 emission_path
->add_region_creation_events
1689 (pb
.get_pending_diagnostic (),
1692 event_loc_info (src_point
.get_location (),
1693 src_point
.get_fndecl (),
1706 /* Subclass of state_change_visitor that creates state_change_event
1709 class state_change_event_creator
: public state_change_visitor
1712 state_change_event_creator (const path_builder
&pb
,
1713 const exploded_edge
&eedge
,
1714 checker_path
*emission_path
)
1717 m_emission_path (emission_path
)
1720 bool on_global_state_change (const state_machine
&sm
,
1721 state_machine::state_t src_sm_val
,
1722 state_machine::state_t dst_sm_val
)
1725 if (&sm
!= m_pb
.get_sm ())
1727 const exploded_node
*src_node
= m_eedge
.m_src
;
1728 const program_point
&src_point
= src_node
->get_point ();
1729 const int src_stack_depth
= src_point
.get_stack_depth ();
1730 const exploded_node
*dst_node
= m_eedge
.m_dest
;
1731 const gimple
*stmt
= src_point
.get_stmt ();
1732 const supernode
*supernode
= src_point
.get_supernode ();
1733 const program_state
&dst_state
= dst_node
->get_state ();
1735 int stack_depth
= src_stack_depth
;
1737 m_emission_path
->add_event
1738 (make_unique
<state_change_event
> (supernode
,
1751 bool on_state_change (const state_machine
&sm
,
1752 state_machine::state_t src_sm_val
,
1753 state_machine::state_t dst_sm_val
,
1755 const svalue
*dst_origin_sval
) final override
1757 if (&sm
!= m_pb
.get_sm ())
1759 const exploded_node
*src_node
= m_eedge
.m_src
;
1760 const program_point
&src_point
= src_node
->get_point ();
1761 const int src_stack_depth
= src_point
.get_stack_depth ();
1762 const exploded_node
*dst_node
= m_eedge
.m_dest
;
1763 const gimple
*stmt
= src_point
.get_stmt ();
1764 const supernode
*supernode
= src_point
.get_supernode ();
1765 const program_state
&dst_state
= dst_node
->get_state ();
1767 int stack_depth
= src_stack_depth
;
1770 && m_eedge
.m_sedge
->m_kind
== SUPEREDGE_CFG_EDGE
)
1772 supernode
= src_point
.get_supernode ();
1773 stmt
= supernode
->get_last_stmt ();
1774 stack_depth
= src_stack_depth
;
1777 /* Bulletproofing for state changes at calls/returns;
1778 TODO: is there a better way? */
1782 m_emission_path
->add_event
1783 (make_unique
<state_change_event
> (supernode
,
1796 const path_builder
&m_pb
;
1797 const exploded_edge
&m_eedge
;
1798 checker_path
*m_emission_path
;
1801 /* Compare SRC_STATE and DST_STATE (which use EXT_STATE), and call
1802 VISITOR's on_state_change for every sm-state change that occurs
1803 to a tree, and on_global_state_change for every global state change
1806 This determines the state changes that ought to be reported to
1807 the user: a combination of the effects of changes to sm_state_map
1808 (which maps svalues to sm-states), and of region_model changes
1809 (which map trees to svalues).
1811 Bail out early and return true if any call to on_global_state_change
1812 or on_state_change returns true, otherwise return false.
1814 This is split out to make it easier to experiment with changes to
1815 exploded_node granularity (so that we can observe what state changes
1816 lead to state_change_events being emitted). */
1819 for_each_state_change (const program_state
&src_state
,
1820 const program_state
&dst_state
,
1821 const extrinsic_state
&ext_state
,
1822 state_change_visitor
*visitor
)
1824 gcc_assert (src_state
.m_checker_states
.length ()
1825 == ext_state
.get_num_checkers ());
1826 gcc_assert (dst_state
.m_checker_states
.length ()
1827 == ext_state
.get_num_checkers ());
1828 for (unsigned i
= 0; i
< ext_state
.get_num_checkers (); i
++)
1830 const state_machine
&sm
= ext_state
.get_sm (i
);
1831 const sm_state_map
&src_smap
= *src_state
.m_checker_states
[i
];
1832 const sm_state_map
&dst_smap
= *dst_state
.m_checker_states
[i
];
1834 /* Add events for any global state changes. */
1835 if (src_smap
.get_global_state () != dst_smap
.get_global_state ())
1836 if (visitor
->on_global_state_change (sm
,
1837 src_smap
.get_global_state (),
1838 dst_smap
.get_global_state ()))
1841 /* Add events for per-svalue state changes. */
1842 for (sm_state_map::iterator_t iter
= dst_smap
.begin ();
1843 iter
!= dst_smap
.end ();
1846 const svalue
*sval
= (*iter
).first
;
1847 state_machine::state_t dst_sm_val
= (*iter
).second
.m_state
;
1848 state_machine::state_t src_sm_val
1849 = src_smap
.get_state (sval
, ext_state
);
1850 if (dst_sm_val
!= src_sm_val
)
1852 const svalue
*origin_sval
= (*iter
).second
.m_origin
;
1853 if (visitor
->on_state_change (sm
, src_sm_val
, dst_sm_val
,
1862 /* An sm_context for adding state_change_event on assignments to NULL,
1863 where the default state isn't m_start. Storing such state in the
1864 sm_state_map would lead to bloat of the exploded_graph, so we want
1865 to leave it as a default state, and inject state change events here
1866 when we have a diagnostic.
1867 Find transitions of constants, for handling on_zero_assignment. */
1869 struct null_assignment_sm_context
: public sm_context
1871 null_assignment_sm_context (int sm_idx
,
1872 const state_machine
&sm
,
1873 const program_state
*old_state
,
1874 const program_state
*new_state
,
1876 const program_point
*point
,
1877 checker_path
*emission_path
,
1878 const extrinsic_state
&ext_state
)
1879 : sm_context (sm_idx
, sm
), m_old_state (old_state
), m_new_state (new_state
),
1880 m_stmt (stmt
), m_point (point
), m_emission_path (emission_path
),
1881 m_ext_state (ext_state
)
1885 tree
get_fndecl_for_call (const gcall */
*call*/
) final override
1890 state_machine::state_t
get_state (const gimple
*stmt ATTRIBUTE_UNUSED
,
1891 tree var
) final override
1893 const svalue
*var_old_sval
1894 = m_old_state
->m_region_model
->get_rvalue (var
, NULL
);
1895 const sm_state_map
*old_smap
= m_old_state
->m_checker_states
[m_sm_idx
];
1897 state_machine::state_t current
1898 = old_smap
->get_state (var_old_sval
, m_ext_state
);
1903 state_machine::state_t
get_state (const gimple
*stmt ATTRIBUTE_UNUSED
,
1904 const svalue
*sval
) final override
1906 const sm_state_map
*old_smap
= m_old_state
->m_checker_states
[m_sm_idx
];
1907 state_machine::state_t current
= old_smap
->get_state (sval
, m_ext_state
);
1911 void set_next_state (const gimple
*stmt
,
1913 state_machine::state_t to
,
1914 tree origin ATTRIBUTE_UNUSED
) final override
1916 state_machine::state_t from
= get_state (stmt
, var
);
1917 if (from
!= m_sm
.get_start_state ())
1919 if (!is_transition_to_null (to
))
1922 const svalue
*var_new_sval
1923 = m_new_state
->m_region_model
->get_rvalue (var
, NULL
);
1925 const supernode
*supernode
= m_point
->get_supernode ();
1926 int stack_depth
= m_point
->get_stack_depth ();
1928 m_emission_path
->add_event
1929 (make_unique
<state_change_event
> (supernode
,
1940 void set_next_state (const gimple
*stmt
,
1942 state_machine::state_t to
,
1943 tree origin ATTRIBUTE_UNUSED
) final override
1945 state_machine::state_t from
= get_state (stmt
, sval
);
1946 if (from
!= m_sm
.get_start_state ())
1948 if (!is_transition_to_null (to
))
1951 const supernode
*supernode
= m_point
->get_supernode ();
1952 int stack_depth
= m_point
->get_stack_depth ();
1954 m_emission_path
->add_event
1955 (make_unique
<state_change_event
> (supernode
,
1966 void warn (const supernode
*, const gimple
*,
1967 tree
, std::unique_ptr
<pending_diagnostic
>) final override
1970 void warn (const supernode
*, const gimple
*,
1971 const svalue
*, std::unique_ptr
<pending_diagnostic
>) final override
1975 tree
get_diagnostic_tree (tree expr
) final override
1980 tree
get_diagnostic_tree (const svalue
*sval
) final override
1982 return m_new_state
->m_region_model
->get_representative_tree (sval
);
1985 state_machine::state_t
get_global_state () const final override
1990 void set_global_state (state_machine::state_t
) final override
1995 void on_custom_transition (custom_transition
*) final override
1999 tree
is_zero_assignment (const gimple
*stmt
) final override
2001 const gassign
*assign_stmt
= dyn_cast
<const gassign
*> (stmt
);
2004 if (const svalue
*sval
2005 = m_new_state
->m_region_model
->get_gassign_result (assign_stmt
, NULL
))
2006 if (tree cst
= sval
->maybe_get_constant ())
2008 return gimple_assign_lhs (assign_stmt
);
2012 const program_state
*get_old_program_state () const final override
2016 const program_state
*get_new_program_state () const final override
2021 /* We only care about transitions to the "null" state
2022 within sm-malloc. Special-case this. */
2023 static bool is_transition_to_null (state_machine::state_t s
)
2025 return !strcmp (s
->get_name (), "null");
2028 const program_state
*m_old_state
;
2029 const program_state
*m_new_state
;
2030 const gimple
*m_stmt
;
2031 const program_point
*m_point
;
2032 checker_path
*m_emission_path
;
2033 const extrinsic_state
&m_ext_state
;
2036 /* Subroutine of diagnostic_manager::build_emission_path.
2037 Add any events for EEDGE to EMISSION_PATH. */
2040 diagnostic_manager::add_events_for_eedge (const path_builder
&pb
,
2041 const exploded_edge
&eedge
,
2042 checker_path
*emission_path
,
2043 interesting_t
*interest
) const
2045 const exploded_node
*src_node
= eedge
.m_src
;
2046 const program_point
&src_point
= src_node
->get_point ();
2047 const int src_stack_depth
= src_point
.get_stack_depth ();
2048 const exploded_node
*dst_node
= eedge
.m_dest
;
2049 const program_point
&dst_point
= dst_node
->get_point ();
2050 const int dst_stack_depth
= dst_point
.get_stack_depth ();
2053 get_logger ()->start_log_line ();
2054 pretty_printer
*pp
= get_logger ()->get_printer ();
2055 pp_printf (pp
, "EN %i -> EN %i: ",
2056 eedge
.m_src
->m_index
,
2057 eedge
.m_dest
->m_index
);
2058 src_point
.print (pp
, format (false));
2059 pp_string (pp
, "-> ");
2060 dst_point
.print (pp
, format (false));
2061 get_logger ()->end_log_line ();
2063 const program_state
&src_state
= src_node
->get_state ();
2064 const program_state
&dst_state
= dst_node
->get_state ();
2066 /* Add state change events for the states that have changed.
2067 We add these before events for superedges, so that if we have a
2068 state_change_event due to following an edge, we'll get this sequence
2074 | (1) assuming 'ptr' is non-NULL (state_change_event)
2075 | (2) following 'false' branch... (start_cfg_edge_event)
2077 | do_something (ptr);
2078 | ~~~~~~~~~~~~~^~~~~
2080 | (3) ...to here (end_cfg_edge_event). */
2081 state_change_event_creator
visitor (pb
, eedge
, emission_path
);
2082 for_each_state_change (src_state
, dst_state
, pb
.get_ext_state (),
2085 /* Allow non-standard edges to add events, e.g. when rewinding from
2086 longjmp to a setjmp. */
2087 if (eedge
.m_custom_info
)
2088 eedge
.m_custom_info
->add_events_to_path (emission_path
, eedge
);
2090 /* Add events for superedges, function entries, and for statements. */
2091 switch (dst_point
.get_kind ())
2095 case PK_BEFORE_SUPERNODE
:
2096 if (src_point
.get_kind () == PK_AFTER_SUPERNODE
)
2099 add_events_for_superedge (pb
, eedge
, emission_path
);
2101 /* Add function entry events. */
2102 if (dst_point
.get_supernode ()->entry_p ())
2104 pb
.get_pending_diagnostic ()->add_function_entry_event
2105 (eedge
, emission_path
);
2106 /* Create region_creation_events for on-stack regions within
2112 FOR_EACH_VEC_ELT (interest
->m_region_creation
, i
, reg
)
2113 if (const frame_region
*frame
= reg
->maybe_get_frame_region ())
2114 if (frame
->get_fndecl () == dst_point
.get_fndecl ())
2116 const region
*base_reg
= reg
->get_base_region ();
2117 if (tree decl
= base_reg
->maybe_get_decl ())
2119 && DECL_SOURCE_LOCATION (decl
) != UNKNOWN_LOCATION
)
2121 emission_path
->add_region_creation_events
2122 (pb
.get_pending_diagnostic (),
2123 reg
, dst_state
.m_region_model
,
2124 event_loc_info (DECL_SOURCE_LOCATION (decl
),
2125 dst_point
.get_fndecl (),
2133 case PK_BEFORE_STMT
:
2135 const gimple
*stmt
= dst_point
.get_stmt ();
2136 const gcall
*call
= dyn_cast
<const gcall
*> (stmt
);
2137 if (call
&& is_setjmp_call_p (call
))
2138 emission_path
->add_event
2139 (make_unique
<setjmp_event
> (event_loc_info (stmt
->location
,
2140 dst_point
.get_fndecl (),
2145 emission_path
->add_event
2146 (make_unique
<statement_event
> (stmt
,
2147 dst_point
.get_fndecl (),
2148 dst_stack_depth
, dst_state
));
2150 /* Create state change events for assignment to NULL.
2151 Iterate through the stmts in dst_enode, adding state change
2153 if (dst_state
.m_region_model
)
2155 log_scope
s (get_logger (), "processing run of stmts");
2156 program_state
iter_state (dst_state
);
2157 program_point
iter_point (dst_point
);
2160 const gimple
*stmt
= iter_point
.get_stmt ();
2161 if (const gassign
*assign
= dyn_cast
<const gassign
*> (stmt
))
2163 const extrinsic_state
&ext_state
= pb
.get_ext_state ();
2164 program_state
old_state (iter_state
);
2165 iter_state
.m_region_model
->on_assignment (assign
, NULL
);
2166 for (unsigned i
= 0; i
< ext_state
.get_num_checkers (); i
++)
2168 const state_machine
&sm
= ext_state
.get_sm (i
);
2169 null_assignment_sm_context
sm_ctxt (i
, sm
,
2175 pb
.get_ext_state ());
2176 sm
.on_stmt (&sm_ctxt
, dst_point
.get_supernode (), stmt
);
2177 // TODO: what about phi nodes?
2180 iter_point
.next_stmt ();
2181 if (iter_point
.get_kind () == PK_AFTER_SUPERNODE
2182 || (dst_node
->m_succs
.length () > 1
2184 == dst_node
->m_succs
[0]->m_dest
->get_point ())))
2193 /* Look for changes in dynamic extents, which will identify
2194 the creation of heap-based regions and alloca regions. */
2197 const region_model
*src_model
= src_state
.m_region_model
;
2198 const region_model
*dst_model
= dst_state
.m_region_model
;
2199 if (src_model
->get_dynamic_extents ()
2200 != dst_model
->get_dynamic_extents ())
2204 FOR_EACH_VEC_ELT (interest
->m_region_creation
, i
, reg
)
2206 const region
*base_reg
= reg
->get_base_region ();
2207 const svalue
*old_extents
2208 = src_model
->get_dynamic_extents (base_reg
);
2209 const svalue
*new_extents
2210 = dst_model
->get_dynamic_extents (base_reg
);
2211 if (old_extents
== NULL
&& new_extents
!= NULL
)
2212 switch (base_reg
->get_kind ())
2216 case RK_HEAP_ALLOCATED
:
2218 emission_path
->add_region_creation_events
2219 (pb
.get_pending_diagnostic (),
2221 event_loc_info (src_point
.get_location (),
2222 src_point
.get_fndecl (),
2231 if (pb
.get_feasibility_problem ()
2232 && &pb
.get_feasibility_problem ()->m_eedge
== &eedge
)
2235 pp_format_decoder (&pp
) = default_tree_printer
;
2237 "this path would have been rejected as infeasible"
2239 pb
.get_feasibility_problem ()->dump_to_pp (&pp
);
2240 emission_path
->add_event
2241 (make_unique
<precanned_custom_event
>
2242 (event_loc_info (dst_point
.get_location (),
2243 dst_point
.get_fndecl (),
2245 pp_formatted_text (&pp
)));
2249 /* Return true if EEDGE is a significant edge in the path to the diagnostic
2252 Consider all of the sibling out-eedges from the same source enode
2254 If there's no path from the destinations of those eedges to the
2255 diagnostic enode, then we have to take this eedge and thus it's
2258 Conversely if there is a path from the destination of any other sibling
2259 eedge to the diagnostic enode, then this edge is insignificant.
2261 Example 1: redundant if-else:
2269 D is reachable by either B or C, so neither of these edges
2272 Example 2: pertinent if-else:
2277 (C) [NECESSARY CONDITION] | |
2278 (D) [POSSIBLE DIAGNOSTIC] D1 D2
2280 D becomes D1 and D2 in the exploded graph, where the diagnostic occurs
2281 at D2. D2 is only reachable via C, so the A -> C edge is significant.
2283 Example 3: redundant loop:
2285 (A) while (...) +-->A
2291 D is reachable from both B and C, so the A->C edge is not significant. */
2294 diagnostic_manager::significant_edge_p (const path_builder
&pb
,
2295 const exploded_edge
&eedge
) const
2298 exploded_edge
*sibling
;
2299 FOR_EACH_VEC_ELT (eedge
.m_src
->m_succs
, i
, sibling
)
2301 if (sibling
== &eedge
)
2303 if (pb
.reachable_from_p (sibling
->m_dest
))
2306 get_logger ()->log (" edge EN: %i -> EN: %i is insignificant as"
2307 " EN: %i is also reachable via"
2308 " EN: %i -> EN: %i",
2309 eedge
.m_src
->m_index
, eedge
.m_dest
->m_index
,
2310 pb
.get_diag_node ()->m_index
,
2311 sibling
->m_src
->m_index
,
2312 sibling
->m_dest
->m_index
);
2320 /* Subroutine of diagnostic_manager::add_events_for_eedge
2321 where EEDGE has an underlying superedge i.e. a CFG edge,
2322 or an interprocedural call/return.
2323 Add any events for the superedge to EMISSION_PATH. */
2326 diagnostic_manager::add_events_for_superedge (const path_builder
&pb
,
2327 const exploded_edge
&eedge
,
2328 checker_path
*emission_path
)
2331 gcc_assert (eedge
.m_sedge
);
2333 /* Give diagnostics an opportunity to override this function. */
2334 pending_diagnostic
*pd
= pb
.get_pending_diagnostic ();
2335 if (pd
->maybe_add_custom_events_for_superedge (eedge
, emission_path
))
2338 /* Don't add events for insignificant edges at verbosity levels below 3. */
2339 if (m_verbosity
< 3)
2340 if (!significant_edge_p (pb
, eedge
))
2343 const exploded_node
*src_node
= eedge
.m_src
;
2344 const program_point
&src_point
= src_node
->get_point ();
2345 const exploded_node
*dst_node
= eedge
.m_dest
;
2346 const program_point
&dst_point
= dst_node
->get_point ();
2347 const int src_stack_depth
= src_point
.get_stack_depth ();
2348 const int dst_stack_depth
= dst_point
.get_stack_depth ();
2349 const gimple
*last_stmt
= src_point
.get_supernode ()->get_last_stmt ();
2351 switch (eedge
.m_sedge
->m_kind
)
2353 case SUPEREDGE_CFG_EDGE
:
2355 emission_path
->add_event
2356 (make_unique
<start_cfg_edge_event
>
2358 event_loc_info (last_stmt
? last_stmt
->location
: UNKNOWN_LOCATION
,
2359 src_point
.get_fndecl (),
2361 emission_path
->add_event
2362 (make_unique
<end_cfg_edge_event
>
2364 event_loc_info (dst_point
.get_supernode ()->get_start_location (),
2365 dst_point
.get_fndecl (),
2370 case SUPEREDGE_CALL
:
2371 pd
->add_call_event (eedge
, emission_path
);
2374 case SUPEREDGE_INTRAPROCEDURAL_CALL
:
2376 /* TODO: add a subclass for this, or generate events for the
2378 emission_path
->add_event
2379 (make_unique
<debug_event
> (event_loc_info (last_stmt
2380 ? last_stmt
->location
2382 src_point
.get_fndecl (),
2388 case SUPEREDGE_RETURN
:
2390 const return_superedge
*return_edge
2391 = as_a
<const return_superedge
*> (eedge
.m_sedge
);
2393 const gcall
*call_stmt
= return_edge
->get_call_stmt ();
2394 emission_path
->add_event
2395 (make_unique
<return_event
> (eedge
,
2396 event_loc_info (call_stmt
2397 ? call_stmt
->location
2399 dst_point
.get_fndecl (),
2406 /* Prune PATH, based on the verbosity level, to the most pertinent
2407 events for a diagnostic that involves VAR ending in state STATE
2408 (for state machine SM).
2410 PATH is updated in place, and the redundant checker_events are deleted.
2412 As well as deleting events, call record_critical_state on events in
2413 which state critical to the pending_diagnostic is being handled; see
2414 the comment for diagnostic_manager::prune_for_sm_diagnostic. */
2417 diagnostic_manager::prune_path (checker_path
*path
,
2418 const state_machine
*sm
,
2420 state_machine::state_t state
) const
2422 LOG_FUNC (get_logger ());
2423 path
->maybe_log (get_logger (), "path");
2424 prune_for_sm_diagnostic (path
, sm
, sval
, state
);
2425 prune_interproc_events (path
);
2426 if (! flag_analyzer_show_events_in_system_headers
)
2427 prune_system_headers (path
);
2428 consolidate_conditions (path
);
2429 finish_pruning (path
);
2430 path
->maybe_log (get_logger (), "pruned");
2433 /* A cheap test to determine if EXPR can be the expression of interest in
2434 an sm-diagnostic, so that we can reject cases where we have a non-lvalue.
2435 We don't have always have a model when calling this, so we can't use
2436 tentative_region_model_context, so there can be false positives. */
2439 can_be_expr_of_interest_p (tree expr
)
2444 /* Reject constants. */
2445 if (CONSTANT_CLASS_P (expr
))
2448 /* Otherwise assume that it can be an lvalue. */
2452 /* First pass of diagnostic_manager::prune_path: apply verbosity level,
2453 pruning unrelated state change events.
2455 Iterate backwards through PATH, skipping state change events that aren't
2456 VAR but update the pertinent VAR when state-copying occurs.
2458 As well as deleting events, call record_critical_state on events in
2459 which state critical to the pending_diagnostic is being handled, so
2460 that the event's get_desc vfunc can potentially supply a more precise
2461 description of the event to the user.
2463 "calling 'foo' from 'bar'"
2465 "passing possibly-NULL pointer 'ptr' to 'foo' from 'bar' as param 1"
2466 when the diagnostic relates to later dereferencing 'ptr'. */
2469 diagnostic_manager::prune_for_sm_diagnostic (checker_path
*path
,
2470 const state_machine
*sm
,
2472 state_machine::state_t state
) const
2474 int idx
= path
->num_events () - 1;
2475 while (idx
>= 0 && idx
< (signed)path
->num_events ())
2477 checker_event
*base_event
= path
->get_checker_event (idx
);
2484 label_text sval_desc
= sval
->get_desc ();
2485 log ("considering event %i (%s), with sval: %qs, state: %qs",
2486 idx
, event_kind_to_string (base_event
->m_kind
),
2487 sval_desc
.get (), state
->get_name ());
2490 log ("considering event %i (%s), with global state: %qs",
2491 idx
, event_kind_to_string (base_event
->m_kind
),
2492 state
->get_name ());
2495 log ("considering event %i", idx
);
2498 switch (base_event
->m_kind
)
2504 if (m_verbosity
< 4)
2506 log ("filtering event %i: debug event", idx
);
2507 path
->delete_event (idx
);
2512 /* Don't filter custom events. */
2517 if (m_verbosity
< 4)
2519 log ("filtering event %i: statement event", idx
);
2520 path
->delete_event (idx
);
2525 case EK_REGION_CREATION
:
2526 /* Don't filter these. */
2529 case EK_FUNCTION_ENTRY
:
2530 if (m_verbosity
< 1)
2532 log ("filtering event %i: function entry", idx
);
2533 path
->delete_event (idx
);
2537 case EK_STATE_CHANGE
:
2539 state_change_event
*state_change
= (state_change_event
*)base_event
;
2540 gcc_assert (state_change
->m_dst_state
.m_region_model
);
2542 if (state_change
->m_sval
== sval
)
2544 if (state_change
->m_origin
)
2548 label_text sval_desc
= sval
->get_desc ();
2549 label_text origin_sval_desc
2550 = state_change
->m_origin
->get_desc ();
2552 " switching var of interest from %qs to %qs",
2553 idx
, sval_desc
.get (),
2554 origin_sval_desc
.get ());
2556 sval
= state_change
->m_origin
;
2558 log ("event %i: switching state of interest from %qs to %qs",
2559 idx
, state_change
->m_to
->get_name (),
2560 state_change
->m_from
->get_name ());
2561 state
= state_change
->m_from
;
2563 else if (m_verbosity
< 4)
2567 if (state_change
->m_sval
)
2569 label_text change_sval_desc
2570 = state_change
->m_sval
->get_desc ();
2573 label_text sval_desc
= sval
->get_desc ();
2574 log ("filtering event %i:"
2575 " state change to %qs unrelated to %qs",
2576 idx
, change_sval_desc
.get (),
2580 log ("filtering event %i: state change to %qs",
2581 idx
, change_sval_desc
.get ());
2584 log ("filtering event %i: global state change", idx
);
2586 path
->delete_event (idx
);
2591 case EK_START_CFG_EDGE
:
2593 cfg_edge_event
*event
= (cfg_edge_event
*)base_event
;
2595 /* TODO: is this edge significant to var?
2596 See if var can be in other states in the dest, but not
2597 in other states in the src?
2598 Must have multiple sibling edges. */
2600 if (event
->should_filter_p (m_verbosity
))
2602 log ("filtering events %i and %i: CFG edge", idx
, idx
+ 1);
2603 path
->delete_event (idx
);
2604 /* Also delete the corresponding EK_END_CFG_EDGE. */
2605 gcc_assert (path
->get_checker_event (idx
)->m_kind
2606 == EK_END_CFG_EDGE
);
2607 path
->delete_event (idx
);
2612 case EK_END_CFG_EDGE
:
2613 /* These come in pairs with EK_START_CFG_EDGE events and are
2614 filtered when their start event is filtered. */
2619 call_event
*event
= (call_event
*)base_event
;
2620 const region_model
*callee_model
2621 = event
->m_eedge
.m_dest
->get_state ().m_region_model
;
2622 const region_model
*caller_model
2623 = event
->m_eedge
.m_src
->get_state ().m_region_model
;
2624 tree callee_var
= callee_model
->get_representative_tree (sval
);
2630 const callgraph_superedge
& cg_superedge
2631 = event
->get_callgraph_superedge ();
2632 if (cg_superedge
.m_cedge
)
2634 = cg_superedge
.map_expr_from_callee_to_caller (callee_var
,
2637 caller_var
= caller_model
->get_representative_tree (sval
);
2640 caller_var
= caller_model
->get_representative_tree (sval
);
2646 label_text sval_desc
= sval
->get_desc ();
2648 " recording critical state for %qs at call"
2649 " from %qE in callee to %qE in caller",
2650 idx
, sval_desc
.get (), callee_var
, caller_var
);
2652 if (expr
.param_p ())
2653 event
->record_critical_state (caller_var
, state
);
2658 case EK_RETURN_EDGE
:
2662 return_event
*event
= (return_event
*)base_event
;
2663 const region_model
*caller_model
2664 = event
->m_eedge
.m_dest
->get_state ().m_region_model
;
2665 tree caller_var
= caller_model
->get_representative_tree (sval
);
2666 const region_model
*callee_model
2667 = event
->m_eedge
.m_src
->get_state ().m_region_model
;
2673 const callgraph_superedge
& cg_superedge
2674 = event
->get_callgraph_superedge ();
2675 if (cg_superedge
.m_cedge
)
2677 = cg_superedge
.map_expr_from_caller_to_callee (caller_var
,
2680 callee_var
= callee_model
->get_representative_tree (sval
);
2683 callee_var
= callee_model
->get_representative_tree (sval
);
2689 label_text sval_desc
= sval
->get_desc ();
2691 " recording critical state for %qs at return"
2692 " from %qE in caller to %qE in callee",
2693 idx
, sval_desc
.get (), callee_var
, callee_var
);
2695 if (expr
.return_value_p ())
2696 event
->record_critical_state (callee_var
, state
);
2702 case EK_INLINED_CALL
:
2703 /* We don't expect to see these yet, as they're added later.
2704 We'd want to keep them around. */
2708 /* TODO: only show setjmp_events that matter i.e. those for which
2709 there is a later rewind event using them. */
2710 case EK_REWIND_FROM_LONGJMP
:
2711 case EK_REWIND_TO_SETJMP
:
2715 /* Always show the final "warning" event in the path. */
2722 /* Subroutine of diagnostic_manager::prune_for_sm_diagnostic.
2723 If *EXPR is not suitable to be the expression of interest in
2724 an sm-diagnostic, set *EXPR to NULL and log. */
2727 diagnostic_manager::update_for_unsuitable_sm_exprs (tree
*expr
) const
2730 if (*expr
&& !can_be_expr_of_interest_p (*expr
))
2732 log ("new var %qE is unsuitable; setting var to NULL", *expr
);
2737 /* Second pass of diagnostic_manager::prune_path: remove redundant
2738 interprocedural information.
2741 (1)- calling "f2" from "f1"
2742 (2)--- entry to "f2"
2743 (3)--- calling "f3" from "f2"
2744 (4)----- entry to "f3"
2745 (5)--- returning to "f2" to "f3"
2746 (6)- returning to "f1" to "f2"
2747 with no other intervening events, then none of these events are
2748 likely to be interesting to the user.
2750 Prune [..., call, function-entry, return, ...] triples repeatedly
2751 until nothing has changed. For the example above, this would
2752 remove events (3, 4, 5), and then remove events (1, 2, 6). */
2755 diagnostic_manager::prune_interproc_events (checker_path
*path
) const
2757 bool changed
= false;
2761 int idx
= (signed)path
->num_events () - 1;
2764 /* Prune [..., call, function-entry, return, ...] triples. */
2765 if (idx
+ 2 < (signed)path
->num_events ()
2766 && path
->get_checker_event (idx
)->is_call_p ()
2767 && path
->get_checker_event (idx
+ 1)->is_function_entry_p ()
2768 && path
->get_checker_event (idx
+ 2)->is_return_p ())
2773 (path
->get_checker_event (idx
)->get_desc (false));
2774 log ("filtering events %i-%i:"
2775 " irrelevant call/entry/return: %s",
2776 idx
, idx
+ 2, desc
.get ());
2778 path
->delete_event (idx
+ 2);
2779 path
->delete_event (idx
+ 1);
2780 path
->delete_event (idx
);
2786 /* Prune [..., call, return, ...] pairs
2787 (for -fanalyzer-verbosity=0). */
2788 if (idx
+ 1 < (signed)path
->num_events ()
2789 && path
->get_checker_event (idx
)->is_call_p ()
2790 && path
->get_checker_event (idx
+ 1)->is_return_p ())
2795 (path
->get_checker_event (idx
)->get_desc (false));
2796 log ("filtering events %i-%i:"
2797 " irrelevant call/return: %s",
2798 idx
, idx
+ 1, desc
.get ());
2800 path
->delete_event (idx
+ 1);
2801 path
->delete_event (idx
);
2814 /* Remove everything within [call point, IDX]. For consistency,
2815 IDX should represent the return event of the frame to delete,
2816 or if there is none it should be the last event of the frame.
2817 After this function, IDX designates the event prior to calling
2821 prune_frame (checker_path
*path
, int &idx
)
2823 gcc_assert (idx
>= 0);
2825 if (path
->get_checker_event (idx
)->is_return_p ())
2829 if (path
->get_checker_event (idx
)->is_call_p ())
2831 else if (path
->get_checker_event (idx
)->is_return_p ())
2834 path
->delete_event (idx
--);
2835 } while (idx
>= 0 && nesting
!= 0);
2838 /* This function is called when fanalyzer-show-events-in-system-headers
2839 is disabled and will prune the diagnostic of all events within a
2840 system header, only keeping the entry and exit events to the header.
2841 This should be called after diagnostic_manager::prune_interproc_events
2842 so that sucessive events [system header call, system header return]
2843 are preserved thereafter.
2845 Given a diagnostics path diving into a system header in the form
2849 system header entry,
2850 events within system headers...,
2851 system header return,
2855 then transforms it into
2859 system header return,
2864 diagnostic_manager::prune_system_headers (checker_path
*path
) const
2866 int idx
= (signed)path
->num_events () - 1;
2869 const checker_event
*event
= path
->get_checker_event (idx
);
2870 /* Prune everything between
2871 [..., system entry, (...), system return, ...]. */
2872 if (event
->is_return_p ()
2873 && in_system_header_at (event
->get_location ()))
2876 prune_frame (path
, idx
);
2880 log ("filtering system headers events %i-%i:",
2883 // Delete function entry within system headers.
2886 event
= path
->get_checker_event (idx
);
2887 if (event
->is_function_entry_p ()
2888 && in_system_header_at (event
->get_location ()))
2892 label_text
desc (event
->get_desc (false));
2893 log ("filtering event %i:"
2894 "system header entry event: %s",
2898 path
->delete_event (idx
);
2907 /* Return true iff event IDX within PATH is on the same line as REF_EXP_LOC. */
2910 same_line_as_p (const expanded_location
&ref_exp_loc
,
2911 checker_path
*path
, unsigned idx
)
2913 const checker_event
*ev
= path
->get_checker_event (idx
);
2914 expanded_location idx_exp_loc
= expand_location (ev
->get_location ());
2915 gcc_assert (ref_exp_loc
.file
);
2916 if (idx_exp_loc
.file
== NULL
)
2918 if (strcmp (ref_exp_loc
.file
, idx_exp_loc
.file
))
2920 return ref_exp_loc
.line
== idx_exp_loc
.line
;
2923 /* This path-readability optimization reduces the verbosity of compound
2924 conditional statements (without needing to reconstruct the AST, which
2925 has already been lost).
2927 For example, it converts:
2929 | 61 | if (cp[0] != '\0' && cp[0] != '#')
2930 | | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
2932 | | | | (6) ...to here
2933 | | | (7) following ‘true’ branch...
2934 | | (5) following ‘true’ branch...
2936 | 63 | alias = cp++;
2943 | 61 | if (cp[0] != '\0' && cp[0] != '#')
2946 | | (5) following ‘true’ branch...
2948 | 63 | alias = cp++;
2953 by combining events 5-8 into new events 5-6.
2955 Find runs of consecutive (start_cfg_edge_event, end_cfg_edge_event) pairs
2956 in which all events apart from the final end_cfg_edge_event are on the same
2957 line, and for which either all the CFG edges are TRUE edges, or all are
2960 Consolidate each such run into a
2961 (start_consolidated_cfg_edges_event, end_consolidated_cfg_edges_event)
2965 diagnostic_manager::consolidate_conditions (checker_path
*path
) const
2967 /* Don't simplify edges if we're debugging them. */
2968 if (flag_analyzer_verbose_edges
)
2971 for (int start_idx
= 0;
2972 start_idx
< (signed)path
->num_events () - 1;
2975 if (path
->cfg_edge_pair_at_p (start_idx
))
2977 const checker_event
*old_start_ev
2978 = path
->get_checker_event (start_idx
);
2979 expanded_location start_exp_loc
2980 = expand_location (old_start_ev
->get_location ());
2981 if (start_exp_loc
.file
== NULL
)
2983 if (!same_line_as_p (start_exp_loc
, path
, start_idx
+ 1))
2986 /* Are we looking for a run of all TRUE edges, or all FALSE edges? */
2987 gcc_assert (old_start_ev
->m_kind
== EK_START_CFG_EDGE
);
2988 const start_cfg_edge_event
*old_start_cfg_ev
2989 = (const start_cfg_edge_event
*)old_start_ev
;
2990 const cfg_superedge
& first_cfg_sedge
2991 = old_start_cfg_ev
->get_cfg_superedge ();
2993 if (first_cfg_sedge
.true_value_p ())
2995 else if (first_cfg_sedge
.false_value_p ())
3000 /* Find a run of CFG start/end event pairs from
3001 [start_idx, next_idx)
3002 where all apart from the final event are on the same line,
3003 and all are either TRUE or FALSE edges, matching the initial. */
3004 int next_idx
= start_idx
+ 2;
3005 while (path
->cfg_edge_pair_at_p (next_idx
)
3006 && same_line_as_p (start_exp_loc
, path
, next_idx
))
3008 const checker_event
*iter_ev
3009 = path
->get_checker_event (next_idx
);
3010 gcc_assert (iter_ev
->m_kind
== EK_START_CFG_EDGE
);
3011 const start_cfg_edge_event
*iter_cfg_ev
3012 = (const start_cfg_edge_event
*)iter_ev
;
3013 const cfg_superedge
& iter_cfg_sedge
3014 = iter_cfg_ev
->get_cfg_superedge ();
3017 if (!iter_cfg_sedge
.true_value_p ())
3022 if (!iter_cfg_sedge
.false_value_p ())
3028 /* If we have more than one pair in the run, consolidate. */
3029 if (next_idx
> start_idx
+ 2)
3031 const checker_event
*old_end_ev
3032 = path
->get_checker_event (next_idx
- 1);
3033 log ("consolidating CFG edge events %i-%i into %i-%i",
3034 start_idx
, next_idx
- 1, start_idx
, start_idx
+1);
3035 start_consolidated_cfg_edges_event
*new_start_ev
3036 = new start_consolidated_cfg_edges_event
3037 (event_loc_info (old_start_ev
->get_location (),
3038 old_start_ev
->get_fndecl (),
3039 old_start_ev
->get_stack_depth ()),
3041 checker_event
*new_end_ev
3042 = new end_consolidated_cfg_edges_event
3043 (event_loc_info (old_end_ev
->get_location (),
3044 old_end_ev
->get_fndecl (),
3045 old_end_ev
->get_stack_depth ()));
3046 path
->replace_event (start_idx
, new_start_ev
);
3047 path
->replace_event (start_idx
+ 1, new_end_ev
);
3048 path
->delete_events (start_idx
+ 2, next_idx
- (start_idx
+ 2));
3054 /* Final pass of diagnostic_manager::prune_path.
3056 If all we're left with is in one function, then filter function entry
3060 diagnostic_manager::finish_pruning (checker_path
*path
) const
3062 if (!path
->interprocedural_p ())
3064 int idx
= path
->num_events () - 1;
3065 while (idx
>= 0 && idx
< (signed)path
->num_events ())
3067 checker_event
*base_event
= path
->get_checker_event (idx
);
3068 if (base_event
->m_kind
== EK_FUNCTION_ENTRY
)
3070 log ("filtering event %i:"
3071 " function entry for purely intraprocedural path", idx
);
3072 path
->delete_event (idx
);
3081 #endif /* #if ENABLE_ANALYZER */