Aarch64, bugfix: Fix NEON bigendian addp intrinsic [PR114890]
[official-gcc.git] / gcc / analyzer / infinite-recursion.cc
blobef8ae90ab08edf0b893ad5d1b8de5eeec6df61e6
1 /* Detection of infinite recursion.
2 Copyright (C) 2022-2024 Free Software Foundation, Inc.
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #define INCLUDE_MEMORY
23 #define INCLUDE_VECTOR
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tree.h"
27 #include "fold-const.h"
28 #include "gcc-rich-location.h"
29 #include "alloc-pool.h"
30 #include "fibonacci_heap.h"
31 #include "shortest-paths.h"
32 #include "diagnostic-core.h"
33 #include "diagnostic-event-id.h"
34 #include "diagnostic-path.h"
35 #include "function.h"
36 #include "pretty-print.h"
37 #include "sbitmap.h"
38 #include "bitmap.h"
39 #include "tristate.h"
40 #include "ordered-hash-map.h"
41 #include "selftest.h"
42 #include "json.h"
43 #include "analyzer/analyzer.h"
44 #include "analyzer/analyzer-logging.h"
45 #include "analyzer/call-string.h"
46 #include "analyzer/program-point.h"
47 #include "analyzer/store.h"
48 #include "analyzer/region-model.h"
49 #include "analyzer/constraint-manager.h"
50 #include "analyzer/sm.h"
51 #include "analyzer/pending-diagnostic.h"
52 #include "analyzer/diagnostic-manager.h"
53 #include "cfg.h"
54 #include "basic-block.h"
55 #include "gimple.h"
56 #include "gimple-iterator.h"
57 #include "gimple-pretty-print.h"
58 #include "cgraph.h"
59 #include "digraph.h"
60 #include "analyzer/supergraph.h"
61 #include "analyzer/program-state.h"
62 #include "analyzer/exploded-graph.h"
63 #include "make-unique.h"
64 #include "analyzer/checker-path.h"
65 #include "analyzer/feasible-graph.h"
66 #include "diagnostic-format-sarif.h"
68 /* A subclass of pending_diagnostic for complaining about suspected
69 infinite recursion. */
71 class infinite_recursion_diagnostic
72 : public pending_diagnostic_subclass<infinite_recursion_diagnostic>
74 public:
75 infinite_recursion_diagnostic (const exploded_node *prev_entry_enode,
76 const exploded_node *new_entry_enode,
77 tree callee_fndecl)
78 : m_prev_entry_enode (prev_entry_enode),
79 m_new_entry_enode (new_entry_enode),
80 m_callee_fndecl (callee_fndecl),
81 m_prev_entry_event (NULL)
84 const char *get_kind () const final override
86 return "infinite_recursion_diagnostic";
89 bool operator== (const infinite_recursion_diagnostic &other) const
91 return m_callee_fndecl == other.m_callee_fndecl;
94 int get_controlling_option () const final override
96 return OPT_Wanalyzer_infinite_recursion;
99 bool emit (diagnostic_emission_context &ctxt) final override
101 /* "CWE-674: Uncontrolled Recursion". */
102 ctxt.add_cwe (674);
103 return ctxt.warn ("infinite recursion");
106 label_text describe_final_event (const evdesc::final_event &ev) final override
108 const int frames_consumed = (m_new_entry_enode->get_stack_depth ()
109 - m_prev_entry_enode->get_stack_depth ());
110 if (frames_consumed > 1)
111 return ev.formatted_print
112 ("apparently infinite chain of mutually-recursive function calls,"
113 " consuming %i stack frames per recursion",
114 frames_consumed);
115 else
116 return ev.formatted_print ("apparently infinite recursion");
119 void
120 add_function_entry_event (const exploded_edge &eedge,
121 checker_path *emission_path) final override
123 /* Subclass of function_entry_event for use when reporting both
124 the initial and subsequent entries to the function of interest,
125 allowing for cross-referencing the first event in the description
126 of the second. */
127 class recursive_function_entry_event : public function_entry_event
129 public:
130 recursive_function_entry_event (const program_point &dst_point,
131 const infinite_recursion_diagnostic &pd,
132 bool topmost)
133 : function_entry_event (dst_point),
134 m_pd (pd),
135 m_topmost (topmost)
139 label_text
140 get_desc (bool can_colorize) const final override
142 if (m_topmost)
144 if (m_pd.m_prev_entry_event
145 && m_pd.m_prev_entry_event->get_id_ptr ()->known_p ())
146 return make_label_text
147 (can_colorize,
148 "recursive entry to %qE; previously entered at %@",
149 m_effective_fndecl,
150 m_pd.m_prev_entry_event->get_id_ptr ());
151 else
152 return make_label_text (can_colorize, "recursive entry to %qE",
153 m_effective_fndecl);
155 else
156 return make_label_text (can_colorize, "initial entry to %qE",
157 m_effective_fndecl);
160 private:
161 const infinite_recursion_diagnostic &m_pd;
162 bool m_topmost;
164 const exploded_node *dst_node = eedge.m_dest;
165 const program_point &dst_point = dst_node->get_point ();
166 if (eedge.m_dest == m_prev_entry_enode)
168 gcc_assert (m_prev_entry_event == NULL);
169 std::unique_ptr<checker_event> prev_entry_event
170 = make_unique <recursive_function_entry_event> (dst_point,
171 *this, false);
172 m_prev_entry_event = prev_entry_event.get ();
173 emission_path->add_event (std::move (prev_entry_event));
175 else if (eedge.m_dest == m_new_entry_enode)
176 emission_path->add_event
177 (make_unique<recursive_function_entry_event> (dst_point, *this, true));
178 else
179 pending_diagnostic::add_function_entry_event (eedge, emission_path);
182 /* Customize the location where the warning_event appears, putting
183 it at the topmost entrypoint to the function. */
184 void add_final_event (const state_machine *,
185 const exploded_node *enode,
186 const event_loc_info &,
187 tree,
188 state_machine::state_t,
189 checker_path *emission_path) final override
191 gcc_assert (m_new_entry_enode);
192 emission_path->add_event
193 (make_unique<warning_event>
194 (event_loc_info (m_new_entry_enode->get_supernode
195 ()->get_start_location (),
196 m_callee_fndecl,
197 m_new_entry_enode->get_stack_depth ()),
198 enode,
199 NULL, NULL, NULL));
202 /* Reject paths in which conjured svalues have affected control flow
203 since m_prev_entry_enode. */
205 bool check_valid_fpath_p (const feasible_node &final_fnode,
206 const gimple *)
207 const final override
209 /* Reject paths in which calls with unknown side effects have occurred
210 since m_prev_entry_enode.
211 Find num calls with side effects. Walk backward until we reach the
212 pref */
213 gcc_assert (final_fnode.get_inner_node () == m_new_entry_enode);
215 /* FG is actually a tree. Walk backwards from FINAL_FNODE until we
216 reach the prev_entry_enode (or the origin). */
217 const feasible_node *iter_fnode = &final_fnode;
218 while (iter_fnode->get_inner_node ()->m_index != 0)
220 gcc_assert (iter_fnode->m_preds.length () == 1);
222 feasible_edge *pred_fedge
223 = static_cast <feasible_edge *> (iter_fnode->m_preds[0]);
225 /* Determine if conjured svalues have affected control flow
226 since the prev entry node. */
227 if (fedge_uses_conjured_svalue_p (pred_fedge))
228 /* If so, then reject this diagnostic. */
229 return false;
230 iter_fnode = static_cast <feasible_node *> (pred_fedge->m_src);
231 if (iter_fnode->get_inner_node () == m_prev_entry_enode)
232 /* Accept this diagnostic. */
233 return true;
236 /* We shouldn't get here; if we do, reject the diagnostic. */
237 gcc_unreachable ();
238 return false;
241 void maybe_add_sarif_properties (sarif_object &result_obj)
242 const final override
244 sarif_property_bag &props = result_obj.get_or_create_properties ();
245 #define PROPERTY_PREFIX "gcc/analyzer/infinite_recursion_diagnostic/"
246 props.set_integer (PROPERTY_PREFIX "prev_entry_enode",
247 m_prev_entry_enode->m_index);
248 props.set_integer (PROPERTY_PREFIX "new_entry_enode",
249 m_new_entry_enode->m_index);
250 #undef PROPERTY_PREFIX
253 private:
254 /* Return true iff control flow along FEDGE was affected by
255 a conjured_svalue. */
256 static bool fedge_uses_conjured_svalue_p (feasible_edge *fedge)
258 const exploded_edge *eedge = fedge->get_inner_edge ();
259 const superedge *sedge = eedge->m_sedge;
260 if (!sedge)
261 return false;
262 const cfg_superedge *cfg_sedge = sedge->dyn_cast_cfg_superedge ();
263 if (!cfg_sedge)
264 return false;
265 const gimple *last_stmt = sedge->m_src->get_last_stmt ();
266 if (!last_stmt)
267 return false;
269 const feasible_node *dst_fnode
270 = static_cast<const feasible_node *> (fedge->m_dest);
271 const region_model &model = dst_fnode->get_state ().get_model ();
273 if (const gcond *cond_stmt = dyn_cast <const gcond *> (last_stmt))
275 if (expr_uses_conjured_svalue_p (model, gimple_cond_lhs (cond_stmt)))
276 return true;
277 if (expr_uses_conjured_svalue_p (model, gimple_cond_rhs (cond_stmt)))
278 return true;
280 else if (const gswitch *switch_stmt
281 = dyn_cast <const gswitch *> (last_stmt))
283 if (expr_uses_conjured_svalue_p (model,
284 gimple_switch_index (switch_stmt)))
285 return true;
287 return false;
290 /* Return true iff EXPR is affected by a conjured_svalue. */
291 static bool expr_uses_conjured_svalue_p (const region_model &model,
292 tree expr)
294 class conjured_svalue_finder : public visitor
296 public:
297 conjured_svalue_finder () : m_found_conjured_svalues (false)
300 void
301 visit_conjured_svalue (const conjured_svalue *) final override
303 m_found_conjured_svalues = true;
306 bool m_found_conjured_svalues;
309 const svalue *sval = model.get_rvalue (expr, NULL);
310 conjured_svalue_finder v;
311 sval->accept (&v);
312 return v.m_found_conjured_svalues;
315 const exploded_node *m_prev_entry_enode;
316 const exploded_node *m_new_entry_enode;
317 tree m_callee_fndecl;
318 const checker_event *m_prev_entry_event;
321 /* Return true iff ENODE is the PK_BEFORE_SUPERNODE at a function
322 entrypoint. */
324 static bool
325 is_entrypoint_p (exploded_node *enode)
327 /* Look for an entrypoint to a function... */
328 const supernode *snode = enode->get_supernode ();
329 if (!snode)
330 return false;
331 if (!snode->entry_p ())
332 return false;;
333 const program_point &point = enode->get_point ();
334 if (point.get_kind () != PK_BEFORE_SUPERNODE)
335 return false;
336 return true;
339 /* Walk backwards through the eg, looking for the first
340 enode we find that's also the entrypoint of the same function. */
342 exploded_node *
343 exploded_graph::find_previous_entry_to (function *top_of_stack_fun,
344 exploded_node *enode) const
346 auto_vec<exploded_node *> worklist;
347 hash_set<exploded_node *> visited;
349 visited.add (enode);
350 for (auto in_edge : enode->m_preds)
351 worklist.safe_push (in_edge->m_src);
353 while (worklist.length () > 0)
355 exploded_node *iter = worklist.pop ();
357 if (is_entrypoint_p (iter)
358 && iter->get_function () == top_of_stack_fun)
359 return iter;
361 if (visited.contains (iter))
362 continue;
363 visited.add (iter);
364 for (auto in_edge : iter->m_preds)
365 worklist.safe_push (in_edge->m_src);
368 /* Not found. */
369 return NULL;
372 /* Given BASE_REG within ENCLOSING_FRAME (such as a function parameter),
373 remap it to the equivalent region within EQUIV_PREV_FRAME.
375 For example, given param "n" within frame "foo@3", and equiv prev frame
376 "foo@1", remap it to param "n" within frame "foo@1". */
378 static const region *
379 remap_enclosing_frame (const region *base_reg,
380 const frame_region *enclosing_frame,
381 const frame_region *equiv_prev_frame,
382 region_model_manager *mgr)
384 gcc_assert (base_reg->get_parent_region () == enclosing_frame);
385 switch (base_reg->get_kind ())
387 default:
388 /* We should only encounter params and varargs at the topmost
389 entrypoint. */
390 gcc_unreachable ();
392 case RK_VAR_ARG:
394 const var_arg_region *var_arg_reg = (const var_arg_region *)base_reg;
395 return mgr->get_var_arg_region (equiv_prev_frame,
396 var_arg_reg->get_index ());
398 case RK_DECL:
400 const decl_region *decl_reg = (const decl_region *)base_reg;
401 return equiv_prev_frame->get_region_for_local (mgr,
402 decl_reg->get_decl (),
403 NULL);
408 /* Return true iff SVAL is unknown, or contains an unknown svalue. */
410 static bool
411 contains_unknown_p (const svalue *sval)
413 if (sval->get_kind () == SK_UNKNOWN)
414 return true;
415 if (const compound_svalue *compound_sval
416 = sval->dyn_cast_compound_svalue ())
417 for (auto iter : *compound_sval)
418 if (iter.second->get_kind () == SK_UNKNOWN)
419 return true;
420 return false;
423 /* Subroutine of sufficiently_different_p. Compare the store bindings
424 for BASE_REG within NEW_ENTRY_ENODE and PREV_ENTRY_ENODE.
426 Return true if the state of NEW_ENTRY_ENODE is sufficiently different
427 from PREV_ENTRY_ENODE within BASE_REG to suggest that some variant is
428 being modified, and thus the recursion isn't infinite.
430 Return false if the states for BASE_REG are effectively the same. */
432 static bool
433 sufficiently_different_region_binding_p (exploded_node *new_entry_enode,
434 exploded_node *prev_entry_enode,
435 const region *base_reg)
437 /* Compare the stores of the two enodes. */
438 const region_model &new_model
439 = *new_entry_enode->get_state ().m_region_model;
440 const region_model &prev_model
441 = *prev_entry_enode->get_state ().m_region_model;
443 /* Get the value within the new frame. */
444 const svalue *new_sval
445 = new_model.get_store_value (base_reg, NULL);
447 /* If any part of the value is UNKNOWN (e.g. due to hitting
448 complexity limits) assume that it differs from the previous
449 value. */
450 if (contains_unknown_p (new_sval))
451 return true;
453 /* Get the equivalent value within the old enode. */
454 const svalue *prev_sval;
456 if (const frame_region *enclosing_frame
457 = base_reg->maybe_get_frame_region ())
459 /* We have a binding within a frame in the new entry enode. */
461 /* Consider changes in bindings below the original entry
462 to the recursion. */
463 const int old_stack_depth = prev_entry_enode->get_stack_depth ();
464 if (enclosing_frame->get_stack_depth () < old_stack_depth)
465 prev_sval = prev_model.get_store_value (base_reg, NULL);
466 else
468 /* Ignore bindings within frames below the new entry node. */
469 const int new_stack_depth = new_entry_enode->get_stack_depth ();
470 if (enclosing_frame->get_stack_depth () < new_stack_depth)
471 return false;
473 /* We have a binding within the frame of the new entry node,
474 presumably a parameter. */
476 /* Get the value within the equivalent frame of
477 the old entrypoint; typically will be the initial_svalue
478 of the parameter. */
479 const frame_region *equiv_prev_frame
480 = prev_model.get_current_frame ();
481 const region *equiv_prev_base_reg
482 = remap_enclosing_frame (base_reg,
483 enclosing_frame,
484 equiv_prev_frame,
485 new_model.get_manager ());
486 prev_sval
487 = prev_model.get_store_value (equiv_prev_base_reg, NULL);
490 else
491 prev_sval = prev_model.get_store_value (base_reg, NULL);
493 /* If the prev_sval contains UNKNOWN (e.g. due to hitting complexity limits)
494 assume that it will differ from any new value. */
495 if (contains_unknown_p (prev_sval))
496 return true;
498 if (new_sval != prev_sval)
499 return true;
501 return false;
504 /* Compare the state of memory at NEW_ENTRY_ENODE and PREV_ENTRY_ENODE,
505 both of which are entrypoints to the same function, where recursion has
506 occurred.
508 Return true if the state of NEW_ENTRY_ENODE is sufficiently different
509 from PREV_ENTRY_ENODE to suggest that some variant is being modified,
510 and thus the recursion isn't infinite.
512 Return false if the states are effectively the same, suggesting that
513 the recursion is infinite.
515 For example, consider mutually recursive functions "foo" and "bar".
516 At the entrypoint to a "foo" frame where we've detected recursion,
517 we might have three frames on the stack: the new 'foo'@3, an inner
518 'bar'@2, and the innermost 'foo'@1.
520 (gdb) call enode->dump(m_ext_state)
521 EN: 16
522 callstring: [(SN: 9 -> SN: 3 in foo), (SN: 5 -> SN: 8 in bar)]
523 before SN: 0 (NULL from-edge)
525 rmodel:
526 stack depth: 3
527 frame (index 2): frame: ‘foo’@3
528 frame (index 1): frame: ‘bar’@2
529 frame (index 0): frame: ‘foo’@1
530 clusters within root region
531 cluster for: (*INIT_VAL(f_4(D)))
532 clusters within frame: ‘bar’@2
533 cluster for: b_2(D): INIT_VAL(f_4(D))
534 clusters within frame: ‘foo’@3
535 cluster for: f_4(D): INIT_VAL(f_4(D))
536 m_called_unknown_fn: FALSE
538 whereas for the previous entry node we'd have just the innermost
539 'foo'@1
541 (gdb) call prev_entry_enode->dump(m_ext_state)
542 EN: 1
543 callstring: []
544 before SN: 0 (NULL from-edge)
546 rmodel:
547 stack depth: 1
548 frame (index 0): frame: ‘foo’@1
549 clusters within root region
550 cluster for: (*INIT_VAL(f_4(D)))
551 m_called_unknown_fn: FALSE
553 We want to abstract away frames 1 and 2 in the new entry enode,
554 and compare its frame 3 with the frame 1 in the previous entry
555 enode, and determine if enough state changes between them to
556 rule out infinite recursion. */
558 static bool
559 sufficiently_different_p (exploded_node *new_entry_enode,
560 exploded_node *prev_entry_enode,
561 logger *logger)
563 LOG_SCOPE (logger);
564 gcc_assert (new_entry_enode);
565 gcc_assert (prev_entry_enode);
566 gcc_assert (is_entrypoint_p (new_entry_enode));
567 gcc_assert (is_entrypoint_p (prev_entry_enode));
569 /* Compare the stores of the two enodes. */
570 const region_model &new_model
571 = *new_entry_enode->get_state ().m_region_model;
572 const store &new_store = *new_model.get_store ();
574 for (auto kv : new_store)
576 const region *base_reg = kv.first;
577 if (sufficiently_different_region_binding_p (new_entry_enode,
578 prev_entry_enode,
579 base_reg))
580 return true;
583 /* No significant differences found. */
584 return false;
587 /* Implementation of -Wanalyzer-infinite-recursion.
589 Called when adding ENODE to the graph, after adding its first in-edge.
591 For function entrypoints, see if recursion has occurred, and, if so,
592 check if the state of memory changed between the recursion levels,
593 which would suggest some kind of decreasing variant that leads to
594 termination.
596 For recursive calls where the state of memory is effectively unchanged
597 between recursion levels, warn with -Wanalyzer-infinite-recursion. */
599 void
600 exploded_graph::detect_infinite_recursion (exploded_node *enode)
602 if (!is_entrypoint_p (enode))
603 return;
604 function *top_of_stack_fun = enode->get_function ();
605 gcc_assert (top_of_stack_fun);
607 /* ....where a call to that function is already in the call string. */
608 const call_string &call_string = enode->get_point ().get_call_string ();
610 if (call_string.count_occurrences_of_function (top_of_stack_fun) < 2)
611 return;
613 tree fndecl = top_of_stack_fun->decl;
615 log_scope s (get_logger (),
616 "checking for infinite recursion",
617 "considering recursion at EN: %i entering %qE",
618 enode->m_index, fndecl);
620 /* Find enode that's the entrypoint for the previous frame for fndecl
621 in the recursion. */
622 exploded_node *prev_entry_enode
623 = find_previous_entry_to (top_of_stack_fun, enode);
624 gcc_assert (prev_entry_enode);
625 if (get_logger ())
626 get_logger ()->log ("previous entrypoint to %qE is EN: %i",
627 fndecl, prev_entry_enode->m_index);
629 /* Look for changes to the state of memory between the recursion levels. */
630 if (sufficiently_different_p (enode, prev_entry_enode, get_logger ()))
631 return;
633 /* Otherwise, the state of memory is effectively the same between the two
634 recursion levels; warn. */
636 const supernode *caller_snode = call_string.get_top_of_stack ().m_caller;
637 const supernode *snode = enode->get_supernode ();
638 gcc_assert (caller_snode->m_returning_call);
639 pending_location ploc (enode,
640 snode,
641 caller_snode->m_returning_call,
642 nullptr);
643 get_diagnostic_manager ().add_diagnostic
644 (ploc,
645 make_unique<infinite_recursion_diagnostic> (prev_entry_enode,
646 enode,
647 fndecl));