analyzer: Fix up some -Wformat* warnings
[official-gcc.git] / gcc / analyzer / sm-malloc.cc
bloba518816b2b8b3f6a69108df2e400c3a20cecb376
1 /* A state machine for detecting misuses of the malloc/free API.
2 Copyright (C) 2019-2024 Free Software Foundation, Inc.
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #define INCLUDE_MEMORY
23 #include "system.h"
24 #include "coretypes.h"
25 #include "make-unique.h"
26 #include "tree.h"
27 #include "function.h"
28 #include "basic-block.h"
29 #include "gimple.h"
30 #include "options.h"
31 #include "bitmap.h"
32 #include "diagnostic-core.h"
33 #include "diagnostic-path.h"
34 #include "analyzer/analyzer.h"
35 #include "diagnostic-event-id.h"
36 #include "analyzer/analyzer-logging.h"
37 #include "analyzer/sm.h"
38 #include "analyzer/pending-diagnostic.h"
39 #include "analyzer/call-string.h"
40 #include "analyzer/program-point.h"
41 #include "analyzer/store.h"
42 #include "analyzer/region-model.h"
43 #include "analyzer/call-details.h"
44 #include "stringpool.h"
45 #include "attribs.h"
46 #include "analyzer/function-set.h"
47 #include "analyzer/program-state.h"
48 #include "analyzer/checker-event.h"
49 #include "analyzer/exploded-graph.h"
50 #include "analyzer/inlining-iterator.h"
52 #if ENABLE_ANALYZER
54 namespace ana {
56 namespace {
58 /* This state machine and its various support classes track allocations
59 and deallocations.
61 It has a few standard allocation/deallocation pairs (e.g. new/delete),
62 and also supports user-defined ones via
63 __attribute__ ((malloc(DEALLOCATOR))).
65 There can be more than one valid deallocator for a given allocator,
66 for example:
67 __attribute__ ((malloc (fclose)))
68 __attribute__ ((malloc (freopen, 3)))
69 FILE* fopen (const char*, const char*);
70 A deallocator_set represents a particular set of valid deallocators.
72 We track the expected deallocator_set for a value, but not the allocation
73 function - there could be more than one allocator per deallocator_set.
74 For example, there could be dozens of allocators for "free" beyond just
75 malloc e.g. calloc, xstrdup, etc. We don't want to explode the number
76 of states by tracking individual allocators in the exploded graph;
77 we merely want to track "this value expects to have 'free' called on it".
78 Perhaps we can reconstruct which allocator was used later, when emitting
79 the path, if it's necessary for precision of wording of diagnostics. */
81 class deallocator;
82 class deallocator_set;
83 class malloc_state_machine;
85 /* An enum for discriminating between different kinds of allocation_state. */
87 enum resource_state
89 /* States that are independent of allocator/deallocator. */
91 /* The start state. */
92 RS_START,
94 /* State for a pointer that's been unconditionally dereferenced. */
95 RS_ASSUMED_NON_NULL,
97 /* State for a pointer that's known to be NULL. */
98 RS_NULL,
100 /* State for a pointer that's known to not be on the heap (e.g. to a local
101 or global). */
102 RS_NON_HEAP,
104 /* Stop state, for pointers we don't want to track any more. */
105 RS_STOP,
107 /* States that relate to a specific deallocator_set. */
109 /* State for a pointer returned from an allocator that hasn't
110 been checked for NULL.
111 It could be a pointer to heap-allocated memory, or could be NULL. */
112 RS_UNCHECKED,
114 /* State for a pointer returned from an allocator,
115 known to be non-NULL. */
116 RS_NONNULL,
118 /* State for a pointer passed to a deallocator. */
119 RS_FREED
122 /* Custom state subclass, which can optionally refer to an a
123 deallocator_set. */
125 struct allocation_state : public state_machine::state
127 allocation_state (const char *name, unsigned id,
128 enum resource_state rs,
129 const deallocator_set *deallocators,
130 const deallocator *deallocator)
131 : state (name, id), m_rs (rs),
132 m_deallocators (deallocators),
133 m_deallocator (deallocator)
136 void dump_to_pp (pretty_printer *pp) const override;
138 const allocation_state *get_nonnull () const;
140 enum resource_state m_rs;
141 const deallocator_set *m_deallocators;
142 const deallocator *m_deallocator;
145 /* Custom state subclass, for the "assumed-non-null" state
146 where the assumption happens in a particular frame. */
148 struct assumed_non_null_state : public allocation_state
150 assumed_non_null_state (const char *name, unsigned id,
151 const frame_region *frame)
152 : allocation_state (name, id, RS_ASSUMED_NON_NULL,
153 NULL, NULL),
154 m_frame (frame)
156 gcc_assert (m_frame);
159 void dump_to_pp (pretty_printer *pp) const final override;
161 const frame_region *m_frame;
164 /* An enum for choosing which wording to use in various diagnostics
165 when describing deallocations. */
167 enum wording
169 WORDING_FREED,
170 WORDING_DELETED,
171 WORDING_DEALLOCATED,
172 WORDING_REALLOCATED
175 /* Base class representing a deallocation function,
176 either a built-in one we know about, or one exposed via
177 __attribute__((malloc(DEALLOCATOR))). */
179 struct deallocator
181 hashval_t hash () const;
182 void dump_to_pp (pretty_printer *pp) const;
183 static int cmp (const deallocator *a, const deallocator *b);
184 static int cmp_ptr_ptr (const void *, const void *);
186 /* Name to use in diagnostics. */
187 const char *m_name;
189 /* Which wording to use in diagnostics. */
190 enum wording m_wording;
192 /* State for a value passed to one of the deallocators. */
193 state_machine::state_t m_freed;
195 protected:
196 deallocator (malloc_state_machine *sm,
197 const char *name,
198 enum wording wording);
201 /* Subclass representing a predefined deallocator.
202 e.g. "delete []", without needing a specific FUNCTION_DECL
203 ahead of time. */
205 struct standard_deallocator : public deallocator
207 standard_deallocator (malloc_state_machine *sm,
208 const char *name,
209 enum wording wording);
212 /* Subclass representing a user-defined deallocator
213 via __attribute__((malloc(DEALLOCATOR))) given
214 a specific FUNCTION_DECL. */
216 struct custom_deallocator : public deallocator
218 custom_deallocator (malloc_state_machine *sm,
219 tree deallocator_fndecl,
220 enum wording wording)
221 : deallocator (sm, IDENTIFIER_POINTER (DECL_NAME (deallocator_fndecl)),
222 wording)
227 /* Base class representing a set of possible deallocators.
228 Often this will be just a single deallocator, but some
229 allocators have multiple valid deallocators (e.g. the result of
230 "fopen" can be closed by either "fclose" or "freopen"). */
232 struct deallocator_set
234 deallocator_set (malloc_state_machine *sm,
235 enum wording wording);
236 virtual ~deallocator_set () {}
238 virtual bool contains_p (const deallocator *d) const = 0;
239 virtual const deallocator *maybe_get_single () const = 0;
240 virtual void dump_to_pp (pretty_printer *pp) const = 0;
241 void dump () const;
243 /* Which wording to use in diagnostics. */
244 enum wording m_wording;
246 /* Pointers to states.
247 These states are owned by the state_machine base class. */
249 /* State for an unchecked result from an allocator using this set. */
250 state_machine::state_t m_unchecked;
252 /* State for a known non-NULL result from such an allocator. */
253 state_machine::state_t m_nonnull;
256 /* Subclass of deallocator_set representing a set of deallocators
257 defined by one or more __attribute__((malloc(DEALLOCATOR))). */
259 struct custom_deallocator_set : public deallocator_set
261 typedef const auto_vec <const deallocator *> *key_t;
263 custom_deallocator_set (malloc_state_machine *sm,
264 const auto_vec <const deallocator *> *vec,
265 //const char *name,
266 //const char *dealloc_funcname,
267 //unsigned arg_idx,
268 enum wording wording);
270 bool contains_p (const deallocator *d) const final override;
271 const deallocator *maybe_get_single () const final override;
272 void dump_to_pp (pretty_printer *pp) const final override;
274 auto_vec <const deallocator *> m_deallocator_vec;
277 /* Subclass of deallocator_set representing a set of deallocators
278 with a single standard_deallocator, e.g. "delete []". */
280 struct standard_deallocator_set : public deallocator_set
282 standard_deallocator_set (malloc_state_machine *sm,
283 const char *name,
284 enum wording wording);
286 bool contains_p (const deallocator *d) const final override;
287 const deallocator *maybe_get_single () const final override;
288 void dump_to_pp (pretty_printer *pp) const final override;
290 standard_deallocator m_deallocator;
293 /* Traits class for ensuring uniqueness of deallocator_sets within
294 malloc_state_machine. */
296 struct deallocator_set_map_traits
298 typedef custom_deallocator_set::key_t key_type;
299 typedef custom_deallocator_set *value_type;
300 typedef custom_deallocator_set *compare_type;
302 static inline hashval_t hash (const key_type &k)
304 gcc_assert (k != NULL);
305 gcc_assert (k != reinterpret_cast<key_type> (1));
307 hashval_t result = 0;
308 unsigned i;
309 const deallocator *d;
310 FOR_EACH_VEC_ELT (*k, i, d)
311 result ^= d->hash ();
312 return result;
314 static inline bool equal_keys (const key_type &k1, const key_type &k2)
316 if (k1->length () != k2->length ())
317 return false;
319 for (unsigned i = 0; i < k1->length (); i++)
320 if ((*k1)[i] != (*k2)[i])
321 return false;
323 return true;
325 template <typename T>
326 static inline void remove (T &)
328 /* empty; the nodes are handled elsewhere. */
330 template <typename T>
331 static inline void mark_deleted (T &entry)
333 entry.m_key = reinterpret_cast<key_type> (1);
335 template <typename T>
336 static inline void mark_empty (T &entry)
338 entry.m_key = NULL;
340 template <typename T>
341 static inline bool is_deleted (const T &entry)
343 return entry.m_key == reinterpret_cast<key_type> (1);
345 template <typename T>
346 static inline bool is_empty (const T &entry)
348 return entry.m_key == NULL;
350 static const bool empty_zero_p = false;
353 /* A state machine for detecting misuses of the malloc/free API.
355 See sm-malloc.dot for an overview (keep this in-sync with that file). */
357 class malloc_state_machine : public state_machine
359 public:
360 typedef allocation_state custom_data_t;
362 malloc_state_machine (logger *logger);
363 ~malloc_state_machine ();
365 state_t
366 add_state (const char *name, enum resource_state rs,
367 const deallocator_set *deallocators,
368 const deallocator *deallocator);
370 bool inherited_state_p () const final override { return false; }
372 state_machine::state_t
373 get_default_state (const svalue *sval) const final override
375 if (tree cst = sval->maybe_get_constant ())
377 if (zerop (cst))
378 return m_null;
380 if (const region_svalue *ptr = sval->dyn_cast_region_svalue ())
382 const region *reg = ptr->get_pointee ();
383 switch (reg->get_memory_space ())
385 default:
386 break;
387 case MEMSPACE_CODE:
388 case MEMSPACE_GLOBALS:
389 case MEMSPACE_STACK:
390 case MEMSPACE_READONLY_DATA:
391 return m_non_heap;
394 return m_start;
397 bool on_stmt (sm_context *sm_ctxt,
398 const supernode *node,
399 const gimple *stmt) const final override;
401 void on_phi (sm_context *sm_ctxt,
402 const supernode *node,
403 const gphi *phi,
404 tree rhs) const final override;
406 void on_condition (sm_context *sm_ctxt,
407 const supernode *node,
408 const gimple *stmt,
409 const svalue *lhs,
410 enum tree_code op,
411 const svalue *rhs) const final override;
413 void on_pop_frame (sm_state_map *smap,
414 const frame_region *) const final override;
416 bool can_purge_p (state_t s) const final override;
417 std::unique_ptr<pending_diagnostic> on_leak (tree var) const final override;
419 bool reset_when_passed_to_unknown_fn_p (state_t s,
420 bool is_mutable) const final override;
422 state_t
423 maybe_get_merged_states_nonequal (state_t state_a,
424 state_t state_b) const final override;
426 static bool unaffected_by_call_p (tree fndecl);
428 void maybe_assume_non_null (sm_context *sm_ctxt,
429 tree ptr,
430 const gimple *stmt) const;
432 void on_realloc_with_move (region_model *model,
433 sm_state_map *smap,
434 const svalue *old_ptr_sval,
435 const svalue *new_ptr_sval,
436 const extrinsic_state &ext_state) const;
438 void transition_ptr_sval_non_null (region_model *model,
439 sm_state_map *smap,
440 const svalue *new_ptr_sval,
441 const extrinsic_state &ext_state) const;
443 standard_deallocator_set m_free;
444 standard_deallocator_set m_scalar_delete;
445 standard_deallocator_set m_vector_delete;
447 standard_deallocator m_realloc;
449 /* States that are independent of api. */
451 /* States for a pointer that's been unconditionally dereferenced
452 in a particular stack frame. */
453 hash_map<const frame_region *, state_t> m_assumed_non_null;
455 /* State for a pointer that's known to be NULL. */
456 state_t m_null;
458 /* State for a pointer that's known to not be on the heap (e.g. to a local
459 or global). */
460 state_t m_non_heap; // TODO: or should this be a different state machine?
461 // or do we need child values etc?
463 /* Stop state, for pointers we don't want to track any more. */
464 state_t m_stop;
466 private:
467 const custom_deallocator_set *
468 get_or_create_custom_deallocator_set (tree allocator_fndecl);
469 custom_deallocator_set *
470 maybe_create_custom_deallocator_set (tree allocator_fndecl);
471 const deallocator *
472 get_or_create_deallocator (tree deallocator_fndecl);
474 state_t
475 get_or_create_assumed_non_null_state_for_frame (const frame_region *frame);
477 void
478 maybe_complain_about_deref_before_check (sm_context *sm_ctxt,
479 const supernode *node,
480 const gimple *stmt,
481 const assumed_non_null_state *,
482 tree ptr) const;
484 void on_allocator_call (sm_context *sm_ctxt,
485 const gcall *call,
486 const deallocator_set *deallocators,
487 bool returns_nonnull = false) const;
488 void handle_free_of_non_heap (sm_context *sm_ctxt,
489 const supernode *node,
490 const gcall *call,
491 tree arg,
492 const deallocator *d) const;
493 void on_deallocator_call (sm_context *sm_ctxt,
494 const supernode *node,
495 const gcall *call,
496 const deallocator *d,
497 unsigned argno) const;
498 void on_realloc_call (sm_context *sm_ctxt,
499 const supernode *node,
500 const gcall *call) const;
501 void on_zero_assignment (sm_context *sm_ctxt,
502 const gimple *stmt,
503 tree lhs) const;
505 /* A map for consolidating deallocators so that they are
506 unique per deallocator FUNCTION_DECL. */
507 typedef hash_map<tree, deallocator *> deallocator_map_t;
508 deallocator_map_t m_deallocator_map;
510 /* Memoized lookups from FUNCTION_DECL to custom_deallocator_set *. */
511 typedef hash_map<tree, custom_deallocator_set *> deallocator_set_cache_t;
512 deallocator_set_cache_t m_custom_deallocator_set_cache;
514 /* A map for consolidating custom_deallocator_set instances. */
515 typedef hash_map<custom_deallocator_set::key_t,
516 custom_deallocator_set *,
517 deallocator_set_map_traits> custom_deallocator_set_map_t;
518 custom_deallocator_set_map_t m_custom_deallocator_set_map;
520 /* Record of dynamically-allocated objects, for cleanup. */
521 auto_vec <custom_deallocator_set *> m_dynamic_sets;
522 auto_vec <custom_deallocator *> m_dynamic_deallocators;
525 /* struct deallocator. */
527 deallocator::deallocator (malloc_state_machine *sm,
528 const char *name,
529 enum wording wording)
530 : m_name (name),
531 m_wording (wording),
532 m_freed (sm->add_state ("freed", RS_FREED, NULL, this))
536 hashval_t
537 deallocator::hash () const
539 return (hashval_t)m_freed->get_id ();
542 void
543 deallocator::dump_to_pp (pretty_printer *pp) const
545 pp_printf (pp, "%qs", m_name);
549 deallocator::cmp (const deallocator *a, const deallocator *b)
551 return (int)a->m_freed->get_id () - (int)b->m_freed->get_id ();
555 deallocator::cmp_ptr_ptr (const void *a, const void *b)
557 return cmp (*(const deallocator * const *)a,
558 *(const deallocator * const *)b);
562 /* struct standard_deallocator : public deallocator. */
564 standard_deallocator::standard_deallocator (malloc_state_machine *sm,
565 const char *name,
566 enum wording wording)
567 : deallocator (sm, name, wording)
571 /* struct deallocator_set. */
573 deallocator_set::deallocator_set (malloc_state_machine *sm,
574 enum wording wording)
575 : m_wording (wording),
576 m_unchecked (sm->add_state ("unchecked", RS_UNCHECKED, this, NULL)),
577 m_nonnull (sm->add_state ("nonnull", RS_NONNULL, this, NULL))
581 /* Dump a description of this deallocator_set to stderr. */
583 DEBUG_FUNCTION void
584 deallocator_set::dump () const
586 pretty_printer pp;
587 pp_show_color (&pp) = pp_show_color (global_dc->printer);
588 pp.buffer->stream = stderr;
589 dump_to_pp (&pp);
590 pp_newline (&pp);
591 pp_flush (&pp);
594 /* struct custom_deallocator_set : public deallocator_set. */
596 custom_deallocator_set::
597 custom_deallocator_set (malloc_state_machine *sm,
598 const auto_vec <const deallocator *> *vec,
599 enum wording wording)
600 : deallocator_set (sm, wording),
601 m_deallocator_vec (vec->length ())
603 unsigned i;
604 const deallocator *d;
605 FOR_EACH_VEC_ELT (*vec, i, d)
606 m_deallocator_vec.safe_push (d);
609 bool
610 custom_deallocator_set::contains_p (const deallocator *d) const
612 unsigned i;
613 const deallocator *cd;
614 FOR_EACH_VEC_ELT (m_deallocator_vec, i, cd)
615 if (cd == d)
616 return true;
617 return false;
620 const deallocator *
621 custom_deallocator_set::maybe_get_single () const
623 if (m_deallocator_vec.length () == 1)
624 return m_deallocator_vec[0];
625 return NULL;
628 void
629 custom_deallocator_set::dump_to_pp (pretty_printer *pp) const
631 pp_character (pp, '{');
632 unsigned i;
633 const deallocator *d;
634 FOR_EACH_VEC_ELT (m_deallocator_vec, i, d)
636 if (i > 0)
637 pp_string (pp, ", ");
638 d->dump_to_pp (pp);
640 pp_character (pp, '}');
643 /* struct standard_deallocator_set : public deallocator_set. */
645 standard_deallocator_set::standard_deallocator_set (malloc_state_machine *sm,
646 const char *name,
647 enum wording wording)
648 : deallocator_set (sm, wording),
649 m_deallocator (sm, name, wording)
653 bool
654 standard_deallocator_set::contains_p (const deallocator *d) const
656 return d == &m_deallocator;
659 const deallocator *
660 standard_deallocator_set::maybe_get_single () const
662 return &m_deallocator;
665 void
666 standard_deallocator_set::dump_to_pp (pretty_printer *pp) const
668 pp_character (pp, '{');
669 pp_string (pp, m_deallocator.m_name);
670 pp_character (pp, '}');
673 /* Return STATE cast to the custom state subclass, or NULL for the start state.
674 Everything should be an allocation_state apart from the start state. */
676 static const allocation_state *
677 dyn_cast_allocation_state (state_machine::state_t state)
679 if (state->get_id () == 0)
680 return NULL;
681 return static_cast <const allocation_state *> (state);
684 /* Return STATE cast to the custom state subclass, for a state that is
685 already known to not be the start state . */
687 static const allocation_state *
688 as_a_allocation_state (state_machine::state_t state)
690 gcc_assert (state->get_id () != 0);
691 return static_cast <const allocation_state *> (state);
694 /* Get the resource_state for STATE. */
696 static enum resource_state
697 get_rs (state_machine::state_t state)
699 if (const allocation_state *astate = dyn_cast_allocation_state (state))
700 return astate->m_rs;
701 else
702 return RS_START;
705 /* Return true if STATE is the start state. */
707 static bool
708 start_p (state_machine::state_t state)
710 return get_rs (state) == RS_START;
713 /* Return true if STATE is an unchecked result from an allocator. */
715 static bool
716 unchecked_p (state_machine::state_t state)
718 return get_rs (state) == RS_UNCHECKED;
721 /* Return true if STATE is a non-null result from an allocator. */
723 static bool
724 nonnull_p (state_machine::state_t state)
726 return get_rs (state) == RS_NONNULL;
729 /* Return true if STATE is a value that has been passed to a deallocator. */
731 static bool
732 freed_p (state_machine::state_t state)
734 return get_rs (state) == RS_FREED;
737 /* Return true if STATE is a value that has been assumed to be non-NULL. */
739 static bool
740 assumed_non_null_p (state_machine::state_t state)
742 return get_rs (state) == RS_ASSUMED_NON_NULL;
745 /* Class for diagnostics relating to malloc_state_machine. */
747 class malloc_diagnostic : public pending_diagnostic
749 public:
750 malloc_diagnostic (const malloc_state_machine &sm, tree arg)
751 : m_sm (sm), m_arg (arg)
754 bool subclass_equal_p (const pending_diagnostic &base_other) const override
756 return same_tree_p (m_arg, ((const malloc_diagnostic &)base_other).m_arg);
759 label_text describe_state_change (const evdesc::state_change &change)
760 override
762 if (change.m_old_state == m_sm.get_start_state ()
763 && (unchecked_p (change.m_new_state) || nonnull_p (change.m_new_state)))
764 // TODO: verify that it's the allocation stmt, not a copy
765 return label_text::borrow ("allocated here");
766 if (unchecked_p (change.m_old_state)
767 && nonnull_p (change.m_new_state))
769 if (change.m_expr)
770 return change.formatted_print ("assuming %qE is non-NULL",
771 change.m_expr);
772 else
773 return change.formatted_print ("assuming %qs is non-NULL",
774 "<unknown>");
776 if (change.m_new_state == m_sm.m_null)
778 if (unchecked_p (change.m_old_state))
780 if (change.m_expr)
781 return change.formatted_print ("assuming %qE is NULL",
782 change.m_expr);
783 else
784 return change.formatted_print ("assuming %qs is NULL",
785 "<unknown>");
787 else
789 if (change.m_expr)
790 return change.formatted_print ("%qE is NULL",
791 change.m_expr);
792 else
793 return change.formatted_print ("%qs is NULL",
794 "<unknown>");
798 return label_text ();
801 diagnostic_event::meaning
802 get_meaning_for_state_change (const evdesc::state_change &change)
803 const final override
805 if (change.m_old_state == m_sm.get_start_state ()
806 && unchecked_p (change.m_new_state))
807 return diagnostic_event::meaning (diagnostic_event::VERB_acquire,
808 diagnostic_event::NOUN_memory);
809 if (freed_p (change.m_new_state))
810 return diagnostic_event::meaning (diagnostic_event::VERB_release,
811 diagnostic_event::NOUN_memory);
812 return diagnostic_event::meaning ();
815 protected:
816 const malloc_state_machine &m_sm;
817 tree m_arg;
820 /* Concrete subclass for reporting mismatching allocator/deallocator
821 diagnostics. */
823 class mismatching_deallocation : public malloc_diagnostic
825 public:
826 mismatching_deallocation (const malloc_state_machine &sm, tree arg,
827 const deallocator_set *expected_deallocators,
828 const deallocator *actual_dealloc)
829 : malloc_diagnostic (sm, arg),
830 m_expected_deallocators (expected_deallocators),
831 m_actual_dealloc (actual_dealloc)
834 const char *get_kind () const final override
836 return "mismatching_deallocation";
839 int get_controlling_option () const final override
841 return OPT_Wanalyzer_mismatching_deallocation;
844 bool emit (diagnostic_emission_context &ctxt) final override
846 auto_diagnostic_group d;
847 ctxt.add_cwe (762); /* CWE-762: Mismatched Memory Management Routines. */
848 if (const deallocator *expected_dealloc
849 = m_expected_deallocators->maybe_get_single ())
850 return ctxt.warn ("%qE should have been deallocated with %qs"
851 " but was deallocated with %qs",
852 m_arg, expected_dealloc->m_name,
853 m_actual_dealloc->m_name);
854 else
855 return ctxt.warn ("%qs called on %qE returned from a mismatched"
856 " allocation function",
857 m_actual_dealloc->m_name, m_arg);
860 label_text describe_state_change (const evdesc::state_change &change)
861 final override
863 if (unchecked_p (change.m_new_state))
865 m_alloc_event = change.m_event_id;
866 if (const deallocator *expected_dealloc
867 = m_expected_deallocators->maybe_get_single ())
868 return change.formatted_print ("allocated here"
869 " (expects deallocation with %qs)",
870 expected_dealloc->m_name);
871 else
872 return change.formatted_print ("allocated here");
874 return malloc_diagnostic::describe_state_change (change);
877 label_text describe_final_event (const evdesc::final_event &ev) final override
879 if (m_alloc_event.known_p ())
881 if (const deallocator *expected_dealloc
882 = m_expected_deallocators->maybe_get_single ())
883 return ev.formatted_print
884 ("deallocated with %qs here;"
885 " allocation at %@ expects deallocation with %qs",
886 m_actual_dealloc->m_name, &m_alloc_event,
887 expected_dealloc->m_name);
888 else
889 return ev.formatted_print
890 ("deallocated with %qs here;"
891 " allocated at %@",
892 m_actual_dealloc->m_name, &m_alloc_event);
894 return ev.formatted_print ("deallocated with %qs here",
895 m_actual_dealloc->m_name);
898 private:
899 diagnostic_event_id_t m_alloc_event;
900 const deallocator_set *m_expected_deallocators;
901 const deallocator *m_actual_dealloc;
904 /* Concrete subclass for reporting double-free diagnostics. */
906 class double_free : public malloc_diagnostic
908 public:
909 double_free (const malloc_state_machine &sm, tree arg, const char *funcname)
910 : malloc_diagnostic (sm, arg), m_funcname (funcname)
913 const char *get_kind () const final override { return "double_free"; }
915 int get_controlling_option () const final override
917 return OPT_Wanalyzer_double_free;
920 bool emit (diagnostic_emission_context &ctxt) final override
922 auto_diagnostic_group d;
923 ctxt.add_cwe (415); /* CWE-415: Double Free. */
924 return ctxt.warn ("double-%qs of %qE", m_funcname, m_arg);
927 label_text describe_state_change (const evdesc::state_change &change)
928 final override
930 if (freed_p (change.m_new_state))
932 m_first_free_event = change.m_event_id;
933 return change.formatted_print ("first %qs here", m_funcname);
935 return malloc_diagnostic::describe_state_change (change);
938 label_text describe_call_with_state (const evdesc::call_with_state &info)
939 final override
941 if (freed_p (info.m_state))
942 return info.formatted_print
943 ("passing freed pointer %qE in call to %qE from %qE",
944 info.m_expr, info.m_callee_fndecl, info.m_caller_fndecl);
945 return label_text ();
948 label_text describe_final_event (const evdesc::final_event &ev) final override
950 if (m_first_free_event.known_p ())
951 return ev.formatted_print ("second %qs here; first %qs was at %@",
952 m_funcname, m_funcname,
953 &m_first_free_event);
954 return ev.formatted_print ("second %qs here", m_funcname);
957 private:
958 diagnostic_event_id_t m_first_free_event;
959 const char *m_funcname;
962 /* Abstract subclass for describing possible bad uses of NULL.
963 Responsible for describing the call that could return NULL. */
965 class possible_null : public malloc_diagnostic
967 public:
968 possible_null (const malloc_state_machine &sm, tree arg)
969 : malloc_diagnostic (sm, arg)
972 label_text describe_state_change (const evdesc::state_change &change)
973 final override
975 if (change.m_old_state == m_sm.get_start_state ()
976 && unchecked_p (change.m_new_state))
978 m_origin_of_unchecked_event = change.m_event_id;
979 return label_text::borrow ("this call could return NULL");
981 return malloc_diagnostic::describe_state_change (change);
984 label_text describe_return_of_state (const evdesc::return_of_state &info)
985 final override
987 if (unchecked_p (info.m_state))
988 return info.formatted_print ("possible return of NULL to %qE from %qE",
989 info.m_caller_fndecl, info.m_callee_fndecl);
990 return label_text ();
993 protected:
994 diagnostic_event_id_t m_origin_of_unchecked_event;
997 /* Concrete subclass for describing dereference of a possible NULL
998 value. */
1000 class possible_null_deref : public possible_null
1002 public:
1003 possible_null_deref (const malloc_state_machine &sm, tree arg)
1004 : possible_null (sm, arg)
1007 const char *get_kind () const final override { return "possible_null_deref"; }
1009 int get_controlling_option () const final override
1011 return OPT_Wanalyzer_possible_null_dereference;
1014 bool emit (diagnostic_emission_context &ctxt) final override
1016 /* CWE-690: Unchecked Return Value to NULL Pointer Dereference. */
1017 ctxt.add_cwe (690);
1018 return ctxt.warn ("dereference of possibly-NULL %qE", m_arg);
1021 label_text describe_final_event (const evdesc::final_event &ev) final override
1023 if (m_origin_of_unchecked_event.known_p ())
1024 return ev.formatted_print ("%qE could be NULL: unchecked value from %@",
1025 ev.m_expr,
1026 &m_origin_of_unchecked_event);
1027 else
1028 return ev.formatted_print ("%qE could be NULL", ev.m_expr);
1033 /* Return true if FNDECL is a C++ method. */
1035 static bool
1036 method_p (tree fndecl)
1038 return TREE_CODE (TREE_TYPE (fndecl)) == METHOD_TYPE;
1041 /* Return a 1-based description of ARG_IDX (0-based) of FNDECL.
1042 Compare with %P in the C++ FE (implemented in cp/error.cc: parm_to_string
1043 as called from cp_printer). */
1045 static label_text
1046 describe_argument_index (tree fndecl, int arg_idx)
1048 if (method_p (fndecl))
1049 if (arg_idx == 0)
1050 return label_text::borrow ("'this'");
1051 pretty_printer pp;
1052 pp_printf (&pp, "%u", arg_idx + 1 - method_p (fndecl));
1053 return label_text::take (xstrdup (pp_formatted_text (&pp)));
1056 /* Subroutine for use by possible_null_arg::emit and null_arg::emit.
1057 Issue a note informing that the pertinent argument must be non-NULL. */
1059 static void
1060 inform_nonnull_attribute (tree fndecl, int arg_idx)
1062 label_text arg_desc = describe_argument_index (fndecl, arg_idx);
1063 inform (DECL_SOURCE_LOCATION (fndecl),
1064 "argument %s of %qD must be non-null",
1065 arg_desc.get (), fndecl);
1066 /* Ideally we would use the location of the parm and underline the
1067 attribute also - but we don't have the location_t values at this point
1068 in the middle-end.
1069 For reference, the C and C++ FEs have get_fndecl_argument_location. */
1072 /* Concrete subclass for describing passing a possibly-NULL value to a
1073 function marked with __attribute__((nonnull)). */
1075 class possible_null_arg : public possible_null
1077 public:
1078 possible_null_arg (const malloc_state_machine &sm, tree arg,
1079 tree fndecl, int arg_idx)
1080 : possible_null (sm, arg),
1081 m_fndecl (fndecl), m_arg_idx (arg_idx)
1084 const char *get_kind () const final override { return "possible_null_arg"; }
1086 bool subclass_equal_p (const pending_diagnostic &base_other)
1087 const final override
1089 const possible_null_arg &sub_other
1090 = (const possible_null_arg &)base_other;
1091 return (same_tree_p (m_arg, sub_other.m_arg)
1092 && m_fndecl == sub_other.m_fndecl
1093 && m_arg_idx == sub_other.m_arg_idx);
1096 int get_controlling_option () const final override
1098 return OPT_Wanalyzer_possible_null_argument;
1101 bool emit (diagnostic_emission_context &ctxt) final override
1103 /* CWE-690: Unchecked Return Value to NULL Pointer Dereference. */
1104 auto_diagnostic_group d;
1105 ctxt.add_cwe (690);
1106 bool warned
1107 = ctxt.warn ("use of possibly-NULL %qE where non-null expected",
1108 m_arg);
1109 if (warned)
1110 inform_nonnull_attribute (m_fndecl, m_arg_idx);
1111 return warned;
1114 label_text describe_final_event (const evdesc::final_event &ev) final override
1116 label_text arg_desc = describe_argument_index (m_fndecl, m_arg_idx);
1117 label_text result;
1118 if (m_origin_of_unchecked_event.known_p ())
1119 result = ev.formatted_print ("argument %s (%qE) from %@ could be NULL"
1120 " where non-null expected",
1121 arg_desc.get (), ev.m_expr,
1122 &m_origin_of_unchecked_event);
1123 else
1124 result = ev.formatted_print ("argument %s (%qE) could be NULL"
1125 " where non-null expected",
1126 arg_desc.get (), ev.m_expr);
1127 return result;
1130 private:
1131 tree m_fndecl;
1132 int m_arg_idx;
1135 /* Concrete subclass for describing a dereference of a NULL value. */
1137 class null_deref : public malloc_diagnostic
1139 public:
1140 null_deref (const malloc_state_machine &sm, tree arg)
1141 : malloc_diagnostic (sm, arg) {}
1143 const char *get_kind () const final override { return "null_deref"; }
1145 int get_controlling_option () const final override
1147 return OPT_Wanalyzer_null_dereference;
1150 bool terminate_path_p () const final override { return true; }
1152 bool emit (diagnostic_emission_context &ctxt) final override
1154 /* CWE-476: NULL Pointer Dereference. */
1155 ctxt.add_cwe (476);
1156 return ctxt.warn ("dereference of NULL %qE", m_arg);
1159 label_text describe_return_of_state (const evdesc::return_of_state &info)
1160 final override
1162 if (info.m_state == m_sm.m_null)
1163 return info.formatted_print ("return of NULL to %qE from %qE",
1164 info.m_caller_fndecl, info.m_callee_fndecl);
1165 return label_text ();
1168 label_text describe_final_event (const evdesc::final_event &ev) final override
1170 return ev.formatted_print ("dereference of NULL %qE", ev.m_expr);
1173 /* Implementation of pending_diagnostic::supercedes_p for
1174 null-deref.
1176 We want null-deref to supercede use-of-unitialized-value,
1177 so that if we have these at the same stmt, we don't emit
1178 a use-of-uninitialized, just the null-deref. */
1180 bool supercedes_p (const pending_diagnostic &other) const final override
1182 if (other.use_of_uninit_p ())
1183 return true;
1185 return false;
1189 /* Concrete subclass for describing passing a NULL value to a
1190 function marked with __attribute__((nonnull)). */
1192 class null_arg : public malloc_diagnostic
1194 public:
1195 null_arg (const malloc_state_machine &sm, tree arg,
1196 tree fndecl, int arg_idx)
1197 : malloc_diagnostic (sm, arg),
1198 m_fndecl (fndecl), m_arg_idx (arg_idx)
1201 const char *get_kind () const final override { return "null_arg"; }
1203 bool subclass_equal_p (const pending_diagnostic &base_other)
1204 const final override
1206 const null_arg &sub_other
1207 = (const null_arg &)base_other;
1208 return (same_tree_p (m_arg, sub_other.m_arg)
1209 && m_fndecl == sub_other.m_fndecl
1210 && m_arg_idx == sub_other.m_arg_idx);
1213 int get_controlling_option () const final override
1215 return OPT_Wanalyzer_null_argument;
1218 bool terminate_path_p () const final override { return true; }
1220 bool emit (diagnostic_emission_context &ctxt) final override
1222 /* CWE-476: NULL Pointer Dereference. */
1223 auto_diagnostic_group d;
1224 ctxt.add_cwe (476);
1226 bool warned;
1227 if (zerop (m_arg))
1228 warned = ctxt.warn ("use of NULL where non-null expected");
1229 else
1230 warned = ctxt.warn ("use of NULL %qE where non-null expected",
1231 m_arg);
1232 if (warned)
1233 inform_nonnull_attribute (m_fndecl, m_arg_idx);
1234 return warned;
1237 label_text describe_final_event (const evdesc::final_event &ev) final override
1239 label_text arg_desc = describe_argument_index (m_fndecl, m_arg_idx);
1240 label_text result;
1241 if (zerop (ev.m_expr))
1242 result = ev.formatted_print ("argument %s NULL where non-null expected",
1243 arg_desc.get ());
1244 else
1245 result = ev.formatted_print ("argument %s (%qE) NULL"
1246 " where non-null expected",
1247 arg_desc.get (), ev.m_expr);
1248 return result;
1251 private:
1252 tree m_fndecl;
1253 int m_arg_idx;
1256 class use_after_free : public malloc_diagnostic
1258 public:
1259 use_after_free (const malloc_state_machine &sm, tree arg,
1260 const deallocator *deallocator)
1261 : malloc_diagnostic (sm, arg),
1262 m_deallocator (deallocator)
1264 gcc_assert (deallocator);
1267 const char *get_kind () const final override { return "use_after_free"; }
1269 int get_controlling_option () const final override
1271 return OPT_Wanalyzer_use_after_free;
1274 bool emit (diagnostic_emission_context &ctxt) final override
1276 /* CWE-416: Use After Free. */
1277 ctxt.add_cwe (416);
1278 return ctxt.warn ("use after %<%s%> of %qE",
1279 m_deallocator->m_name, m_arg);
1282 label_text describe_state_change (const evdesc::state_change &change)
1283 final override
1285 if (freed_p (change.m_new_state))
1287 m_free_event = change.m_event_id;
1288 switch (m_deallocator->m_wording)
1290 default:
1291 case WORDING_REALLOCATED:
1292 gcc_unreachable ();
1293 case WORDING_FREED:
1294 return label_text::borrow ("freed here");
1295 case WORDING_DELETED:
1296 return label_text::borrow ("deleted here");
1297 case WORDING_DEALLOCATED:
1298 return label_text::borrow ("deallocated here");
1301 return malloc_diagnostic::describe_state_change (change);
1304 label_text describe_final_event (const evdesc::final_event &ev) final override
1306 const char *funcname = m_deallocator->m_name;
1307 if (m_free_event.known_p ())
1308 switch (m_deallocator->m_wording)
1310 default:
1311 case WORDING_REALLOCATED:
1312 gcc_unreachable ();
1313 case WORDING_FREED:
1314 return ev.formatted_print ("use after %<%s%> of %qE; freed at %@",
1315 funcname, ev.m_expr, &m_free_event);
1316 case WORDING_DELETED:
1317 return ev.formatted_print ("use after %<%s%> of %qE; deleted at %@",
1318 funcname, ev.m_expr, &m_free_event);
1319 case WORDING_DEALLOCATED:
1320 return ev.formatted_print ("use after %<%s%> of %qE;"
1321 " deallocated at %@",
1322 funcname, ev.m_expr, &m_free_event);
1324 else
1325 return ev.formatted_print ("use after %<%s%> of %qE",
1326 funcname, ev.m_expr);
1329 /* Implementation of pending_diagnostic::supercedes_p for
1330 use_after_free.
1332 We want use-after-free to supercede use-of-unitialized-value,
1333 so that if we have these at the same stmt, we don't emit
1334 a use-of-uninitialized, just the use-after-free.
1335 (this is because we fully purge information about freed
1336 buffers when we free them to avoid state explosions, so
1337 that if they are accessed after the free, it looks like
1338 they are uninitialized). */
1340 bool supercedes_p (const pending_diagnostic &other) const final override
1342 if (other.use_of_uninit_p ())
1343 return true;
1345 return false;
1348 private:
1349 diagnostic_event_id_t m_free_event;
1350 const deallocator *m_deallocator;
1353 class malloc_leak : public malloc_diagnostic
1355 public:
1356 malloc_leak (const malloc_state_machine &sm, tree arg)
1357 : malloc_diagnostic (sm, arg) {}
1359 const char *get_kind () const final override { return "malloc_leak"; }
1361 int get_controlling_option () const final override
1363 return OPT_Wanalyzer_malloc_leak;
1366 bool emit (diagnostic_emission_context &ctxt) final override
1368 /* "CWE-401: Missing Release of Memory after Effective Lifetime". */
1369 ctxt.add_cwe (401);
1370 if (m_arg)
1371 return ctxt.warn ("leak of %qE", m_arg);
1372 else
1373 return ctxt.warn ("leak of %qs", "<unknown>");
1376 label_text describe_state_change (const evdesc::state_change &change)
1377 final override
1379 if (unchecked_p (change.m_new_state)
1380 || (start_p (change.m_old_state) && nonnull_p (change.m_new_state)))
1382 m_alloc_event = change.m_event_id;
1383 return label_text::borrow ("allocated here");
1385 return malloc_diagnostic::describe_state_change (change);
1388 label_text describe_final_event (const evdesc::final_event &ev) final override
1390 if (ev.m_expr)
1392 if (m_alloc_event.known_p ())
1393 return ev.formatted_print ("%qE leaks here; was allocated at %@",
1394 ev.m_expr, &m_alloc_event);
1395 else
1396 return ev.formatted_print ("%qE leaks here", ev.m_expr);
1398 else
1400 if (m_alloc_event.known_p ())
1401 return ev.formatted_print ("%qs leaks here; was allocated at %@",
1402 "<unknown>", &m_alloc_event);
1403 else
1404 return ev.formatted_print ("%qs leaks here", "<unknown>");
1408 private:
1409 diagnostic_event_id_t m_alloc_event;
1412 class free_of_non_heap : public malloc_diagnostic
1414 public:
1415 free_of_non_heap (const malloc_state_machine &sm, tree arg,
1416 const region *freed_reg,
1417 const char *funcname)
1418 : malloc_diagnostic (sm, arg), m_freed_reg (freed_reg), m_funcname (funcname)
1422 const char *get_kind () const final override { return "free_of_non_heap"; }
1424 bool subclass_equal_p (const pending_diagnostic &base_other) const
1425 final override
1427 const free_of_non_heap &other = (const free_of_non_heap &)base_other;
1428 return (same_tree_p (m_arg, other.m_arg)
1429 && m_freed_reg == other.m_freed_reg);
1432 int get_controlling_option () const final override
1434 return OPT_Wanalyzer_free_of_non_heap;
1437 bool emit (diagnostic_emission_context &ctxt) final override
1439 auto_diagnostic_group d;
1440 ctxt.add_cwe (590); /* CWE-590: Free of Memory not on the Heap. */
1441 switch (get_memory_space ())
1443 default:
1444 case MEMSPACE_HEAP:
1445 gcc_unreachable ();
1446 case MEMSPACE_UNKNOWN:
1447 case MEMSPACE_CODE:
1448 case MEMSPACE_GLOBALS:
1449 case MEMSPACE_READONLY_DATA:
1450 return ctxt.warn ("%<%s%> of %qE which points to memory"
1451 " not on the heap",
1452 m_funcname, m_arg);
1453 break;
1454 case MEMSPACE_STACK:
1455 return ctxt.warn ("%<%s%> of %qE which points to memory"
1456 " on the stack",
1457 m_funcname, m_arg);
1458 break;
1462 label_text describe_state_change (const evdesc::state_change &)
1463 final override
1465 return label_text::borrow ("pointer is from here");
1468 label_text describe_final_event (const evdesc::final_event &ev) final override
1470 return ev.formatted_print ("call to %qs here", m_funcname);
1473 void mark_interesting_stuff (interesting_t *interest) final override
1475 if (m_freed_reg)
1476 interest->add_region_creation (m_freed_reg);
1479 private:
1480 enum memory_space get_memory_space () const
1482 if (m_freed_reg)
1483 return m_freed_reg->get_memory_space ();
1484 else
1485 return MEMSPACE_UNKNOWN;
1488 const region *m_freed_reg;
1489 const char *m_funcname;
1492 /* Concrete pending_diagnostic subclass for -Wanalyzer-deref-before-check. */
1494 class deref_before_check : public malloc_diagnostic
1496 public:
1497 deref_before_check (const malloc_state_machine &sm, tree arg)
1498 : malloc_diagnostic (sm, arg),
1499 m_deref_enode (NULL),
1500 m_deref_expr (NULL),
1501 m_check_enode (NULL)
1503 gcc_assert (arg);
1506 const char *get_kind () const final override { return "deref_before_check"; }
1508 int get_controlling_option () const final override
1510 return OPT_Wanalyzer_deref_before_check;
1513 bool emit (diagnostic_emission_context &ctxt) final override
1515 /* Don't emit the warning if we can't show where the deref
1516 and the check occur. */
1517 if (!m_deref_enode)
1518 return false;
1519 if (!m_check_enode)
1520 return false;
1521 /* Only emit the warning for intraprocedural cases. */
1522 const program_point &deref_point = m_deref_enode->get_point ();
1523 const program_point &check_point = m_check_enode->get_point ();
1525 if (!program_point::effectively_intraprocedural_p (deref_point,
1526 check_point))
1527 return false;
1529 /* Reject the warning if the check occurs within a macro defintion.
1530 This avoids false positives for such code as:
1532 #define throw_error \
1533 do { \
1534 if (p) \
1535 cleanup (p); \
1536 return; \
1537 } while (0)
1539 if (p->idx >= n)
1540 throw_error ();
1542 where the usage of "throw_error" implicitly adds a check
1543 on 'p'.
1545 We do warn when the check is in a macro expansion if we can get
1546 at the location of the condition and it is't part of the
1547 definition, so that we warn for checks such as:
1548 if (words[0][0] == '@')
1549 return;
1550 g_assert(words[0] != NULL); <--- here
1551 Unfortunately we don't have locations for individual gimple
1552 arguments, so in:
1553 g_assert (ptr);
1554 we merely have a gimple_cond
1555 if (p_2(D) == 0B)
1556 with no way of getting at the location of the condition separately
1557 from that of the gimple_cond (where the "if" is within the macro
1558 definition). We reject the warning for such cases.
1560 We do warn when the *deref* occurs in a macro, since this can be
1561 a source of real bugs; see e.g. PR 77425. */
1562 location_t check_loc = m_check_enode->get_point ().get_location ();
1563 if (linemap_location_from_macro_definition_p (line_table, check_loc))
1564 return false;
1566 /* Reject if m_deref_expr is sufficiently different from m_arg
1567 for cases where the dereference is spelled differently from
1568 the check, which is probably two different ways to get the
1569 same svalue, and thus not worth reporting. */
1570 if (!m_deref_expr)
1571 return false;
1572 if (!sufficiently_similar_p (m_deref_expr, m_arg))
1573 return false;
1575 /* Reject the warning if the deref's BB doesn't dominate that
1576 of the check, so that we don't warn e.g. for shared cleanup
1577 code that checks a pointer for NULL, when that code is sometimes
1578 used before a deref and sometimes after.
1579 Using the dominance code requires setting cfun. */
1580 auto_cfun sentinel (m_deref_enode->get_function ());
1581 calculate_dominance_info (CDI_DOMINATORS);
1582 if (!dominated_by_p (CDI_DOMINATORS,
1583 m_check_enode->get_supernode ()->m_bb,
1584 m_deref_enode->get_supernode ()->m_bb))
1585 return false;
1587 return ctxt.warn ("check of %qE for NULL after already"
1588 " dereferencing it",
1589 m_arg);
1592 label_text describe_state_change (const evdesc::state_change &change)
1593 final override
1595 if (change.m_old_state == m_sm.get_start_state ()
1596 && assumed_non_null_p (change.m_new_state))
1598 m_first_deref_event = change.m_event_id;
1599 m_deref_enode = change.m_event.get_exploded_node ();
1600 m_deref_expr = change.m_expr;
1601 return change.formatted_print ("pointer %qE is dereferenced here",
1602 m_arg);
1604 return malloc_diagnostic::describe_state_change (change);
1607 label_text describe_final_event (const evdesc::final_event &ev) final override
1609 m_check_enode = ev.m_event.get_exploded_node ();
1610 if (m_first_deref_event.known_p ())
1611 return ev.formatted_print ("pointer %qE is checked for NULL here but"
1612 " it was already dereferenced at %@",
1613 m_arg, &m_first_deref_event);
1614 else
1615 return ev.formatted_print ("pointer %qE is checked for NULL here but"
1616 " it was already dereferenced",
1617 m_arg);
1620 private:
1621 static bool sufficiently_similar_p (tree expr_a, tree expr_b)
1623 pretty_printer *pp_a = global_dc->printer->clone ();
1624 pretty_printer *pp_b = global_dc->printer->clone ();
1625 pp_printf (pp_a, "%qE", expr_a);
1626 pp_printf (pp_b, "%qE", expr_b);
1627 bool result = (strcmp (pp_formatted_text (pp_a), pp_formatted_text (pp_b))
1628 == 0);
1629 delete pp_a;
1630 delete pp_b;
1631 return result;
1634 diagnostic_event_id_t m_first_deref_event;
1635 const exploded_node *m_deref_enode;
1636 tree m_deref_expr;
1637 const exploded_node *m_check_enode;
1640 /* struct allocation_state : public state_machine::state. */
1642 /* Implementation of state_machine::state::dump_to_pp vfunc
1643 for allocation_state: append the API that this allocation is
1644 associated with. */
1646 void
1647 allocation_state::dump_to_pp (pretty_printer *pp) const
1649 state_machine::state::dump_to_pp (pp);
1650 if (m_deallocators)
1652 pp_string (pp, " (");
1653 m_deallocators->dump_to_pp (pp);
1654 pp_character (pp, ')');
1658 /* Given a allocation_state for a deallocator_set, get the "nonnull" state
1659 for the corresponding allocator(s). */
1661 const allocation_state *
1662 allocation_state::get_nonnull () const
1664 gcc_assert (m_deallocators);
1665 return as_a_allocation_state (m_deallocators->m_nonnull);
1668 /* struct assumed_non_null_state : public allocation_state. */
1670 void
1671 assumed_non_null_state::dump_to_pp (pretty_printer *pp) const
1673 allocation_state::dump_to_pp (pp);
1674 pp_string (pp, " (in ");
1675 m_frame->dump_to_pp (pp, true);
1676 pp_character (pp, ')');
1679 /* malloc_state_machine's ctor. */
1681 malloc_state_machine::malloc_state_machine (logger *logger)
1682 : state_machine ("malloc", logger),
1683 m_free (this, "free", WORDING_FREED),
1684 m_scalar_delete (this, "delete", WORDING_DELETED),
1685 m_vector_delete (this, "delete[]", WORDING_DELETED),
1686 m_realloc (this, "realloc", WORDING_REALLOCATED)
1688 gcc_assert (m_start->get_id () == 0);
1689 m_null = add_state ("null", RS_FREED, NULL, NULL);
1690 m_non_heap = add_state ("non-heap", RS_NON_HEAP, NULL, NULL);
1691 m_stop = add_state ("stop", RS_STOP, NULL, NULL);
1694 malloc_state_machine::~malloc_state_machine ()
1696 unsigned i;
1697 custom_deallocator_set *set;
1698 FOR_EACH_VEC_ELT (m_dynamic_sets, i, set)
1699 delete set;
1700 custom_deallocator *d;
1701 FOR_EACH_VEC_ELT (m_dynamic_deallocators, i, d)
1702 delete d;
1705 state_machine::state_t
1706 malloc_state_machine::add_state (const char *name, enum resource_state rs,
1707 const deallocator_set *deallocators,
1708 const deallocator *deallocator)
1710 return add_custom_state (new allocation_state (name, alloc_state_id (),
1711 rs, deallocators,
1712 deallocator));
1715 /* If ALLOCATOR_FNDECL has any "__attribute__((malloc(FOO)))",
1716 return a custom_deallocator_set for them, consolidating them
1717 to ensure uniqueness of the sets.
1719 Return NULL if it has no such attributes. */
1721 const custom_deallocator_set *
1722 malloc_state_machine::
1723 get_or_create_custom_deallocator_set (tree allocator_fndecl)
1725 /* Early rejection of decls without attributes. */
1726 tree attrs = DECL_ATTRIBUTES (allocator_fndecl);
1727 if (!attrs)
1728 return NULL;
1730 /* Otherwise, call maybe_create_custom_deallocator_set,
1731 memoizing the result. */
1732 if (custom_deallocator_set **slot
1733 = m_custom_deallocator_set_cache.get (allocator_fndecl))
1734 return *slot;
1735 custom_deallocator_set *set
1736 = maybe_create_custom_deallocator_set (allocator_fndecl);
1737 m_custom_deallocator_set_cache.put (allocator_fndecl, set);
1738 return set;
1741 /* Given ALLOCATOR_FNDECL, a FUNCTION_DECL with attributes,
1742 look for any "__attribute__((malloc(FOO)))" and return a
1743 custom_deallocator_set for them, consolidating them
1744 to ensure uniqueness of the sets.
1746 Return NULL if it has no such attributes.
1748 Subroutine of get_or_create_custom_deallocator_set which
1749 memoizes the result. */
1751 custom_deallocator_set *
1752 malloc_state_machine::
1753 maybe_create_custom_deallocator_set (tree allocator_fndecl)
1755 tree attrs = DECL_ATTRIBUTES (allocator_fndecl);
1756 gcc_assert (attrs);
1758 /* Look for instances of __attribute__((malloc(FOO))). */
1759 auto_vec<const deallocator *> deallocator_vec;
1760 for (tree allocs = attrs;
1761 (allocs = lookup_attribute ("malloc", allocs));
1762 allocs = TREE_CHAIN (allocs))
1764 tree args = TREE_VALUE (allocs);
1765 if (!args)
1766 continue;
1767 if (TREE_VALUE (args))
1769 const deallocator *d
1770 = get_or_create_deallocator (TREE_VALUE (args));
1771 deallocator_vec.safe_push (d);
1775 /* If there weren't any deallocators, bail. */
1776 if (deallocator_vec.length () == 0)
1777 return NULL;
1779 /* Consolidate, so that we reuse existing deallocator_set
1780 instances. */
1781 deallocator_vec.qsort (deallocator::cmp_ptr_ptr);
1782 custom_deallocator_set **slot
1783 = m_custom_deallocator_set_map.get (&deallocator_vec);
1784 if (slot)
1785 return *slot;
1786 custom_deallocator_set *set
1787 = new custom_deallocator_set (this, &deallocator_vec, WORDING_DEALLOCATED);
1788 m_custom_deallocator_set_map.put (&set->m_deallocator_vec, set);
1789 m_dynamic_sets.safe_push (set);
1790 return set;
1793 /* Get the deallocator for DEALLOCATOR_FNDECL, creating it if necessary. */
1795 const deallocator *
1796 malloc_state_machine::get_or_create_deallocator (tree deallocator_fndecl)
1798 deallocator **slot = m_deallocator_map.get (deallocator_fndecl);
1799 if (slot)
1800 return *slot;
1802 /* Reuse "free". */
1803 deallocator *d;
1804 if (is_named_call_p (deallocator_fndecl, "free")
1805 || is_std_named_call_p (deallocator_fndecl, "free")
1806 || is_named_call_p (deallocator_fndecl, "__builtin_free"))
1807 d = &m_free.m_deallocator;
1808 else
1810 custom_deallocator *cd
1811 = new custom_deallocator (this, deallocator_fndecl,
1812 WORDING_DEALLOCATED);
1813 m_dynamic_deallocators.safe_push (cd);
1814 d = cd;
1816 m_deallocator_map.put (deallocator_fndecl, d);
1817 return d;
1820 /* Get the "assumed-non-null" state for assumptions made within FRAME,
1821 creating it if necessary. */
1823 state_machine::state_t
1824 malloc_state_machine::
1825 get_or_create_assumed_non_null_state_for_frame (const frame_region *frame)
1827 if (state_t *slot = m_assumed_non_null.get (frame))
1828 return *slot;
1829 state_machine::state *new_state
1830 = new assumed_non_null_state ("assumed-non-null", alloc_state_id (), frame);
1831 add_custom_state (new_state);
1832 m_assumed_non_null.put (frame, new_state);
1833 return new_state;
1836 /* Try to identify the function declaration either by name or as a known malloc
1837 builtin. */
1839 static bool
1840 known_allocator_p (const_tree fndecl, const gcall *call)
1842 /* Either it is a function we know by name and number of arguments... */
1843 if (is_named_call_p (fndecl, "malloc", call, 1)
1844 || is_named_call_p (fndecl, "calloc", call, 2)
1845 || is_std_named_call_p (fndecl, "malloc", call, 1)
1846 || is_std_named_call_p (fndecl, "calloc", call, 2)
1847 || is_named_call_p (fndecl, "strdup", call, 1)
1848 || is_named_call_p (fndecl, "strndup", call, 2))
1849 return true;
1851 /* ... or it is a builtin allocator that allocates objects freed with
1852 __builtin_free. */
1853 if (fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
1854 switch (DECL_FUNCTION_CODE (fndecl))
1856 case BUILT_IN_MALLOC:
1857 case BUILT_IN_CALLOC:
1858 case BUILT_IN_STRDUP:
1859 case BUILT_IN_STRNDUP:
1860 return true;
1861 default:
1862 break;
1865 return false;
1868 /* If PTR's nullness is not known, transition it to the "assumed-non-null"
1869 state for the current frame. */
1871 void
1872 malloc_state_machine::maybe_assume_non_null (sm_context *sm_ctxt,
1873 tree ptr,
1874 const gimple *stmt) const
1876 const region_model *old_model = sm_ctxt->get_old_region_model ();
1877 if (!old_model)
1878 return;
1880 tree null_ptr_cst = build_int_cst (TREE_TYPE (ptr), 0);
1881 tristate known_non_null
1882 = old_model->eval_condition (ptr, NE_EXPR, null_ptr_cst, NULL);
1883 if (known_non_null.is_unknown ())
1885 /* Cast away const-ness for cache-like operations. */
1886 malloc_state_machine *mut_this
1887 = const_cast <malloc_state_machine *> (this);
1888 state_t next_state
1889 = mut_this->get_or_create_assumed_non_null_state_for_frame
1890 (old_model->get_current_frame ());
1891 sm_ctxt->set_next_state (stmt, ptr, next_state);
1895 /* Implementation of state_machine::on_stmt vfunc for malloc_state_machine. */
1897 bool
1898 malloc_state_machine::on_stmt (sm_context *sm_ctxt,
1899 const supernode *node,
1900 const gimple *stmt) const
1902 if (const gcall *call = dyn_cast <const gcall *> (stmt))
1903 if (tree callee_fndecl = sm_ctxt->get_fndecl_for_call (call))
1905 if (known_allocator_p (callee_fndecl, call))
1907 on_allocator_call (sm_ctxt, call, &m_free);
1908 return true;
1911 if (!is_placement_new_p (call))
1913 bool returns_nonnull = !TREE_NOTHROW (callee_fndecl)
1914 && flag_exceptions;
1915 if (is_named_call_p (callee_fndecl, "operator new"))
1916 on_allocator_call (sm_ctxt, call,
1917 &m_scalar_delete, returns_nonnull);
1918 else if (is_named_call_p (callee_fndecl, "operator new []"))
1919 on_allocator_call (sm_ctxt, call,
1920 &m_vector_delete, returns_nonnull);
1923 if (is_named_call_p (callee_fndecl, "operator delete", call, 1)
1924 || is_named_call_p (callee_fndecl, "operator delete", call, 2))
1926 on_deallocator_call (sm_ctxt, node, call,
1927 &m_scalar_delete.m_deallocator, 0);
1928 return true;
1930 else if (is_named_call_p (callee_fndecl, "operator delete []", call, 1))
1932 on_deallocator_call (sm_ctxt, node, call,
1933 &m_vector_delete.m_deallocator, 0);
1934 return true;
1937 if (is_named_call_p (callee_fndecl, "alloca", call, 1)
1938 || is_named_call_p (callee_fndecl, "__builtin_alloca", call, 1))
1940 tree lhs = gimple_call_lhs (call);
1941 if (lhs)
1942 sm_ctxt->on_transition (node, stmt, lhs, m_start, m_non_heap);
1943 return true;
1946 if (is_named_call_p (callee_fndecl, "free", call, 1)
1947 || is_std_named_call_p (callee_fndecl, "free", call, 1)
1948 || is_named_call_p (callee_fndecl, "__builtin_free", call, 1))
1950 on_deallocator_call (sm_ctxt, node, call,
1951 &m_free.m_deallocator, 0);
1952 return true;
1955 if (is_named_call_p (callee_fndecl, "realloc", call, 2)
1956 || is_named_call_p (callee_fndecl, "__builtin_realloc", call, 2))
1958 on_realloc_call (sm_ctxt, node, call);
1959 return true;
1962 if (unaffected_by_call_p (callee_fndecl))
1963 return true;
1965 /* Cast away const-ness for cache-like operations. */
1966 malloc_state_machine *mutable_this
1967 = const_cast <malloc_state_machine *> (this);
1969 /* Handle interesting attributes of the callee_fndecl,
1970 or prioritize those of the builtin that callee_fndecl is expected
1971 to be.
1972 Might want this to be controlled by a flag. */
1974 tree fndecl = callee_fndecl;
1975 /* If call is recognized as a builtin known_function, use that
1976 builtin's function_decl. */
1977 if (const region_model *old_model = sm_ctxt->get_old_region_model ())
1978 if (const builtin_known_function *builtin_kf
1979 = old_model->get_builtin_kf (call))
1980 fndecl = builtin_kf->builtin_decl ();
1982 /* Handle "__attribute__((malloc(FOO)))". */
1983 if (const deallocator_set *deallocators
1984 = mutable_this->get_or_create_custom_deallocator_set
1985 (fndecl))
1987 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (fndecl));
1988 bool returns_nonnull
1989 = lookup_attribute ("returns_nonnull", attrs);
1990 on_allocator_call (sm_ctxt, call, deallocators, returns_nonnull);
1994 /* Handle "__attribute__((nonnull))". */
1995 tree fntype = TREE_TYPE (fndecl);
1996 bitmap nonnull_args = get_nonnull_args (fntype);
1997 if (nonnull_args)
1999 for (unsigned i = 0; i < gimple_call_num_args (stmt); i++)
2001 tree arg = gimple_call_arg (stmt, i);
2002 if (TREE_CODE (TREE_TYPE (arg)) != POINTER_TYPE)
2003 continue;
2004 /* If we have a nonnull-args, and either all pointers, or
2005 just the specified pointers. */
2006 if (bitmap_empty_p (nonnull_args)
2007 || bitmap_bit_p (nonnull_args, i))
2009 state_t state = sm_ctxt->get_state (stmt, arg);
2010 /* Can't use a switch as the states are non-const. */
2011 /* Do use the fndecl that caused the warning so that the
2012 misused attributes are printed and the user not
2013 confused. */
2014 if (unchecked_p (state))
2016 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
2017 sm_ctxt->warn (node, stmt, arg,
2018 make_unique<possible_null_arg>
2019 (*this, diag_arg, fndecl, i));
2020 const allocation_state *astate
2021 = as_a_allocation_state (state);
2022 sm_ctxt->set_next_state (stmt, arg,
2023 astate->get_nonnull ());
2025 else if (state == m_null)
2027 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
2028 sm_ctxt->warn (node, stmt, arg,
2029 make_unique<null_arg>
2030 (*this, diag_arg, fndecl, i));
2031 sm_ctxt->set_next_state (stmt, arg, m_stop);
2033 else if (state == m_start)
2034 maybe_assume_non_null (sm_ctxt, arg, stmt);
2037 BITMAP_FREE (nonnull_args);
2041 /* Check for this after nonnull, so that if we have both
2042 then we transition to "freed", rather than "checked". */
2043 unsigned dealloc_argno = fndecl_dealloc_argno (fndecl);
2044 if (dealloc_argno != UINT_MAX)
2046 const deallocator *d
2047 = mutable_this->get_or_create_deallocator (fndecl);
2048 on_deallocator_call (sm_ctxt, node, call, d, dealloc_argno);
2053 /* Look for pointers explicitly being compared against zero
2054 that are in state assumed_non_null i.e. we already defererenced
2055 them.
2056 We have to do this check here, rather than in on_condition
2057 because we add a constraint that the pointer is non-null when
2058 dereferencing it, and this makes the apply_constraints_for_gcond
2059 find known-true and known-false conditions; on_condition is only
2060 called when adding new constraints. */
2061 if (const gcond *cond_stmt = dyn_cast <const gcond *> (stmt))
2063 enum tree_code op = gimple_cond_code (cond_stmt);
2064 if (op == EQ_EXPR || op == NE_EXPR)
2066 tree lhs = gimple_cond_lhs (cond_stmt);
2067 tree rhs = gimple_cond_rhs (cond_stmt);
2068 if (any_pointer_p (lhs)
2069 && any_pointer_p (rhs)
2070 && zerop (rhs))
2072 state_t state = sm_ctxt->get_state (stmt, lhs);
2073 if (assumed_non_null_p (state))
2074 maybe_complain_about_deref_before_check
2075 (sm_ctxt, node,
2076 stmt,
2077 (const assumed_non_null_state *)state,
2078 lhs);
2083 if (tree lhs = sm_ctxt->is_zero_assignment (stmt))
2084 if (any_pointer_p (lhs))
2085 on_zero_assignment (sm_ctxt, stmt,lhs);
2087 /* Handle dereferences. */
2088 for (unsigned i = 0; i < gimple_num_ops (stmt); i++)
2090 tree op = gimple_op (stmt, i);
2091 if (!op)
2092 continue;
2093 if (TREE_CODE (op) == COMPONENT_REF)
2094 op = TREE_OPERAND (op, 0);
2096 if (TREE_CODE (op) == MEM_REF)
2098 tree arg = TREE_OPERAND (op, 0);
2100 state_t state = sm_ctxt->get_state (stmt, arg);
2101 if (state == m_start)
2102 maybe_assume_non_null (sm_ctxt, arg, stmt);
2103 else if (unchecked_p (state))
2105 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
2106 sm_ctxt->warn (node, stmt, arg,
2107 make_unique<possible_null_deref> (*this,
2108 diag_arg));
2109 const allocation_state *astate = as_a_allocation_state (state);
2110 sm_ctxt->set_next_state (stmt, arg, astate->get_nonnull ());
2112 else if (state == m_null)
2114 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
2115 sm_ctxt->warn (node, stmt, arg,
2116 make_unique<null_deref> (*this, diag_arg));
2117 sm_ctxt->set_next_state (stmt, arg, m_stop);
2119 else if (freed_p (state))
2121 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
2122 const allocation_state *astate = as_a_allocation_state (state);
2123 sm_ctxt->warn (node, stmt, arg,
2124 make_unique<use_after_free>
2125 (*this, diag_arg, astate->m_deallocator));
2126 sm_ctxt->set_next_state (stmt, arg, m_stop);
2130 return false;
2133 /* Given a check against null of PTR in assumed-non-null state STATE,
2134 potentially add a deref_before_check warning to SM_CTXT. */
2136 void
2137 malloc_state_machine::
2138 maybe_complain_about_deref_before_check (sm_context *sm_ctxt,
2139 const supernode *node,
2140 const gimple *stmt,
2141 const assumed_non_null_state *state,
2142 tree ptr) const
2144 const region_model *model = sm_ctxt->get_old_region_model ();
2145 if (!model)
2146 return;
2148 /* Don't complain if the current frame (where the check is occurring) is
2149 deeper than the frame in which the "not null" assumption was made.
2150 This suppress false positives for cases like:
2152 void foo (struct s *p)
2154 int val = s->some_field; // deref here
2155 shared_helper (p);
2158 where "shared_helper" has:
2160 void shared_helper (struct s *p)
2162 if (!p) // check here
2163 return;
2164 // etc
2167 since the check in "shared_helper" is OK. */
2168 const frame_region *checked_in_frame = model->get_current_frame ();
2169 const frame_region *assumed_nonnull_in_frame = state->m_frame;
2170 if (checked_in_frame->get_index () > assumed_nonnull_in_frame->get_index ())
2171 return;
2173 /* Don't complain if STMT was inlined from another function, to avoid
2174 similar false positives involving shared helper functions. */
2175 if (stmt->location)
2177 inlining_info info (stmt->location);
2178 if (info.get_extra_frames () > 0)
2179 return;
2182 tree diag_ptr = sm_ctxt->get_diagnostic_tree (ptr);
2183 if (diag_ptr)
2184 sm_ctxt->warn
2185 (node, stmt, ptr,
2186 make_unique<deref_before_check> (*this, diag_ptr));
2187 sm_ctxt->set_next_state (stmt, ptr, m_stop);
2190 /* Handle a call to an allocator.
2191 RETURNS_NONNULL is true if CALL is to a fndecl known to have
2192 __attribute__((returns_nonnull)). */
2194 void
2195 malloc_state_machine::on_allocator_call (sm_context *sm_ctxt,
2196 const gcall *call,
2197 const deallocator_set *deallocators,
2198 bool returns_nonnull) const
2200 tree lhs = gimple_call_lhs (call);
2201 if (lhs)
2203 if (sm_ctxt->get_state (call, lhs) == m_start)
2204 sm_ctxt->set_next_state (call, lhs,
2205 (returns_nonnull
2206 ? deallocators->m_nonnull
2207 : deallocators->m_unchecked));
2209 else
2211 /* TODO: report leak. */
2215 /* Handle deallocations of non-heap pointers.
2216 non-heap -> stop, with warning. */
2218 void
2219 malloc_state_machine::handle_free_of_non_heap (sm_context *sm_ctxt,
2220 const supernode *node,
2221 const gcall *call,
2222 tree arg,
2223 const deallocator *d) const
2225 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
2226 const region *freed_reg = NULL;
2227 if (const program_state *old_state = sm_ctxt->get_old_program_state ())
2229 const region_model *old_model = old_state->m_region_model;
2230 const svalue *ptr_sval = old_model->get_rvalue (arg, NULL);
2231 freed_reg = old_model->deref_rvalue (ptr_sval, arg, NULL);
2233 sm_ctxt->warn (node, call, arg,
2234 make_unique<free_of_non_heap>
2235 (*this, diag_arg, freed_reg, d->m_name));
2236 sm_ctxt->set_next_state (call, arg, m_stop);
2239 void
2240 malloc_state_machine::on_deallocator_call (sm_context *sm_ctxt,
2241 const supernode *node,
2242 const gcall *call,
2243 const deallocator *d,
2244 unsigned argno) const
2246 if (argno >= gimple_call_num_args (call))
2247 return;
2248 tree arg = gimple_call_arg (call, argno);
2250 state_t state = sm_ctxt->get_state (call, arg);
2252 /* start/assumed_non_null/unchecked/nonnull -> freed. */
2253 if (state == m_start || assumed_non_null_p (state))
2254 sm_ctxt->set_next_state (call, arg, d->m_freed);
2255 else if (unchecked_p (state) || nonnull_p (state))
2257 const allocation_state *astate = as_a_allocation_state (state);
2258 gcc_assert (astate->m_deallocators);
2259 if (!astate->m_deallocators->contains_p (d))
2261 /* Wrong allocator. */
2262 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
2263 sm_ctxt->warn (node, call, arg,
2264 make_unique<mismatching_deallocation>
2265 (*this, diag_arg,
2266 astate->m_deallocators,
2267 d));
2269 sm_ctxt->set_next_state (call, arg, d->m_freed);
2272 /* Keep state "null" as-is, rather than transitioning to "freed";
2273 we don't want to complain about double-free of NULL. */
2274 else if (state == d->m_freed)
2276 /* freed -> stop, with warning. */
2277 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
2278 sm_ctxt->warn (node, call, arg,
2279 make_unique<double_free> (*this, diag_arg, d->m_name));
2280 sm_ctxt->set_next_state (call, arg, m_stop);
2282 else if (state == m_non_heap)
2284 /* non-heap -> stop, with warning. */
2285 handle_free_of_non_heap (sm_ctxt, node, call, arg, d);
2289 /* Handle a call to "realloc".
2290 Check for free of non-heap or mismatching allocators,
2291 transitioning to the "stop" state for such cases.
2293 Otherwise, kf_realloc::impl_call_post will later
2294 get called (which will handle other sm-state transitions
2295 when the state is bifurcated). */
2297 void
2298 malloc_state_machine::on_realloc_call (sm_context *sm_ctxt,
2299 const supernode *node,
2300 const gcall *call) const
2302 const unsigned argno = 0;
2303 const deallocator *d = &m_realloc;
2305 tree arg = gimple_call_arg (call, argno);
2307 state_t state = sm_ctxt->get_state (call, arg);
2309 if (unchecked_p (state) || nonnull_p (state))
2311 const allocation_state *astate = as_a_allocation_state (state);
2312 gcc_assert (astate->m_deallocators);
2313 if (!astate->m_deallocators->contains_p (&m_free.m_deallocator))
2315 /* Wrong allocator. */
2316 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
2317 sm_ctxt->warn (node, call, arg,
2318 make_unique<mismatching_deallocation>
2319 (*this, diag_arg,
2320 astate->m_deallocators, d));
2321 sm_ctxt->set_next_state (call, arg, m_stop);
2322 if (path_context *path_ctxt = sm_ctxt->get_path_context ())
2323 path_ctxt->terminate_path ();
2326 else if (state == m_free.m_deallocator.m_freed)
2328 /* freed -> stop, with warning. */
2329 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
2330 sm_ctxt->warn (node, call, arg,
2331 make_unique<double_free> (*this, diag_arg, "free"));
2332 sm_ctxt->set_next_state (call, arg, m_stop);
2333 if (path_context *path_ctxt = sm_ctxt->get_path_context ())
2334 path_ctxt->terminate_path ();
2336 else if (state == m_non_heap)
2338 /* non-heap -> stop, with warning. */
2339 handle_free_of_non_heap (sm_ctxt, node, call, arg, d);
2340 if (path_context *path_ctxt = sm_ctxt->get_path_context ())
2341 path_ctxt->terminate_path ();
2345 /* Implementation of state_machine::on_phi vfunc for malloc_state_machine. */
2347 void
2348 malloc_state_machine::on_phi (sm_context *sm_ctxt,
2349 const supernode *node ATTRIBUTE_UNUSED,
2350 const gphi *phi,
2351 tree rhs) const
2353 if (zerop (rhs))
2355 tree lhs = gimple_phi_result (phi);
2356 on_zero_assignment (sm_ctxt, phi, lhs);
2360 /* Implementation of state_machine::on_condition vfunc for malloc_state_machine.
2361 Potentially transition state 'unchecked' to 'nonnull' or to 'null'. */
2363 void
2364 malloc_state_machine::on_condition (sm_context *sm_ctxt,
2365 const supernode *node ATTRIBUTE_UNUSED,
2366 const gimple *stmt,
2367 const svalue *lhs,
2368 enum tree_code op,
2369 const svalue *rhs) const
2371 if (!rhs->all_zeroes_p ())
2372 return;
2374 if (!any_pointer_p (lhs))
2375 return;
2376 if (!any_pointer_p (rhs))
2377 return;
2379 if (op == NE_EXPR)
2381 log ("got 'ARG != 0' match");
2382 state_t s = sm_ctxt->get_state (stmt, lhs);
2383 if (unchecked_p (s))
2385 const allocation_state *astate = as_a_allocation_state (s);
2386 sm_ctxt->set_next_state (stmt, lhs, astate->get_nonnull ());
2389 else if (op == EQ_EXPR)
2391 log ("got 'ARG == 0' match");
2392 state_t s = sm_ctxt->get_state (stmt, lhs);
2393 if (unchecked_p (s))
2394 sm_ctxt->set_next_state (stmt, lhs, m_null);
2398 /* Implementation of state_machine::on_pop_frame vfunc for malloc_state_machine.
2399 Clear any "assumed-non-null" state where the assumption happened in
2400 FRAME_REG. */
2402 void
2403 malloc_state_machine::on_pop_frame (sm_state_map *smap,
2404 const frame_region *frame_reg) const
2406 hash_set<const svalue *> svals_to_clear;
2407 for (auto kv : *smap)
2409 const svalue *sval = kv.first;
2410 state_t state = kv.second.m_state;
2411 if (assumed_non_null_p (state))
2413 const assumed_non_null_state *assumed_state
2414 = (const assumed_non_null_state *)state;
2415 if (frame_reg == assumed_state->m_frame)
2416 svals_to_clear.add (sval);
2419 for (auto sval : svals_to_clear)
2420 smap->clear_any_state (sval);
2423 /* Implementation of state_machine::can_purge_p vfunc for malloc_state_machine.
2424 Don't allow purging of pointers in state 'unchecked' or 'nonnull'
2425 (to avoid false leak reports). */
2427 bool
2428 malloc_state_machine::can_purge_p (state_t s) const
2430 enum resource_state rs = get_rs (s);
2431 return rs != RS_UNCHECKED && rs != RS_NONNULL;
2434 /* Implementation of state_machine::on_leak vfunc for malloc_state_machine
2435 (for complaining about leaks of pointers in state 'unchecked' and
2436 'nonnull'). */
2438 std::unique_ptr<pending_diagnostic>
2439 malloc_state_machine::on_leak (tree var) const
2441 return make_unique<malloc_leak> (*this, var);
2444 /* Implementation of state_machine::reset_when_passed_to_unknown_fn_p vfunc
2445 for malloc_state_machine. */
2447 bool
2448 malloc_state_machine::reset_when_passed_to_unknown_fn_p (state_t s,
2449 bool is_mutable) const
2451 /* An on-stack ptr doesn't stop being stack-allocated when passed to an
2452 unknown fn. */
2453 if (s == m_non_heap)
2454 return false;
2456 /* Otherwise, pointers passed as non-const can be freed. */
2457 return is_mutable;
2460 /* Implementation of state_machine::maybe_get_merged_states_nonequal vfunc
2461 for malloc_state_machine.
2463 Support discarding "assumed-non-null" states when merging with
2464 start state. */
2466 state_machine::state_t
2467 malloc_state_machine::maybe_get_merged_states_nonequal (state_t state_a,
2468 state_t state_b) const
2470 if (assumed_non_null_p (state_a) && state_b == m_start)
2471 return m_start;
2472 if (state_a == m_start && assumed_non_null_p (state_b))
2473 return m_start;
2474 return NULL;
2477 /* Return true if calls to FNDECL are known to not affect this sm-state. */
2479 bool
2480 malloc_state_machine::unaffected_by_call_p (tree fndecl)
2482 /* A set of functions that are known to not affect allocation
2483 status, even if we haven't fully modelled the rest of their
2484 behavior yet. */
2485 static const char * const funcnames[] = {
2486 /* This array must be kept sorted. */
2487 "strsep",
2489 const size_t count = ARRAY_SIZE (funcnames);
2490 function_set fs (funcnames, count);
2492 if (fs.contains_decl_p (fndecl))
2493 return true;
2495 return false;
2498 /* Shared logic for handling GIMPLE_ASSIGNs and GIMPLE_PHIs that
2499 assign zero to LHS. */
2501 void
2502 malloc_state_machine::on_zero_assignment (sm_context *sm_ctxt,
2503 const gimple *stmt,
2504 tree lhs) const
2506 state_t s = sm_ctxt->get_state (stmt, lhs);
2507 enum resource_state rs = get_rs (s);
2508 if (rs == RS_START
2509 || rs == RS_UNCHECKED
2510 || rs == RS_NONNULL
2511 || rs == RS_FREED)
2512 sm_ctxt->set_next_state (stmt, lhs, m_null);
2515 /* Special-case hook for handling realloc, for the "success with move to
2516 a new buffer" case, marking OLD_PTR_SVAL as freed and NEW_PTR_SVAL as
2517 non-null.
2519 This is similar to on_deallocator_call and on_allocator_call,
2520 but the checks happen in on_realloc_call, and by splitting the states. */
2522 void
2523 malloc_state_machine::
2524 on_realloc_with_move (region_model *model,
2525 sm_state_map *smap,
2526 const svalue *old_ptr_sval,
2527 const svalue *new_ptr_sval,
2528 const extrinsic_state &ext_state) const
2530 smap->set_state (model, old_ptr_sval,
2531 m_free.m_deallocator.m_freed,
2532 NULL, ext_state);
2534 smap->set_state (model, new_ptr_sval,
2535 m_free.m_nonnull,
2536 NULL, ext_state);
2539 /* Hook for get_or_create_region_for_heap_alloc for the case when we want
2540 ptr_sval to mark a newly created region as assumed non null on malloc SM. */
2541 void
2542 malloc_state_machine::transition_ptr_sval_non_null (region_model *model,
2543 sm_state_map *smap,
2544 const svalue *new_ptr_sval,
2545 const extrinsic_state &ext_state) const
2547 smap->set_state (model, new_ptr_sval, m_free.m_nonnull, NULL, ext_state);
2550 } // anonymous namespace
2552 /* Internal interface to this file. */
2554 state_machine *
2555 make_malloc_state_machine (logger *logger)
2557 return new malloc_state_machine (logger);
2560 /* Specialcase hook for handling realloc, for use by
2561 kf_realloc::impl_call_post::success_with_move::update_model. */
2563 void
2564 region_model::on_realloc_with_move (const call_details &cd,
2565 const svalue *old_ptr_sval,
2566 const svalue *new_ptr_sval)
2568 region_model_context *ctxt = cd.get_ctxt ();
2569 if (!ctxt)
2570 return;
2571 const extrinsic_state *ext_state = ctxt->get_ext_state ();
2572 if (!ext_state)
2573 return;
2575 sm_state_map *smap;
2576 const state_machine *sm;
2577 unsigned sm_idx;
2578 if (!ctxt->get_malloc_map (&smap, &sm, &sm_idx))
2579 return;
2581 gcc_assert (smap);
2582 gcc_assert (sm);
2584 const malloc_state_machine &malloc_sm
2585 = (const malloc_state_machine &)*sm;
2587 malloc_sm.on_realloc_with_move (this,
2588 smap,
2589 old_ptr_sval,
2590 new_ptr_sval,
2591 *ext_state);
2594 /* Moves ptr_sval from start to assumed non-null, for use by
2595 region_model::get_or_create_region_for_heap_alloc. */
2596 void
2597 region_model::transition_ptr_sval_non_null (region_model_context *ctxt,
2598 const svalue *ptr_sval)
2600 if (!ctxt)
2601 return;
2602 const extrinsic_state *ext_state = ctxt->get_ext_state ();
2603 if (!ext_state)
2604 return;
2606 sm_state_map *smap;
2607 const state_machine *sm;
2608 unsigned sm_idx;
2609 if (!ctxt->get_malloc_map (&smap, &sm, &sm_idx))
2610 return;
2612 gcc_assert (smap);
2613 gcc_assert (sm);
2615 const malloc_state_machine &malloc_sm = (const malloc_state_machine &)*sm;
2617 malloc_sm.transition_ptr_sval_non_null (this, smap, ptr_sval, *ext_state);
2620 } // namespace ana
2622 #endif /* #if ENABLE_ANALYZER */