analyzer: deal with -fshort-enums
[official-gcc.git] / gcc / analyzer / store.h
blobcf10fa3b0108de6158e29117326e2a7a5aa47511
1 /* Classes for modeling the state of memory.
2 Copyright (C) 2020-2023 Free Software Foundation, Inc.
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #ifndef GCC_ANALYZER_STORE_H
22 #define GCC_ANALYZER_STORE_H
24 /* Implementation of the region-based ternary model described in:
25 "A Memory Model for Static Analysis of C Programs"
26 (Zhongxing Xu, Ted Kremenek, and Jian Zhang)
27 http://lcs.ios.ac.cn/~xuzb/canalyze/memmodel.pdf */
29 /* The store models memory as a collection of "clusters", where regions
30 are partitioned into clusters via their base region.
32 For example, given:
33 int a, b, c;
34 struct coord { double x; double y; } verts[3];
35 then "verts[0].y" and "verts[1].x" both have "verts" as their base region.
36 Each of a, b, c, and verts will have their own clusters, so that we
37 know that writes to e.g. "verts[1].x".don't affect e.g. "a".
39 Within each cluster we store a map of bindings to values, where the
40 binding keys can be either concrete or symbolic.
42 Concrete bindings affect a specific range of bits relative to the start
43 of the base region of the cluster, whereas symbolic bindings affect
44 a specific subregion within the cluster.
46 Consider (from the symbolic-1.c testcase):
48 char arr[1024];
49 arr[2] = a; (1)
50 arr[3] = b; (2)
51 After (1) and (2), the cluster for "arr" has concrete bindings
52 for bits 16-23 and for bits 24-31, with svalues "INIT_VAL(a)"
53 and "INIT_VAL(b)" respectively:
54 cluster: {bits 16-23: "INIT_VAL(a)",
55 bits 24-31: "INIT_VAL(b)";
56 flags: {}}
57 Attempting to query unbound subregions e.g. arr[4] will
58 return "UNINITIALIZED".
59 "a" and "b" are each in their own clusters, with no explicit
60 bindings, and thus implicitly have value INIT_VAL(a) and INIT_VAL(b).
62 arr[3] = c; (3)
63 After (3), the concrete binding for bits 24-31 is replaced with the
64 svalue "INIT_VAL(c)":
65 cluster: {bits 16-23: "INIT_VAL(a)", (from before)
66 bits 24-31: "INIT_VAL(c)"; (updated)
67 flags: {}}
69 arr[i] = d; (4)
70 After (4), we lose the concrete bindings and replace them with a
71 symbolic binding for "arr[i]", with svalue "INIT_VAL(d)". We also
72 mark the cluster as having been "symbolically touched": future
73 attempts to query the values of subregions other than "arr[i]",
74 such as "arr[3]" are "UNKNOWN", since we don't know if the write
75 to arr[i] affected them.
76 cluster: {symbolic_key(arr[i]): "INIT_VAL(d)";
77 flags: {TOUCHED}}
79 arr[j] = e; (5)
80 After (5), we lose the symbolic binding for "arr[i]" since we could
81 have overwritten it, and add a symbolic binding for "arr[j]".
82 cluster: {symbolic_key(arr[j]): "INIT_VAL(d)"; (different symbolic
83 flags: {TOUCHED}} binding)
85 arr[3] = f; (6)
86 After (6), we lose the symbolic binding for "arr[j]" since we could
87 have overwritten it, and gain a concrete binding for bits 24-31
88 again, this time with svalue "INIT_VAL(e)":
89 cluster: {bits 24-31: "INIT_VAL(d)";
90 flags: {TOUCHED}}
91 The cluster is still flagged as touched, so that we know that
92 accesses to other elements are "UNKNOWN" rather than
93 "UNINITIALIZED".
95 Handling symbolic regions requires us to handle aliasing.
97 In the first example above, each of a, b, c and verts are non-symbolic
98 base regions and so their clusters are "concrete clusters", whereas given:
99 struct coord *p, *q;
100 then "*p" and "*q" are symbolic base regions, and thus "*p" and "*q"
101 have "symbolic clusters".
103 In the above, "verts[i].x" will have a symbolic *binding* within a
104 concrete cluster for "verts", whereas "*p" is a symbolic *cluster*.
106 Writes to concrete clusters can't affect other concrete clusters,
107 but can affect symbolic clusters; e.g. after:
108 verts[0].x = 42;
109 we bind 42 in the cluster for "verts", but the clusters for "b" and "c"
110 can't be affected. Any symbolic clusters for *p and for *q can be
111 affected, *p and *q could alias verts.
113 Writes to a symbolic cluster can affect other clusters, both
114 concrete and symbolic; e.g. after:
115 p->x = 17;
116 we bind 17 within the cluster for "*p". The concrete clusters for a, b,
117 c, and verts could be affected, depending on whether *p aliases them.
118 Similarly, the symbolic cluster to *q could be affected. */
120 namespace ana {
122 /* A class for keeping track of aspects of a program_state that we don't
123 know about, to avoid false positives about leaks.
125 Consider:
127 p->field = malloc (1024);
128 q->field = NULL;
130 where we don't know whether or not p and q point to the same memory,
131 and:
133 p->field = malloc (1024);
134 unknown_fn (p);
136 In both cases, the svalue for the address of the allocated buffer
137 goes from being bound to p->field to not having anything explicitly bound
138 to it.
140 Given that we conservatively discard bindings due to possible aliasing or
141 calls to unknown function, the store loses references to svalues,
142 but these svalues could still be live. We don't want to warn about
143 them leaking - they're effectively in a "maybe live" state.
145 This "maybe live" information is somewhat transient.
147 We don't want to store this "maybe live" information in the program_state,
148 region_model, or store, since we don't want to bloat these objects (and
149 potentially bloat the exploded_graph with more nodes).
150 However, we can't store it in the region_model_context, as these context
151 objects sometimes don't last long enough to be around when comparing the
152 old vs the new state.
154 This class is a way to track a set of such svalues, so that we can
155 temporarily capture that they are in a "maybe live" state whilst
156 comparing old and new states. */
158 class uncertainty_t
160 public:
161 typedef hash_set<const svalue *>::iterator iterator;
163 void on_maybe_bound_sval (const svalue *sval)
165 m_maybe_bound_svals.add (sval);
167 void on_mutable_sval_at_unknown_call (const svalue *sval)
169 m_mutable_at_unknown_call_svals.add (sval);
172 bool unknown_sm_state_p (const svalue *sval)
174 return (m_maybe_bound_svals.contains (sval)
175 || m_mutable_at_unknown_call_svals.contains (sval));
178 void dump_to_pp (pretty_printer *pp, bool simple) const;
179 void dump (bool simple) const;
181 iterator begin_maybe_bound_svals () const
183 return m_maybe_bound_svals.begin ();
185 iterator end_maybe_bound_svals () const
187 return m_maybe_bound_svals.end ();
190 private:
192 /* svalues that might or might not still be bound. */
193 hash_set<const svalue *> m_maybe_bound_svals;
195 /* svalues that have mutable sm-state at unknown calls. */
196 hash_set<const svalue *> m_mutable_at_unknown_call_svals;
199 class byte_range;
200 class concrete_binding;
201 class symbolic_binding;
203 /* Abstract base class for describing ranges of bits within a binding_map
204 that can have svalues bound to them. */
206 class binding_key
208 public:
209 virtual ~binding_key () {}
210 virtual bool concrete_p () const = 0;
211 bool symbolic_p () const { return !concrete_p (); }
213 static const binding_key *make (store_manager *mgr, const region *r);
215 virtual void dump_to_pp (pretty_printer *pp, bool simple) const = 0;
216 void dump (bool simple) const;
217 label_text get_desc (bool simple=true) const;
219 static int cmp_ptrs (const void *, const void *);
220 static int cmp (const binding_key *, const binding_key *);
222 virtual const concrete_binding *dyn_cast_concrete_binding () const
223 { return NULL; }
224 virtual const symbolic_binding *dyn_cast_symbolic_binding () const
225 { return NULL; }
228 /* A concrete range of bits. */
230 struct bit_range
232 bit_range (bit_offset_t start_bit_offset, bit_size_t size_in_bits)
233 : m_start_bit_offset (start_bit_offset),
234 m_size_in_bits (size_in_bits)
237 void dump_to_pp (pretty_printer *pp) const;
238 void dump () const;
240 bool empty_p () const
242 return m_size_in_bits == 0;
245 bit_offset_t get_start_bit_offset () const
247 return m_start_bit_offset;
249 bit_offset_t get_next_bit_offset () const
251 return m_start_bit_offset + m_size_in_bits;
253 bit_offset_t get_last_bit_offset () const
255 gcc_assert (!empty_p ());
256 return get_next_bit_offset () - 1;
259 bool contains_p (bit_offset_t offset) const
261 return (offset >= get_start_bit_offset ()
262 && offset < get_next_bit_offset ());
265 bool contains_p (const bit_range &other, bit_range *out) const;
267 bool operator== (const bit_range &other) const
269 return (m_start_bit_offset == other.m_start_bit_offset
270 && m_size_in_bits == other.m_size_in_bits);
273 bool intersects_p (const bit_range &other) const
275 return (get_start_bit_offset () < other.get_next_bit_offset ()
276 && other.get_start_bit_offset () < get_next_bit_offset ());
278 bool intersects_p (const bit_range &other,
279 bit_range *out_this,
280 bit_range *out_other) const;
282 static int cmp (const bit_range &br1, const bit_range &br2);
284 bit_range operator- (bit_offset_t offset) const;
286 static bool from_mask (unsigned HOST_WIDE_INT mask, bit_range *out);
288 bool as_byte_range (byte_range *out) const;
290 bit_offset_t m_start_bit_offset;
291 bit_size_t m_size_in_bits;
294 /* A concrete range of bytes. */
296 struct byte_range
298 byte_range (byte_offset_t start_byte_offset, byte_size_t size_in_bytes)
299 : m_start_byte_offset (start_byte_offset),
300 m_size_in_bytes (size_in_bytes)
303 void dump_to_pp (pretty_printer *pp) const;
304 void dump () const;
306 bool empty_p () const
308 return m_size_in_bytes == 0;
311 bool contains_p (byte_offset_t offset) const
313 return (offset >= get_start_byte_offset ()
314 && offset < get_next_byte_offset ());
316 bool contains_p (const byte_range &other, byte_range *out) const;
318 bool operator== (const byte_range &other) const
320 return (m_start_byte_offset == other.m_start_byte_offset
321 && m_size_in_bytes == other.m_size_in_bytes);
324 bool intersects_p (const byte_range &other,
325 byte_size_t *out_num_overlap_bytes) const;
327 bool exceeds_p (const byte_range &other,
328 byte_range *out_overhanging_byte_range) const;
330 bool falls_short_of_p (byte_offset_t offset,
331 byte_range *out_fall_short_bytes) const;
333 byte_offset_t get_start_byte_offset () const
335 return m_start_byte_offset;
337 byte_offset_t get_next_byte_offset () const
339 return m_start_byte_offset + m_size_in_bytes;
341 byte_offset_t get_last_byte_offset () const
343 gcc_assert (!empty_p ());
344 return m_start_byte_offset + m_size_in_bytes - 1;
347 bit_range as_bit_range () const
349 return bit_range (m_start_byte_offset * BITS_PER_UNIT,
350 m_size_in_bytes * BITS_PER_UNIT);
353 bit_offset_t get_start_bit_offset () const
355 return m_start_byte_offset * BITS_PER_UNIT;
357 bit_offset_t get_next_bit_offset () const
359 return get_next_byte_offset () * BITS_PER_UNIT;
362 static int cmp (const byte_range &br1, const byte_range &br2);
364 byte_offset_t m_start_byte_offset;
365 byte_size_t m_size_in_bytes;
368 /* Concrete subclass of binding_key, for describing a non-empty
369 concrete range of bits within the binding_map (e.g. "bits 8-15"). */
371 class concrete_binding : public binding_key
373 public:
374 /* This class is its own key for the purposes of consolidation. */
375 typedef concrete_binding key_t;
377 concrete_binding (bit_offset_t start_bit_offset, bit_size_t size_in_bits)
378 : m_bit_range (start_bit_offset, size_in_bits)
380 gcc_assert (!m_bit_range.empty_p ());
382 bool concrete_p () const final override { return true; }
384 hashval_t hash () const
386 inchash::hash hstate;
387 hstate.add_wide_int (m_bit_range.m_start_bit_offset);
388 hstate.add_wide_int (m_bit_range.m_size_in_bits);
389 return hstate.end ();
391 bool operator== (const concrete_binding &other) const
393 return m_bit_range == other.m_bit_range;
396 void dump_to_pp (pretty_printer *pp, bool simple) const final override;
398 const concrete_binding *dyn_cast_concrete_binding () const final override
399 { return this; }
401 const bit_range &get_bit_range () const { return m_bit_range; }
402 bool get_byte_range (byte_range *out) const;
404 bit_offset_t get_start_bit_offset () const
406 return m_bit_range.m_start_bit_offset;
408 bit_size_t get_size_in_bits () const
410 return m_bit_range.m_size_in_bits;
412 /* Return the next bit offset after the end of this binding. */
413 bit_offset_t get_next_bit_offset () const
415 return m_bit_range.get_next_bit_offset ();
418 bool overlaps_p (const concrete_binding &other) const;
420 static int cmp_ptr_ptr (const void *, const void *);
422 void mark_deleted () { m_bit_range.m_start_bit_offset = -1; }
423 void mark_empty () { m_bit_range.m_start_bit_offset = -2; }
424 bool is_deleted () const { return m_bit_range.m_start_bit_offset == -1; }
425 bool is_empty () const { return m_bit_range.m_start_bit_offset == -2; }
427 private:
428 bit_range m_bit_range;
431 } // namespace ana
433 template <>
434 template <>
435 inline bool
436 is_a_helper <const ana::concrete_binding *>::test (const ana::binding_key *key)
438 return key->concrete_p ();
441 template <> struct default_hash_traits<ana::concrete_binding>
442 : public member_function_hash_traits<ana::concrete_binding>
444 static const bool empty_zero_p = false;
447 namespace ana {
449 /* Concrete subclass of binding_key, for describing a symbolic set of
450 bits within the binding_map in terms of a region (e.g. "arr[i]"). */
452 class symbolic_binding : public binding_key
454 public:
455 /* This class is its own key for the purposes of consolidation. */
456 typedef symbolic_binding key_t;
458 symbolic_binding (const region *region) : m_region (region) {}
459 bool concrete_p () const final override { return false; }
461 hashval_t hash () const
463 return (intptr_t)m_region;
465 bool operator== (const symbolic_binding &other) const
467 return m_region == other.m_region;
470 void dump_to_pp (pretty_printer *pp, bool simple) const final override;
472 const symbolic_binding *dyn_cast_symbolic_binding () const final override
473 { return this; }
475 const region *get_region () const { return m_region; }
477 static int cmp_ptr_ptr (const void *, const void *);
479 void mark_deleted () { m_region = reinterpret_cast<const region *> (1); }
480 void mark_empty () { m_region = NULL; }
481 bool is_deleted () const
482 { return m_region == reinterpret_cast<const region *> (1); }
483 bool is_empty () const { return m_region == NULL; }
485 private:
486 const region *m_region;
489 } // namespace ana
491 template <> struct default_hash_traits<ana::symbolic_binding>
492 : public member_function_hash_traits<ana::symbolic_binding>
494 static const bool empty_zero_p = true;
497 namespace ana {
499 /* A mapping from binding_keys to svalues, for use by binding_cluster
500 and compound_svalue. */
502 class binding_map
504 public:
505 typedef hash_map <const binding_key *, const svalue *> map_t;
506 typedef map_t::iterator iterator_t;
508 binding_map () : m_map () {}
509 binding_map (const binding_map &other);
510 binding_map& operator=(const binding_map &other);
512 bool operator== (const binding_map &other) const;
513 bool operator!= (const binding_map &other) const
515 return !(*this == other);
518 hashval_t hash () const;
520 const svalue *get (const binding_key *key) const
522 const svalue **slot = const_cast<map_t &> (m_map).get (key);
523 if (slot)
524 return *slot;
525 else
526 return NULL;
528 bool put (const binding_key *k, const svalue *v)
530 gcc_assert (v);
531 return m_map.put (k, v);
534 void remove (const binding_key *k) { m_map.remove (k); }
535 void empty () { m_map.empty (); }
537 iterator_t begin () const { return m_map.begin (); }
538 iterator_t end () const { return m_map.end (); }
539 size_t elements () const { return m_map.elements (); }
541 void dump_to_pp (pretty_printer *pp, bool simple, bool multiline) const;
542 void dump (bool simple) const;
544 json::object *to_json () const;
546 bool apply_ctor_to_region (const region *parent_reg, tree ctor,
547 region_model_manager *mgr);
549 static int cmp (const binding_map &map1, const binding_map &map2);
551 void remove_overlapping_bindings (store_manager *mgr,
552 const binding_key *drop_key,
553 uncertainty_t *uncertainty,
554 svalue_set *maybe_live_values,
555 bool always_overlap);
557 private:
558 void get_overlapping_bindings (const binding_key *key,
559 auto_vec<const binding_key *> *out);
560 bool apply_ctor_val_to_range (const region *parent_reg,
561 region_model_manager *mgr,
562 tree min_index, tree max_index,
563 tree val);
564 bool apply_ctor_pair_to_child_region (const region *parent_reg,
565 region_model_manager *mgr,
566 tree index, tree val);
568 map_t m_map;
571 /* Concept: BindingVisitor, for use by binding_cluster::for_each_binding
572 and store::for_each_binding.
574 Should implement:
575 void on_binding (const binding_key *key, const svalue *&sval);
578 /* All of the bindings within a store for regions that share the same
579 base region. */
581 class binding_cluster
583 public:
584 friend class store;
586 typedef hash_map <const binding_key *, const svalue *> map_t;
587 typedef map_t::iterator iterator_t;
589 binding_cluster (const region *base_region);
590 binding_cluster (const binding_cluster &other);
591 binding_cluster& operator=(const binding_cluster &other);
593 bool operator== (const binding_cluster &other) const;
594 bool operator!= (const binding_cluster &other) const
596 return !(*this == other);
599 hashval_t hash () const;
601 bool symbolic_p () const;
603 const region *get_base_region () const { return m_base_region; }
605 void dump_to_pp (pretty_printer *pp, bool simple, bool multiline) const;
606 void dump (bool simple) const;
608 void validate () const;
610 json::object *to_json () const;
612 void bind (store_manager *mgr, const region *, const svalue *);
614 void clobber_region (store_manager *mgr, const region *reg);
615 void purge_region (store_manager *mgr, const region *reg);
616 void fill_region (store_manager *mgr, const region *reg, const svalue *sval);
617 void zero_fill_region (store_manager *mgr, const region *reg);
618 void mark_region_as_unknown (store_manager *mgr,
619 const region *reg_to_bind,
620 const region *reg_for_overlap,
621 uncertainty_t *uncertainty,
622 svalue_set *maybe_live_values);
623 void purge_state_involving (const svalue *sval,
624 region_model_manager *sval_mgr);
626 const svalue *get_binding (store_manager *mgr, const region *reg) const;
627 const svalue *get_binding_recursive (store_manager *mgr,
628 const region *reg) const;
629 const svalue *get_any_binding (store_manager *mgr,
630 const region *reg) const;
631 const svalue *maybe_get_compound_binding (store_manager *mgr,
632 const region *reg) const;
634 void remove_overlapping_bindings (store_manager *mgr, const region *reg,
635 uncertainty_t *uncertainty,
636 svalue_set *maybe_live_values);
638 template <typename T>
639 void for_each_value (void (*cb) (const svalue *sval, T user_data),
640 T user_data) const
642 for (map_t::iterator iter = m_map.begin (); iter != m_map.end (); ++iter)
643 cb ((*iter).second, user_data);
646 static bool can_merge_p (const binding_cluster *cluster_a,
647 const binding_cluster *cluster_b,
648 binding_cluster *out_cluster,
649 store *out_store,
650 store_manager *mgr,
651 model_merger *merger);
652 void make_unknown_relative_to (const binding_cluster *other_cluster,
653 store *out_store,
654 store_manager *mgr);
656 void mark_as_escaped ();
657 void on_unknown_fncall (const gcall *call, store_manager *mgr,
658 const conjured_purge &p);
659 void on_asm (const gasm *stmt, store_manager *mgr,
660 const conjured_purge &p);
662 bool escaped_p () const;
663 bool touched_p () const { return m_touched; }
665 bool redundant_p () const;
666 bool empty_p () const { return m_map.elements () == 0; }
668 void get_representative_path_vars (const region_model *model,
669 svalue_set *visited,
670 const region *base_reg,
671 const svalue *sval,
672 auto_vec<path_var> *out_pvs) const;
674 const svalue *maybe_get_simple_value (store_manager *mgr) const;
676 template <typename BindingVisitor>
677 void for_each_binding (BindingVisitor &v) const
679 for (map_t::iterator iter = m_map.begin (); iter != m_map.end (); ++iter)
681 const binding_key *key = (*iter).first;
682 const svalue *&sval = (*iter).second;
683 v.on_binding (key, sval);
687 iterator_t begin () const { return m_map.begin (); }
688 iterator_t end () const { return m_map.end (); }
690 const binding_map &get_map () const { return m_map; }
692 private:
693 const svalue *get_any_value (const binding_key *key) const;
694 void bind_compound_sval (store_manager *mgr,
695 const region *reg,
696 const compound_svalue *compound_sval);
697 void bind_key (const binding_key *key, const svalue *sval);
699 const region *m_base_region;
701 binding_map m_map;
703 /* Has a pointer to this cluster "escaped" into a part of the program
704 we don't know about (via a call to a function with an unknown body,
705 or by being passed in as a pointer param of a "top-level" function call).
706 Such regions could be overwritten when other such functions are called,
707 even if the region is no longer reachable by pointers that we are
708 tracking. */
709 bool m_escaped;
711 /* Has this cluster been written to via a symbolic binding?
712 If so, then we don't know anything about unbound subregions,
713 so we can't use initial_svalue, treat them as uninitialized, or
714 inherit values from a parent region. */
715 bool m_touched;
718 /* The mapping from regions to svalues.
719 This is actually expressed by subdividing into clusters, to better
720 handle aliasing. */
722 class store
724 public:
725 typedef hash_map <const region *, binding_cluster *> cluster_map_t;
727 store ();
728 store (const store &other);
729 ~store ();
731 store &operator= (const store &other);
733 bool operator== (const store &other) const;
734 bool operator!= (const store &other) const
736 return !(*this == other);
739 hashval_t hash () const;
741 void dump_to_pp (pretty_printer *pp, bool summarize, bool multiline,
742 store_manager *mgr) const;
743 void dump (bool simple) const;
744 void summarize_to_pp (pretty_printer *pp, bool simple) const;
746 void validate () const;
748 json::object *to_json () const;
750 const svalue *get_any_binding (store_manager *mgr, const region *reg) const;
752 bool called_unknown_fn_p () const { return m_called_unknown_fn; }
754 void set_value (store_manager *mgr, const region *lhs_reg,
755 const svalue *rhs_sval,
756 uncertainty_t *uncertainty);
757 void clobber_region (store_manager *mgr, const region *reg);
758 void purge_region (store_manager *mgr, const region *reg);
759 void fill_region (store_manager *mgr, const region *reg, const svalue *sval);
760 void zero_fill_region (store_manager *mgr, const region *reg);
761 void mark_region_as_unknown (store_manager *mgr, const region *reg,
762 uncertainty_t *uncertainty,
763 svalue_set *maybe_live_values);
764 void purge_state_involving (const svalue *sval,
765 region_model_manager *sval_mgr);
767 const binding_cluster *get_cluster (const region *base_reg) const;
768 binding_cluster *get_cluster (const region *base_reg);
769 binding_cluster *get_or_create_cluster (const region *base_reg);
770 void purge_cluster (const region *base_reg);
772 template <typename T>
773 void for_each_cluster (void (*cb) (const region *base_reg, T user_data),
774 T user_data) const
776 for (cluster_map_t::iterator iter = m_cluster_map.begin ();
777 iter != m_cluster_map.end (); ++iter)
778 cb ((*iter).first, user_data);
781 static bool can_merge_p (const store *store_a, const store *store_b,
782 store *out_store, store_manager *mgr,
783 model_merger *merger);
785 void mark_as_escaped (const region *base_reg);
786 void on_unknown_fncall (const gcall *call, store_manager *mgr,
787 const conjured_purge &p);
788 bool escaped_p (const region *reg) const;
790 void get_representative_path_vars (const region_model *model,
791 svalue_set *visited,
792 const svalue *sval,
793 auto_vec<path_var> *out_pvs) const;
795 cluster_map_t::iterator begin () const { return m_cluster_map.begin (); }
796 cluster_map_t::iterator end () const { return m_cluster_map.end (); }
798 tristate eval_alias (const region *base_reg_a,
799 const region *base_reg_b) const;
801 template <typename BindingVisitor>
802 void for_each_binding (BindingVisitor &v)
804 for (cluster_map_t::iterator iter = m_cluster_map.begin ();
805 iter != m_cluster_map.end (); ++iter)
806 (*iter).second->for_each_binding (v);
809 void canonicalize (store_manager *mgr);
810 void loop_replay_fixup (const store *other_store,
811 region_model_manager *mgr);
813 void replay_call_summary (call_summary_replay &r,
814 const store &summary);
815 void replay_call_summary_cluster (call_summary_replay &r,
816 const store &summary,
817 const region *base_reg);
818 void on_maybe_live_values (const svalue_set &maybe_live_values);
820 private:
821 void remove_overlapping_bindings (store_manager *mgr, const region *reg,
822 uncertainty_t *uncertainty);
823 tristate eval_alias_1 (const region *base_reg_a,
824 const region *base_reg_b) const;
826 cluster_map_t m_cluster_map;
828 /* If this is true, then unknown code has been called, and so
829 any global variable that isn't currently modelled by the store
830 has unknown state, rather than being in an "initial state".
831 This is to avoid having to mark (and thus explicitly track)
832 every global when an unknown function is called; instead, they
833 can be tracked implicitly. */
834 bool m_called_unknown_fn;
837 /* A class responsible for owning and consolidating binding keys
838 (both concrete and symbolic).
839 Key instances are immutable as far as clients are concerned, so they
840 are provided as "const" ptrs. */
842 class store_manager
844 public:
845 store_manager (region_model_manager *mgr) : m_mgr (mgr) {}
847 logger *get_logger () const;
849 /* binding consolidation. */
850 const concrete_binding *
851 get_concrete_binding (bit_offset_t start_bit_offset,
852 bit_offset_t size_in_bits);
853 const concrete_binding *
854 get_concrete_binding (const bit_range &bits)
856 return get_concrete_binding (bits.get_start_bit_offset (),
857 bits.m_size_in_bits);
859 const concrete_binding *
860 get_concrete_binding (const byte_range &bytes)
862 bit_range bits = bytes.as_bit_range ();
863 return get_concrete_binding (bits);
865 const symbolic_binding *
866 get_symbolic_binding (const region *region);
868 region_model_manager *get_svalue_manager () const
870 return m_mgr;
873 void log_stats (logger *logger, bool show_objs) const;
875 private:
876 region_model_manager *m_mgr;
877 consolidation_map<concrete_binding> m_concrete_binding_key_mgr;
878 consolidation_map<symbolic_binding> m_symbolic_binding_key_mgr;
881 } // namespace ana
883 #endif /* GCC_ANALYZER_STORE_H */