Disable tests for strdup/strndup on __hpux__
[official-gcc.git] / gcc / analyzer / store.h
blob2b603654946d492c2153768d75083b909be296f4
1 /* Classes for modeling the state of memory.
2 Copyright (C) 2020-2024 Free Software Foundation, Inc.
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #ifndef GCC_ANALYZER_STORE_H
22 #define GCC_ANALYZER_STORE_H
24 /* Implementation of the region-based ternary model described in:
25 "A Memory Model for Static Analysis of C Programs"
26 (Zhongxing Xu, Ted Kremenek, and Jian Zhang)
27 http://lcs.ios.ac.cn/~xuzb/canalyze/memmodel.pdf */
29 /* The store models memory as a collection of "clusters", where regions
30 are partitioned into clusters via their base region.
32 For example, given:
33 int a, b, c;
34 struct coord { double x; double y; } verts[3];
35 then "verts[0].y" and "verts[1].x" both have "verts" as their base region.
36 Each of a, b, c, and verts will have their own clusters, so that we
37 know that writes to e.g. "verts[1].x".don't affect e.g. "a".
39 Within each cluster we store a map of bindings to values, where the
40 binding keys can be either concrete or symbolic.
42 Concrete bindings affect a specific range of bits relative to the start
43 of the base region of the cluster, whereas symbolic bindings affect
44 a specific subregion within the cluster.
46 Consider (from the symbolic-1.c testcase):
48 char arr[1024];
49 arr[2] = a; (1)
50 arr[3] = b; (2)
51 After (1) and (2), the cluster for "arr" has concrete bindings
52 for bits 16-23 and for bits 24-31, with svalues "INIT_VAL(a)"
53 and "INIT_VAL(b)" respectively:
54 cluster: {bits 16-23: "INIT_VAL(a)",
55 bits 24-31: "INIT_VAL(b)";
56 flags: {}}
57 Attempting to query unbound subregions e.g. arr[4] will
58 return "UNINITIALIZED".
59 "a" and "b" are each in their own clusters, with no explicit
60 bindings, and thus implicitly have value INIT_VAL(a) and INIT_VAL(b).
62 arr[3] = c; (3)
63 After (3), the concrete binding for bits 24-31 is replaced with the
64 svalue "INIT_VAL(c)":
65 cluster: {bits 16-23: "INIT_VAL(a)", (from before)
66 bits 24-31: "INIT_VAL(c)"; (updated)
67 flags: {}}
69 arr[i] = d; (4)
70 After (4), we lose the concrete bindings and replace them with a
71 symbolic binding for "arr[i]", with svalue "INIT_VAL(d)". We also
72 mark the cluster as having been "symbolically touched": future
73 attempts to query the values of subregions other than "arr[i]",
74 such as "arr[3]" are "UNKNOWN", since we don't know if the write
75 to arr[i] affected them.
76 cluster: {symbolic_key(arr[i]): "INIT_VAL(d)";
77 flags: {TOUCHED}}
79 arr[j] = e; (5)
80 After (5), we lose the symbolic binding for "arr[i]" since we could
81 have overwritten it, and add a symbolic binding for "arr[j]".
82 cluster: {symbolic_key(arr[j]): "INIT_VAL(d)"; (different symbolic
83 flags: {TOUCHED}} binding)
85 arr[3] = f; (6)
86 After (6), we lose the symbolic binding for "arr[j]" since we could
87 have overwritten it, and gain a concrete binding for bits 24-31
88 again, this time with svalue "INIT_VAL(e)":
89 cluster: {bits 24-31: "INIT_VAL(d)";
90 flags: {TOUCHED}}
91 The cluster is still flagged as touched, so that we know that
92 accesses to other elements are "UNKNOWN" rather than
93 "UNINITIALIZED".
95 Handling symbolic regions requires us to handle aliasing.
97 In the first example above, each of a, b, c and verts are non-symbolic
98 base regions and so their clusters are "concrete clusters", whereas given:
99 struct coord *p, *q;
100 then "*p" and "*q" are symbolic base regions, and thus "*p" and "*q"
101 have "symbolic clusters".
103 In the above, "verts[i].x" will have a symbolic *binding* within a
104 concrete cluster for "verts", whereas "*p" is a symbolic *cluster*.
106 Writes to concrete clusters can't affect other concrete clusters,
107 but can affect symbolic clusters; e.g. after:
108 verts[0].x = 42;
109 we bind 42 in the cluster for "verts", but the clusters for "b" and "c"
110 can't be affected. Any symbolic clusters for *p and for *q can be
111 affected, *p and *q could alias verts.
113 Writes to a symbolic cluster can affect other clusters, both
114 concrete and symbolic; e.g. after:
115 p->x = 17;
116 we bind 17 within the cluster for "*p". The concrete clusters for a, b,
117 c, and verts could be affected, depending on whether *p aliases them.
118 Similarly, the symbolic cluster to *q could be affected. */
120 namespace ana {
122 /* A class for keeping track of aspects of a program_state that we don't
123 know about, to avoid false positives about leaks.
125 Consider:
127 p->field = malloc (1024);
128 q->field = NULL;
130 where we don't know whether or not p and q point to the same memory,
131 and:
133 p->field = malloc (1024);
134 unknown_fn (p);
136 In both cases, the svalue for the address of the allocated buffer
137 goes from being bound to p->field to not having anything explicitly bound
138 to it.
140 Given that we conservatively discard bindings due to possible aliasing or
141 calls to unknown function, the store loses references to svalues,
142 but these svalues could still be live. We don't want to warn about
143 them leaking - they're effectively in a "maybe live" state.
145 This "maybe live" information is somewhat transient.
147 We don't want to store this "maybe live" information in the program_state,
148 region_model, or store, since we don't want to bloat these objects (and
149 potentially bloat the exploded_graph with more nodes).
150 However, we can't store it in the region_model_context, as these context
151 objects sometimes don't last long enough to be around when comparing the
152 old vs the new state.
154 This class is a way to track a set of such svalues, so that we can
155 temporarily capture that they are in a "maybe live" state whilst
156 comparing old and new states. */
158 class uncertainty_t
160 public:
161 typedef hash_set<const svalue *>::iterator iterator;
163 void on_maybe_bound_sval (const svalue *sval)
165 m_maybe_bound_svals.add (sval);
167 void on_mutable_sval_at_unknown_call (const svalue *sval)
169 m_mutable_at_unknown_call_svals.add (sval);
172 bool unknown_sm_state_p (const svalue *sval)
174 return (m_maybe_bound_svals.contains (sval)
175 || m_mutable_at_unknown_call_svals.contains (sval));
178 void dump_to_pp (pretty_printer *pp, bool simple) const;
179 void dump (bool simple) const;
181 iterator begin_maybe_bound_svals () const
183 return m_maybe_bound_svals.begin ();
185 iterator end_maybe_bound_svals () const
187 return m_maybe_bound_svals.end ();
190 private:
192 /* svalues that might or might not still be bound. */
193 hash_set<const svalue *> m_maybe_bound_svals;
195 /* svalues that have mutable sm-state at unknown calls. */
196 hash_set<const svalue *> m_mutable_at_unknown_call_svals;
199 class byte_range;
200 class concrete_binding;
201 class symbolic_binding;
203 /* Abstract base class for describing ranges of bits within a binding_map
204 that can have svalues bound to them. */
206 class binding_key
208 public:
209 virtual ~binding_key () {}
210 virtual bool concrete_p () const = 0;
211 bool symbolic_p () const { return !concrete_p (); }
213 static const binding_key *make (store_manager *mgr, const region *r);
215 virtual void dump_to_pp (pretty_printer *pp, bool simple) const = 0;
216 void dump (bool simple) const;
217 label_text get_desc (bool simple=true) const;
219 static int cmp_ptrs (const void *, const void *);
220 static int cmp (const binding_key *, const binding_key *);
222 virtual const concrete_binding *dyn_cast_concrete_binding () const
223 { return NULL; }
224 virtual const symbolic_binding *dyn_cast_symbolic_binding () const
225 { return NULL; }
228 /* A concrete range of bits. */
230 struct bit_range
232 bit_range (bit_offset_t start_bit_offset, bit_size_t size_in_bits)
233 : m_start_bit_offset (start_bit_offset),
234 m_size_in_bits (size_in_bits)
237 void dump_to_pp (pretty_printer *pp) const;
238 void dump () const;
240 json::object *to_json () const;
242 bool empty_p () const
244 return m_size_in_bits == 0;
247 bit_offset_t get_start_bit_offset () const
249 return m_start_bit_offset;
251 bit_offset_t get_next_bit_offset () const
253 return m_start_bit_offset + m_size_in_bits;
255 bit_offset_t get_last_bit_offset () const
257 gcc_assert (!empty_p ());
258 return get_next_bit_offset () - 1;
261 bool contains_p (bit_offset_t offset) const
263 return (offset >= get_start_bit_offset ()
264 && offset < get_next_bit_offset ());
267 bool contains_p (const bit_range &other, bit_range *out) const;
269 bool operator== (const bit_range &other) const
271 return (m_start_bit_offset == other.m_start_bit_offset
272 && m_size_in_bits == other.m_size_in_bits);
275 bool intersects_p (const bit_range &other) const
277 return (get_start_bit_offset () < other.get_next_bit_offset ()
278 && other.get_start_bit_offset () < get_next_bit_offset ());
280 bool intersects_p (const bit_range &other,
281 bit_size_t *out_num_overlap_bits) const;
282 bool intersects_p (const bit_range &other,
283 bit_range *out_this,
284 bit_range *out_other) const;
286 bool exceeds_p (const bit_range &other,
287 bit_range *out_overhanging_bit_range) const;
289 bool falls_short_of_p (bit_offset_t offset,
290 bit_range *out_fall_short_bits) const;
292 static int cmp (const bit_range &br1, const bit_range &br2);
294 bit_range operator- (bit_offset_t offset) const;
296 static bool from_mask (unsigned HOST_WIDE_INT mask, bit_range *out);
298 bool as_byte_range (byte_range *out) const;
300 bit_offset_t m_start_bit_offset;
301 bit_size_t m_size_in_bits;
304 /* A concrete range of bytes. */
306 struct byte_range
308 byte_range (byte_offset_t start_byte_offset, byte_size_t size_in_bytes)
309 : m_start_byte_offset (start_byte_offset),
310 m_size_in_bytes (size_in_bytes)
313 void dump_to_pp (pretty_printer *pp) const;
314 void dump () const;
316 json::object *to_json () const;
318 bool empty_p () const
320 return m_size_in_bytes == 0;
323 bool contains_p (byte_offset_t offset) const
325 return (offset >= get_start_byte_offset ()
326 && offset < get_next_byte_offset ());
328 bool contains_p (const byte_range &other, byte_range *out) const;
330 bool operator== (const byte_range &other) const
332 return (m_start_byte_offset == other.m_start_byte_offset
333 && m_size_in_bytes == other.m_size_in_bytes);
336 byte_offset_t get_start_byte_offset () const
338 return m_start_byte_offset;
340 byte_offset_t get_next_byte_offset () const
342 return m_start_byte_offset + m_size_in_bytes;
344 byte_offset_t get_last_byte_offset () const
346 gcc_assert (!empty_p ());
347 return m_start_byte_offset + m_size_in_bytes - 1;
350 bit_range as_bit_range () const
352 return bit_range (m_start_byte_offset * BITS_PER_UNIT,
353 m_size_in_bytes * BITS_PER_UNIT);
356 bit_offset_t get_start_bit_offset () const
358 return m_start_byte_offset * BITS_PER_UNIT;
360 bit_offset_t get_next_bit_offset () const
362 return get_next_byte_offset () * BITS_PER_UNIT;
365 static int cmp (const byte_range &br1, const byte_range &br2);
367 byte_offset_t m_start_byte_offset;
368 byte_size_t m_size_in_bytes;
371 /* Concrete subclass of binding_key, for describing a non-empty
372 concrete range of bits within the binding_map (e.g. "bits 8-15"). */
374 class concrete_binding : public binding_key
376 public:
377 /* This class is its own key for the purposes of consolidation. */
378 typedef concrete_binding key_t;
380 concrete_binding (bit_offset_t start_bit_offset, bit_size_t size_in_bits)
381 : m_bit_range (start_bit_offset, size_in_bits)
383 gcc_assert (m_bit_range.m_size_in_bits > 0);
385 bool concrete_p () const final override { return true; }
387 hashval_t hash () const
389 inchash::hash hstate;
390 hstate.add_wide_int (m_bit_range.m_start_bit_offset);
391 hstate.add_wide_int (m_bit_range.m_size_in_bits);
392 return hstate.end ();
394 bool operator== (const concrete_binding &other) const
396 return m_bit_range == other.m_bit_range;
399 void dump_to_pp (pretty_printer *pp, bool simple) const final override;
401 const concrete_binding *dyn_cast_concrete_binding () const final override
402 { return this; }
404 const bit_range &get_bit_range () const { return m_bit_range; }
405 bool get_byte_range (byte_range *out) const;
407 bit_offset_t get_start_bit_offset () const
409 return m_bit_range.m_start_bit_offset;
411 bit_size_t get_size_in_bits () const
413 return m_bit_range.m_size_in_bits;
415 /* Return the next bit offset after the end of this binding. */
416 bit_offset_t get_next_bit_offset () const
418 return m_bit_range.get_next_bit_offset ();
421 bool overlaps_p (const concrete_binding &other) const;
423 static int cmp_ptr_ptr (const void *, const void *);
425 void mark_deleted () { m_bit_range.m_size_in_bits = -1; }
426 void mark_empty () { m_bit_range.m_size_in_bits = -2; }
427 bool is_deleted () const { return m_bit_range.m_size_in_bits == -1; }
428 bool is_empty () const { return m_bit_range.m_size_in_bits == -2; }
430 private:
431 bit_range m_bit_range;
434 } // namespace ana
436 template <>
437 template <>
438 inline bool
439 is_a_helper <const ana::concrete_binding *>::test (const ana::binding_key *key)
441 return key->concrete_p ();
444 template <> struct default_hash_traits<ana::concrete_binding>
445 : public member_function_hash_traits<ana::concrete_binding>
447 static const bool empty_zero_p = false;
450 namespace ana {
452 /* Concrete subclass of binding_key, for describing a symbolic set of
453 bits within the binding_map in terms of a region (e.g. "arr[i]"). */
455 class symbolic_binding : public binding_key
457 public:
458 /* This class is its own key for the purposes of consolidation. */
459 typedef symbolic_binding key_t;
461 symbolic_binding (const region *region) : m_region (region) {}
462 bool concrete_p () const final override { return false; }
464 hashval_t hash () const
466 return (intptr_t)m_region;
468 bool operator== (const symbolic_binding &other) const
470 return m_region == other.m_region;
473 void dump_to_pp (pretty_printer *pp, bool simple) const final override;
475 const symbolic_binding *dyn_cast_symbolic_binding () const final override
476 { return this; }
478 const region *get_region () const { return m_region; }
480 static int cmp_ptr_ptr (const void *, const void *);
482 void mark_deleted () { m_region = reinterpret_cast<const region *> (1); }
483 void mark_empty () { m_region = NULL; }
484 bool is_deleted () const
485 { return m_region == reinterpret_cast<const region *> (1); }
486 bool is_empty () const { return m_region == NULL; }
488 private:
489 const region *m_region;
492 } // namespace ana
494 template <> struct default_hash_traits<ana::symbolic_binding>
495 : public member_function_hash_traits<ana::symbolic_binding>
497 static const bool empty_zero_p = true;
500 namespace ana {
502 /* A mapping from binding_keys to svalues, for use by binding_cluster
503 and compound_svalue. */
505 class binding_map
507 public:
508 typedef hash_map <const binding_key *, const svalue *> map_t;
509 typedef map_t::iterator iterator_t;
511 binding_map () : m_map () {}
512 binding_map (const binding_map &other);
513 binding_map& operator=(const binding_map &other);
515 bool operator== (const binding_map &other) const;
516 bool operator!= (const binding_map &other) const
518 return !(*this == other);
521 hashval_t hash () const;
523 const svalue *get (const binding_key *key) const
525 const svalue **slot = const_cast<map_t &> (m_map).get (key);
526 if (slot)
527 return *slot;
528 else
529 return NULL;
531 bool put (const binding_key *k, const svalue *v)
533 gcc_assert (v);
534 return m_map.put (k, v);
537 void remove (const binding_key *k) { m_map.remove (k); }
538 void empty () { m_map.empty (); }
540 iterator_t begin () const { return m_map.begin (); }
541 iterator_t end () const { return m_map.end (); }
542 size_t elements () const { return m_map.elements (); }
544 void dump_to_pp (pretty_printer *pp, bool simple, bool multiline) const;
545 void dump (bool simple) const;
547 json::object *to_json () const;
549 bool apply_ctor_to_region (const region *parent_reg, tree ctor,
550 region_model_manager *mgr);
552 static int cmp (const binding_map &map1, const binding_map &map2);
554 void remove_overlapping_bindings (store_manager *mgr,
555 const binding_key *drop_key,
556 uncertainty_t *uncertainty,
557 svalue_set *maybe_live_values,
558 bool always_overlap);
560 private:
561 void get_overlapping_bindings (const binding_key *key,
562 auto_vec<const binding_key *> *out);
563 bool apply_ctor_val_to_range (const region *parent_reg,
564 region_model_manager *mgr,
565 tree min_index, tree max_index,
566 tree val);
567 bool apply_ctor_pair_to_child_region (const region *parent_reg,
568 region_model_manager *mgr,
569 tree index, tree val);
571 map_t m_map;
574 /* Concept: BindingVisitor, for use by binding_cluster::for_each_binding
575 and store::for_each_binding.
577 Should implement:
578 void on_binding (const binding_key *key, const svalue *&sval);
581 /* All of the bindings within a store for regions that share the same
582 base region. */
584 class binding_cluster
586 public:
587 friend class store;
589 typedef hash_map <const binding_key *, const svalue *> map_t;
590 typedef map_t::iterator iterator_t;
592 binding_cluster (const region *base_region);
593 binding_cluster (const binding_cluster &other);
594 binding_cluster& operator=(const binding_cluster &other);
596 bool operator== (const binding_cluster &other) const;
597 bool operator!= (const binding_cluster &other) const
599 return !(*this == other);
602 hashval_t hash () const;
604 bool symbolic_p () const;
606 const region *get_base_region () const { return m_base_region; }
608 void dump_to_pp (pretty_printer *pp, bool simple, bool multiline) const;
609 void dump (bool simple) const;
611 void validate () const;
613 json::object *to_json () const;
615 void bind (store_manager *mgr, const region *, const svalue *);
617 void clobber_region (store_manager *mgr, const region *reg);
618 void purge_region (store_manager *mgr, const region *reg);
619 void fill_region (store_manager *mgr, const region *reg, const svalue *sval);
620 void zero_fill_region (store_manager *mgr, const region *reg);
621 void mark_region_as_unknown (store_manager *mgr,
622 const region *reg_to_bind,
623 const region *reg_for_overlap,
624 uncertainty_t *uncertainty,
625 svalue_set *maybe_live_values);
626 void purge_state_involving (const svalue *sval,
627 region_model_manager *sval_mgr);
629 const svalue *get_binding (store_manager *mgr, const region *reg) const;
630 const svalue *get_binding_recursive (store_manager *mgr,
631 const region *reg) const;
632 const svalue *get_any_binding (store_manager *mgr,
633 const region *reg) const;
634 const svalue *maybe_get_compound_binding (store_manager *mgr,
635 const region *reg) const;
637 void remove_overlapping_bindings (store_manager *mgr, const region *reg,
638 uncertainty_t *uncertainty,
639 svalue_set *maybe_live_values);
641 template <typename T>
642 void for_each_value (void (*cb) (const svalue *sval, T user_data),
643 T user_data) const
645 for (map_t::iterator iter = m_map.begin (); iter != m_map.end (); ++iter)
646 cb ((*iter).second, user_data);
649 static bool can_merge_p (const binding_cluster *cluster_a,
650 const binding_cluster *cluster_b,
651 binding_cluster *out_cluster,
652 store *out_store,
653 store_manager *mgr,
654 model_merger *merger);
655 void make_unknown_relative_to (const binding_cluster *other_cluster,
656 store *out_store,
657 store_manager *mgr);
659 void mark_as_escaped ();
660 void on_unknown_fncall (const gcall *call, store_manager *mgr,
661 const conjured_purge &p);
662 void on_asm (const gasm *stmt, store_manager *mgr,
663 const conjured_purge &p);
665 bool escaped_p () const;
666 bool touched_p () const { return m_touched; }
668 bool redundant_p () const;
669 bool empty_p () const { return m_map.elements () == 0; }
671 void get_representative_path_vars (const region_model *model,
672 svalue_set *visited,
673 const region *base_reg,
674 const svalue *sval,
675 auto_vec<path_var> *out_pvs) const;
677 const svalue *maybe_get_simple_value (store_manager *mgr) const;
679 template <typename BindingVisitor>
680 void for_each_binding (BindingVisitor &v) const
682 for (map_t::iterator iter = m_map.begin (); iter != m_map.end (); ++iter)
684 const binding_key *key = (*iter).first;
685 const svalue *&sval = (*iter).second;
686 v.on_binding (key, sval);
690 iterator_t begin () const { return m_map.begin (); }
691 iterator_t end () const { return m_map.end (); }
693 const binding_map &get_map () const { return m_map; }
695 private:
696 const svalue *get_any_value (const binding_key *key) const;
697 void bind_compound_sval (store_manager *mgr,
698 const region *reg,
699 const compound_svalue *compound_sval);
700 void bind_key (const binding_key *key, const svalue *sval);
702 const region *m_base_region;
704 binding_map m_map;
706 /* Has a pointer to this cluster "escaped" into a part of the program
707 we don't know about (via a call to a function with an unknown body,
708 or by being passed in as a pointer param of a "top-level" function call).
709 Such regions could be overwritten when other such functions are called,
710 even if the region is no longer reachable by pointers that we are
711 tracking. */
712 bool m_escaped;
714 /* Has this cluster been written to via a symbolic binding?
715 If so, then we don't know anything about unbound subregions,
716 so we can't use initial_svalue, treat them as uninitialized, or
717 inherit values from a parent region. */
718 bool m_touched;
721 /* The mapping from regions to svalues.
722 This is actually expressed by subdividing into clusters, to better
723 handle aliasing. */
725 class store
727 public:
728 typedef hash_map <const region *, binding_cluster *> cluster_map_t;
730 store ();
731 store (const store &other);
732 ~store ();
734 store &operator= (const store &other);
736 bool operator== (const store &other) const;
737 bool operator!= (const store &other) const
739 return !(*this == other);
742 hashval_t hash () const;
744 void dump_to_pp (pretty_printer *pp, bool summarize, bool multiline,
745 store_manager *mgr) const;
746 void dump (bool simple) const;
747 void summarize_to_pp (pretty_printer *pp, bool simple) const;
749 void validate () const;
751 json::object *to_json () const;
753 const svalue *get_any_binding (store_manager *mgr, const region *reg) const;
755 bool called_unknown_fn_p () const { return m_called_unknown_fn; }
757 void set_value (store_manager *mgr, const region *lhs_reg,
758 const svalue *rhs_sval,
759 uncertainty_t *uncertainty);
760 void clobber_region (store_manager *mgr, const region *reg);
761 void purge_region (store_manager *mgr, const region *reg);
762 void fill_region (store_manager *mgr, const region *reg, const svalue *sval);
763 void zero_fill_region (store_manager *mgr, const region *reg);
764 void mark_region_as_unknown (store_manager *mgr, const region *reg,
765 uncertainty_t *uncertainty,
766 svalue_set *maybe_live_values);
767 void purge_state_involving (const svalue *sval,
768 region_model_manager *sval_mgr);
770 const binding_cluster *get_cluster (const region *base_reg) const;
771 binding_cluster *get_cluster (const region *base_reg);
772 binding_cluster *get_or_create_cluster (const region *base_reg);
773 void purge_cluster (const region *base_reg);
775 template <typename T>
776 void for_each_cluster (void (*cb) (const region *base_reg, T user_data),
777 T user_data) const
779 for (cluster_map_t::iterator iter = m_cluster_map.begin ();
780 iter != m_cluster_map.end (); ++iter)
781 cb ((*iter).first, user_data);
784 static bool can_merge_p (const store *store_a, const store *store_b,
785 store *out_store, store_manager *mgr,
786 model_merger *merger);
788 void mark_as_escaped (const region *base_reg);
789 void on_unknown_fncall (const gcall *call, store_manager *mgr,
790 const conjured_purge &p);
791 bool escaped_p (const region *reg) const;
793 void get_representative_path_vars (const region_model *model,
794 svalue_set *visited,
795 const svalue *sval,
796 auto_vec<path_var> *out_pvs) const;
798 cluster_map_t::iterator begin () const { return m_cluster_map.begin (); }
799 cluster_map_t::iterator end () const { return m_cluster_map.end (); }
801 tristate eval_alias (const region *base_reg_a,
802 const region *base_reg_b) const;
804 template <typename BindingVisitor>
805 void for_each_binding (BindingVisitor &v)
807 for (cluster_map_t::iterator iter = m_cluster_map.begin ();
808 iter != m_cluster_map.end (); ++iter)
809 (*iter).second->for_each_binding (v);
812 void canonicalize (store_manager *mgr);
813 void loop_replay_fixup (const store *other_store,
814 region_model_manager *mgr);
816 void replay_call_summary (call_summary_replay &r,
817 const store &summary);
818 void replay_call_summary_cluster (call_summary_replay &r,
819 const store &summary,
820 const region *base_reg);
821 void on_maybe_live_values (const svalue_set &maybe_live_values);
823 private:
824 void remove_overlapping_bindings (store_manager *mgr, const region *reg,
825 uncertainty_t *uncertainty);
826 tristate eval_alias_1 (const region *base_reg_a,
827 const region *base_reg_b) const;
829 cluster_map_t m_cluster_map;
831 /* If this is true, then unknown code has been called, and so
832 any global variable that isn't currently modelled by the store
833 has unknown state, rather than being in an "initial state".
834 This is to avoid having to mark (and thus explicitly track)
835 every global when an unknown function is called; instead, they
836 can be tracked implicitly. */
837 bool m_called_unknown_fn;
840 /* A class responsible for owning and consolidating binding keys
841 (both concrete and symbolic).
842 Key instances are immutable as far as clients are concerned, so they
843 are provided as "const" ptrs. */
845 class store_manager
847 public:
848 store_manager (region_model_manager *mgr) : m_mgr (mgr) {}
850 logger *get_logger () const;
852 /* binding consolidation. */
853 const concrete_binding *
854 get_concrete_binding (bit_offset_t start_bit_offset,
855 bit_offset_t size_in_bits);
856 const concrete_binding *
857 get_concrete_binding (const bit_range &bits)
859 return get_concrete_binding (bits.get_start_bit_offset (),
860 bits.m_size_in_bits);
862 const concrete_binding *
863 get_concrete_binding (const byte_range &bytes)
865 bit_range bits = bytes.as_bit_range ();
866 return get_concrete_binding (bits);
868 const symbolic_binding *
869 get_symbolic_binding (const region *region);
871 region_model_manager *get_svalue_manager () const
873 return m_mgr;
876 void log_stats (logger *logger, bool show_objs) const;
878 private:
879 region_model_manager *m_mgr;
880 consolidation_map<concrete_binding> m_concrete_binding_key_mgr;
881 consolidation_map<symbolic_binding> m_symbolic_binding_key_mgr;
884 } // namespace ana
886 #endif /* GCC_ANALYZER_STORE_H */