libstdc++: Normalise _GLIBCXX20_DEPRECATED macro
[official-gcc.git] / gcc / analyzer / region-model.cc
blobadb6a5e06df917a8ea96a32b65b915c1ea7bb74b
1 /* Classes for modeling the state of memory.
2 Copyright (C) 2019-2023 Free Software Foundation, Inc.
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #define INCLUDE_MEMORY
23 #include "system.h"
24 #include "coretypes.h"
25 #include "make-unique.h"
26 #include "tree.h"
27 #include "function.h"
28 #include "basic-block.h"
29 #include "gimple.h"
30 #include "gimple-iterator.h"
31 #include "diagnostic-core.h"
32 #include "graphviz.h"
33 #include "options.h"
34 #include "cgraph.h"
35 #include "tree-dfa.h"
36 #include "stringpool.h"
37 #include "convert.h"
38 #include "target.h"
39 #include "fold-const.h"
40 #include "tree-pretty-print.h"
41 #include "diagnostic-color.h"
42 #include "diagnostic-metadata.h"
43 #include "bitmap.h"
44 #include "selftest.h"
45 #include "analyzer/analyzer.h"
46 #include "analyzer/analyzer-logging.h"
47 #include "ordered-hash-map.h"
48 #include "options.h"
49 #include "cgraph.h"
50 #include "cfg.h"
51 #include "analyzer/supergraph.h"
52 #include "sbitmap.h"
53 #include "analyzer/call-string.h"
54 #include "analyzer/program-point.h"
55 #include "analyzer/store.h"
56 #include "analyzer/region-model.h"
57 #include "analyzer/constraint-manager.h"
58 #include "diagnostic-event-id.h"
59 #include "analyzer/sm.h"
60 #include "diagnostic-event-id.h"
61 #include "analyzer/sm.h"
62 #include "analyzer/pending-diagnostic.h"
63 #include "analyzer/region-model-reachability.h"
64 #include "analyzer/analyzer-selftests.h"
65 #include "analyzer/program-state.h"
66 #include "analyzer/call-summary.h"
67 #include "stor-layout.h"
68 #include "attribs.h"
69 #include "tree-object-size.h"
70 #include "gimple-ssa.h"
71 #include "tree-phinodes.h"
72 #include "tree-ssa-operands.h"
73 #include "ssa-iterators.h"
74 #include "calls.h"
75 #include "is-a.h"
76 #include "gcc-rich-location.h"
77 #include "analyzer/checker-event.h"
78 #include "analyzer/checker-path.h"
80 #if ENABLE_ANALYZER
82 namespace ana {
84 /* Dump T to PP in language-independent form, for debugging/logging/dumping
85 purposes. */
87 void
88 dump_tree (pretty_printer *pp, tree t)
90 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
93 /* Dump T to PP in language-independent form in quotes, for
94 debugging/logging/dumping purposes. */
96 void
97 dump_quoted_tree (pretty_printer *pp, tree t)
99 pp_begin_quote (pp, pp_show_color (pp));
100 dump_tree (pp, t);
101 pp_end_quote (pp, pp_show_color (pp));
104 /* Equivalent to pp_printf (pp, "%qT", t), to avoid nesting pp_printf
105 calls within other pp_printf calls.
107 default_tree_printer handles 'T' and some other codes by calling
108 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
109 dump_generic_node calls pp_printf in various places, leading to
110 garbled output.
112 Ideally pp_printf could be made to be reentrant, but in the meantime
113 this function provides a workaround. */
115 void
116 print_quoted_type (pretty_printer *pp, tree t)
118 pp_begin_quote (pp, pp_show_color (pp));
119 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
120 pp_end_quote (pp, pp_show_color (pp));
123 /* class region_to_value_map. */
125 /* Assignment operator for region_to_value_map. */
127 region_to_value_map &
128 region_to_value_map::operator= (const region_to_value_map &other)
130 m_hash_map.empty ();
131 for (auto iter : other.m_hash_map)
133 const region *reg = iter.first;
134 const svalue *sval = iter.second;
135 m_hash_map.put (reg, sval);
137 return *this;
140 /* Equality operator for region_to_value_map. */
142 bool
143 region_to_value_map::operator== (const region_to_value_map &other) const
145 if (m_hash_map.elements () != other.m_hash_map.elements ())
146 return false;
148 for (auto iter : *this)
150 const region *reg = iter.first;
151 const svalue *sval = iter.second;
152 const svalue * const *other_slot = other.get (reg);
153 if (other_slot == NULL)
154 return false;
155 if (sval != *other_slot)
156 return false;
159 return true;
162 /* Dump this object to PP. */
164 void
165 region_to_value_map::dump_to_pp (pretty_printer *pp, bool simple,
166 bool multiline) const
168 auto_vec<const region *> regs;
169 for (iterator iter = begin (); iter != end (); ++iter)
170 regs.safe_push ((*iter).first);
171 regs.qsort (region::cmp_ptr_ptr);
172 if (multiline)
173 pp_newline (pp);
174 else
175 pp_string (pp, " {");
176 unsigned i;
177 const region *reg;
178 FOR_EACH_VEC_ELT (regs, i, reg)
180 if (multiline)
181 pp_string (pp, " ");
182 else if (i > 0)
183 pp_string (pp, ", ");
184 reg->dump_to_pp (pp, simple);
185 pp_string (pp, ": ");
186 const svalue *sval = *get (reg);
187 sval->dump_to_pp (pp, true);
188 if (multiline)
189 pp_newline (pp);
191 if (!multiline)
192 pp_string (pp, "}");
195 /* Dump this object to stderr. */
197 DEBUG_FUNCTION void
198 region_to_value_map::dump (bool simple) const
200 pretty_printer pp;
201 pp_format_decoder (&pp) = default_tree_printer;
202 pp_show_color (&pp) = pp_show_color (global_dc->printer);
203 pp.buffer->stream = stderr;
204 dump_to_pp (&pp, simple, true);
205 pp_newline (&pp);
206 pp_flush (&pp);
210 /* Attempt to merge THIS with OTHER, writing the result
211 to OUT.
213 For now, write (region, value) mappings that are in common between THIS
214 and OTHER to OUT, effectively taking the intersection.
216 Reject merger of different values. */
218 bool
219 region_to_value_map::can_merge_with_p (const region_to_value_map &other,
220 region_to_value_map *out) const
222 for (auto iter : *this)
224 const region *iter_reg = iter.first;
225 const svalue *iter_sval = iter.second;
226 const svalue * const * other_slot = other.get (iter_reg);
227 if (other_slot)
229 if (iter_sval == *other_slot)
230 out->put (iter_reg, iter_sval);
231 else
232 return false;
235 return true;
238 /* Purge any state involving SVAL. */
240 void
241 region_to_value_map::purge_state_involving (const svalue *sval)
243 auto_vec<const region *> to_purge;
244 for (auto iter : *this)
246 const region *iter_reg = iter.first;
247 const svalue *iter_sval = iter.second;
248 if (iter_reg->involves_p (sval) || iter_sval->involves_p (sval))
249 to_purge.safe_push (iter_reg);
251 for (auto iter : to_purge)
252 m_hash_map.remove (iter);
255 /* class region_model. */
257 /* Ctor for region_model: construct an "empty" model. */
259 region_model::region_model (region_model_manager *mgr)
260 : m_mgr (mgr), m_store (), m_current_frame (NULL),
261 m_dynamic_extents ()
263 m_constraints = new constraint_manager (mgr);
266 /* region_model's copy ctor. */
268 region_model::region_model (const region_model &other)
269 : m_mgr (other.m_mgr), m_store (other.m_store),
270 m_constraints (new constraint_manager (*other.m_constraints)),
271 m_current_frame (other.m_current_frame),
272 m_dynamic_extents (other.m_dynamic_extents)
276 /* region_model's dtor. */
278 region_model::~region_model ()
280 delete m_constraints;
283 /* region_model's assignment operator. */
285 region_model &
286 region_model::operator= (const region_model &other)
288 /* m_mgr is const. */
289 gcc_assert (m_mgr == other.m_mgr);
291 m_store = other.m_store;
293 delete m_constraints;
294 m_constraints = new constraint_manager (*other.m_constraints);
296 m_current_frame = other.m_current_frame;
298 m_dynamic_extents = other.m_dynamic_extents;
300 return *this;
303 /* Equality operator for region_model.
305 Amongst other things this directly compares the stores and the constraint
306 managers, so for this to be meaningful both this and OTHER should
307 have been canonicalized. */
309 bool
310 region_model::operator== (const region_model &other) const
312 /* We can only compare instances that use the same manager. */
313 gcc_assert (m_mgr == other.m_mgr);
315 if (m_store != other.m_store)
316 return false;
318 if (*m_constraints != *other.m_constraints)
319 return false;
321 if (m_current_frame != other.m_current_frame)
322 return false;
324 if (m_dynamic_extents != other.m_dynamic_extents)
325 return false;
327 gcc_checking_assert (hash () == other.hash ());
329 return true;
332 /* Generate a hash value for this region_model. */
334 hashval_t
335 region_model::hash () const
337 hashval_t result = m_store.hash ();
338 result ^= m_constraints->hash ();
339 return result;
342 /* Dump a representation of this model to PP, showing the
343 stack, the store, and any constraints.
344 Use SIMPLE to control how svalues and regions are printed. */
346 void
347 region_model::dump_to_pp (pretty_printer *pp, bool simple,
348 bool multiline) const
350 /* Dump stack. */
351 pp_printf (pp, "stack depth: %i", get_stack_depth ());
352 if (multiline)
353 pp_newline (pp);
354 else
355 pp_string (pp, " {");
356 for (const frame_region *iter_frame = m_current_frame; iter_frame;
357 iter_frame = iter_frame->get_calling_frame ())
359 if (multiline)
360 pp_string (pp, " ");
361 else if (iter_frame != m_current_frame)
362 pp_string (pp, ", ");
363 pp_printf (pp, "frame (index %i): ", iter_frame->get_index ());
364 iter_frame->dump_to_pp (pp, simple);
365 if (multiline)
366 pp_newline (pp);
368 if (!multiline)
369 pp_string (pp, "}");
371 /* Dump store. */
372 if (!multiline)
373 pp_string (pp, ", {");
374 m_store.dump_to_pp (pp, simple, multiline,
375 m_mgr->get_store_manager ());
376 if (!multiline)
377 pp_string (pp, "}");
379 /* Dump constraints. */
380 pp_string (pp, "constraint_manager:");
381 if (multiline)
382 pp_newline (pp);
383 else
384 pp_string (pp, " {");
385 m_constraints->dump_to_pp (pp, multiline);
386 if (!multiline)
387 pp_string (pp, "}");
389 /* Dump sizes of dynamic regions, if any are known. */
390 if (!m_dynamic_extents.is_empty ())
392 pp_string (pp, "dynamic_extents:");
393 m_dynamic_extents.dump_to_pp (pp, simple, multiline);
397 /* Dump a representation of this model to FILE. */
399 void
400 region_model::dump (FILE *fp, bool simple, bool multiline) const
402 pretty_printer pp;
403 pp_format_decoder (&pp) = default_tree_printer;
404 pp_show_color (&pp) = pp_show_color (global_dc->printer);
405 pp.buffer->stream = fp;
406 dump_to_pp (&pp, simple, multiline);
407 pp_newline (&pp);
408 pp_flush (&pp);
411 /* Dump a multiline representation of this model to stderr. */
413 DEBUG_FUNCTION void
414 region_model::dump (bool simple) const
416 dump (stderr, simple, true);
419 /* Dump a multiline representation of this model to stderr. */
421 DEBUG_FUNCTION void
422 region_model::debug () const
424 dump (true);
427 /* Assert that this object is valid. */
429 void
430 region_model::validate () const
432 m_store.validate ();
435 /* Canonicalize the store and constraints, to maximize the chance of
436 equality between region_model instances. */
438 void
439 region_model::canonicalize ()
441 m_store.canonicalize (m_mgr->get_store_manager ());
442 m_constraints->canonicalize ();
445 /* Return true if this region_model is in canonical form. */
447 bool
448 region_model::canonicalized_p () const
450 region_model copy (*this);
451 copy.canonicalize ();
452 return *this == copy;
455 /* See the comment for store::loop_replay_fixup. */
457 void
458 region_model::loop_replay_fixup (const region_model *dst_state)
460 m_store.loop_replay_fixup (dst_state->get_store (), m_mgr);
463 /* A subclass of pending_diagnostic for complaining about uses of
464 poisoned values. */
466 class poisoned_value_diagnostic
467 : public pending_diagnostic_subclass<poisoned_value_diagnostic>
469 public:
470 poisoned_value_diagnostic (tree expr, enum poison_kind pkind,
471 const region *src_region)
472 : m_expr (expr), m_pkind (pkind),
473 m_src_region (src_region)
476 const char *get_kind () const final override { return "poisoned_value_diagnostic"; }
478 bool use_of_uninit_p () const final override
480 return m_pkind == POISON_KIND_UNINIT;
483 bool operator== (const poisoned_value_diagnostic &other) const
485 return (m_expr == other.m_expr
486 && m_pkind == other.m_pkind
487 && m_src_region == other.m_src_region);
490 int get_controlling_option () const final override
492 switch (m_pkind)
494 default:
495 gcc_unreachable ();
496 case POISON_KIND_UNINIT:
497 return OPT_Wanalyzer_use_of_uninitialized_value;
498 case POISON_KIND_FREED:
499 return OPT_Wanalyzer_use_after_free;
500 case POISON_KIND_POPPED_STACK:
501 return OPT_Wanalyzer_use_of_pointer_in_stale_stack_frame;
505 bool emit (rich_location *rich_loc) final override
507 switch (m_pkind)
509 default:
510 gcc_unreachable ();
511 case POISON_KIND_UNINIT:
513 diagnostic_metadata m;
514 m.add_cwe (457); /* "CWE-457: Use of Uninitialized Variable". */
515 return warning_meta (rich_loc, m, get_controlling_option (),
516 "use of uninitialized value %qE",
517 m_expr);
519 break;
520 case POISON_KIND_FREED:
522 diagnostic_metadata m;
523 m.add_cwe (416); /* "CWE-416: Use After Free". */
524 return warning_meta (rich_loc, m, get_controlling_option (),
525 "use after %<free%> of %qE",
526 m_expr);
528 break;
529 case POISON_KIND_POPPED_STACK:
531 /* TODO: which CWE? */
532 return warning_at
533 (rich_loc, get_controlling_option (),
534 "dereferencing pointer %qE to within stale stack frame",
535 m_expr);
537 break;
541 label_text describe_final_event (const evdesc::final_event &ev) final override
543 switch (m_pkind)
545 default:
546 gcc_unreachable ();
547 case POISON_KIND_UNINIT:
548 return ev.formatted_print ("use of uninitialized value %qE here",
549 m_expr);
550 case POISON_KIND_FREED:
551 return ev.formatted_print ("use after %<free%> of %qE here",
552 m_expr);
553 case POISON_KIND_POPPED_STACK:
554 return ev.formatted_print
555 ("dereferencing pointer %qE to within stale stack frame",
556 m_expr);
560 void mark_interesting_stuff (interesting_t *interest) final override
562 if (m_src_region)
563 interest->add_region_creation (m_src_region);
566 private:
567 tree m_expr;
568 enum poison_kind m_pkind;
569 const region *m_src_region;
572 /* A subclass of pending_diagnostic for complaining about shifts
573 by negative counts. */
575 class shift_count_negative_diagnostic
576 : public pending_diagnostic_subclass<shift_count_negative_diagnostic>
578 public:
579 shift_count_negative_diagnostic (const gassign *assign, tree count_cst)
580 : m_assign (assign), m_count_cst (count_cst)
583 const char *get_kind () const final override
585 return "shift_count_negative_diagnostic";
588 bool operator== (const shift_count_negative_diagnostic &other) const
590 return (m_assign == other.m_assign
591 && same_tree_p (m_count_cst, other.m_count_cst));
594 int get_controlling_option () const final override
596 return OPT_Wanalyzer_shift_count_negative;
599 bool emit (rich_location *rich_loc) final override
601 return warning_at (rich_loc, get_controlling_option (),
602 "shift by negative count (%qE)", m_count_cst);
605 label_text describe_final_event (const evdesc::final_event &ev) final override
607 return ev.formatted_print ("shift by negative amount here (%qE)", m_count_cst);
610 private:
611 const gassign *m_assign;
612 tree m_count_cst;
615 /* A subclass of pending_diagnostic for complaining about shifts
616 by counts >= the width of the operand type. */
618 class shift_count_overflow_diagnostic
619 : public pending_diagnostic_subclass<shift_count_overflow_diagnostic>
621 public:
622 shift_count_overflow_diagnostic (const gassign *assign,
623 int operand_precision,
624 tree count_cst)
625 : m_assign (assign), m_operand_precision (operand_precision),
626 m_count_cst (count_cst)
629 const char *get_kind () const final override
631 return "shift_count_overflow_diagnostic";
634 bool operator== (const shift_count_overflow_diagnostic &other) const
636 return (m_assign == other.m_assign
637 && m_operand_precision == other.m_operand_precision
638 && same_tree_p (m_count_cst, other.m_count_cst));
641 int get_controlling_option () const final override
643 return OPT_Wanalyzer_shift_count_overflow;
646 bool emit (rich_location *rich_loc) final override
648 return warning_at (rich_loc, get_controlling_option (),
649 "shift by count (%qE) >= precision of type (%qi)",
650 m_count_cst, m_operand_precision);
653 label_text describe_final_event (const evdesc::final_event &ev) final override
655 return ev.formatted_print ("shift by count %qE here", m_count_cst);
658 private:
659 const gassign *m_assign;
660 int m_operand_precision;
661 tree m_count_cst;
664 /* If ASSIGN is a stmt that can be modelled via
665 set_value (lhs_reg, SVALUE, CTXT)
666 for some SVALUE, get the SVALUE.
667 Otherwise return NULL. */
669 const svalue *
670 region_model::get_gassign_result (const gassign *assign,
671 region_model_context *ctxt)
673 tree lhs = gimple_assign_lhs (assign);
674 tree rhs1 = gimple_assign_rhs1 (assign);
675 enum tree_code op = gimple_assign_rhs_code (assign);
676 switch (op)
678 default:
679 return NULL;
681 case POINTER_PLUS_EXPR:
683 /* e.g. "_1 = a_10(D) + 12;" */
684 tree ptr = rhs1;
685 tree offset = gimple_assign_rhs2 (assign);
687 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
688 const svalue *offset_sval = get_rvalue (offset, ctxt);
689 /* Quoting tree.def, "the second operand [of a POINTER_PLUS_EXPR]
690 is an integer of type sizetype". */
691 offset_sval = m_mgr->get_or_create_cast (size_type_node, offset_sval);
693 const svalue *sval_binop
694 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
695 ptr_sval, offset_sval);
696 return sval_binop;
698 break;
700 case POINTER_DIFF_EXPR:
702 /* e.g. "_1 = p_2(D) - q_3(D);". */
703 tree rhs2 = gimple_assign_rhs2 (assign);
704 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
705 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
707 // TODO: perhaps fold to zero if they're known to be equal?
709 const svalue *sval_binop
710 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
711 rhs1_sval, rhs2_sval);
712 return sval_binop;
714 break;
716 /* Assignments of the form
717 set_value (lvalue (LHS), rvalue (EXPR))
718 for various EXPR.
719 We already have the lvalue for the LHS above, as "lhs_reg". */
720 case ADDR_EXPR: /* LHS = &RHS; */
721 case BIT_FIELD_REF:
722 case COMPONENT_REF: /* LHS = op0.op1; */
723 case MEM_REF:
724 case REAL_CST:
725 case COMPLEX_CST:
726 case VECTOR_CST:
727 case INTEGER_CST:
728 case ARRAY_REF:
729 case SSA_NAME: /* LHS = VAR; */
730 case VAR_DECL: /* LHS = VAR; */
731 case PARM_DECL:/* LHS = VAR; */
732 case REALPART_EXPR:
733 case IMAGPART_EXPR:
734 return get_rvalue (rhs1, ctxt);
736 case ABS_EXPR:
737 case ABSU_EXPR:
738 case CONJ_EXPR:
739 case BIT_NOT_EXPR:
740 case FIX_TRUNC_EXPR:
741 case FLOAT_EXPR:
742 case NEGATE_EXPR:
743 case NOP_EXPR:
744 case VIEW_CONVERT_EXPR:
746 /* Unary ops. */
747 const svalue *rhs_sval = get_rvalue (rhs1, ctxt);
748 const svalue *sval_unaryop
749 = m_mgr->get_or_create_unaryop (TREE_TYPE (lhs), op, rhs_sval);
750 return sval_unaryop;
753 case EQ_EXPR:
754 case GE_EXPR:
755 case LE_EXPR:
756 case NE_EXPR:
757 case GT_EXPR:
758 case LT_EXPR:
759 case UNORDERED_EXPR:
760 case ORDERED_EXPR:
762 tree rhs2 = gimple_assign_rhs2 (assign);
764 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
765 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
767 if (TREE_TYPE (lhs) == boolean_type_node)
769 /* Consider constraints between svalues. */
770 tristate t = eval_condition (rhs1_sval, op, rhs2_sval);
771 if (t.is_known ())
772 return m_mgr->get_or_create_constant_svalue
773 (t.is_true () ? boolean_true_node : boolean_false_node);
776 /* Otherwise, generate a symbolic binary op. */
777 const svalue *sval_binop
778 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
779 rhs1_sval, rhs2_sval);
780 return sval_binop;
782 break;
784 case PLUS_EXPR:
785 case MINUS_EXPR:
786 case MULT_EXPR:
787 case MULT_HIGHPART_EXPR:
788 case TRUNC_DIV_EXPR:
789 case CEIL_DIV_EXPR:
790 case FLOOR_DIV_EXPR:
791 case ROUND_DIV_EXPR:
792 case TRUNC_MOD_EXPR:
793 case CEIL_MOD_EXPR:
794 case FLOOR_MOD_EXPR:
795 case ROUND_MOD_EXPR:
796 case RDIV_EXPR:
797 case EXACT_DIV_EXPR:
798 case LSHIFT_EXPR:
799 case RSHIFT_EXPR:
800 case LROTATE_EXPR:
801 case RROTATE_EXPR:
802 case BIT_IOR_EXPR:
803 case BIT_XOR_EXPR:
804 case BIT_AND_EXPR:
805 case MIN_EXPR:
806 case MAX_EXPR:
807 case COMPLEX_EXPR:
809 /* Binary ops. */
810 tree rhs2 = gimple_assign_rhs2 (assign);
812 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
813 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
815 if (ctxt && (op == LSHIFT_EXPR || op == RSHIFT_EXPR))
817 /* "INT34-C. Do not shift an expression by a negative number of bits
818 or by greater than or equal to the number of bits that exist in
819 the operand." */
820 if (const tree rhs2_cst = rhs2_sval->maybe_get_constant ())
821 if (TREE_CODE (rhs2_cst) == INTEGER_CST)
823 if (tree_int_cst_sgn (rhs2_cst) < 0)
824 ctxt->warn
825 (make_unique<shift_count_negative_diagnostic>
826 (assign, rhs2_cst));
827 else if (compare_tree_int (rhs2_cst,
828 TYPE_PRECISION (TREE_TYPE (rhs1)))
829 >= 0)
830 ctxt->warn
831 (make_unique<shift_count_overflow_diagnostic>
832 (assign,
833 int (TYPE_PRECISION (TREE_TYPE (rhs1))),
834 rhs2_cst));
838 const svalue *sval_binop
839 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
840 rhs1_sval, rhs2_sval);
841 return sval_binop;
844 /* Vector expressions. In theory we could implement these elementwise,
845 but for now, simply return unknown values. */
846 case VEC_DUPLICATE_EXPR:
847 case VEC_SERIES_EXPR:
848 case VEC_COND_EXPR:
849 case VEC_PERM_EXPR:
850 case VEC_WIDEN_MULT_HI_EXPR:
851 case VEC_WIDEN_MULT_LO_EXPR:
852 case VEC_WIDEN_MULT_EVEN_EXPR:
853 case VEC_WIDEN_MULT_ODD_EXPR:
854 case VEC_UNPACK_HI_EXPR:
855 case VEC_UNPACK_LO_EXPR:
856 case VEC_UNPACK_FLOAT_HI_EXPR:
857 case VEC_UNPACK_FLOAT_LO_EXPR:
858 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
859 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
860 case VEC_PACK_TRUNC_EXPR:
861 case VEC_PACK_SAT_EXPR:
862 case VEC_PACK_FIX_TRUNC_EXPR:
863 case VEC_PACK_FLOAT_EXPR:
864 case VEC_WIDEN_LSHIFT_HI_EXPR:
865 case VEC_WIDEN_LSHIFT_LO_EXPR:
866 return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (lhs));
870 /* Workaround for discarding certain false positives from
871 -Wanalyzer-use-of-uninitialized-value
872 of the form:
873 ((A OR-IF B) OR-IF C)
874 and:
875 ((A AND-IF B) AND-IF C)
876 where evaluating B is redundant, but could involve simple accesses of
877 uninitialized locals.
879 When optimization is turned on the FE can immediately fold compound
880 conditionals. Specifically, c_parser_condition parses this condition:
881 ((A OR-IF B) OR-IF C)
882 and calls c_fully_fold on the condition.
883 Within c_fully_fold, fold_truth_andor is called, which bails when
884 optimization is off, but if any optimization is turned on can convert the
885 ((A OR-IF B) OR-IF C)
886 into:
887 ((A OR B) OR_IF C)
888 for sufficiently simple B
889 i.e. the inner OR-IF becomes an OR.
890 At gimplification time the inner OR becomes BIT_IOR_EXPR (in gimplify_expr),
891 giving this for the inner condition:
892 tmp = A | B;
893 if (tmp)
894 thus effectively synthesizing a redundant access of B when optimization
895 is turned on, when compared to:
896 if (A) goto L1; else goto L4;
897 L1: if (B) goto L2; else goto L4;
898 L2: if (C) goto L3; else goto L4;
899 for the unoptimized case.
901 Return true if CTXT appears to be handling such a short-circuitable stmt,
902 such as the def-stmt for B for the:
903 tmp = A | B;
904 case above, for the case where A is true and thus B would have been
905 short-circuited without optimization, using MODEL for the value of A. */
907 static bool
908 within_short_circuited_stmt_p (const region_model *model,
909 const gassign *assign_stmt)
911 /* We must have an assignment to a temporary of _Bool type. */
912 tree lhs = gimple_assign_lhs (assign_stmt);
913 if (TREE_TYPE (lhs) != boolean_type_node)
914 return false;
915 if (TREE_CODE (lhs) != SSA_NAME)
916 return false;
917 if (SSA_NAME_VAR (lhs) != NULL_TREE)
918 return false;
920 /* The temporary bool must be used exactly once: as the second arg of
921 a BIT_IOR_EXPR or BIT_AND_EXPR. */
922 use_operand_p use_op;
923 gimple *use_stmt;
924 if (!single_imm_use (lhs, &use_op, &use_stmt))
925 return false;
926 const gassign *use_assign = dyn_cast <const gassign *> (use_stmt);
927 if (!use_assign)
928 return false;
929 enum tree_code op = gimple_assign_rhs_code (use_assign);
930 if (!(op == BIT_IOR_EXPR ||op == BIT_AND_EXPR))
931 return false;
932 if (!(gimple_assign_rhs1 (use_assign) != lhs
933 && gimple_assign_rhs2 (use_assign) == lhs))
934 return false;
936 /* The first arg of the bitwise stmt must have a known value in MODEL
937 that implies that the value of the second arg doesn't matter, i.e.
938 1 for bitwise or, 0 for bitwise and. */
939 tree other_arg = gimple_assign_rhs1 (use_assign);
940 /* Use a NULL ctxt here to avoid generating warnings. */
941 const svalue *other_arg_sval = model->get_rvalue (other_arg, NULL);
942 tree other_arg_cst = other_arg_sval->maybe_get_constant ();
943 if (!other_arg_cst)
944 return false;
945 switch (op)
947 default:
948 gcc_unreachable ();
949 case BIT_IOR_EXPR:
950 if (zerop (other_arg_cst))
951 return false;
952 break;
953 case BIT_AND_EXPR:
954 if (!zerop (other_arg_cst))
955 return false;
956 break;
959 /* All tests passed. We appear to be in a stmt that generates a boolean
960 temporary with a value that won't matter. */
961 return true;
964 /* Workaround for discarding certain false positives from
965 -Wanalyzer-use-of-uninitialized-value
966 seen with -ftrivial-auto-var-init=.
968 -ftrivial-auto-var-init= will generate calls to IFN_DEFERRED_INIT.
970 If the address of the var is taken, gimplification will give us
971 something like:
973 _1 = .DEFERRED_INIT (4, 2, &"len"[0]);
974 len = _1;
976 The result of DEFERRED_INIT will be an uninit value; we don't
977 want to emit a false positive for "len = _1;"
979 Return true if ASSIGN_STMT is such a stmt. */
981 static bool
982 due_to_ifn_deferred_init_p (const gassign *assign_stmt)
985 /* We must have an assignment to a decl from an SSA name that's the
986 result of a IFN_DEFERRED_INIT call. */
987 if (gimple_assign_rhs_code (assign_stmt) != SSA_NAME)
988 return false;
989 tree lhs = gimple_assign_lhs (assign_stmt);
990 if (TREE_CODE (lhs) != VAR_DECL)
991 return false;
992 tree rhs = gimple_assign_rhs1 (assign_stmt);
993 if (TREE_CODE (rhs) != SSA_NAME)
994 return false;
995 const gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
996 const gcall *call = dyn_cast <const gcall *> (def_stmt);
997 if (!call)
998 return false;
999 if (gimple_call_internal_p (call)
1000 && gimple_call_internal_fn (call) == IFN_DEFERRED_INIT)
1001 return true;
1002 return false;
1005 /* Check for SVAL being poisoned, adding a warning to CTXT.
1006 Return SVAL, or, if a warning is added, another value, to avoid
1007 repeatedly complaining about the same poisoned value in followup code.
1008 SRC_REGION is a hint about where SVAL came from, and can be NULL. */
1010 const svalue *
1011 region_model::check_for_poison (const svalue *sval,
1012 tree expr,
1013 const region *src_region,
1014 region_model_context *ctxt) const
1016 if (!ctxt)
1017 return sval;
1019 if (const poisoned_svalue *poisoned_sval = sval->dyn_cast_poisoned_svalue ())
1021 enum poison_kind pkind = poisoned_sval->get_poison_kind ();
1023 /* Ignore uninitialized uses of empty types; there's nothing
1024 to initialize. */
1025 if (pkind == POISON_KIND_UNINIT
1026 && sval->get_type ()
1027 && is_empty_type (sval->get_type ()))
1028 return sval;
1030 if (pkind == POISON_KIND_UNINIT)
1031 if (const gimple *curr_stmt = ctxt->get_stmt ())
1032 if (const gassign *assign_stmt
1033 = dyn_cast <const gassign *> (curr_stmt))
1035 /* Special case to avoid certain false positives. */
1036 if (within_short_circuited_stmt_p (this, assign_stmt))
1037 return sval;
1039 /* Special case to avoid false positive on
1040 -ftrivial-auto-var-init=. */
1041 if (due_to_ifn_deferred_init_p (assign_stmt))
1042 return sval;
1045 /* If we have an SSA name for a temporary, we don't want to print
1046 '<unknown>'.
1047 Poisoned values are shared by type, and so we can't reconstruct
1048 the tree other than via the def stmts, using
1049 fixup_tree_for_diagnostic. */
1050 tree diag_arg = fixup_tree_for_diagnostic (expr);
1051 if (src_region == NULL && pkind == POISON_KIND_UNINIT)
1052 src_region = get_region_for_poisoned_expr (expr);
1053 if (ctxt->warn (make_unique<poisoned_value_diagnostic> (diag_arg,
1054 pkind,
1055 src_region)))
1057 /* We only want to report use of a poisoned value at the first
1058 place it gets used; return an unknown value to avoid generating
1059 a chain of followup warnings. */
1060 sval = m_mgr->get_or_create_unknown_svalue (sval->get_type ());
1063 return sval;
1066 return sval;
1069 /* Attempt to get a region for describing EXPR, the source of region of
1070 a poisoned_svalue for use in a poisoned_value_diagnostic.
1071 Return NULL if there is no good region to use. */
1073 const region *
1074 region_model::get_region_for_poisoned_expr (tree expr) const
1076 if (TREE_CODE (expr) == SSA_NAME)
1078 tree decl = SSA_NAME_VAR (expr);
1079 if (decl && DECL_P (decl))
1080 expr = decl;
1081 else
1082 return NULL;
1084 return get_lvalue (expr, NULL);
1087 /* Update this model for the ASSIGN stmt, using CTXT to report any
1088 diagnostics. */
1090 void
1091 region_model::on_assignment (const gassign *assign, region_model_context *ctxt)
1093 tree lhs = gimple_assign_lhs (assign);
1094 tree rhs1 = gimple_assign_rhs1 (assign);
1096 const region *lhs_reg = get_lvalue (lhs, ctxt);
1098 /* Most assignments are handled by:
1099 set_value (lhs_reg, SVALUE, CTXT)
1100 for some SVALUE. */
1101 if (const svalue *sval = get_gassign_result (assign, ctxt))
1103 tree expr = get_diagnostic_tree_for_gassign (assign);
1104 check_for_poison (sval, expr, NULL, ctxt);
1105 set_value (lhs_reg, sval, ctxt);
1106 return;
1109 enum tree_code op = gimple_assign_rhs_code (assign);
1110 switch (op)
1112 default:
1114 if (0)
1115 sorry_at (assign->location, "unhandled assignment op: %qs",
1116 get_tree_code_name (op));
1117 const svalue *unknown_sval
1118 = m_mgr->get_or_create_unknown_svalue (TREE_TYPE (lhs));
1119 set_value (lhs_reg, unknown_sval, ctxt);
1121 break;
1123 case CONSTRUCTOR:
1125 if (TREE_CLOBBER_P (rhs1))
1127 /* e.g. "x ={v} {CLOBBER};" */
1128 clobber_region (lhs_reg);
1130 else
1132 /* Any CONSTRUCTOR that survives to this point is either
1133 just a zero-init of everything, or a vector. */
1134 if (!CONSTRUCTOR_NO_CLEARING (rhs1))
1135 zero_fill_region (lhs_reg);
1136 unsigned ix;
1137 tree index;
1138 tree val;
1139 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), ix, index, val)
1141 gcc_assert (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE);
1142 if (!index)
1143 index = build_int_cst (integer_type_node, ix);
1144 gcc_assert (TREE_CODE (index) == INTEGER_CST);
1145 const svalue *index_sval
1146 = m_mgr->get_or_create_constant_svalue (index);
1147 gcc_assert (index_sval);
1148 const region *sub_reg
1149 = m_mgr->get_element_region (lhs_reg,
1150 TREE_TYPE (val),
1151 index_sval);
1152 const svalue *val_sval = get_rvalue (val, ctxt);
1153 set_value (sub_reg, val_sval, ctxt);
1157 break;
1159 case STRING_CST:
1161 /* e.g. "struct s2 x = {{'A', 'B', 'C', 'D'}};". */
1162 const svalue *rhs_sval = get_rvalue (rhs1, ctxt);
1163 m_store.set_value (m_mgr->get_store_manager(), lhs_reg, rhs_sval,
1164 ctxt ? ctxt->get_uncertainty () : NULL);
1166 break;
1170 /* Handle the pre-sm-state part of STMT, modifying this object in-place.
1171 Write true to *OUT_UNKNOWN_SIDE_EFFECTS if the stmt has unknown
1172 side effects. */
1174 void
1175 region_model::on_stmt_pre (const gimple *stmt,
1176 bool *out_unknown_side_effects,
1177 region_model_context *ctxt)
1179 switch (gimple_code (stmt))
1181 default:
1182 /* No-op for now. */
1183 break;
1185 case GIMPLE_ASSIGN:
1187 const gassign *assign = as_a <const gassign *> (stmt);
1188 on_assignment (assign, ctxt);
1190 break;
1192 case GIMPLE_ASM:
1194 const gasm *asm_stmt = as_a <const gasm *> (stmt);
1195 on_asm_stmt (asm_stmt, ctxt);
1197 break;
1199 case GIMPLE_CALL:
1201 /* Track whether we have a gcall to a function that's not recognized by
1202 anything, for which we don't have a function body, or for which we
1203 don't know the fndecl. */
1204 const gcall *call = as_a <const gcall *> (stmt);
1205 *out_unknown_side_effects = on_call_pre (call, ctxt);
1207 break;
1209 case GIMPLE_RETURN:
1211 const greturn *return_ = as_a <const greturn *> (stmt);
1212 on_return (return_, ctxt);
1214 break;
1218 /* Ensure that all arguments at the call described by CD are checked
1219 for poisoned values, by calling get_rvalue on each argument. */
1221 void
1222 region_model::check_call_args (const call_details &cd) const
1224 for (unsigned arg_idx = 0; arg_idx < cd.num_args (); arg_idx++)
1225 cd.get_arg_svalue (arg_idx);
1228 /* Return true if CD is known to be a call to a function with
1229 __attribute__((const)). */
1231 static bool
1232 const_fn_p (const call_details &cd)
1234 tree fndecl = cd.get_fndecl_for_call ();
1235 if (!fndecl)
1236 return false;
1237 gcc_assert (DECL_P (fndecl));
1238 return TREE_READONLY (fndecl);
1241 /* If this CD is known to be a call to a function with
1242 __attribute__((const)), attempt to get a const_fn_result_svalue
1243 based on the arguments, or return NULL otherwise. */
1245 static const svalue *
1246 maybe_get_const_fn_result (const call_details &cd)
1248 if (!const_fn_p (cd))
1249 return NULL;
1251 unsigned num_args = cd.num_args ();
1252 if (num_args > const_fn_result_svalue::MAX_INPUTS)
1253 /* Too many arguments. */
1254 return NULL;
1256 auto_vec<const svalue *> inputs (num_args);
1257 for (unsigned arg_idx = 0; arg_idx < num_args; arg_idx++)
1259 const svalue *arg_sval = cd.get_arg_svalue (arg_idx);
1260 if (!arg_sval->can_have_associated_state_p ())
1261 return NULL;
1262 inputs.quick_push (arg_sval);
1265 region_model_manager *mgr = cd.get_manager ();
1266 const svalue *sval
1267 = mgr->get_or_create_const_fn_result_svalue (cd.get_lhs_type (),
1268 cd.get_fndecl_for_call (),
1269 inputs);
1270 return sval;
1273 /* Update this model for an outcome of a call that returns a specific
1274 integer constant.
1275 If UNMERGEABLE, then make the result unmergeable, e.g. to prevent
1276 the state-merger code from merging success and failure outcomes. */
1278 void
1279 region_model::update_for_int_cst_return (const call_details &cd,
1280 int retval,
1281 bool unmergeable)
1283 if (!cd.get_lhs_type ())
1284 return;
1285 if (TREE_CODE (cd.get_lhs_type ()) != INTEGER_TYPE)
1286 return;
1287 const svalue *result
1288 = m_mgr->get_or_create_int_cst (cd.get_lhs_type (), retval);
1289 if (unmergeable)
1290 result = m_mgr->get_or_create_unmergeable (result);
1291 set_value (cd.get_lhs_region (), result, cd.get_ctxt ());
1294 /* Update this model for an outcome of a call that returns zero.
1295 If UNMERGEABLE, then make the result unmergeable, e.g. to prevent
1296 the state-merger code from merging success and failure outcomes. */
1298 void
1299 region_model::update_for_zero_return (const call_details &cd,
1300 bool unmergeable)
1302 update_for_int_cst_return (cd, 0, unmergeable);
1305 /* Update this model for an outcome of a call that returns non-zero. */
1307 void
1308 region_model::update_for_nonzero_return (const call_details &cd)
1310 if (!cd.get_lhs_type ())
1311 return;
1312 if (TREE_CODE (cd.get_lhs_type ()) != INTEGER_TYPE)
1313 return;
1314 const svalue *zero
1315 = m_mgr->get_or_create_int_cst (cd.get_lhs_type (), 0);
1316 const svalue *result
1317 = get_store_value (cd.get_lhs_region (), cd.get_ctxt ());
1318 add_constraint (result, NE_EXPR, zero, cd.get_ctxt ());
1321 /* Subroutine of region_model::maybe_get_copy_bounds.
1322 The Linux kernel commonly uses
1323 min_t([unsigned] long, VAR, sizeof(T));
1324 to set an upper bound on the size of a copy_to_user.
1325 Attempt to simplify such sizes by trying to get the upper bound as a
1326 constant.
1327 Return the simplified svalue if possible, or NULL otherwise. */
1329 static const svalue *
1330 maybe_simplify_upper_bound (const svalue *num_bytes_sval,
1331 region_model_manager *mgr)
1333 tree type = num_bytes_sval->get_type ();
1334 while (const svalue *raw = num_bytes_sval->maybe_undo_cast ())
1335 num_bytes_sval = raw;
1336 if (const binop_svalue *binop_sval = num_bytes_sval->dyn_cast_binop_svalue ())
1337 if (binop_sval->get_op () == MIN_EXPR)
1338 if (binop_sval->get_arg1 ()->get_kind () == SK_CONSTANT)
1340 return mgr->get_or_create_cast (type, binop_sval->get_arg1 ());
1341 /* TODO: we might want to also capture the constraint
1342 when recording the diagnostic, or note that we're using
1343 the upper bound. */
1345 return NULL;
1348 /* Attempt to get an upper bound for the size of a copy when simulating a
1349 copy function.
1351 NUM_BYTES_SVAL is the symbolic value for the size of the copy.
1352 Use it if it's constant, otherwise try to simplify it. Failing
1353 that, use the size of SRC_REG if constant.
1355 Return a symbolic value for an upper limit on the number of bytes
1356 copied, or NULL if no such value could be determined. */
1358 const svalue *
1359 region_model::maybe_get_copy_bounds (const region *src_reg,
1360 const svalue *num_bytes_sval)
1362 if (num_bytes_sval->maybe_get_constant ())
1363 return num_bytes_sval;
1365 if (const svalue *simplified
1366 = maybe_simplify_upper_bound (num_bytes_sval, m_mgr))
1367 num_bytes_sval = simplified;
1369 if (num_bytes_sval->maybe_get_constant ())
1370 return num_bytes_sval;
1372 /* For now, try just guessing the size as the capacity of the
1373 base region of the src.
1374 This is a hack; we might get too large a value. */
1375 const region *src_base_reg = src_reg->get_base_region ();
1376 num_bytes_sval = get_capacity (src_base_reg);
1378 if (num_bytes_sval->maybe_get_constant ())
1379 return num_bytes_sval;
1381 /* Non-constant: give up. */
1382 return NULL;
1385 /* Get any known_function for FNDECL for call CD.
1387 The call must match all assumptions made by the known_function (such as
1388 e.g. "argument 1's type must be a pointer type").
1390 Return NULL if no known_function is found, or it does not match the
1391 assumption(s). */
1393 const known_function *
1394 region_model::get_known_function (tree fndecl, const call_details &cd) const
1396 known_function_manager *known_fn_mgr = m_mgr->get_known_function_manager ();
1397 return known_fn_mgr->get_match (fndecl, cd);
1400 /* Get any known_function for IFN, or NULL. */
1402 const known_function *
1403 region_model::get_known_function (enum internal_fn ifn) const
1405 known_function_manager *known_fn_mgr = m_mgr->get_known_function_manager ();
1406 return known_fn_mgr->get_internal_fn (ifn);
1409 /* Update this model for the CALL stmt, using CTXT to report any
1410 diagnostics - the first half.
1412 Updates to the region_model that should be made *before* sm-states
1413 are updated are done here; other updates to the region_model are done
1414 in region_model::on_call_post.
1416 Return true if the function call has unknown side effects (it wasn't
1417 recognized and we don't have a body for it, or are unable to tell which
1418 fndecl it is). */
1420 bool
1421 region_model::on_call_pre (const gcall *call, region_model_context *ctxt)
1423 call_details cd (call, this, ctxt);
1425 bool unknown_side_effects = false;
1427 /* Special-case for IFN_DEFERRED_INIT.
1428 We want to report uninitialized variables with -fanalyzer (treating
1429 -ftrivial-auto-var-init= as purely a mitigation feature).
1430 Handle IFN_DEFERRED_INIT by treating it as no-op: don't touch the
1431 lhs of the call, so that it is still uninitialized from the point of
1432 view of the analyzer. */
1433 if (gimple_call_internal_p (call)
1434 && gimple_call_internal_fn (call) == IFN_DEFERRED_INIT)
1435 return false;
1437 /* Get svalues for all of the arguments at the callsite, to ensure that we
1438 complain about any uninitialized arguments. This might lead to
1439 duplicates if any of the handling below also looks up the svalues,
1440 but the deduplication code should deal with that. */
1441 if (ctxt)
1442 check_call_args (cd);
1444 tree callee_fndecl = get_fndecl_for_call (call, ctxt);
1446 /* Some of the cases below update the lhs of the call based on the
1447 return value, but not all. Provide a default value, which may
1448 get overwritten below. */
1449 if (tree lhs = gimple_call_lhs (call))
1451 const region *lhs_region = get_lvalue (lhs, ctxt);
1452 const svalue *sval = maybe_get_const_fn_result (cd);
1453 if (!sval)
1455 if (callee_fndecl
1456 && lookup_attribute ("malloc", DECL_ATTRIBUTES (callee_fndecl)))
1458 const region *new_reg
1459 = get_or_create_region_for_heap_alloc (NULL, ctxt);
1460 mark_region_as_unknown (new_reg, NULL);
1461 sval = m_mgr->get_ptr_svalue (cd.get_lhs_type (), new_reg);
1463 else
1464 /* For the common case of functions without __attribute__((const)),
1465 use a conjured value, and purge any prior state involving that
1466 value (in case this is in a loop). */
1467 sval = m_mgr->get_or_create_conjured_svalue (TREE_TYPE (lhs), call,
1468 lhs_region,
1469 conjured_purge (this,
1470 ctxt));
1472 set_value (lhs_region, sval, ctxt);
1475 if (gimple_call_internal_p (call))
1476 if (const known_function *kf
1477 = get_known_function (gimple_call_internal_fn (call)))
1479 kf->impl_call_pre (cd);
1480 return false;
1483 if (callee_fndecl)
1485 int callee_fndecl_flags = flags_from_decl_or_type (callee_fndecl);
1487 if (const known_function *kf = get_known_function (callee_fndecl, cd))
1489 kf->impl_call_pre (cd);
1490 return false;
1492 else if (fndecl_built_in_p (callee_fndecl, BUILT_IN_NORMAL)
1493 && gimple_builtin_call_types_compatible_p (call, callee_fndecl))
1495 if (!(callee_fndecl_flags & (ECF_CONST | ECF_PURE)))
1496 unknown_side_effects = true;
1498 else if (!fndecl_has_gimple_body_p (callee_fndecl)
1499 && (!(callee_fndecl_flags & (ECF_CONST | ECF_PURE)))
1500 && !fndecl_built_in_p (callee_fndecl))
1501 unknown_side_effects = true;
1503 else
1504 unknown_side_effects = true;
1506 return unknown_side_effects;
1509 /* Update this model for the CALL stmt, using CTXT to report any
1510 diagnostics - the second half.
1512 Updates to the region_model that should be made *after* sm-states
1513 are updated are done here; other updates to the region_model are done
1514 in region_model::on_call_pre.
1516 If UNKNOWN_SIDE_EFFECTS is true, also call handle_unrecognized_call
1517 to purge state. */
1519 void
1520 region_model::on_call_post (const gcall *call,
1521 bool unknown_side_effects,
1522 region_model_context *ctxt)
1524 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
1526 call_details cd (call, this, ctxt);
1527 if (const known_function *kf = get_known_function (callee_fndecl, cd))
1529 kf->impl_call_post (cd);
1530 return;
1532 /* Was this fndecl referenced by
1533 __attribute__((malloc(FOO)))? */
1534 if (lookup_attribute ("*dealloc", DECL_ATTRIBUTES (callee_fndecl)))
1536 impl_deallocation_call (cd);
1537 return;
1541 if (unknown_side_effects)
1542 handle_unrecognized_call (call, ctxt);
1545 /* Purge state involving SVAL from this region_model, using CTXT
1546 (if non-NULL) to purge other state in a program_state.
1548 For example, if we're at the def-stmt of an SSA name, then we need to
1549 purge any state for svalues that involve that SSA name. This avoids
1550 false positives in loops, since a symbolic value referring to the
1551 SSA name will be referring to the previous value of that SSA name.
1553 For example, in:
1554 while ((e = hashmap_iter_next(&iter))) {
1555 struct oid2strbuf *e_strbuf = (struct oid2strbuf *)e;
1556 free (e_strbuf->value);
1558 at the def-stmt of e_8:
1559 e_8 = hashmap_iter_next (&iter);
1560 we should purge the "freed" state of:
1561 INIT_VAL(CAST_REG(‘struct oid2strbuf’, (*INIT_VAL(e_8))).value)
1562 which is the "e_strbuf->value" value from the previous iteration,
1563 or we will erroneously report a double-free - the "e_8" within it
1564 refers to the previous value. */
1566 void
1567 region_model::purge_state_involving (const svalue *sval,
1568 region_model_context *ctxt)
1570 if (!sval->can_have_associated_state_p ())
1571 return;
1572 m_store.purge_state_involving (sval, m_mgr);
1573 m_constraints->purge_state_involving (sval);
1574 m_dynamic_extents.purge_state_involving (sval);
1575 if (ctxt)
1576 ctxt->purge_state_involving (sval);
1579 /* A pending_note subclass for adding a note about an
1580 __attribute__((access, ...)) to a diagnostic. */
1582 class reason_attr_access : public pending_note_subclass<reason_attr_access>
1584 public:
1585 reason_attr_access (tree callee_fndecl, const attr_access &access)
1586 : m_callee_fndecl (callee_fndecl),
1587 m_ptr_argno (access.ptrarg),
1588 m_access_str (TREE_STRING_POINTER (access.to_external_string ()))
1592 const char *get_kind () const final override { return "reason_attr_access"; }
1594 void emit () const final override
1596 inform (DECL_SOURCE_LOCATION (m_callee_fndecl),
1597 "parameter %i of %qD marked with attribute %qs",
1598 m_ptr_argno + 1, m_callee_fndecl, m_access_str);
1601 bool operator== (const reason_attr_access &other) const
1603 return (m_callee_fndecl == other.m_callee_fndecl
1604 && m_ptr_argno == other.m_ptr_argno
1605 && !strcmp (m_access_str, other.m_access_str));
1608 private:
1609 tree m_callee_fndecl;
1610 unsigned m_ptr_argno;
1611 const char *m_access_str;
1614 /* Check CALL a call to external function CALLEE_FNDECL based on
1615 any __attribute__ ((access, ....) on the latter, complaining to
1616 CTXT about any issues.
1618 Currently we merely call check_region_for_write on any regions
1619 pointed to by arguments marked with a "write_only" or "read_write"
1620 attribute. */
1622 void
1623 region_model::
1624 check_external_function_for_access_attr (const gcall *call,
1625 tree callee_fndecl,
1626 region_model_context *ctxt) const
1628 gcc_assert (call);
1629 gcc_assert (callee_fndecl);
1630 gcc_assert (ctxt);
1632 tree fntype = TREE_TYPE (callee_fndecl);
1633 if (!fntype)
1634 return;
1636 if (!TYPE_ATTRIBUTES (fntype))
1637 return;
1639 /* Initialize a map of attribute access specifications for arguments
1640 to the function call. */
1641 rdwr_map rdwr_idx;
1642 init_attr_rdwr_indices (&rdwr_idx, TYPE_ATTRIBUTES (fntype));
1644 unsigned argno = 0;
1646 for (tree iter = TYPE_ARG_TYPES (fntype); iter;
1647 iter = TREE_CHAIN (iter), ++argno)
1649 const attr_access* access = rdwr_idx.get (argno);
1650 if (!access)
1651 continue;
1653 /* Ignore any duplicate entry in the map for the size argument. */
1654 if (access->ptrarg != argno)
1655 continue;
1657 if (access->mode == access_write_only
1658 || access->mode == access_read_write)
1660 /* Subclass of decorated_region_model_context that
1661 adds a note about the attr access to any saved diagnostics. */
1662 class annotating_ctxt : public note_adding_context
1664 public:
1665 annotating_ctxt (tree callee_fndecl,
1666 const attr_access &access,
1667 region_model_context *ctxt)
1668 : note_adding_context (ctxt),
1669 m_callee_fndecl (callee_fndecl),
1670 m_access (access)
1673 std::unique_ptr<pending_note> make_note () final override
1675 return make_unique<reason_attr_access>
1676 (m_callee_fndecl, m_access);
1678 private:
1679 tree m_callee_fndecl;
1680 const attr_access &m_access;
1683 /* Use this ctxt below so that any diagnostics get the
1684 note added to them. */
1685 annotating_ctxt my_ctxt (callee_fndecl, *access, ctxt);
1687 tree ptr_tree = gimple_call_arg (call, access->ptrarg);
1688 const svalue *ptr_sval = get_rvalue (ptr_tree, &my_ctxt);
1689 const region *reg = deref_rvalue (ptr_sval, ptr_tree, &my_ctxt);
1690 check_region_for_write (reg, &my_ctxt);
1691 /* We don't use the size arg for now. */
1696 /* Handle a call CALL to a function with unknown behavior.
1698 Traverse the regions in this model, determining what regions are
1699 reachable from pointer arguments to CALL and from global variables,
1700 recursively.
1702 Set all reachable regions to new unknown values and purge sm-state
1703 from their values, and from values that point to them. */
1705 void
1706 region_model::handle_unrecognized_call (const gcall *call,
1707 region_model_context *ctxt)
1709 tree fndecl = get_fndecl_for_call (call, ctxt);
1711 if (fndecl && ctxt)
1712 check_external_function_for_access_attr (call, fndecl, ctxt);
1714 reachable_regions reachable_regs (this);
1716 /* Determine the reachable regions and their mutability. */
1718 /* Add globals and regions that already escaped in previous
1719 unknown calls. */
1720 m_store.for_each_cluster (reachable_regions::init_cluster_cb,
1721 &reachable_regs);
1723 /* Params that are pointers. */
1724 tree iter_param_types = NULL_TREE;
1725 if (fndecl)
1726 iter_param_types = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
1727 for (unsigned arg_idx = 0; arg_idx < gimple_call_num_args (call); arg_idx++)
1729 /* Track expected param type, where available. */
1730 tree param_type = NULL_TREE;
1731 if (iter_param_types)
1733 param_type = TREE_VALUE (iter_param_types);
1734 gcc_assert (param_type);
1735 iter_param_types = TREE_CHAIN (iter_param_types);
1738 tree parm = gimple_call_arg (call, arg_idx);
1739 const svalue *parm_sval = get_rvalue (parm, ctxt);
1740 reachable_regs.handle_parm (parm_sval, param_type);
1744 uncertainty_t *uncertainty = ctxt ? ctxt->get_uncertainty () : NULL;
1746 /* Purge sm-state for the svalues that were reachable,
1747 both in non-mutable and mutable form. */
1748 for (svalue_set::iterator iter
1749 = reachable_regs.begin_reachable_svals ();
1750 iter != reachable_regs.end_reachable_svals (); ++iter)
1752 const svalue *sval = (*iter);
1753 if (ctxt)
1754 ctxt->on_unknown_change (sval, false);
1756 for (svalue_set::iterator iter
1757 = reachable_regs.begin_mutable_svals ();
1758 iter != reachable_regs.end_mutable_svals (); ++iter)
1760 const svalue *sval = (*iter);
1761 if (ctxt)
1762 ctxt->on_unknown_change (sval, true);
1763 if (uncertainty)
1764 uncertainty->on_mutable_sval_at_unknown_call (sval);
1767 /* Mark any clusters that have escaped. */
1768 reachable_regs.mark_escaped_clusters (ctxt);
1770 /* Update bindings for all clusters that have escaped, whether above,
1771 or previously. */
1772 m_store.on_unknown_fncall (call, m_mgr->get_store_manager (),
1773 conjured_purge (this, ctxt));
1775 /* Purge dynamic extents from any regions that have escaped mutably:
1776 realloc could have been called on them. */
1777 for (hash_set<const region *>::iterator
1778 iter = reachable_regs.begin_mutable_base_regs ();
1779 iter != reachable_regs.end_mutable_base_regs ();
1780 ++iter)
1782 const region *base_reg = (*iter);
1783 unset_dynamic_extents (base_reg);
1787 /* Traverse the regions in this model, determining what regions are
1788 reachable from the store and populating *OUT.
1790 If EXTRA_SVAL is non-NULL, treat it as an additional "root"
1791 for reachability (for handling return values from functions when
1792 analyzing return of the only function on the stack).
1794 If UNCERTAINTY is non-NULL, treat any svalues that were recorded
1795 within it as being maybe-bound as additional "roots" for reachability.
1797 Find svalues that haven't leaked. */
1799 void
1800 region_model::get_reachable_svalues (svalue_set *out,
1801 const svalue *extra_sval,
1802 const uncertainty_t *uncertainty)
1804 reachable_regions reachable_regs (this);
1806 /* Add globals and regions that already escaped in previous
1807 unknown calls. */
1808 m_store.for_each_cluster (reachable_regions::init_cluster_cb,
1809 &reachable_regs);
1811 if (extra_sval)
1812 reachable_regs.handle_sval (extra_sval);
1814 if (uncertainty)
1815 for (uncertainty_t::iterator iter
1816 = uncertainty->begin_maybe_bound_svals ();
1817 iter != uncertainty->end_maybe_bound_svals (); ++iter)
1818 reachable_regs.handle_sval (*iter);
1820 /* Get regions for locals that have explicitly bound values. */
1821 for (store::cluster_map_t::iterator iter = m_store.begin ();
1822 iter != m_store.end (); ++iter)
1824 const region *base_reg = (*iter).first;
1825 if (const region *parent = base_reg->get_parent_region ())
1826 if (parent->get_kind () == RK_FRAME)
1827 reachable_regs.add (base_reg, false);
1830 /* Populate *OUT based on the values that were reachable. */
1831 for (svalue_set::iterator iter
1832 = reachable_regs.begin_reachable_svals ();
1833 iter != reachable_regs.end_reachable_svals (); ++iter)
1834 out->add (*iter);
1837 /* Update this model for the RETURN_STMT, using CTXT to report any
1838 diagnostics. */
1840 void
1841 region_model::on_return (const greturn *return_stmt, region_model_context *ctxt)
1843 tree callee = get_current_function ()->decl;
1844 tree lhs = DECL_RESULT (callee);
1845 tree rhs = gimple_return_retval (return_stmt);
1847 if (lhs && rhs)
1849 const svalue *sval = get_rvalue (rhs, ctxt);
1850 const region *ret_reg = get_lvalue (lhs, ctxt);
1851 set_value (ret_reg, sval, ctxt);
1855 /* Update this model for a call and return of setjmp/sigsetjmp at CALL within
1856 ENODE, using CTXT to report any diagnostics.
1858 This is for the initial direct invocation of setjmp/sigsetjmp (which returns
1859 0), as opposed to any second return due to longjmp/sigsetjmp. */
1861 void
1862 region_model::on_setjmp (const gcall *call, const exploded_node *enode,
1863 region_model_context *ctxt)
1865 const svalue *buf_ptr = get_rvalue (gimple_call_arg (call, 0), ctxt);
1866 const region *buf_reg = deref_rvalue (buf_ptr, gimple_call_arg (call, 0),
1867 ctxt);
1869 /* Create a setjmp_svalue for this call and store it in BUF_REG's
1870 region. */
1871 if (buf_reg)
1873 setjmp_record r (enode, call);
1874 const svalue *sval
1875 = m_mgr->get_or_create_setjmp_svalue (r, buf_reg->get_type ());
1876 set_value (buf_reg, sval, ctxt);
1879 /* Direct calls to setjmp return 0. */
1880 if (tree lhs = gimple_call_lhs (call))
1882 const svalue *new_sval
1883 = m_mgr->get_or_create_int_cst (TREE_TYPE (lhs), 0);
1884 const region *lhs_reg = get_lvalue (lhs, ctxt);
1885 set_value (lhs_reg, new_sval, ctxt);
1889 /* Update this region_model for rewinding from a "longjmp" at LONGJMP_CALL
1890 to a "setjmp" at SETJMP_CALL where the final stack depth should be
1891 SETJMP_STACK_DEPTH. Pop any stack frames. Leak detection is *not*
1892 done, and should be done by the caller. */
1894 void
1895 region_model::on_longjmp (const gcall *longjmp_call, const gcall *setjmp_call,
1896 int setjmp_stack_depth, region_model_context *ctxt)
1898 /* Evaluate the val, using the frame of the "longjmp". */
1899 tree fake_retval = gimple_call_arg (longjmp_call, 1);
1900 const svalue *fake_retval_sval = get_rvalue (fake_retval, ctxt);
1902 /* Pop any frames until we reach the stack depth of the function where
1903 setjmp was called. */
1904 gcc_assert (get_stack_depth () >= setjmp_stack_depth);
1905 while (get_stack_depth () > setjmp_stack_depth)
1906 pop_frame (NULL, NULL, ctxt);
1908 gcc_assert (get_stack_depth () == setjmp_stack_depth);
1910 /* Assign to LHS of "setjmp" in new_state. */
1911 if (tree lhs = gimple_call_lhs (setjmp_call))
1913 /* Passing 0 as the val to longjmp leads to setjmp returning 1. */
1914 const svalue *zero_sval
1915 = m_mgr->get_or_create_int_cst (TREE_TYPE (fake_retval), 0);
1916 tristate eq_zero = eval_condition (fake_retval_sval, EQ_EXPR, zero_sval);
1917 /* If we have 0, use 1. */
1918 if (eq_zero.is_true ())
1920 const svalue *one_sval
1921 = m_mgr->get_or_create_int_cst (TREE_TYPE (fake_retval), 1);
1922 fake_retval_sval = one_sval;
1924 else
1926 /* Otherwise note that the value is nonzero. */
1927 m_constraints->add_constraint (fake_retval_sval, NE_EXPR, zero_sval);
1930 /* Decorate the return value from setjmp as being unmergeable,
1931 so that we don't attempt to merge states with it as zero
1932 with states in which it's nonzero, leading to a clean distinction
1933 in the exploded_graph betweeen the first return and the second
1934 return. */
1935 fake_retval_sval = m_mgr->get_or_create_unmergeable (fake_retval_sval);
1937 const region *lhs_reg = get_lvalue (lhs, ctxt);
1938 set_value (lhs_reg, fake_retval_sval, ctxt);
1942 /* Update this region_model for a phi stmt of the form
1943 LHS = PHI <...RHS...>.
1944 where RHS is for the appropriate edge.
1945 Get state from OLD_STATE so that all of the phi stmts for a basic block
1946 are effectively handled simultaneously. */
1948 void
1949 region_model::handle_phi (const gphi *phi,
1950 tree lhs, tree rhs,
1951 const region_model &old_state,
1952 region_model_context *ctxt)
1954 /* For now, don't bother tracking the .MEM SSA names. */
1955 if (tree var = SSA_NAME_VAR (lhs))
1956 if (TREE_CODE (var) == VAR_DECL)
1957 if (VAR_DECL_IS_VIRTUAL_OPERAND (var))
1958 return;
1960 const svalue *src_sval = old_state.get_rvalue (rhs, ctxt);
1961 const region *dst_reg = old_state.get_lvalue (lhs, ctxt);
1963 set_value (dst_reg, src_sval, ctxt);
1965 if (ctxt)
1966 ctxt->on_phi (phi, rhs);
1969 /* Implementation of region_model::get_lvalue; the latter adds type-checking.
1971 Get the id of the region for PV within this region_model,
1972 emitting any diagnostics to CTXT. */
1974 const region *
1975 region_model::get_lvalue_1 (path_var pv, region_model_context *ctxt) const
1977 tree expr = pv.m_tree;
1979 gcc_assert (expr);
1981 switch (TREE_CODE (expr))
1983 default:
1984 return m_mgr->get_region_for_unexpected_tree_code (ctxt, expr,
1985 dump_location_t ());
1987 case ARRAY_REF:
1989 tree array = TREE_OPERAND (expr, 0);
1990 tree index = TREE_OPERAND (expr, 1);
1992 const region *array_reg = get_lvalue (array, ctxt);
1993 const svalue *index_sval = get_rvalue (index, ctxt);
1994 return m_mgr->get_element_region (array_reg,
1995 TREE_TYPE (TREE_TYPE (array)),
1996 index_sval);
1998 break;
2000 case BIT_FIELD_REF:
2002 tree inner_expr = TREE_OPERAND (expr, 0);
2003 const region *inner_reg = get_lvalue (inner_expr, ctxt);
2004 tree num_bits = TREE_OPERAND (expr, 1);
2005 tree first_bit_offset = TREE_OPERAND (expr, 2);
2006 gcc_assert (TREE_CODE (num_bits) == INTEGER_CST);
2007 gcc_assert (TREE_CODE (first_bit_offset) == INTEGER_CST);
2008 bit_range bits (TREE_INT_CST_LOW (first_bit_offset),
2009 TREE_INT_CST_LOW (num_bits));
2010 return m_mgr->get_bit_range (inner_reg, TREE_TYPE (expr), bits);
2012 break;
2014 case MEM_REF:
2016 tree ptr = TREE_OPERAND (expr, 0);
2017 tree offset = TREE_OPERAND (expr, 1);
2018 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
2019 const svalue *offset_sval = get_rvalue (offset, ctxt);
2020 const region *star_ptr = deref_rvalue (ptr_sval, ptr, ctxt);
2021 return m_mgr->get_offset_region (star_ptr,
2022 TREE_TYPE (expr),
2023 offset_sval);
2025 break;
2027 case FUNCTION_DECL:
2028 return m_mgr->get_region_for_fndecl (expr);
2030 case LABEL_DECL:
2031 return m_mgr->get_region_for_label (expr);
2033 case VAR_DECL:
2034 /* Handle globals. */
2035 if (is_global_var (expr))
2036 return m_mgr->get_region_for_global (expr);
2038 /* Fall through. */
2040 case SSA_NAME:
2041 case PARM_DECL:
2042 case RESULT_DECL:
2044 gcc_assert (TREE_CODE (expr) == SSA_NAME
2045 || TREE_CODE (expr) == PARM_DECL
2046 || TREE_CODE (expr) == VAR_DECL
2047 || TREE_CODE (expr) == RESULT_DECL);
2049 int stack_index = pv.m_stack_depth;
2050 const frame_region *frame = get_frame_at_index (stack_index);
2051 gcc_assert (frame);
2052 return frame->get_region_for_local (m_mgr, expr, ctxt);
2055 case COMPONENT_REF:
2057 /* obj.field */
2058 tree obj = TREE_OPERAND (expr, 0);
2059 tree field = TREE_OPERAND (expr, 1);
2060 const region *obj_reg = get_lvalue (obj, ctxt);
2061 return m_mgr->get_field_region (obj_reg, field);
2063 break;
2065 case STRING_CST:
2066 return m_mgr->get_region_for_string (expr);
2070 /* Assert that SRC_TYPE can be converted to DST_TYPE as a no-op. */
2072 static void
2073 assert_compat_types (tree src_type, tree dst_type)
2075 if (src_type && dst_type && !VOID_TYPE_P (dst_type))
2077 #if CHECKING_P
2078 if (!(useless_type_conversion_p (src_type, dst_type)))
2079 internal_error ("incompatible types: %qT and %qT", src_type, dst_type);
2080 #endif
2084 /* Return true if SRC_TYPE can be converted to DST_TYPE as a no-op. */
2086 bool
2087 compat_types_p (tree src_type, tree dst_type)
2089 if (src_type && dst_type && !VOID_TYPE_P (dst_type))
2090 if (!(useless_type_conversion_p (src_type, dst_type)))
2091 return false;
2092 return true;
2095 /* Get the region for PV within this region_model,
2096 emitting any diagnostics to CTXT. */
2098 const region *
2099 region_model::get_lvalue (path_var pv, region_model_context *ctxt) const
2101 if (pv.m_tree == NULL_TREE)
2102 return NULL;
2104 const region *result_reg = get_lvalue_1 (pv, ctxt);
2105 assert_compat_types (result_reg->get_type (), TREE_TYPE (pv.m_tree));
2106 return result_reg;
2109 /* Get the region for EXPR within this region_model (assuming the most
2110 recent stack frame if it's a local). */
2112 const region *
2113 region_model::get_lvalue (tree expr, region_model_context *ctxt) const
2115 return get_lvalue (path_var (expr, get_stack_depth () - 1), ctxt);
2118 /* Implementation of region_model::get_rvalue; the latter adds type-checking.
2120 Get the value of PV within this region_model,
2121 emitting any diagnostics to CTXT. */
2123 const svalue *
2124 region_model::get_rvalue_1 (path_var pv, region_model_context *ctxt) const
2126 gcc_assert (pv.m_tree);
2128 switch (TREE_CODE (pv.m_tree))
2130 default:
2131 return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (pv.m_tree));
2133 case ADDR_EXPR:
2135 /* "&EXPR". */
2136 tree expr = pv.m_tree;
2137 tree op0 = TREE_OPERAND (expr, 0);
2138 const region *expr_reg = get_lvalue (op0, ctxt);
2139 return m_mgr->get_ptr_svalue (TREE_TYPE (expr), expr_reg);
2141 break;
2143 case BIT_FIELD_REF:
2145 tree expr = pv.m_tree;
2146 tree op0 = TREE_OPERAND (expr, 0);
2147 const region *reg = get_lvalue (op0, ctxt);
2148 tree num_bits = TREE_OPERAND (expr, 1);
2149 tree first_bit_offset = TREE_OPERAND (expr, 2);
2150 gcc_assert (TREE_CODE (num_bits) == INTEGER_CST);
2151 gcc_assert (TREE_CODE (first_bit_offset) == INTEGER_CST);
2152 bit_range bits (TREE_INT_CST_LOW (first_bit_offset),
2153 TREE_INT_CST_LOW (num_bits));
2154 return get_rvalue_for_bits (TREE_TYPE (expr), reg, bits, ctxt);
2157 case SSA_NAME:
2158 case VAR_DECL:
2159 case PARM_DECL:
2160 case RESULT_DECL:
2161 case ARRAY_REF:
2163 const region *reg = get_lvalue (pv, ctxt);
2164 return get_store_value (reg, ctxt);
2167 case REALPART_EXPR:
2168 case IMAGPART_EXPR:
2169 case VIEW_CONVERT_EXPR:
2171 tree expr = pv.m_tree;
2172 tree arg = TREE_OPERAND (expr, 0);
2173 const svalue *arg_sval = get_rvalue (arg, ctxt);
2174 const svalue *sval_unaryop
2175 = m_mgr->get_or_create_unaryop (TREE_TYPE (expr), TREE_CODE (expr),
2176 arg_sval);
2177 return sval_unaryop;
2180 case INTEGER_CST:
2181 case REAL_CST:
2182 case COMPLEX_CST:
2183 case VECTOR_CST:
2184 case STRING_CST:
2185 return m_mgr->get_or_create_constant_svalue (pv.m_tree);
2187 case POINTER_PLUS_EXPR:
2189 tree expr = pv.m_tree;
2190 tree ptr = TREE_OPERAND (expr, 0);
2191 tree offset = TREE_OPERAND (expr, 1);
2192 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
2193 const svalue *offset_sval = get_rvalue (offset, ctxt);
2194 const svalue *sval_binop
2195 = m_mgr->get_or_create_binop (TREE_TYPE (expr), POINTER_PLUS_EXPR,
2196 ptr_sval, offset_sval);
2197 return sval_binop;
2200 /* Binary ops. */
2201 case PLUS_EXPR:
2202 case MULT_EXPR:
2204 tree expr = pv.m_tree;
2205 tree arg0 = TREE_OPERAND (expr, 0);
2206 tree arg1 = TREE_OPERAND (expr, 1);
2207 const svalue *arg0_sval = get_rvalue (arg0, ctxt);
2208 const svalue *arg1_sval = get_rvalue (arg1, ctxt);
2209 const svalue *sval_binop
2210 = m_mgr->get_or_create_binop (TREE_TYPE (expr), TREE_CODE (expr),
2211 arg0_sval, arg1_sval);
2212 return sval_binop;
2215 case COMPONENT_REF:
2216 case MEM_REF:
2218 const region *ref_reg = get_lvalue (pv, ctxt);
2219 return get_store_value (ref_reg, ctxt);
2221 case OBJ_TYPE_REF:
2223 tree expr = OBJ_TYPE_REF_EXPR (pv.m_tree);
2224 return get_rvalue (expr, ctxt);
2229 /* Get the value of PV within this region_model,
2230 emitting any diagnostics to CTXT. */
2232 const svalue *
2233 region_model::get_rvalue (path_var pv, region_model_context *ctxt) const
2235 if (pv.m_tree == NULL_TREE)
2236 return NULL;
2238 const svalue *result_sval = get_rvalue_1 (pv, ctxt);
2240 assert_compat_types (result_sval->get_type (), TREE_TYPE (pv.m_tree));
2242 result_sval = check_for_poison (result_sval, pv.m_tree, NULL, ctxt);
2244 return result_sval;
2247 /* Get the value of EXPR within this region_model (assuming the most
2248 recent stack frame if it's a local). */
2250 const svalue *
2251 region_model::get_rvalue (tree expr, region_model_context *ctxt) const
2253 return get_rvalue (path_var (expr, get_stack_depth () - 1), ctxt);
2256 /* Return true if this model is on a path with "main" as the entrypoint
2257 (as opposed to one in which we're merely analyzing a subset of the
2258 path through the code). */
2260 bool
2261 region_model::called_from_main_p () const
2263 if (!m_current_frame)
2264 return false;
2265 /* Determine if the oldest stack frame in this model is for "main". */
2266 const frame_region *frame0 = get_frame_at_index (0);
2267 gcc_assert (frame0);
2268 return id_equal (DECL_NAME (frame0->get_function ()->decl), "main");
2271 /* Subroutine of region_model::get_store_value for when REG is (or is within)
2272 a global variable that hasn't been touched since the start of this path
2273 (or was implicitly touched due to a call to an unknown function). */
2275 const svalue *
2276 region_model::get_initial_value_for_global (const region *reg) const
2278 /* Get the decl that REG is for (or is within). */
2279 const decl_region *base_reg
2280 = reg->get_base_region ()->dyn_cast_decl_region ();
2281 gcc_assert (base_reg);
2282 tree decl = base_reg->get_decl ();
2284 /* Special-case: to avoid having to explicitly update all previously
2285 untracked globals when calling an unknown fn, they implicitly have
2286 an unknown value if an unknown call has occurred, unless this is
2287 static to-this-TU and hasn't escaped. Globals that have escaped
2288 are explicitly tracked, so we shouldn't hit this case for them. */
2289 if (m_store.called_unknown_fn_p ()
2290 && TREE_PUBLIC (decl)
2291 && !TREE_READONLY (decl))
2292 return m_mgr->get_or_create_unknown_svalue (reg->get_type ());
2294 /* If we are on a path from the entrypoint from "main" and we have a
2295 global decl defined in this TU that hasn't been touched yet, then
2296 the initial value of REG can be taken from the initialization value
2297 of the decl. */
2298 if (called_from_main_p () || TREE_READONLY (decl))
2300 /* Attempt to get the initializer value for base_reg. */
2301 if (const svalue *base_reg_init
2302 = base_reg->get_svalue_for_initializer (m_mgr))
2304 if (reg == base_reg)
2305 return base_reg_init;
2306 else
2308 /* Get the value for REG within base_reg_init. */
2309 binding_cluster c (base_reg);
2310 c.bind (m_mgr->get_store_manager (), base_reg, base_reg_init);
2311 const svalue *sval
2312 = c.get_any_binding (m_mgr->get_store_manager (), reg);
2313 if (sval)
2315 if (reg->get_type ())
2316 sval = m_mgr->get_or_create_cast (reg->get_type (),
2317 sval);
2318 return sval;
2324 /* Otherwise, return INIT_VAL(REG). */
2325 return m_mgr->get_or_create_initial_value (reg);
2328 /* Get a value for REG, looking it up in the store, or otherwise falling
2329 back to "initial" or "unknown" values.
2330 Use CTXT to report any warnings associated with reading from REG. */
2332 const svalue *
2333 region_model::get_store_value (const region *reg,
2334 region_model_context *ctxt) const
2336 /* Getting the value of an empty region gives an unknown_svalue. */
2337 if (reg->empty_p ())
2338 return m_mgr->get_or_create_unknown_svalue (reg->get_type ());
2340 check_region_for_read (reg, ctxt);
2342 /* Special-case: handle var_decls in the constant pool. */
2343 if (const decl_region *decl_reg = reg->dyn_cast_decl_region ())
2344 if (const svalue *sval = decl_reg->maybe_get_constant_value (m_mgr))
2345 return sval;
2347 const svalue *sval
2348 = m_store.get_any_binding (m_mgr->get_store_manager (), reg);
2349 if (sval)
2351 if (reg->get_type ())
2352 sval = m_mgr->get_or_create_cast (reg->get_type (), sval);
2353 return sval;
2356 /* Special-case: read at a constant index within a STRING_CST. */
2357 if (const offset_region *offset_reg = reg->dyn_cast_offset_region ())
2358 if (tree byte_offset_cst
2359 = offset_reg->get_byte_offset ()->maybe_get_constant ())
2360 if (const string_region *str_reg
2361 = reg->get_parent_region ()->dyn_cast_string_region ())
2363 tree string_cst = str_reg->get_string_cst ();
2364 if (const svalue *char_sval
2365 = m_mgr->maybe_get_char_from_string_cst (string_cst,
2366 byte_offset_cst))
2367 return m_mgr->get_or_create_cast (reg->get_type (), char_sval);
2370 /* Special-case: read the initial char of a STRING_CST. */
2371 if (const cast_region *cast_reg = reg->dyn_cast_cast_region ())
2372 if (const string_region *str_reg
2373 = cast_reg->get_original_region ()->dyn_cast_string_region ())
2375 tree string_cst = str_reg->get_string_cst ();
2376 tree byte_offset_cst = build_int_cst (integer_type_node, 0);
2377 if (const svalue *char_sval
2378 = m_mgr->maybe_get_char_from_string_cst (string_cst,
2379 byte_offset_cst))
2380 return m_mgr->get_or_create_cast (reg->get_type (), char_sval);
2383 /* Otherwise we implicitly have the initial value of the region
2384 (if the cluster had been touched, binding_cluster::get_any_binding,
2385 would have returned UNKNOWN, and we would already have returned
2386 that above). */
2388 /* Handle globals. */
2389 if (reg->get_base_region ()->get_parent_region ()->get_kind ()
2390 == RK_GLOBALS)
2391 return get_initial_value_for_global (reg);
2393 return m_mgr->get_or_create_initial_value (reg);
2396 /* Return false if REG does not exist, true if it may do.
2397 This is for detecting regions within the stack that don't exist anymore
2398 after frames are popped. */
2400 bool
2401 region_model::region_exists_p (const region *reg) const
2403 /* If within a stack frame, check that the stack frame is live. */
2404 if (const frame_region *enclosing_frame = reg->maybe_get_frame_region ())
2406 /* Check that the current frame is the enclosing frame, or is called
2407 by it. */
2408 for (const frame_region *iter_frame = get_current_frame (); iter_frame;
2409 iter_frame = iter_frame->get_calling_frame ())
2410 if (iter_frame == enclosing_frame)
2411 return true;
2412 return false;
2415 return true;
2418 /* Get a region for referencing PTR_SVAL, creating a region if need be, and
2419 potentially generating warnings via CTXT.
2420 PTR_SVAL must be of pointer type.
2421 PTR_TREE if non-NULL can be used when emitting diagnostics. */
2423 const region *
2424 region_model::deref_rvalue (const svalue *ptr_sval, tree ptr_tree,
2425 region_model_context *ctxt) const
2427 gcc_assert (ptr_sval);
2428 gcc_assert (POINTER_TYPE_P (ptr_sval->get_type ()));
2430 /* If we're dereferencing PTR_SVAL, assume that it is non-NULL; add this
2431 as a constraint. This suppresses false positives from
2432 -Wanalyzer-null-dereference for the case where we later have an
2433 if (PTR_SVAL) that would occur if we considered the false branch
2434 and transitioned the malloc state machine from start->null. */
2435 tree null_ptr_cst = build_int_cst (ptr_sval->get_type (), 0);
2436 const svalue *null_ptr = m_mgr->get_or_create_constant_svalue (null_ptr_cst);
2437 m_constraints->add_constraint (ptr_sval, NE_EXPR, null_ptr);
2439 switch (ptr_sval->get_kind ())
2441 default:
2442 break;
2444 case SK_REGION:
2446 const region_svalue *region_sval
2447 = as_a <const region_svalue *> (ptr_sval);
2448 return region_sval->get_pointee ();
2451 case SK_BINOP:
2453 const binop_svalue *binop_sval
2454 = as_a <const binop_svalue *> (ptr_sval);
2455 switch (binop_sval->get_op ())
2457 case POINTER_PLUS_EXPR:
2459 /* If we have a symbolic value expressing pointer arithmentic,
2460 try to convert it to a suitable region. */
2461 const region *parent_region
2462 = deref_rvalue (binop_sval->get_arg0 (), NULL_TREE, ctxt);
2463 const svalue *offset = binop_sval->get_arg1 ();
2464 tree type= TREE_TYPE (ptr_sval->get_type ());
2465 return m_mgr->get_offset_region (parent_region, type, offset);
2467 default:
2468 break;
2471 break;
2473 case SK_POISONED:
2475 if (ctxt)
2477 tree ptr = get_representative_tree (ptr_sval);
2478 /* If we can't get a representative tree for PTR_SVAL
2479 (e.g. if it hasn't been bound into the store), then
2480 fall back on PTR_TREE, if non-NULL. */
2481 if (!ptr)
2482 ptr = ptr_tree;
2483 if (ptr)
2485 const poisoned_svalue *poisoned_sval
2486 = as_a <const poisoned_svalue *> (ptr_sval);
2487 enum poison_kind pkind = poisoned_sval->get_poison_kind ();
2488 ctxt->warn (make_unique<poisoned_value_diagnostic>
2489 (ptr, pkind, NULL));
2493 break;
2496 return m_mgr->get_symbolic_region (ptr_sval);
2499 /* Attempt to get BITS within any value of REG, as TYPE.
2500 In particular, extract values from compound_svalues for the case
2501 where there's a concrete binding at BITS.
2502 Return an unknown svalue if we can't handle the given case.
2503 Use CTXT to report any warnings associated with reading from REG. */
2505 const svalue *
2506 region_model::get_rvalue_for_bits (tree type,
2507 const region *reg,
2508 const bit_range &bits,
2509 region_model_context *ctxt) const
2511 const svalue *sval = get_store_value (reg, ctxt);
2512 return m_mgr->get_or_create_bits_within (type, bits, sval);
2515 /* A subclass of pending_diagnostic for complaining about writes to
2516 constant regions of memory. */
2518 class write_to_const_diagnostic
2519 : public pending_diagnostic_subclass<write_to_const_diagnostic>
2521 public:
2522 write_to_const_diagnostic (const region *reg, tree decl)
2523 : m_reg (reg), m_decl (decl)
2526 const char *get_kind () const final override
2528 return "write_to_const_diagnostic";
2531 bool operator== (const write_to_const_diagnostic &other) const
2533 return (m_reg == other.m_reg
2534 && m_decl == other.m_decl);
2537 int get_controlling_option () const final override
2539 return OPT_Wanalyzer_write_to_const;
2542 bool emit (rich_location *rich_loc) final override
2544 auto_diagnostic_group d;
2545 bool warned;
2546 switch (m_reg->get_kind ())
2548 default:
2549 warned = warning_at (rich_loc, get_controlling_option (),
2550 "write to %<const%> object %qE", m_decl);
2551 break;
2552 case RK_FUNCTION:
2553 warned = warning_at (rich_loc, get_controlling_option (),
2554 "write to function %qE", m_decl);
2555 break;
2556 case RK_LABEL:
2557 warned = warning_at (rich_loc, get_controlling_option (),
2558 "write to label %qE", m_decl);
2559 break;
2561 if (warned)
2562 inform (DECL_SOURCE_LOCATION (m_decl), "declared here");
2563 return warned;
2566 label_text describe_final_event (const evdesc::final_event &ev) final override
2568 switch (m_reg->get_kind ())
2570 default:
2571 return ev.formatted_print ("write to %<const%> object %qE here", m_decl);
2572 case RK_FUNCTION:
2573 return ev.formatted_print ("write to function %qE here", m_decl);
2574 case RK_LABEL:
2575 return ev.formatted_print ("write to label %qE here", m_decl);
2579 private:
2580 const region *m_reg;
2581 tree m_decl;
2584 /* A subclass of pending_diagnostic for complaining about writes to
2585 string literals. */
2587 class write_to_string_literal_diagnostic
2588 : public pending_diagnostic_subclass<write_to_string_literal_diagnostic>
2590 public:
2591 write_to_string_literal_diagnostic (const region *reg)
2592 : m_reg (reg)
2595 const char *get_kind () const final override
2597 return "write_to_string_literal_diagnostic";
2600 bool operator== (const write_to_string_literal_diagnostic &other) const
2602 return m_reg == other.m_reg;
2605 int get_controlling_option () const final override
2607 return OPT_Wanalyzer_write_to_string_literal;
2610 bool emit (rich_location *rich_loc) final override
2612 return warning_at (rich_loc, get_controlling_option (),
2613 "write to string literal");
2614 /* Ideally we would show the location of the STRING_CST as well,
2615 but it is not available at this point. */
2618 label_text describe_final_event (const evdesc::final_event &ev) final override
2620 return ev.formatted_print ("write to string literal here");
2623 private:
2624 const region *m_reg;
2627 /* Use CTXT to warn If DEST_REG is a region that shouldn't be written to. */
2629 void
2630 region_model::check_for_writable_region (const region* dest_reg,
2631 region_model_context *ctxt) const
2633 /* Fail gracefully if CTXT is NULL. */
2634 if (!ctxt)
2635 return;
2637 const region *base_reg = dest_reg->get_base_region ();
2638 switch (base_reg->get_kind ())
2640 default:
2641 break;
2642 case RK_FUNCTION:
2644 const function_region *func_reg = as_a <const function_region *> (base_reg);
2645 tree fndecl = func_reg->get_fndecl ();
2646 ctxt->warn (make_unique<write_to_const_diagnostic>
2647 (func_reg, fndecl));
2649 break;
2650 case RK_LABEL:
2652 const label_region *label_reg = as_a <const label_region *> (base_reg);
2653 tree label = label_reg->get_label ();
2654 ctxt->warn (make_unique<write_to_const_diagnostic>
2655 (label_reg, label));
2657 break;
2658 case RK_DECL:
2660 const decl_region *decl_reg = as_a <const decl_region *> (base_reg);
2661 tree decl = decl_reg->get_decl ();
2662 /* Warn about writes to const globals.
2663 Don't warn for writes to const locals, and params in particular,
2664 since we would warn in push_frame when setting them up (e.g the
2665 "this" param is "T* const"). */
2666 if (TREE_READONLY (decl)
2667 && is_global_var (decl))
2668 ctxt->warn (make_unique<write_to_const_diagnostic> (dest_reg, decl));
2670 break;
2671 case RK_STRING:
2672 ctxt->warn (make_unique<write_to_string_literal_diagnostic> (dest_reg));
2673 break;
2677 /* Get the capacity of REG in bytes. */
2679 const svalue *
2680 region_model::get_capacity (const region *reg) const
2682 switch (reg->get_kind ())
2684 default:
2685 break;
2686 case RK_DECL:
2688 const decl_region *decl_reg = as_a <const decl_region *> (reg);
2689 tree decl = decl_reg->get_decl ();
2690 if (TREE_CODE (decl) == SSA_NAME)
2692 tree type = TREE_TYPE (decl);
2693 tree size = TYPE_SIZE (type);
2694 return get_rvalue (size, NULL);
2696 else
2698 tree size = decl_init_size (decl, false);
2699 if (size)
2700 return get_rvalue (size, NULL);
2703 break;
2704 case RK_SIZED:
2705 /* Look through sized regions to get at the capacity
2706 of the underlying regions. */
2707 return get_capacity (reg->get_parent_region ());
2710 if (const svalue *recorded = get_dynamic_extents (reg))
2711 return recorded;
2713 return m_mgr->get_or_create_unknown_svalue (sizetype);
2716 /* Return the string size, including the 0-terminator, if SVAL is a
2717 constant_svalue holding a string. Otherwise, return an unknown_svalue. */
2719 const svalue *
2720 region_model::get_string_size (const svalue *sval) const
2722 tree cst = sval->maybe_get_constant ();
2723 if (!cst || TREE_CODE (cst) != STRING_CST)
2724 return m_mgr->get_or_create_unknown_svalue (size_type_node);
2726 tree out = build_int_cst (size_type_node, TREE_STRING_LENGTH (cst));
2727 return m_mgr->get_or_create_constant_svalue (out);
2730 /* Return the string size, including the 0-terminator, if REG is a
2731 string_region. Otherwise, return an unknown_svalue. */
2733 const svalue *
2734 region_model::get_string_size (const region *reg) const
2736 const string_region *str_reg = dyn_cast <const string_region *> (reg);
2737 if (!str_reg)
2738 return m_mgr->get_or_create_unknown_svalue (size_type_node);
2740 tree cst = str_reg->get_string_cst ();
2741 tree out = build_int_cst (size_type_node, TREE_STRING_LENGTH (cst));
2742 return m_mgr->get_or_create_constant_svalue (out);
2745 /* If CTXT is non-NULL, use it to warn about any problems accessing REG,
2746 using DIR to determine if this access is a read or write. */
2748 void
2749 region_model::check_region_access (const region *reg,
2750 enum access_direction dir,
2751 region_model_context *ctxt) const
2753 /* Fail gracefully if CTXT is NULL. */
2754 if (!ctxt)
2755 return;
2757 check_region_for_taint (reg, dir, ctxt);
2758 check_region_bounds (reg, dir, ctxt);
2760 switch (dir)
2762 default:
2763 gcc_unreachable ();
2764 case DIR_READ:
2765 /* Currently a no-op. */
2766 break;
2767 case DIR_WRITE:
2768 check_for_writable_region (reg, ctxt);
2769 break;
2773 /* If CTXT is non-NULL, use it to warn about any problems writing to REG. */
2775 void
2776 region_model::check_region_for_write (const region *dest_reg,
2777 region_model_context *ctxt) const
2779 check_region_access (dest_reg, DIR_WRITE, ctxt);
2782 /* If CTXT is non-NULL, use it to warn about any problems reading from REG. */
2784 void
2785 region_model::check_region_for_read (const region *src_reg,
2786 region_model_context *ctxt) const
2788 check_region_access (src_reg, DIR_READ, ctxt);
2791 /* Concrete subclass for casts of pointers that lead to trailing bytes. */
2793 class dubious_allocation_size
2794 : public pending_diagnostic_subclass<dubious_allocation_size>
2796 public:
2797 dubious_allocation_size (const region *lhs, const region *rhs)
2798 : m_lhs (lhs), m_rhs (rhs), m_expr (NULL_TREE),
2799 m_has_allocation_event (false)
2802 dubious_allocation_size (const region *lhs, const region *rhs,
2803 tree expr)
2804 : m_lhs (lhs), m_rhs (rhs), m_expr (expr),
2805 m_has_allocation_event (false)
2808 const char *get_kind () const final override
2810 return "dubious_allocation_size";
2813 bool operator== (const dubious_allocation_size &other) const
2815 return m_lhs == other.m_lhs && m_rhs == other.m_rhs
2816 && pending_diagnostic::same_tree_p (m_expr, other.m_expr);
2819 int get_controlling_option () const final override
2821 return OPT_Wanalyzer_allocation_size;
2824 bool emit (rich_location *rich_loc) final override
2826 diagnostic_metadata m;
2827 m.add_cwe (131);
2829 return warning_meta (rich_loc, m, get_controlling_option (),
2830 "allocated buffer size is not a multiple"
2831 " of the pointee's size");
2834 label_text describe_final_event (const evdesc::final_event &ev) final
2835 override
2837 tree pointee_type = TREE_TYPE (m_lhs->get_type ());
2838 if (m_has_allocation_event)
2839 return ev.formatted_print ("assigned to %qT here;"
2840 " %<sizeof (%T)%> is %qE",
2841 m_lhs->get_type (), pointee_type,
2842 size_in_bytes (pointee_type));
2843 /* Fallback: Typically, we should always see an allocation_event
2844 before. */
2845 if (m_expr)
2847 if (TREE_CODE (m_expr) == INTEGER_CST)
2848 return ev.formatted_print ("allocated %E bytes and assigned to"
2849 " %qT here; %<sizeof (%T)%> is %qE",
2850 m_expr, m_lhs->get_type (), pointee_type,
2851 size_in_bytes (pointee_type));
2852 else
2853 return ev.formatted_print ("allocated %qE bytes and assigned to"
2854 " %qT here; %<sizeof (%T)%> is %qE",
2855 m_expr, m_lhs->get_type (), pointee_type,
2856 size_in_bytes (pointee_type));
2859 return ev.formatted_print ("allocated and assigned to %qT here;"
2860 " %<sizeof (%T)%> is %qE",
2861 m_lhs->get_type (), pointee_type,
2862 size_in_bytes (pointee_type));
2865 void
2866 add_region_creation_events (const region *,
2867 tree capacity,
2868 const event_loc_info &loc_info,
2869 checker_path &emission_path) final override
2871 emission_path.add_event
2872 (make_unique<region_creation_event_allocation_size> (capacity, loc_info));
2874 m_has_allocation_event = true;
2877 void mark_interesting_stuff (interesting_t *interest) final override
2879 interest->add_region_creation (m_rhs);
2882 private:
2883 const region *m_lhs;
2884 const region *m_rhs;
2885 const tree m_expr;
2886 bool m_has_allocation_event;
2889 /* Return true on dubious allocation sizes for constant sizes. */
2891 static bool
2892 capacity_compatible_with_type (tree cst, tree pointee_size_tree,
2893 bool is_struct)
2895 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
2896 gcc_assert (TREE_CODE (pointee_size_tree) == INTEGER_CST);
2898 unsigned HOST_WIDE_INT pointee_size = TREE_INT_CST_LOW (pointee_size_tree);
2899 unsigned HOST_WIDE_INT alloc_size = TREE_INT_CST_LOW (cst);
2901 if (is_struct)
2902 return alloc_size == 0 || alloc_size >= pointee_size;
2903 return alloc_size % pointee_size == 0;
2906 static bool
2907 capacity_compatible_with_type (tree cst, tree pointee_size_tree)
2909 return capacity_compatible_with_type (cst, pointee_size_tree, false);
2912 /* Checks whether SVAL could be a multiple of SIZE_CST.
2914 It works by visiting all svalues inside SVAL until it reaches
2915 atomic nodes. From those, it goes back up again and adds each
2916 node that might be a multiple of SIZE_CST to the RESULT_SET. */
2918 class size_visitor : public visitor
2920 public:
2921 size_visitor (tree size_cst, const svalue *root_sval, constraint_manager *cm)
2922 : m_size_cst (size_cst), m_root_sval (root_sval), m_cm (cm)
2924 m_root_sval->accept (this);
2927 bool get_result ()
2929 return result_set.contains (m_root_sval);
2932 void visit_constant_svalue (const constant_svalue *sval) final override
2934 check_constant (sval->get_constant (), sval);
2937 void visit_unknown_svalue (const unknown_svalue *sval ATTRIBUTE_UNUSED)
2938 final override
2940 result_set.add (sval);
2943 void visit_poisoned_svalue (const poisoned_svalue *sval ATTRIBUTE_UNUSED)
2944 final override
2946 result_set.add (sval);
2949 void visit_unaryop_svalue (const unaryop_svalue *sval) final override
2951 const svalue *arg = sval->get_arg ();
2952 if (result_set.contains (arg))
2953 result_set.add (sval);
2956 void visit_binop_svalue (const binop_svalue *sval) final override
2958 const svalue *arg0 = sval->get_arg0 ();
2959 const svalue *arg1 = sval->get_arg1 ();
2961 if (sval->get_op () == MULT_EXPR)
2963 if (result_set.contains (arg0) || result_set.contains (arg1))
2964 result_set.add (sval);
2966 else
2968 if (result_set.contains (arg0) && result_set.contains (arg1))
2969 result_set.add (sval);
2973 void visit_repeated_svalue (const repeated_svalue *sval) final override
2975 sval->get_inner_svalue ()->accept (this);
2976 if (result_set.contains (sval->get_inner_svalue ()))
2977 result_set.add (sval);
2980 void visit_unmergeable_svalue (const unmergeable_svalue *sval) final override
2982 sval->get_arg ()->accept (this);
2983 if (result_set.contains (sval->get_arg ()))
2984 result_set.add (sval);
2987 void visit_widening_svalue (const widening_svalue *sval) final override
2989 const svalue *base = sval->get_base_svalue ();
2990 const svalue *iter = sval->get_iter_svalue ();
2992 if (result_set.contains (base) && result_set.contains (iter))
2993 result_set.add (sval);
2996 void visit_conjured_svalue (const conjured_svalue *sval ATTRIBUTE_UNUSED)
2997 final override
2999 equiv_class_id id (-1);
3000 if (m_cm->get_equiv_class_by_svalue (sval, &id))
3002 if (tree cst = id.get_obj (*m_cm).get_any_constant ())
3003 check_constant (cst, sval);
3004 else
3005 result_set.add (sval);
3009 void visit_asm_output_svalue (const asm_output_svalue *sval ATTRIBUTE_UNUSED)
3010 final override
3012 result_set.add (sval);
3015 void visit_const_fn_result_svalue (const const_fn_result_svalue
3016 *sval ATTRIBUTE_UNUSED) final override
3018 result_set.add (sval);
3021 private:
3022 void check_constant (tree cst, const svalue *sval)
3024 switch (TREE_CODE (cst))
3026 default:
3027 /* Assume all unhandled operands are compatible. */
3028 result_set.add (sval);
3029 break;
3030 case INTEGER_CST:
3031 if (capacity_compatible_with_type (cst, m_size_cst))
3032 result_set.add (sval);
3033 break;
3037 tree m_size_cst;
3038 const svalue *m_root_sval;
3039 constraint_manager *m_cm;
3040 svalue_set result_set; /* Used as a mapping of svalue*->bool. */
3043 /* Return true if a struct or union either uses the inheritance pattern,
3044 where the first field is a base struct, or the flexible array member
3045 pattern, where the last field is an array without a specified size. */
3047 static bool
3048 struct_or_union_with_inheritance_p (tree struc)
3050 tree iter = TYPE_FIELDS (struc);
3051 if (iter == NULL_TREE)
3052 return false;
3053 if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (iter)))
3054 return true;
3056 tree last_field;
3057 while (iter != NULL_TREE)
3059 last_field = iter;
3060 iter = DECL_CHAIN (iter);
3063 if (last_field != NULL_TREE
3064 && TREE_CODE (TREE_TYPE (last_field)) == ARRAY_TYPE)
3065 return true;
3067 return false;
3070 /* Return true if the lhs and rhs of an assignment have different types. */
3072 static bool
3073 is_any_cast_p (const gimple *stmt)
3075 if (const gassign *assign = dyn_cast <const gassign *> (stmt))
3076 return gimple_assign_cast_p (assign)
3077 || !pending_diagnostic::same_tree_p (
3078 TREE_TYPE (gimple_assign_lhs (assign)),
3079 TREE_TYPE (gimple_assign_rhs1 (assign)));
3080 else if (const gcall *call = dyn_cast <const gcall *> (stmt))
3082 tree lhs = gimple_call_lhs (call);
3083 return lhs != NULL_TREE && !pending_diagnostic::same_tree_p (
3084 TREE_TYPE (gimple_call_lhs (call)),
3085 gimple_call_return_type (call));
3088 return false;
3091 /* On pointer assignments, check whether the buffer size of
3092 RHS_SVAL is compatible with the type of the LHS_REG.
3093 Use a non-null CTXT to report allocation size warnings. */
3095 void
3096 region_model::check_region_size (const region *lhs_reg, const svalue *rhs_sval,
3097 region_model_context *ctxt) const
3099 if (!ctxt || ctxt->get_stmt () == NULL)
3100 return;
3101 /* Only report warnings on assignments that actually change the type. */
3102 if (!is_any_cast_p (ctxt->get_stmt ()))
3103 return;
3105 const region_svalue *reg_sval = dyn_cast <const region_svalue *> (rhs_sval);
3106 if (!reg_sval)
3107 return;
3109 tree pointer_type = lhs_reg->get_type ();
3110 if (pointer_type == NULL_TREE || !POINTER_TYPE_P (pointer_type))
3111 return;
3113 tree pointee_type = TREE_TYPE (pointer_type);
3114 /* Make sure that the type on the left-hand size actually has a size. */
3115 if (pointee_type == NULL_TREE || VOID_TYPE_P (pointee_type)
3116 || TYPE_SIZE_UNIT (pointee_type) == NULL_TREE)
3117 return;
3119 /* Bail out early on pointers to structs where we can
3120 not deduce whether the buffer size is compatible. */
3121 bool is_struct = RECORD_OR_UNION_TYPE_P (pointee_type);
3122 if (is_struct && struct_or_union_with_inheritance_p (pointee_type))
3123 return;
3125 tree pointee_size_tree = size_in_bytes (pointee_type);
3126 /* We give up if the type size is not known at compile-time or the
3127 type size is always compatible regardless of the buffer size. */
3128 if (TREE_CODE (pointee_size_tree) != INTEGER_CST
3129 || integer_zerop (pointee_size_tree)
3130 || integer_onep (pointee_size_tree))
3131 return;
3133 const region *rhs_reg = reg_sval->get_pointee ();
3134 const svalue *capacity = get_capacity (rhs_reg);
3135 switch (capacity->get_kind ())
3137 case svalue_kind::SK_CONSTANT:
3139 const constant_svalue *cst_cap_sval
3140 = as_a <const constant_svalue *> (capacity);
3141 tree cst_cap = cst_cap_sval->get_constant ();
3142 if (TREE_CODE (cst_cap) == INTEGER_CST
3143 && !capacity_compatible_with_type (cst_cap, pointee_size_tree,
3144 is_struct))
3145 ctxt->warn (make_unique <dubious_allocation_size> (lhs_reg, rhs_reg,
3146 cst_cap));
3148 break;
3149 default:
3151 if (!is_struct)
3153 size_visitor v (pointee_size_tree, capacity, m_constraints);
3154 if (!v.get_result ())
3156 tree expr = get_representative_tree (capacity);
3157 ctxt->warn (make_unique <dubious_allocation_size> (lhs_reg,
3158 rhs_reg,
3159 expr));
3162 break;
3167 /* Set the value of the region given by LHS_REG to the value given
3168 by RHS_SVAL.
3169 Use CTXT to report any warnings associated with writing to LHS_REG. */
3171 void
3172 region_model::set_value (const region *lhs_reg, const svalue *rhs_sval,
3173 region_model_context *ctxt)
3175 gcc_assert (lhs_reg);
3176 gcc_assert (rhs_sval);
3178 /* Setting the value of an empty region is a no-op. */
3179 if (lhs_reg->empty_p ())
3180 return;
3182 check_region_size (lhs_reg, rhs_sval, ctxt);
3184 check_region_for_write (lhs_reg, ctxt);
3186 m_store.set_value (m_mgr->get_store_manager(), lhs_reg, rhs_sval,
3187 ctxt ? ctxt->get_uncertainty () : NULL);
3190 /* Set the value of the region given by LHS to the value given by RHS. */
3192 void
3193 region_model::set_value (tree lhs, tree rhs, region_model_context *ctxt)
3195 const region *lhs_reg = get_lvalue (lhs, ctxt);
3196 const svalue *rhs_sval = get_rvalue (rhs, ctxt);
3197 gcc_assert (lhs_reg);
3198 gcc_assert (rhs_sval);
3199 set_value (lhs_reg, rhs_sval, ctxt);
3202 /* Remove all bindings overlapping REG within the store. */
3204 void
3205 region_model::clobber_region (const region *reg)
3207 m_store.clobber_region (m_mgr->get_store_manager(), reg);
3210 /* Remove any bindings for REG within the store. */
3212 void
3213 region_model::purge_region (const region *reg)
3215 m_store.purge_region (m_mgr->get_store_manager(), reg);
3218 /* Fill REG with SVAL. */
3220 void
3221 region_model::fill_region (const region *reg, const svalue *sval)
3223 m_store.fill_region (m_mgr->get_store_manager(), reg, sval);
3226 /* Zero-fill REG. */
3228 void
3229 region_model::zero_fill_region (const region *reg)
3231 m_store.zero_fill_region (m_mgr->get_store_manager(), reg);
3234 /* Mark REG as having unknown content. */
3236 void
3237 region_model::mark_region_as_unknown (const region *reg,
3238 uncertainty_t *uncertainty)
3240 m_store.mark_region_as_unknown (m_mgr->get_store_manager(), reg,
3241 uncertainty);
3244 /* Determine what is known about the condition "LHS_SVAL OP RHS_SVAL" within
3245 this model. */
3247 tristate
3248 region_model::eval_condition (const svalue *lhs,
3249 enum tree_code op,
3250 const svalue *rhs) const
3252 gcc_assert (lhs);
3253 gcc_assert (rhs);
3255 /* For now, make no attempt to capture constraints on floating-point
3256 values. */
3257 if ((lhs->get_type () && FLOAT_TYPE_P (lhs->get_type ()))
3258 || (rhs->get_type () && FLOAT_TYPE_P (rhs->get_type ())))
3259 return tristate::unknown ();
3261 /* See what we know based on the values. */
3263 /* Unwrap any unmergeable values. */
3264 lhs = lhs->unwrap_any_unmergeable ();
3265 rhs = rhs->unwrap_any_unmergeable ();
3267 if (lhs == rhs)
3269 /* If we have the same svalue, then we have equality
3270 (apart from NaN-handling).
3271 TODO: should this definitely be the case for poisoned values? */
3272 /* Poisoned and unknown values are "unknowable". */
3273 if (lhs->get_kind () == SK_POISONED
3274 || lhs->get_kind () == SK_UNKNOWN)
3275 return tristate::TS_UNKNOWN;
3277 switch (op)
3279 case EQ_EXPR:
3280 case GE_EXPR:
3281 case LE_EXPR:
3282 return tristate::TS_TRUE;
3284 case NE_EXPR:
3285 case GT_EXPR:
3286 case LT_EXPR:
3287 return tristate::TS_FALSE;
3289 default:
3290 /* For other ops, use the logic below. */
3291 break;
3295 /* If we have a pair of region_svalues, compare them. */
3296 if (const region_svalue *lhs_ptr = lhs->dyn_cast_region_svalue ())
3297 if (const region_svalue *rhs_ptr = rhs->dyn_cast_region_svalue ())
3299 tristate res = region_svalue::eval_condition (lhs_ptr, op, rhs_ptr);
3300 if (res.is_known ())
3301 return res;
3302 /* Otherwise, only known through constraints. */
3305 if (const constant_svalue *cst_lhs = lhs->dyn_cast_constant_svalue ())
3307 /* If we have a pair of constants, compare them. */
3308 if (const constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ())
3309 return constant_svalue::eval_condition (cst_lhs, op, cst_rhs);
3310 else
3312 /* When we have one constant, put it on the RHS. */
3313 std::swap (lhs, rhs);
3314 op = swap_tree_comparison (op);
3317 gcc_assert (lhs->get_kind () != SK_CONSTANT);
3319 /* Handle comparison against zero. */
3320 if (const constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ())
3321 if (zerop (cst_rhs->get_constant ()))
3323 if (const region_svalue *ptr = lhs->dyn_cast_region_svalue ())
3325 /* A region_svalue is a non-NULL pointer, except in certain
3326 special cases (see the comment for region::non_null_p). */
3327 const region *pointee = ptr->get_pointee ();
3328 if (pointee->non_null_p ())
3330 switch (op)
3332 default:
3333 gcc_unreachable ();
3335 case EQ_EXPR:
3336 case GE_EXPR:
3337 case LE_EXPR:
3338 return tristate::TS_FALSE;
3340 case NE_EXPR:
3341 case GT_EXPR:
3342 case LT_EXPR:
3343 return tristate::TS_TRUE;
3347 else if (const binop_svalue *binop = lhs->dyn_cast_binop_svalue ())
3349 /* Treat offsets from a non-NULL pointer as being non-NULL. This
3350 isn't strictly true, in that eventually ptr++ will wrap
3351 around and be NULL, but it won't occur in practise and thus
3352 can be used to suppress effectively false positives that we
3353 shouldn't warn for. */
3354 if (binop->get_op () == POINTER_PLUS_EXPR)
3356 tristate lhs_ts = eval_condition (binop->get_arg0 (), op, rhs);
3357 if (lhs_ts.is_known ())
3358 return lhs_ts;
3361 else if (const unaryop_svalue *unaryop
3362 = lhs->dyn_cast_unaryop_svalue ())
3364 if (unaryop->get_op () == NEGATE_EXPR)
3366 /* e.g. "-X <= 0" is equivalent to X >= 0". */
3367 tristate lhs_ts = eval_condition (unaryop->get_arg (),
3368 swap_tree_comparison (op),
3369 rhs);
3370 if (lhs_ts.is_known ())
3371 return lhs_ts;
3376 /* Handle rejection of equality for comparisons of the initial values of
3377 "external" values (such as params) with the address of locals. */
3378 if (const initial_svalue *init_lhs = lhs->dyn_cast_initial_svalue ())
3379 if (const region_svalue *rhs_ptr = rhs->dyn_cast_region_svalue ())
3381 tristate res = compare_initial_and_pointer (init_lhs, rhs_ptr);
3382 if (res.is_known ())
3383 return res;
3385 if (const initial_svalue *init_rhs = rhs->dyn_cast_initial_svalue ())
3386 if (const region_svalue *lhs_ptr = lhs->dyn_cast_region_svalue ())
3388 tristate res = compare_initial_and_pointer (init_rhs, lhs_ptr);
3389 if (res.is_known ())
3390 return res;
3393 if (const widening_svalue *widen_lhs = lhs->dyn_cast_widening_svalue ())
3394 if (tree rhs_cst = rhs->maybe_get_constant ())
3396 tristate res = widen_lhs->eval_condition_without_cm (op, rhs_cst);
3397 if (res.is_known ())
3398 return res;
3401 /* Handle comparisons between two svalues with more than one operand. */
3402 if (const binop_svalue *binop = lhs->dyn_cast_binop_svalue ())
3404 switch (op)
3406 default:
3407 break;
3408 case EQ_EXPR:
3410 /* TODO: binops can be equal even if they are not structurally
3411 equal in case of commutative operators. */
3412 tristate res = structural_equality (lhs, rhs);
3413 if (res.is_true ())
3414 return res;
3416 break;
3417 case LE_EXPR:
3419 tristate res = structural_equality (lhs, rhs);
3420 if (res.is_true ())
3421 return res;
3423 break;
3424 case GE_EXPR:
3426 tristate res = structural_equality (lhs, rhs);
3427 if (res.is_true ())
3428 return res;
3429 res = symbolic_greater_than (binop, rhs);
3430 if (res.is_true ())
3431 return res;
3433 break;
3434 case GT_EXPR:
3436 tristate res = symbolic_greater_than (binop, rhs);
3437 if (res.is_true ())
3438 return res;
3440 break;
3444 /* Otherwise, try constraints.
3445 Cast to const to ensure we don't change the constraint_manager as we
3446 do this (e.g. by creating equivalence classes). */
3447 const constraint_manager *constraints = m_constraints;
3448 return constraints->eval_condition (lhs, op, rhs);
3451 /* Subroutine of region_model::eval_condition, for rejecting
3452 equality of INIT_VAL(PARM) with &LOCAL. */
3454 tristate
3455 region_model::compare_initial_and_pointer (const initial_svalue *init,
3456 const region_svalue *ptr) const
3458 const region *pointee = ptr->get_pointee ();
3460 /* If we have a pointer to something within a stack frame, it can't be the
3461 initial value of a param. */
3462 if (pointee->maybe_get_frame_region ())
3463 if (init->initial_value_of_param_p ())
3464 return tristate::TS_FALSE;
3466 return tristate::TS_UNKNOWN;
3469 /* Return true if SVAL is definitely positive. */
3471 static bool
3472 is_positive_svalue (const svalue *sval)
3474 if (tree cst = sval->maybe_get_constant ())
3475 return !zerop (cst) && get_range_pos_neg (cst) == 1;
3476 tree type = sval->get_type ();
3477 if (!type)
3478 return false;
3479 /* Consider a binary operation size_t + int. The analyzer wraps the int in
3480 an unaryop_svalue, converting it to a size_t, but in the dynamic execution
3481 the result is smaller than the first operand. Thus, we have to look if
3482 the argument of the unaryop_svalue is also positive. */
3483 if (const unaryop_svalue *un_op = dyn_cast <const unaryop_svalue *> (sval))
3484 return CONVERT_EXPR_CODE_P (un_op->get_op ()) && TYPE_UNSIGNED (type)
3485 && is_positive_svalue (un_op->get_arg ());
3486 return TYPE_UNSIGNED (type);
3489 /* Return true if A is definitely larger than B.
3491 Limitation: does not account for integer overflows and does not try to
3492 return false, so it can not be used negated. */
3494 tristate
3495 region_model::symbolic_greater_than (const binop_svalue *bin_a,
3496 const svalue *b) const
3498 if (bin_a->get_op () == PLUS_EXPR || bin_a->get_op () == MULT_EXPR)
3500 /* Eliminate the right-hand side of both svalues. */
3501 if (const binop_svalue *bin_b = dyn_cast <const binop_svalue *> (b))
3502 if (bin_a->get_op () == bin_b->get_op ()
3503 && eval_condition (bin_a->get_arg1 (),
3504 GT_EXPR,
3505 bin_b->get_arg1 ()).is_true ()
3506 && eval_condition (bin_a->get_arg0 (),
3507 GE_EXPR,
3508 bin_b->get_arg0 ()).is_true ())
3509 return tristate (tristate::TS_TRUE);
3511 /* Otherwise, try to remove a positive offset or factor from BIN_A. */
3512 if (is_positive_svalue (bin_a->get_arg1 ())
3513 && eval_condition (bin_a->get_arg0 (),
3514 GE_EXPR, b).is_true ())
3515 return tristate (tristate::TS_TRUE);
3517 return tristate::unknown ();
3520 /* Return true if A and B are equal structurally.
3522 Structural equality means that A and B are equal if the svalues A and B have
3523 the same nodes at the same positions in the tree and the leafs are equal.
3524 Equality for conjured_svalues and initial_svalues is determined by comparing
3525 the pointers while constants are compared by value. That behavior is useful
3526 to check for binaryop_svlaues that evaluate to the same concrete value but
3527 might use one operand with a different type but the same constant value.
3529 For example,
3530 binop_svalue (mult_expr,
3531 initial_svalue (‘size_t’, decl_region (..., 'some_var')),
3532 constant_svalue (‘size_t’, 4))
3534 binop_svalue (mult_expr,
3535 initial_svalue (‘size_t’, decl_region (..., 'some_var'),
3536 constant_svalue (‘sizetype’, 4))
3537 are structurally equal. A concrete C code example, where this occurs, can
3538 be found in test7 of out-of-bounds-5.c. */
3540 tristate
3541 region_model::structural_equality (const svalue *a, const svalue *b) const
3543 /* If A and B are referentially equal, they are also structurally equal. */
3544 if (a == b)
3545 return tristate (tristate::TS_TRUE);
3547 switch (a->get_kind ())
3549 default:
3550 return tristate::unknown ();
3551 /* SK_CONJURED and SK_INITIAL are already handled
3552 by the referential equality above. */
3553 case SK_CONSTANT:
3555 tree a_cst = a->maybe_get_constant ();
3556 tree b_cst = b->maybe_get_constant ();
3557 if (a_cst && b_cst)
3558 return tristate (tree_int_cst_equal (a_cst, b_cst));
3560 return tristate (tristate::TS_FALSE);
3561 case SK_UNARYOP:
3563 const unaryop_svalue *un_a = as_a <const unaryop_svalue *> (a);
3564 if (const unaryop_svalue *un_b = dyn_cast <const unaryop_svalue *> (b))
3565 return tristate (pending_diagnostic::same_tree_p (un_a->get_type (),
3566 un_b->get_type ())
3567 && un_a->get_op () == un_b->get_op ()
3568 && structural_equality (un_a->get_arg (),
3569 un_b->get_arg ()));
3571 return tristate (tristate::TS_FALSE);
3572 case SK_BINOP:
3574 const binop_svalue *bin_a = as_a <const binop_svalue *> (a);
3575 if (const binop_svalue *bin_b = dyn_cast <const binop_svalue *> (b))
3576 return tristate (bin_a->get_op () == bin_b->get_op ()
3577 && structural_equality (bin_a->get_arg0 (),
3578 bin_b->get_arg0 ())
3579 && structural_equality (bin_a->get_arg1 (),
3580 bin_b->get_arg1 ()));
3582 return tristate (tristate::TS_FALSE);
3586 /* Handle various constraints of the form:
3587 LHS: ((bool)INNER_LHS INNER_OP INNER_RHS))
3588 OP : == or !=
3589 RHS: zero
3590 and (with a cast):
3591 LHS: CAST([long]int, ((bool)INNER_LHS INNER_OP INNER_RHS))
3592 OP : == or !=
3593 RHS: zero
3594 by adding constraints for INNER_LHS INNEROP INNER_RHS.
3596 Return true if this function can fully handle the constraint; if
3597 so, add the implied constraint(s) and write true to *OUT if they
3598 are consistent with existing constraints, or write false to *OUT
3599 if they contradicts existing constraints.
3601 Return false for cases that this function doeesn't know how to handle.
3603 For example, if we're checking a stored conditional, we'll have
3604 something like:
3605 LHS: CAST(long int, (&HEAP_ALLOCATED_REGION(8)!=(int *)0B))
3606 OP : NE_EXPR
3607 RHS: zero
3608 which this function can turn into an add_constraint of:
3609 (&HEAP_ALLOCATED_REGION(8) != (int *)0B)
3611 Similarly, optimized && and || conditionals lead to e.g.
3612 if (p && q)
3613 becoming gimple like this:
3614 _1 = p_6 == 0B;
3615 _2 = q_8 == 0B
3616 _3 = _1 | _2
3617 On the "_3 is false" branch we can have constraints of the form:
3618 ((&HEAP_ALLOCATED_REGION(8)!=(int *)0B)
3619 | (&HEAP_ALLOCATED_REGION(10)!=(int *)0B))
3620 == 0
3621 which implies that both _1 and _2 are false,
3622 which this function can turn into a pair of add_constraints of
3623 (&HEAP_ALLOCATED_REGION(8)!=(int *)0B)
3624 and:
3625 (&HEAP_ALLOCATED_REGION(10)!=(int *)0B). */
3627 bool
3628 region_model::add_constraints_from_binop (const svalue *outer_lhs,
3629 enum tree_code outer_op,
3630 const svalue *outer_rhs,
3631 bool *out,
3632 region_model_context *ctxt)
3634 while (const svalue *cast = outer_lhs->maybe_undo_cast ())
3635 outer_lhs = cast;
3636 const binop_svalue *binop_sval = outer_lhs->dyn_cast_binop_svalue ();
3637 if (!binop_sval)
3638 return false;
3639 if (!outer_rhs->all_zeroes_p ())
3640 return false;
3642 const svalue *inner_lhs = binop_sval->get_arg0 ();
3643 enum tree_code inner_op = binop_sval->get_op ();
3644 const svalue *inner_rhs = binop_sval->get_arg1 ();
3646 if (outer_op != NE_EXPR && outer_op != EQ_EXPR)
3647 return false;
3649 /* We have either
3650 - "OUTER_LHS != false" (i.e. OUTER is true), or
3651 - "OUTER_LHS == false" (i.e. OUTER is false). */
3652 bool is_true = outer_op == NE_EXPR;
3654 switch (inner_op)
3656 default:
3657 return false;
3659 case EQ_EXPR:
3660 case NE_EXPR:
3662 /* ...and "(inner_lhs OP inner_rhs) == 0"
3663 then (inner_lhs OP inner_rhs) must have the same
3664 logical value as LHS. */
3665 if (!is_true)
3666 inner_op = invert_tree_comparison (inner_op, false /* honor_nans */);
3667 *out = add_constraint (inner_lhs, inner_op, inner_rhs, ctxt);
3668 return true;
3670 break;
3672 case BIT_AND_EXPR:
3673 if (is_true)
3675 /* ...and "(inner_lhs & inner_rhs) != 0"
3676 then both inner_lhs and inner_rhs must be true. */
3677 const svalue *false_sval
3678 = m_mgr->get_or_create_constant_svalue (boolean_false_node);
3679 bool sat1 = add_constraint (inner_lhs, NE_EXPR, false_sval, ctxt);
3680 bool sat2 = add_constraint (inner_rhs, NE_EXPR, false_sval, ctxt);
3681 *out = sat1 && sat2;
3682 return true;
3684 return false;
3686 case BIT_IOR_EXPR:
3687 if (!is_true)
3689 /* ...and "(inner_lhs | inner_rhs) == 0"
3690 i.e. "(inner_lhs | inner_rhs)" is false
3691 then both inner_lhs and inner_rhs must be false. */
3692 const svalue *false_sval
3693 = m_mgr->get_or_create_constant_svalue (boolean_false_node);
3694 bool sat1 = add_constraint (inner_lhs, EQ_EXPR, false_sval, ctxt);
3695 bool sat2 = add_constraint (inner_rhs, EQ_EXPR, false_sval, ctxt);
3696 *out = sat1 && sat2;
3697 return true;
3699 return false;
3703 /* Attempt to add the constraint "LHS OP RHS" to this region_model.
3704 If it is consistent with existing constraints, add it, and return true.
3705 Return false if it contradicts existing constraints.
3706 Use CTXT for reporting any diagnostics associated with the accesses. */
3708 bool
3709 region_model::add_constraint (tree lhs, enum tree_code op, tree rhs,
3710 region_model_context *ctxt)
3712 /* For now, make no attempt to capture constraints on floating-point
3713 values. */
3714 if (FLOAT_TYPE_P (TREE_TYPE (lhs)) || FLOAT_TYPE_P (TREE_TYPE (rhs)))
3715 return true;
3717 const svalue *lhs_sval = get_rvalue (lhs, ctxt);
3718 const svalue *rhs_sval = get_rvalue (rhs, ctxt);
3720 return add_constraint (lhs_sval, op, rhs_sval, ctxt);
3723 /* Attempt to add the constraint "LHS OP RHS" to this region_model.
3724 If it is consistent with existing constraints, add it, and return true.
3725 Return false if it contradicts existing constraints.
3726 Use CTXT for reporting any diagnostics associated with the accesses. */
3728 bool
3729 region_model::add_constraint (const svalue *lhs,
3730 enum tree_code op,
3731 const svalue *rhs,
3732 region_model_context *ctxt)
3734 tristate t_cond = eval_condition (lhs, op, rhs);
3736 /* If we already have the condition, do nothing. */
3737 if (t_cond.is_true ())
3738 return true;
3740 /* Reject a constraint that would contradict existing knowledge, as
3741 unsatisfiable. */
3742 if (t_cond.is_false ())
3743 return false;
3745 bool out;
3746 if (add_constraints_from_binop (lhs, op, rhs, &out, ctxt))
3747 return out;
3749 /* Attempt to store the constraint. */
3750 if (!m_constraints->add_constraint (lhs, op, rhs))
3751 return false;
3753 /* Notify the context, if any. This exists so that the state machines
3754 in a program_state can be notified about the condition, and so can
3755 set sm-state for e.g. unchecked->checked, both for cfg-edges, and
3756 when synthesizing constraints as above. */
3757 if (ctxt)
3758 ctxt->on_condition (lhs, op, rhs);
3760 /* If we have &REGION == NULL, then drop dynamic extents for REGION (for
3761 the case where REGION is heap-allocated and thus could be NULL). */
3762 if (tree rhs_cst = rhs->maybe_get_constant ())
3763 if (op == EQ_EXPR && zerop (rhs_cst))
3764 if (const region_svalue *region_sval = lhs->dyn_cast_region_svalue ())
3765 unset_dynamic_extents (region_sval->get_pointee ());
3767 return true;
3770 /* As above, but when returning false, if OUT is non-NULL, write a
3771 new rejected_constraint to *OUT. */
3773 bool
3774 region_model::add_constraint (tree lhs, enum tree_code op, tree rhs,
3775 region_model_context *ctxt,
3776 rejected_constraint **out)
3778 bool sat = add_constraint (lhs, op, rhs, ctxt);
3779 if (!sat && out)
3780 *out = new rejected_op_constraint (*this, lhs, op, rhs);
3781 return sat;
3784 /* Determine what is known about the condition "LHS OP RHS" within
3785 this model.
3786 Use CTXT for reporting any diagnostics associated with the accesses. */
3788 tristate
3789 region_model::eval_condition (tree lhs,
3790 enum tree_code op,
3791 tree rhs,
3792 region_model_context *ctxt) const
3794 /* For now, make no attempt to model constraints on floating-point
3795 values. */
3796 if (FLOAT_TYPE_P (TREE_TYPE (lhs)) || FLOAT_TYPE_P (TREE_TYPE (rhs)))
3797 return tristate::unknown ();
3799 return eval_condition (get_rvalue (lhs, ctxt), op, get_rvalue (rhs, ctxt));
3802 /* Implementation of region_model::get_representative_path_var.
3803 Attempt to return a path_var that represents SVAL, or return NULL_TREE.
3804 Use VISITED to prevent infinite mutual recursion with the overload for
3805 regions. */
3807 path_var
3808 region_model::get_representative_path_var_1 (const svalue *sval,
3809 svalue_set *visited) const
3811 gcc_assert (sval);
3813 /* Prevent infinite recursion. */
3814 if (visited->contains (sval))
3815 return path_var (NULL_TREE, 0);
3816 visited->add (sval);
3818 /* Handle casts by recursion into get_representative_path_var. */
3819 if (const svalue *cast_sval = sval->maybe_undo_cast ())
3821 path_var result = get_representative_path_var (cast_sval, visited);
3822 tree orig_type = sval->get_type ();
3823 /* If necessary, wrap the result in a cast. */
3824 if (result.m_tree && orig_type)
3825 result.m_tree = build1 (NOP_EXPR, orig_type, result.m_tree);
3826 return result;
3829 auto_vec<path_var> pvs;
3830 m_store.get_representative_path_vars (this, visited, sval, &pvs);
3832 if (tree cst = sval->maybe_get_constant ())
3833 pvs.safe_push (path_var (cst, 0));
3835 /* Handle string literals and various other pointers. */
3836 if (const region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
3838 const region *reg = ptr_sval->get_pointee ();
3839 if (path_var pv = get_representative_path_var (reg, visited))
3840 return path_var (build1 (ADDR_EXPR,
3841 sval->get_type (),
3842 pv.m_tree),
3843 pv.m_stack_depth);
3846 /* If we have a sub_svalue, look for ways to represent the parent. */
3847 if (const sub_svalue *sub_sval = sval->dyn_cast_sub_svalue ())
3849 const svalue *parent_sval = sub_sval->get_parent ();
3850 const region *subreg = sub_sval->get_subregion ();
3851 if (path_var parent_pv
3852 = get_representative_path_var (parent_sval, visited))
3853 if (const field_region *field_reg = subreg->dyn_cast_field_region ())
3854 return path_var (build3 (COMPONENT_REF,
3855 sval->get_type (),
3856 parent_pv.m_tree,
3857 field_reg->get_field (),
3858 NULL_TREE),
3859 parent_pv.m_stack_depth);
3862 /* Handle binops. */
3863 if (const binop_svalue *binop_sval = sval->dyn_cast_binop_svalue ())
3864 if (path_var lhs_pv
3865 = get_representative_path_var (binop_sval->get_arg0 (), visited))
3866 if (path_var rhs_pv
3867 = get_representative_path_var (binop_sval->get_arg1 (), visited))
3868 return path_var (build2 (binop_sval->get_op (),
3869 sval->get_type (),
3870 lhs_pv.m_tree, rhs_pv.m_tree),
3871 lhs_pv.m_stack_depth);
3873 if (pvs.length () < 1)
3874 return path_var (NULL_TREE, 0);
3876 pvs.qsort (readability_comparator);
3877 return pvs[0];
3880 /* Attempt to return a path_var that represents SVAL, or return NULL_TREE.
3881 Use VISITED to prevent infinite mutual recursion with the overload for
3882 regions
3884 This function defers to get_representative_path_var_1 to do the work;
3885 it adds verification that get_representative_path_var_1 returned a tree
3886 of the correct type. */
3888 path_var
3889 region_model::get_representative_path_var (const svalue *sval,
3890 svalue_set *visited) const
3892 if (sval == NULL)
3893 return path_var (NULL_TREE, 0);
3895 tree orig_type = sval->get_type ();
3897 path_var result = get_representative_path_var_1 (sval, visited);
3899 /* Verify that the result has the same type as SVAL, if any. */
3900 if (result.m_tree && orig_type)
3901 gcc_assert (TREE_TYPE (result.m_tree) == orig_type);
3903 return result;
3906 /* Attempt to return a tree that represents SVAL, or return NULL_TREE.
3908 Strip off any top-level cast, to avoid messages like
3909 double-free of '(void *)ptr'
3910 from analyzer diagnostics. */
3912 tree
3913 region_model::get_representative_tree (const svalue *sval) const
3915 svalue_set visited;
3916 tree expr = get_representative_path_var (sval, &visited).m_tree;
3918 /* Strip off any top-level cast. */
3919 if (expr && TREE_CODE (expr) == NOP_EXPR)
3920 expr = TREE_OPERAND (expr, 0);
3922 return fixup_tree_for_diagnostic (expr);
3925 tree
3926 region_model::get_representative_tree (const region *reg) const
3928 svalue_set visited;
3929 tree expr = get_representative_path_var (reg, &visited).m_tree;
3931 /* Strip off any top-level cast. */
3932 if (expr && TREE_CODE (expr) == NOP_EXPR)
3933 expr = TREE_OPERAND (expr, 0);
3935 return fixup_tree_for_diagnostic (expr);
3938 /* Implementation of region_model::get_representative_path_var.
3940 Attempt to return a path_var that represents REG, or return
3941 the NULL path_var.
3942 For example, a region for a field of a local would be a path_var
3943 wrapping a COMPONENT_REF.
3944 Use VISITED to prevent infinite mutual recursion with the overload for
3945 svalues. */
3947 path_var
3948 region_model::get_representative_path_var_1 (const region *reg,
3949 svalue_set *visited) const
3951 switch (reg->get_kind ())
3953 default:
3954 gcc_unreachable ();
3956 case RK_FRAME:
3957 case RK_GLOBALS:
3958 case RK_CODE:
3959 case RK_HEAP:
3960 case RK_STACK:
3961 case RK_THREAD_LOCAL:
3962 case RK_ROOT:
3963 /* Regions that represent memory spaces are not expressible as trees. */
3964 return path_var (NULL_TREE, 0);
3966 case RK_FUNCTION:
3968 const function_region *function_reg
3969 = as_a <const function_region *> (reg);
3970 return path_var (function_reg->get_fndecl (), 0);
3972 case RK_LABEL:
3974 const label_region *label_reg = as_a <const label_region *> (reg);
3975 return path_var (label_reg->get_label (), 0);
3978 case RK_SYMBOLIC:
3980 const symbolic_region *symbolic_reg
3981 = as_a <const symbolic_region *> (reg);
3982 const svalue *pointer = symbolic_reg->get_pointer ();
3983 path_var pointer_pv = get_representative_path_var (pointer, visited);
3984 if (!pointer_pv)
3985 return path_var (NULL_TREE, 0);
3986 tree offset = build_int_cst (pointer->get_type (), 0);
3987 return path_var (build2 (MEM_REF,
3988 reg->get_type (),
3989 pointer_pv.m_tree,
3990 offset),
3991 pointer_pv.m_stack_depth);
3993 case RK_DECL:
3995 const decl_region *decl_reg = as_a <const decl_region *> (reg);
3996 return path_var (decl_reg->get_decl (), decl_reg->get_stack_depth ());
3998 case RK_FIELD:
4000 const field_region *field_reg = as_a <const field_region *> (reg);
4001 path_var parent_pv
4002 = get_representative_path_var (reg->get_parent_region (), visited);
4003 if (!parent_pv)
4004 return path_var (NULL_TREE, 0);
4005 return path_var (build3 (COMPONENT_REF,
4006 reg->get_type (),
4007 parent_pv.m_tree,
4008 field_reg->get_field (),
4009 NULL_TREE),
4010 parent_pv.m_stack_depth);
4013 case RK_ELEMENT:
4015 const element_region *element_reg
4016 = as_a <const element_region *> (reg);
4017 path_var parent_pv
4018 = get_representative_path_var (reg->get_parent_region (), visited);
4019 if (!parent_pv)
4020 return path_var (NULL_TREE, 0);
4021 path_var index_pv
4022 = get_representative_path_var (element_reg->get_index (), visited);
4023 if (!index_pv)
4024 return path_var (NULL_TREE, 0);
4025 return path_var (build4 (ARRAY_REF,
4026 reg->get_type (),
4027 parent_pv.m_tree, index_pv.m_tree,
4028 NULL_TREE, NULL_TREE),
4029 parent_pv.m_stack_depth);
4032 case RK_OFFSET:
4034 const offset_region *offset_reg
4035 = as_a <const offset_region *> (reg);
4036 path_var parent_pv
4037 = get_representative_path_var (reg->get_parent_region (), visited);
4038 if (!parent_pv)
4039 return path_var (NULL_TREE, 0);
4040 path_var offset_pv
4041 = get_representative_path_var (offset_reg->get_byte_offset (),
4042 visited);
4043 if (!offset_pv || TREE_CODE (offset_pv.m_tree) != INTEGER_CST)
4044 return path_var (NULL_TREE, 0);
4045 tree addr_parent = build1 (ADDR_EXPR,
4046 build_pointer_type (reg->get_type ()),
4047 parent_pv.m_tree);
4048 return path_var (build2 (MEM_REF,
4049 reg->get_type (),
4050 addr_parent, offset_pv.m_tree),
4051 parent_pv.m_stack_depth);
4054 case RK_SIZED:
4055 return path_var (NULL_TREE, 0);
4057 case RK_CAST:
4059 path_var parent_pv
4060 = get_representative_path_var (reg->get_parent_region (), visited);
4061 if (!parent_pv)
4062 return path_var (NULL_TREE, 0);
4063 return path_var (build1 (NOP_EXPR,
4064 reg->get_type (),
4065 parent_pv.m_tree),
4066 parent_pv.m_stack_depth);
4069 case RK_HEAP_ALLOCATED:
4070 case RK_ALLOCA:
4071 /* No good way to express heap-allocated/alloca regions as trees. */
4072 return path_var (NULL_TREE, 0);
4074 case RK_STRING:
4076 const string_region *string_reg = as_a <const string_region *> (reg);
4077 return path_var (string_reg->get_string_cst (), 0);
4080 case RK_VAR_ARG:
4081 case RK_ERRNO:
4082 case RK_UNKNOWN:
4083 return path_var (NULL_TREE, 0);
4087 /* Attempt to return a path_var that represents REG, or return
4088 the NULL path_var.
4089 For example, a region for a field of a local would be a path_var
4090 wrapping a COMPONENT_REF.
4091 Use VISITED to prevent infinite mutual recursion with the overload for
4092 svalues.
4094 This function defers to get_representative_path_var_1 to do the work;
4095 it adds verification that get_representative_path_var_1 returned a tree
4096 of the correct type. */
4098 path_var
4099 region_model::get_representative_path_var (const region *reg,
4100 svalue_set *visited) const
4102 path_var result = get_representative_path_var_1 (reg, visited);
4104 /* Verify that the result has the same type as REG, if any. */
4105 if (result.m_tree && reg->get_type ())
4106 gcc_assert (TREE_TYPE (result.m_tree) == reg->get_type ());
4108 return result;
4111 /* Update this model for any phis in SNODE, assuming we came from
4112 LAST_CFG_SUPEREDGE. */
4114 void
4115 region_model::update_for_phis (const supernode *snode,
4116 const cfg_superedge *last_cfg_superedge,
4117 region_model_context *ctxt)
4119 gcc_assert (last_cfg_superedge);
4121 /* Copy this state and pass it to handle_phi so that all of the phi stmts
4122 are effectively handled simultaneously. */
4123 const region_model old_state (*this);
4125 for (gphi_iterator gpi = const_cast<supernode *>(snode)->start_phis ();
4126 !gsi_end_p (gpi); gsi_next (&gpi))
4128 gphi *phi = gpi.phi ();
4130 tree src = last_cfg_superedge->get_phi_arg (phi);
4131 tree lhs = gimple_phi_result (phi);
4133 /* Update next_state based on phi and old_state. */
4134 handle_phi (phi, lhs, src, old_state, ctxt);
4138 /* Attempt to update this model for taking EDGE (where the last statement
4139 was LAST_STMT), returning true if the edge can be taken, false
4140 otherwise.
4141 When returning false, if OUT is non-NULL, write a new rejected_constraint
4142 to it.
4144 For CFG superedges where LAST_STMT is a conditional or a switch
4145 statement, attempt to add the relevant conditions for EDGE to this
4146 model, returning true if they are feasible, or false if they are
4147 impossible.
4149 For call superedges, push frame information and store arguments
4150 into parameters.
4152 For return superedges, pop frame information and store return
4153 values into any lhs.
4155 Rejection of call/return superedges happens elsewhere, in
4156 program_point::on_edge (i.e. based on program point, rather
4157 than program state). */
4159 bool
4160 region_model::maybe_update_for_edge (const superedge &edge,
4161 const gimple *last_stmt,
4162 region_model_context *ctxt,
4163 rejected_constraint **out)
4165 /* Handle frame updates for interprocedural edges. */
4166 switch (edge.m_kind)
4168 default:
4169 break;
4171 case SUPEREDGE_CALL:
4173 const call_superedge *call_edge = as_a <const call_superedge *> (&edge);
4174 update_for_call_superedge (*call_edge, ctxt);
4176 break;
4178 case SUPEREDGE_RETURN:
4180 const return_superedge *return_edge
4181 = as_a <const return_superedge *> (&edge);
4182 update_for_return_superedge (*return_edge, ctxt);
4184 break;
4186 case SUPEREDGE_INTRAPROCEDURAL_CALL:
4187 /* This is a no-op for call summaries; we should already
4188 have handled the effect of the call summary at the call stmt. */
4189 break;
4192 if (last_stmt == NULL)
4193 return true;
4195 /* Apply any constraints for conditionals/switch statements. */
4197 if (const gcond *cond_stmt = dyn_cast <const gcond *> (last_stmt))
4199 const cfg_superedge *cfg_sedge = as_a <const cfg_superedge *> (&edge);
4200 return apply_constraints_for_gcond (*cfg_sedge, cond_stmt, ctxt, out);
4203 if (const gswitch *switch_stmt = dyn_cast <const gswitch *> (last_stmt))
4205 const switch_cfg_superedge *switch_sedge
4206 = as_a <const switch_cfg_superedge *> (&edge);
4207 return apply_constraints_for_gswitch (*switch_sedge, switch_stmt,
4208 ctxt, out);
4211 /* Apply any constraints due to an exception being thrown. */
4212 if (const cfg_superedge *cfg_sedge = dyn_cast <const cfg_superedge *> (&edge))
4213 if (cfg_sedge->get_flags () & EDGE_EH)
4214 return apply_constraints_for_exception (last_stmt, ctxt, out);
4216 return true;
4219 /* Push a new frame_region on to the stack region.
4220 Populate the frame_region with child regions for the function call's
4221 parameters, using values from the arguments at the callsite in the
4222 caller's frame. */
4224 void
4225 region_model::update_for_gcall (const gcall *call_stmt,
4226 region_model_context *ctxt,
4227 function *callee)
4229 /* Build a vec of argument svalues, using the current top
4230 frame for resolving tree expressions. */
4231 auto_vec<const svalue *> arg_svals (gimple_call_num_args (call_stmt));
4233 for (unsigned i = 0; i < gimple_call_num_args (call_stmt); i++)
4235 tree arg = gimple_call_arg (call_stmt, i);
4236 arg_svals.quick_push (get_rvalue (arg, ctxt));
4239 if(!callee)
4241 /* Get the function * from the gcall. */
4242 tree fn_decl = get_fndecl_for_call (call_stmt,ctxt);
4243 callee = DECL_STRUCT_FUNCTION (fn_decl);
4246 push_frame (callee, &arg_svals, ctxt);
4249 /* Pop the top-most frame_region from the stack, and copy the return
4250 region's values (if any) into the region for the lvalue of the LHS of
4251 the call (if any). */
4253 void
4254 region_model::update_for_return_gcall (const gcall *call_stmt,
4255 region_model_context *ctxt)
4257 /* Get the lvalue for the result of the call, passing it to pop_frame,
4258 so that pop_frame can determine the region with respect to the
4259 *caller* frame. */
4260 tree lhs = gimple_call_lhs (call_stmt);
4261 pop_frame (lhs, NULL, ctxt);
4264 /* Extract calling information from the superedge and update the model for the
4265 call */
4267 void
4268 region_model::update_for_call_superedge (const call_superedge &call_edge,
4269 region_model_context *ctxt)
4271 const gcall *call_stmt = call_edge.get_call_stmt ();
4272 update_for_gcall (call_stmt, ctxt, call_edge.get_callee_function ());
4275 /* Extract calling information from the return superedge and update the model
4276 for the returning call */
4278 void
4279 region_model::update_for_return_superedge (const return_superedge &return_edge,
4280 region_model_context *ctxt)
4282 const gcall *call_stmt = return_edge.get_call_stmt ();
4283 update_for_return_gcall (call_stmt, ctxt);
4286 /* Attempt to to use R to replay SUMMARY into this object.
4287 Return true if it is possible. */
4289 bool
4290 region_model::replay_call_summary (call_summary_replay &r,
4291 const region_model &summary)
4293 gcc_assert (summary.get_stack_depth () == 1);
4295 m_store.replay_call_summary (r, summary.m_store);
4297 if (!m_constraints->replay_call_summary (r, *summary.m_constraints))
4298 return false;
4300 for (auto kv : summary.m_dynamic_extents)
4302 const region *summary_reg = kv.first;
4303 const region *caller_reg = r.convert_region_from_summary (summary_reg);
4304 if (!caller_reg)
4305 continue;
4306 const svalue *summary_sval = kv.second;
4307 const svalue *caller_sval = r.convert_svalue_from_summary (summary_sval);
4308 if (!caller_sval)
4309 continue;
4310 m_dynamic_extents.put (caller_reg, caller_sval);
4313 return true;
4316 /* Given a true or false edge guarded by conditional statement COND_STMT,
4317 determine appropriate constraints for the edge to be taken.
4319 If they are feasible, add the constraints and return true.
4321 Return false if the constraints contradict existing knowledge
4322 (and so the edge should not be taken).
4323 When returning false, if OUT is non-NULL, write a new rejected_constraint
4324 to it. */
4326 bool
4327 region_model::apply_constraints_for_gcond (const cfg_superedge &sedge,
4328 const gcond *cond_stmt,
4329 region_model_context *ctxt,
4330 rejected_constraint **out)
4332 ::edge cfg_edge = sedge.get_cfg_edge ();
4333 gcc_assert (cfg_edge != NULL);
4334 gcc_assert (cfg_edge->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE));
4336 enum tree_code op = gimple_cond_code (cond_stmt);
4337 tree lhs = gimple_cond_lhs (cond_stmt);
4338 tree rhs = gimple_cond_rhs (cond_stmt);
4339 if (cfg_edge->flags & EDGE_FALSE_VALUE)
4340 op = invert_tree_comparison (op, false /* honor_nans */);
4341 return add_constraint (lhs, op, rhs, ctxt, out);
4344 /* Return true iff SWITCH_STMT has a non-default label that contains
4345 INT_CST. */
4347 static bool
4348 has_nondefault_case_for_value_p (const gswitch *switch_stmt, tree int_cst)
4350 /* We expect the initial label to be the default; skip it. */
4351 gcc_assert (CASE_LOW (gimple_switch_label (switch_stmt, 0)) == NULL);
4352 unsigned min_idx = 1;
4353 unsigned max_idx = gimple_switch_num_labels (switch_stmt) - 1;
4355 /* Binary search: try to find the label containing INT_CST.
4356 This requires the cases to be sorted by CASE_LOW (done by the
4357 gimplifier). */
4358 while (max_idx >= min_idx)
4360 unsigned case_idx = (min_idx + max_idx) / 2;
4361 tree label = gimple_switch_label (switch_stmt, case_idx);
4362 tree low = CASE_LOW (label);
4363 gcc_assert (low);
4364 tree high = CASE_HIGH (label);
4365 if (!high)
4366 high = low;
4367 if (tree_int_cst_compare (int_cst, low) < 0)
4369 /* INT_CST is below the range of this label. */
4370 gcc_assert (case_idx > 0);
4371 max_idx = case_idx - 1;
4373 else if (tree_int_cst_compare (int_cst, high) > 0)
4375 /* INT_CST is above the range of this case. */
4376 min_idx = case_idx + 1;
4378 else
4379 /* This case contains INT_CST. */
4380 return true;
4382 /* Not found. */
4383 return false;
4386 /* Return true iff SWITCH_STMT (which must be on an enum value)
4387 has nondefault cases handling all values in the enum. */
4389 static bool
4390 has_nondefault_cases_for_all_enum_values_p (const gswitch *switch_stmt)
4392 gcc_assert (switch_stmt);
4393 tree type = TREE_TYPE (gimple_switch_index (switch_stmt));
4394 gcc_assert (TREE_CODE (type) == ENUMERAL_TYPE);
4396 for (tree enum_val_iter = TYPE_VALUES (type);
4397 enum_val_iter;
4398 enum_val_iter = TREE_CHAIN (enum_val_iter))
4400 tree enum_val = TREE_VALUE (enum_val_iter);
4401 gcc_assert (TREE_CODE (enum_val) == CONST_DECL);
4402 gcc_assert (TREE_CODE (DECL_INITIAL (enum_val)) == INTEGER_CST);
4403 if (!has_nondefault_case_for_value_p (switch_stmt,
4404 DECL_INITIAL (enum_val)))
4405 return false;
4407 return true;
4410 /* Given an EDGE guarded by SWITCH_STMT, determine appropriate constraints
4411 for the edge to be taken.
4413 If they are feasible, add the constraints and return true.
4415 Return false if the constraints contradict existing knowledge
4416 (and so the edge should not be taken).
4417 When returning false, if OUT is non-NULL, write a new rejected_constraint
4418 to it. */
4420 bool
4421 region_model::apply_constraints_for_gswitch (const switch_cfg_superedge &edge,
4422 const gswitch *switch_stmt,
4423 region_model_context *ctxt,
4424 rejected_constraint **out)
4426 tree index = gimple_switch_index (switch_stmt);
4427 const svalue *index_sval = get_rvalue (index, ctxt);
4429 /* If we're switching based on an enum type, assume that the user is only
4430 working with values from the enum. Hence if this is an
4431 implicitly-created "default", assume it doesn't get followed.
4432 This fixes numerous "uninitialized" false positives where we otherwise
4433 consider jumping past the initialization cases. */
4435 if (/* Don't check during feasibility-checking (when ctxt is NULL). */
4436 ctxt
4437 /* Must be an enum value. */
4438 && index_sval->get_type ()
4439 && TREE_CODE (TREE_TYPE (index)) == ENUMERAL_TYPE
4440 && TREE_CODE (index_sval->get_type ()) == ENUMERAL_TYPE
4441 /* If we have a constant, then we can check it directly. */
4442 && index_sval->get_kind () != SK_CONSTANT
4443 && edge.implicitly_created_default_p ()
4444 && has_nondefault_cases_for_all_enum_values_p (switch_stmt)
4445 /* Don't do this if there's a chance that the index is
4446 attacker-controlled. */
4447 && !ctxt->possibly_tainted_p (index_sval))
4449 if (out)
4450 *out = new rejected_default_case (*this);
4451 return false;
4454 bounded_ranges_manager *ranges_mgr = get_range_manager ();
4455 const bounded_ranges *all_cases_ranges
4456 = ranges_mgr->get_or_create_ranges_for_switch (&edge, switch_stmt);
4457 bool sat = m_constraints->add_bounded_ranges (index_sval, all_cases_ranges);
4458 if (!sat && out)
4459 *out = new rejected_ranges_constraint (*this, index, all_cases_ranges);
4460 if (sat && ctxt && !all_cases_ranges->empty_p ())
4461 ctxt->on_bounded_ranges (*index_sval, *all_cases_ranges);
4462 return sat;
4465 /* Apply any constraints due to an exception being thrown at LAST_STMT.
4467 If they are feasible, add the constraints and return true.
4469 Return false if the constraints contradict existing knowledge
4470 (and so the edge should not be taken).
4471 When returning false, if OUT is non-NULL, write a new rejected_constraint
4472 to it. */
4474 bool
4475 region_model::apply_constraints_for_exception (const gimple *last_stmt,
4476 region_model_context *ctxt,
4477 rejected_constraint **out)
4479 gcc_assert (last_stmt);
4480 if (const gcall *call = dyn_cast <const gcall *> (last_stmt))
4481 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
4482 if (is_named_call_p (callee_fndecl, "operator new", call, 1)
4483 || is_named_call_p (callee_fndecl, "operator new []", call, 1))
4485 /* We have an exception thrown from operator new.
4486 Add a constraint that the result was NULL, to avoid a false
4487 leak report due to the result being lost when following
4488 the EH edge. */
4489 if (tree lhs = gimple_call_lhs (call))
4490 return add_constraint (lhs, EQ_EXPR, null_pointer_node, ctxt, out);
4491 return true;
4493 return true;
4496 /* For use with push_frame when handling a top-level call within the analysis.
4497 PARAM has a defined but unknown initial value.
4498 Anything it points to has escaped, since the calling context "knows"
4499 the pointer, and thus calls to unknown functions could read/write into
4500 the region.
4501 If NONNULL is true, then assume that PARAM must be non-NULL. */
4503 void
4504 region_model::on_top_level_param (tree param,
4505 bool nonnull,
4506 region_model_context *ctxt)
4508 if (POINTER_TYPE_P (TREE_TYPE (param)))
4510 const region *param_reg = get_lvalue (param, ctxt);
4511 const svalue *init_ptr_sval
4512 = m_mgr->get_or_create_initial_value (param_reg);
4513 const region *pointee_reg = m_mgr->get_symbolic_region (init_ptr_sval);
4514 m_store.mark_as_escaped (pointee_reg);
4515 if (nonnull)
4517 const svalue *null_ptr_sval
4518 = m_mgr->get_or_create_null_ptr (TREE_TYPE (param));
4519 add_constraint (init_ptr_sval, NE_EXPR, null_ptr_sval, ctxt);
4524 /* Update this region_model to reflect pushing a frame onto the stack
4525 for a call to FUN.
4527 If ARG_SVALS is non-NULL, use it to populate the parameters
4528 in the new frame.
4529 Otherwise, the params have their initial_svalues.
4531 Return the frame_region for the new frame. */
4533 const region *
4534 region_model::push_frame (function *fun, const vec<const svalue *> *arg_svals,
4535 region_model_context *ctxt)
4537 m_current_frame = m_mgr->get_frame_region (m_current_frame, fun);
4538 if (arg_svals)
4540 /* Arguments supplied from a caller frame. */
4541 tree fndecl = fun->decl;
4542 unsigned idx = 0;
4543 for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm;
4544 iter_parm = DECL_CHAIN (iter_parm), ++idx)
4546 /* If there's a mismatching declaration, the call stmt might
4547 not have enough args. Handle this case by leaving the
4548 rest of the params as uninitialized. */
4549 if (idx >= arg_svals->length ())
4550 break;
4551 tree parm_lval = iter_parm;
4552 if (tree parm_default_ssa = ssa_default_def (fun, iter_parm))
4553 parm_lval = parm_default_ssa;
4554 const region *parm_reg = get_lvalue (parm_lval, ctxt);
4555 const svalue *arg_sval = (*arg_svals)[idx];
4556 set_value (parm_reg, arg_sval, ctxt);
4559 /* Handle any variadic args. */
4560 unsigned va_arg_idx = 0;
4561 for (; idx < arg_svals->length (); idx++, va_arg_idx++)
4563 const svalue *arg_sval = (*arg_svals)[idx];
4564 const region *var_arg_reg
4565 = m_mgr->get_var_arg_region (m_current_frame,
4566 va_arg_idx);
4567 set_value (var_arg_reg, arg_sval, ctxt);
4570 else
4572 /* Otherwise we have a top-level call within the analysis. The params
4573 have defined but unknown initial values.
4574 Anything they point to has escaped. */
4575 tree fndecl = fun->decl;
4577 /* Handle "__attribute__((nonnull))". */
4578 tree fntype = TREE_TYPE (fndecl);
4579 bitmap nonnull_args = get_nonnull_args (fntype);
4581 unsigned parm_idx = 0;
4582 for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm;
4583 iter_parm = DECL_CHAIN (iter_parm))
4585 bool non_null = (nonnull_args
4586 ? (bitmap_empty_p (nonnull_args)
4587 || bitmap_bit_p (nonnull_args, parm_idx))
4588 : false);
4589 if (tree parm_default_ssa = ssa_default_def (fun, iter_parm))
4590 on_top_level_param (parm_default_ssa, non_null, ctxt);
4591 else
4592 on_top_level_param (iter_parm, non_null, ctxt);
4593 parm_idx++;
4596 BITMAP_FREE (nonnull_args);
4599 return m_current_frame;
4602 /* Get the function of the top-most frame in this region_model's stack.
4603 There must be such a frame. */
4605 function *
4606 region_model::get_current_function () const
4608 const frame_region *frame = get_current_frame ();
4609 gcc_assert (frame);
4610 return frame->get_function ();
4613 /* Pop the topmost frame_region from this region_model's stack;
4615 If RESULT_LVALUE is non-null, copy any return value from the frame
4616 into the corresponding region (evaluated with respect to the *caller*
4617 frame, rather than the called frame).
4618 If OUT_RESULT is non-null, copy any return value from the frame
4619 into *OUT_RESULT.
4621 Purge the frame region and all its descendent regions.
4622 Convert any pointers that point into such regions into
4623 POISON_KIND_POPPED_STACK svalues. */
4625 void
4626 region_model::pop_frame (tree result_lvalue,
4627 const svalue **out_result,
4628 region_model_context *ctxt)
4630 gcc_assert (m_current_frame);
4632 const frame_region *frame_reg = m_current_frame;
4634 /* Notify state machines. */
4635 if (ctxt)
4636 ctxt->on_pop_frame (frame_reg);
4638 /* Evaluate the result, within the callee frame. */
4639 tree fndecl = m_current_frame->get_function ()->decl;
4640 tree result = DECL_RESULT (fndecl);
4641 const svalue *retval = NULL;
4642 if (result && TREE_TYPE (result) != void_type_node)
4644 retval = get_rvalue (result, ctxt);
4645 if (out_result)
4646 *out_result = retval;
4649 /* Pop the frame. */
4650 m_current_frame = m_current_frame->get_calling_frame ();
4652 if (result_lvalue && retval)
4654 /* Compute result_dst_reg using RESULT_LVALUE *after* popping
4655 the frame, but before poisoning pointers into the old frame. */
4656 const region *result_dst_reg = get_lvalue (result_lvalue, ctxt);
4657 set_value (result_dst_reg, retval, ctxt);
4660 unbind_region_and_descendents (frame_reg,POISON_KIND_POPPED_STACK);
4663 /* Get the number of frames in this region_model's stack. */
4666 region_model::get_stack_depth () const
4668 const frame_region *frame = get_current_frame ();
4669 if (frame)
4670 return frame->get_stack_depth ();
4671 else
4672 return 0;
4675 /* Get the frame_region with the given index within the stack.
4676 The frame_region must exist. */
4678 const frame_region *
4679 region_model::get_frame_at_index (int index) const
4681 const frame_region *frame = get_current_frame ();
4682 gcc_assert (frame);
4683 gcc_assert (index >= 0);
4684 gcc_assert (index <= frame->get_index ());
4685 while (index != frame->get_index ())
4687 frame = frame->get_calling_frame ();
4688 gcc_assert (frame);
4690 return frame;
4693 /* Unbind svalues for any regions in REG and below.
4694 Find any pointers to such regions; convert them to
4695 poisoned values of kind PKIND.
4696 Also purge any dynamic extents. */
4698 void
4699 region_model::unbind_region_and_descendents (const region *reg,
4700 enum poison_kind pkind)
4702 /* Gather a set of base regions to be unbound. */
4703 hash_set<const region *> base_regs;
4704 for (store::cluster_map_t::iterator iter = m_store.begin ();
4705 iter != m_store.end (); ++iter)
4707 const region *iter_base_reg = (*iter).first;
4708 if (iter_base_reg->descendent_of_p (reg))
4709 base_regs.add (iter_base_reg);
4711 for (hash_set<const region *>::iterator iter = base_regs.begin ();
4712 iter != base_regs.end (); ++iter)
4713 m_store.purge_cluster (*iter);
4715 /* Find any pointers to REG or its descendents; convert to poisoned. */
4716 poison_any_pointers_to_descendents (reg, pkind);
4718 /* Purge dynamic extents of any base regions in REG and below
4719 (e.g. VLAs and alloca stack regions). */
4720 for (auto iter : m_dynamic_extents)
4722 const region *iter_reg = iter.first;
4723 if (iter_reg->descendent_of_p (reg))
4724 unset_dynamic_extents (iter_reg);
4728 /* Implementation of BindingVisitor.
4729 Update the bound svalues for regions below REG to use poisoned
4730 values instead. */
4732 struct bad_pointer_finder
4734 bad_pointer_finder (const region *reg, enum poison_kind pkind,
4735 region_model_manager *mgr)
4736 : m_reg (reg), m_pkind (pkind), m_mgr (mgr), m_count (0)
4739 void on_binding (const binding_key *, const svalue *&sval)
4741 if (const region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
4743 const region *ptr_dst = ptr_sval->get_pointee ();
4744 /* Poison ptrs to descendents of REG, but not to REG itself,
4745 otherwise double-free detection doesn't work (since sm-state
4746 for "free" is stored on the original ptr svalue). */
4747 if (ptr_dst->descendent_of_p (m_reg)
4748 && ptr_dst != m_reg)
4750 sval = m_mgr->get_or_create_poisoned_svalue (m_pkind,
4751 sval->get_type ());
4752 ++m_count;
4757 const region *m_reg;
4758 enum poison_kind m_pkind;
4759 region_model_manager *const m_mgr;
4760 int m_count;
4763 /* Find any pointers to REG or its descendents; convert them to
4764 poisoned values of kind PKIND.
4765 Return the number of pointers that were poisoned. */
4768 region_model::poison_any_pointers_to_descendents (const region *reg,
4769 enum poison_kind pkind)
4771 bad_pointer_finder bv (reg, pkind, m_mgr);
4772 m_store.for_each_binding (bv);
4773 return bv.m_count;
4776 /* Attempt to merge THIS with OTHER_MODEL, writing the result
4777 to OUT_MODEL. Use POINT to distinguish values created as a
4778 result of merging. */
4780 bool
4781 region_model::can_merge_with_p (const region_model &other_model,
4782 const program_point &point,
4783 region_model *out_model,
4784 const extrinsic_state *ext_state,
4785 const program_state *state_a,
4786 const program_state *state_b) const
4788 gcc_assert (out_model);
4789 gcc_assert (m_mgr == other_model.m_mgr);
4790 gcc_assert (m_mgr == out_model->m_mgr);
4792 if (m_current_frame != other_model.m_current_frame)
4793 return false;
4794 out_model->m_current_frame = m_current_frame;
4796 model_merger m (this, &other_model, point, out_model,
4797 ext_state, state_a, state_b);
4799 if (!store::can_merge_p (&m_store, &other_model.m_store,
4800 &out_model->m_store, m_mgr->get_store_manager (),
4801 &m))
4802 return false;
4804 if (!m_dynamic_extents.can_merge_with_p (other_model.m_dynamic_extents,
4805 &out_model->m_dynamic_extents))
4806 return false;
4808 /* Merge constraints. */
4809 constraint_manager::merge (*m_constraints,
4810 *other_model.m_constraints,
4811 out_model->m_constraints);
4813 return true;
4816 /* Attempt to get the fndecl used at CALL, if known, or NULL_TREE
4817 otherwise. */
4819 tree
4820 region_model::get_fndecl_for_call (const gcall *call,
4821 region_model_context *ctxt)
4823 tree fn_ptr = gimple_call_fn (call);
4824 if (fn_ptr == NULL_TREE)
4825 return NULL_TREE;
4826 const svalue *fn_ptr_sval = get_rvalue (fn_ptr, ctxt);
4827 if (const region_svalue *fn_ptr_ptr
4828 = fn_ptr_sval->dyn_cast_region_svalue ())
4830 const region *reg = fn_ptr_ptr->get_pointee ();
4831 if (const function_region *fn_reg = reg->dyn_cast_function_region ())
4833 tree fn_decl = fn_reg->get_fndecl ();
4834 cgraph_node *node = cgraph_node::get (fn_decl);
4835 if (!node)
4836 return NULL_TREE;
4837 const cgraph_node *ultimate_node = node->ultimate_alias_target ();
4838 if (ultimate_node)
4839 return ultimate_node->decl;
4843 return NULL_TREE;
4846 /* Would be much simpler to use a lambda here, if it were supported. */
4848 struct append_regions_cb_data
4850 const region_model *model;
4851 auto_vec<const decl_region *> *out;
4854 /* Populate *OUT with all decl_regions in the current
4855 frame that have clusters within the store. */
4857 void
4858 region_model::
4859 get_regions_for_current_frame (auto_vec<const decl_region *> *out) const
4861 append_regions_cb_data data;
4862 data.model = this;
4863 data.out = out;
4864 m_store.for_each_cluster (append_regions_cb, &data);
4867 /* Implementation detail of get_regions_for_current_frame. */
4869 void
4870 region_model::append_regions_cb (const region *base_reg,
4871 append_regions_cb_data *cb_data)
4873 if (base_reg->get_parent_region () != cb_data->model->m_current_frame)
4874 return;
4875 if (const decl_region *decl_reg = base_reg->dyn_cast_decl_region ())
4876 cb_data->out->safe_push (decl_reg);
4880 /* Abstract class for diagnostics related to the use of
4881 floating-point arithmetic where precision is needed. */
4883 class imprecise_floating_point_arithmetic : public pending_diagnostic
4885 public:
4886 int get_controlling_option () const final override
4888 return OPT_Wanalyzer_imprecise_fp_arithmetic;
4892 /* Concrete diagnostic to complain about uses of floating-point arithmetic
4893 in the size argument of malloc etc. */
4895 class float_as_size_arg : public imprecise_floating_point_arithmetic
4897 public:
4898 float_as_size_arg (tree arg) : m_arg (arg)
4901 const char *get_kind () const final override
4903 return "float_as_size_arg_diagnostic";
4906 bool subclass_equal_p (const pending_diagnostic &other) const final override
4908 return same_tree_p (m_arg, ((const float_as_size_arg &) other).m_arg);
4911 bool emit (rich_location *rich_loc) final override
4913 diagnostic_metadata m;
4914 bool warned = warning_meta (rich_loc, m, get_controlling_option (),
4915 "use of floating-point arithmetic here might"
4916 " yield unexpected results");
4917 if (warned)
4918 inform (rich_loc->get_loc (), "only use operands of an integer type"
4919 " inside the size argument");
4920 return warned;
4923 label_text describe_final_event (const evdesc::final_event &ev) final
4924 override
4926 if (m_arg)
4927 return ev.formatted_print ("operand %qE is of type %qT",
4928 m_arg, TREE_TYPE (m_arg));
4929 return ev.formatted_print ("at least one operand of the size argument is"
4930 " of a floating-point type");
4933 private:
4934 tree m_arg;
4937 /* Visitor to find uses of floating-point variables/constants in an svalue. */
4939 class contains_floating_point_visitor : public visitor
4941 public:
4942 contains_floating_point_visitor (const svalue *root_sval) : m_result (NULL)
4944 root_sval->accept (this);
4947 const svalue *get_svalue_to_report ()
4949 return m_result;
4952 void visit_constant_svalue (const constant_svalue *sval) final override
4954 /* At the point the analyzer runs, constant integer operands in a floating
4955 point expression are already implictly converted to floating-points.
4956 Thus, we do prefer to report non-constants such that the diagnostic
4957 always reports a floating-point operand. */
4958 tree type = sval->get_type ();
4959 if (type && FLOAT_TYPE_P (type) && !m_result)
4960 m_result = sval;
4963 void visit_conjured_svalue (const conjured_svalue *sval) final override
4965 tree type = sval->get_type ();
4966 if (type && FLOAT_TYPE_P (type))
4967 m_result = sval;
4970 void visit_initial_svalue (const initial_svalue *sval) final override
4972 tree type = sval->get_type ();
4973 if (type && FLOAT_TYPE_P (type))
4974 m_result = sval;
4977 private:
4978 /* Non-null if at least one floating-point operand was found. */
4979 const svalue *m_result;
4982 /* May complain about uses of floating-point operands in SIZE_IN_BYTES. */
4984 void
4985 region_model::check_dynamic_size_for_floats (const svalue *size_in_bytes,
4986 region_model_context *ctxt) const
4988 gcc_assert (ctxt);
4990 contains_floating_point_visitor v (size_in_bytes);
4991 if (const svalue *float_sval = v.get_svalue_to_report ())
4993 tree diag_arg = get_representative_tree (float_sval);
4994 ctxt->warn (make_unique<float_as_size_arg> (diag_arg));
4998 /* Return a region describing a heap-allocated block of memory.
4999 Use CTXT to complain about tainted sizes.
5001 Reuse an existing heap_allocated_region if it's not being referenced by
5002 this region_model; otherwise create a new one. */
5004 const region *
5005 region_model::get_or_create_region_for_heap_alloc (const svalue *size_in_bytes,
5006 region_model_context *ctxt)
5008 /* Determine which regions are referenced in this region_model, so that
5009 we can reuse an existing heap_allocated_region if it's not in use on
5010 this path. */
5011 auto_bitmap base_regs_in_use;
5012 get_referenced_base_regions (base_regs_in_use);
5013 const region *reg
5014 = m_mgr->get_or_create_region_for_heap_alloc (base_regs_in_use);
5015 if (size_in_bytes)
5016 if (compat_types_p (size_in_bytes->get_type (), size_type_node))
5017 set_dynamic_extents (reg, size_in_bytes, ctxt);
5018 return reg;
5021 /* Populate OUT_IDS with the set of IDs of those base regions which are
5022 reachable in this region_model. */
5024 void
5025 region_model::get_referenced_base_regions (auto_bitmap &out_ids) const
5027 reachable_regions reachable_regs (const_cast<region_model *> (this));
5028 m_store.for_each_cluster (reachable_regions::init_cluster_cb,
5029 &reachable_regs);
5030 /* Get regions for locals that have explicitly bound values. */
5031 for (store::cluster_map_t::iterator iter = m_store.begin ();
5032 iter != m_store.end (); ++iter)
5034 const region *base_reg = (*iter).first;
5035 if (const region *parent = base_reg->get_parent_region ())
5036 if (parent->get_kind () == RK_FRAME)
5037 reachable_regs.add (base_reg, false);
5040 bitmap_clear (out_ids);
5041 for (auto iter_reg : reachable_regs)
5042 bitmap_set_bit (out_ids, iter_reg->get_id ());
5045 /* Return a new region describing a block of memory allocated within the
5046 current frame.
5047 Use CTXT to complain about tainted sizes. */
5049 const region *
5050 region_model::create_region_for_alloca (const svalue *size_in_bytes,
5051 region_model_context *ctxt)
5053 const region *reg = m_mgr->create_region_for_alloca (m_current_frame);
5054 if (compat_types_p (size_in_bytes->get_type (), size_type_node))
5055 set_dynamic_extents (reg, size_in_bytes, ctxt);
5056 return reg;
5059 /* Record that the size of REG is SIZE_IN_BYTES.
5060 Use CTXT to complain about tainted sizes. */
5062 void
5063 region_model::set_dynamic_extents (const region *reg,
5064 const svalue *size_in_bytes,
5065 region_model_context *ctxt)
5067 assert_compat_types (size_in_bytes->get_type (), size_type_node);
5068 if (ctxt)
5070 check_dynamic_size_for_taint (reg->get_memory_space (), size_in_bytes,
5071 ctxt);
5072 check_dynamic_size_for_floats (size_in_bytes, ctxt);
5074 m_dynamic_extents.put (reg, size_in_bytes);
5077 /* Get the recording of REG in bytes, or NULL if no dynamic size was
5078 recorded. */
5080 const svalue *
5081 region_model::get_dynamic_extents (const region *reg) const
5083 if (const svalue * const *slot = m_dynamic_extents.get (reg))
5084 return *slot;
5085 return NULL;
5088 /* Unset any recorded dynamic size of REG. */
5090 void
5091 region_model::unset_dynamic_extents (const region *reg)
5093 m_dynamic_extents.remove (reg);
5096 /* Information of the layout of a RECORD_TYPE, capturing it as a vector
5097 of items, where each item is either a field or padding. */
5099 class record_layout
5101 public:
5102 /* An item within a record; either a field, or padding after a field. */
5103 struct item
5105 public:
5106 item (const bit_range &br,
5107 tree field,
5108 bool is_padding)
5109 : m_bit_range (br),
5110 m_field (field),
5111 m_is_padding (is_padding)
5115 bit_offset_t get_start_bit_offset () const
5117 return m_bit_range.get_start_bit_offset ();
5119 bit_offset_t get_next_bit_offset () const
5121 return m_bit_range.get_next_bit_offset ();
5124 bool contains_p (bit_offset_t offset) const
5126 return m_bit_range.contains_p (offset);
5129 void dump_to_pp (pretty_printer *pp) const
5131 if (m_is_padding)
5132 pp_printf (pp, "padding after %qD", m_field);
5133 else
5134 pp_printf (pp, "%qD", m_field);
5135 pp_string (pp, ", ");
5136 m_bit_range.dump_to_pp (pp);
5139 bit_range m_bit_range;
5140 tree m_field;
5141 bool m_is_padding;
5144 record_layout (tree record_type)
5146 gcc_assert (TREE_CODE (record_type) == RECORD_TYPE);
5148 for (tree iter = TYPE_FIELDS (record_type); iter != NULL_TREE;
5149 iter = DECL_CHAIN (iter))
5151 if (TREE_CODE (iter) == FIELD_DECL)
5153 int iter_field_offset = int_bit_position (iter);
5154 bit_size_t size_in_bits;
5155 if (!int_size_in_bits (TREE_TYPE (iter), &size_in_bits))
5156 size_in_bits = 0;
5158 maybe_pad_to (iter_field_offset);
5160 /* Add field. */
5161 m_items.safe_push (item (bit_range (iter_field_offset,
5162 size_in_bits),
5163 iter, false));
5167 /* Add any trailing padding. */
5168 bit_size_t size_in_bits;
5169 if (int_size_in_bits (record_type, &size_in_bits))
5170 maybe_pad_to (size_in_bits);
5173 void dump_to_pp (pretty_printer *pp) const
5175 unsigned i;
5176 item *it;
5177 FOR_EACH_VEC_ELT (m_items, i, it)
5179 it->dump_to_pp (pp);
5180 pp_newline (pp);
5184 DEBUG_FUNCTION void dump () const
5186 pretty_printer pp;
5187 pp_format_decoder (&pp) = default_tree_printer;
5188 pp.buffer->stream = stderr;
5189 dump_to_pp (&pp);
5190 pp_flush (&pp);
5193 const record_layout::item *get_item_at (bit_offset_t offset) const
5195 unsigned i;
5196 item *it;
5197 FOR_EACH_VEC_ELT (m_items, i, it)
5198 if (it->contains_p (offset))
5199 return it;
5200 return NULL;
5203 private:
5204 /* Subroutine of ctor. Add padding item to NEXT_OFFSET if necessary. */
5206 void maybe_pad_to (bit_offset_t next_offset)
5208 if (m_items.length () > 0)
5210 const item &last_item = m_items[m_items.length () - 1];
5211 bit_offset_t offset_after_last_item
5212 = last_item.get_next_bit_offset ();
5213 if (next_offset > offset_after_last_item)
5215 bit_size_t padding_size
5216 = next_offset - offset_after_last_item;
5217 m_items.safe_push (item (bit_range (offset_after_last_item,
5218 padding_size),
5219 last_item.m_field, true));
5224 auto_vec<item> m_items;
5227 /* A subclass of pending_diagnostic for complaining about uninitialized data
5228 being copied across a trust boundary to an untrusted output
5229 (e.g. copy_to_user infoleaks in the Linux kernel). */
5231 class exposure_through_uninit_copy
5232 : public pending_diagnostic_subclass<exposure_through_uninit_copy>
5234 public:
5235 exposure_through_uninit_copy (const region *src_region,
5236 const region *dest_region,
5237 const svalue *copied_sval)
5238 : m_src_region (src_region),
5239 m_dest_region (dest_region),
5240 m_copied_sval (copied_sval)
5242 gcc_assert (m_copied_sval->get_kind () == SK_POISONED
5243 || m_copied_sval->get_kind () == SK_COMPOUND);
5246 const char *get_kind () const final override
5248 return "exposure_through_uninit_copy";
5251 bool operator== (const exposure_through_uninit_copy &other) const
5253 return (m_src_region == other.m_src_region
5254 && m_dest_region == other.m_dest_region
5255 && m_copied_sval == other.m_copied_sval);
5258 int get_controlling_option () const final override
5260 return OPT_Wanalyzer_exposure_through_uninit_copy;
5263 bool emit (rich_location *rich_loc) final override
5265 diagnostic_metadata m;
5266 /* CWE-200: Exposure of Sensitive Information to an Unauthorized Actor. */
5267 m.add_cwe (200);
5268 enum memory_space mem_space = get_src_memory_space ();
5269 bool warned;
5270 switch (mem_space)
5272 default:
5273 warned = warning_meta
5274 (rich_loc, m, get_controlling_option (),
5275 "potential exposure of sensitive information"
5276 " by copying uninitialized data across trust boundary");
5277 break;
5278 case MEMSPACE_STACK:
5279 warned = warning_meta
5280 (rich_loc, m, get_controlling_option (),
5281 "potential exposure of sensitive information"
5282 " by copying uninitialized data from stack across trust boundary");
5283 break;
5284 case MEMSPACE_HEAP:
5285 warned = warning_meta
5286 (rich_loc, m, get_controlling_option (),
5287 "potential exposure of sensitive information"
5288 " by copying uninitialized data from heap across trust boundary");
5289 break;
5291 if (warned)
5293 location_t loc = rich_loc->get_loc ();
5294 inform_number_of_uninit_bits (loc);
5295 complain_about_uninit_ranges (loc);
5297 if (mem_space == MEMSPACE_STACK)
5298 maybe_emit_fixit_hint ();
5300 return warned;
5303 label_text describe_final_event (const evdesc::final_event &) final override
5305 enum memory_space mem_space = get_src_memory_space ();
5306 switch (mem_space)
5308 default:
5309 return label_text::borrow ("uninitialized data copied here");
5311 case MEMSPACE_STACK:
5312 return label_text::borrow ("uninitialized data copied from stack here");
5314 case MEMSPACE_HEAP:
5315 return label_text::borrow ("uninitialized data copied from heap here");
5319 void mark_interesting_stuff (interesting_t *interest) final override
5321 if (m_src_region)
5322 interest->add_region_creation (m_src_region);
5325 private:
5326 enum memory_space get_src_memory_space () const
5328 return m_src_region ? m_src_region->get_memory_space () : MEMSPACE_UNKNOWN;
5331 bit_size_t calc_num_uninit_bits () const
5333 switch (m_copied_sval->get_kind ())
5335 default:
5336 gcc_unreachable ();
5337 break;
5338 case SK_POISONED:
5340 const poisoned_svalue *poisoned_sval
5341 = as_a <const poisoned_svalue *> (m_copied_sval);
5342 gcc_assert (poisoned_sval->get_poison_kind () == POISON_KIND_UNINIT);
5344 /* Give up if don't have type information. */
5345 if (m_copied_sval->get_type () == NULL_TREE)
5346 return 0;
5348 bit_size_t size_in_bits;
5349 if (int_size_in_bits (m_copied_sval->get_type (), &size_in_bits))
5350 return size_in_bits;
5352 /* Give up if we can't get the size of the type. */
5353 return 0;
5355 break;
5356 case SK_COMPOUND:
5358 const compound_svalue *compound_sval
5359 = as_a <const compound_svalue *> (m_copied_sval);
5360 bit_size_t result = 0;
5361 /* Find keys for uninit svals. */
5362 for (auto iter : *compound_sval)
5364 const svalue *sval = iter.second;
5365 if (const poisoned_svalue *psval
5366 = sval->dyn_cast_poisoned_svalue ())
5367 if (psval->get_poison_kind () == POISON_KIND_UNINIT)
5369 const binding_key *key = iter.first;
5370 const concrete_binding *ckey
5371 = key->dyn_cast_concrete_binding ();
5372 gcc_assert (ckey);
5373 result += ckey->get_size_in_bits ();
5376 return result;
5381 void inform_number_of_uninit_bits (location_t loc) const
5383 bit_size_t num_uninit_bits = calc_num_uninit_bits ();
5384 if (num_uninit_bits <= 0)
5385 return;
5386 if (num_uninit_bits % BITS_PER_UNIT == 0)
5388 /* Express in bytes. */
5389 byte_size_t num_uninit_bytes = num_uninit_bits / BITS_PER_UNIT;
5390 if (num_uninit_bytes == 1)
5391 inform (loc, "1 byte is uninitialized");
5392 else
5393 inform (loc,
5394 "%wu bytes are uninitialized", num_uninit_bytes.to_uhwi ());
5396 else
5398 /* Express in bits. */
5399 if (num_uninit_bits == 1)
5400 inform (loc, "1 bit is uninitialized");
5401 else
5402 inform (loc,
5403 "%wu bits are uninitialized", num_uninit_bits.to_uhwi ());
5407 void complain_about_uninit_ranges (location_t loc) const
5409 if (const compound_svalue *compound_sval
5410 = m_copied_sval->dyn_cast_compound_svalue ())
5412 /* Find keys for uninit svals. */
5413 auto_vec<const concrete_binding *> uninit_keys;
5414 for (auto iter : *compound_sval)
5416 const svalue *sval = iter.second;
5417 if (const poisoned_svalue *psval
5418 = sval->dyn_cast_poisoned_svalue ())
5419 if (psval->get_poison_kind () == POISON_KIND_UNINIT)
5421 const binding_key *key = iter.first;
5422 const concrete_binding *ckey
5423 = key->dyn_cast_concrete_binding ();
5424 gcc_assert (ckey);
5425 uninit_keys.safe_push (ckey);
5428 /* Complain about them in sorted order. */
5429 uninit_keys.qsort (concrete_binding::cmp_ptr_ptr);
5431 std::unique_ptr<record_layout> layout;
5433 tree type = m_copied_sval->get_type ();
5434 if (type && TREE_CODE (type) == RECORD_TYPE)
5436 // (std::make_unique is C++14)
5437 layout = std::unique_ptr<record_layout> (new record_layout (type));
5439 if (0)
5440 layout->dump ();
5443 unsigned i;
5444 const concrete_binding *ckey;
5445 FOR_EACH_VEC_ELT (uninit_keys, i, ckey)
5447 bit_offset_t start_bit = ckey->get_start_bit_offset ();
5448 bit_offset_t next_bit = ckey->get_next_bit_offset ();
5449 complain_about_uninit_range (loc, start_bit, next_bit,
5450 layout.get ());
5455 void complain_about_uninit_range (location_t loc,
5456 bit_offset_t start_bit,
5457 bit_offset_t next_bit,
5458 const record_layout *layout) const
5460 if (layout)
5462 while (start_bit < next_bit)
5464 if (const record_layout::item *item
5465 = layout->get_item_at (start_bit))
5467 gcc_assert (start_bit >= item->get_start_bit_offset ());
5468 gcc_assert (start_bit < item->get_next_bit_offset ());
5469 if (item->get_start_bit_offset () == start_bit
5470 && item->get_next_bit_offset () <= next_bit)
5471 complain_about_fully_uninit_item (*item);
5472 else
5473 complain_about_partially_uninit_item (*item);
5474 start_bit = item->get_next_bit_offset ();
5475 continue;
5477 else
5478 break;
5482 if (start_bit >= next_bit)
5483 return;
5485 if (start_bit % 8 == 0 && next_bit % 8 == 0)
5487 /* Express in bytes. */
5488 byte_offset_t start_byte = start_bit / 8;
5489 byte_offset_t last_byte = (next_bit / 8) - 1;
5490 if (last_byte == start_byte)
5491 inform (loc,
5492 "byte %wu is uninitialized",
5493 start_byte.to_uhwi ());
5494 else
5495 inform (loc,
5496 "bytes %wu - %wu are uninitialized",
5497 start_byte.to_uhwi (),
5498 last_byte.to_uhwi ());
5500 else
5502 /* Express in bits. */
5503 bit_offset_t last_bit = next_bit - 1;
5504 if (last_bit == start_bit)
5505 inform (loc,
5506 "bit %wu is uninitialized",
5507 start_bit.to_uhwi ());
5508 else
5509 inform (loc,
5510 "bits %wu - %wu are uninitialized",
5511 start_bit.to_uhwi (),
5512 last_bit.to_uhwi ());
5516 static void
5517 complain_about_fully_uninit_item (const record_layout::item &item)
5519 tree field = item.m_field;
5520 bit_size_t num_bits = item.m_bit_range.m_size_in_bits;
5521 if (item.m_is_padding)
5523 if (num_bits % 8 == 0)
5525 /* Express in bytes. */
5526 byte_size_t num_bytes = num_bits / BITS_PER_UNIT;
5527 if (num_bytes == 1)
5528 inform (DECL_SOURCE_LOCATION (field),
5529 "padding after field %qD is uninitialized (1 byte)",
5530 field);
5531 else
5532 inform (DECL_SOURCE_LOCATION (field),
5533 "padding after field %qD is uninitialized (%wu bytes)",
5534 field, num_bytes.to_uhwi ());
5536 else
5538 /* Express in bits. */
5539 if (num_bits == 1)
5540 inform (DECL_SOURCE_LOCATION (field),
5541 "padding after field %qD is uninitialized (1 bit)",
5542 field);
5543 else
5544 inform (DECL_SOURCE_LOCATION (field),
5545 "padding after field %qD is uninitialized (%wu bits)",
5546 field, num_bits.to_uhwi ());
5549 else
5551 if (num_bits % 8 == 0)
5553 /* Express in bytes. */
5554 byte_size_t num_bytes = num_bits / BITS_PER_UNIT;
5555 if (num_bytes == 1)
5556 inform (DECL_SOURCE_LOCATION (field),
5557 "field %qD is uninitialized (1 byte)", field);
5558 else
5559 inform (DECL_SOURCE_LOCATION (field),
5560 "field %qD is uninitialized (%wu bytes)",
5561 field, num_bytes.to_uhwi ());
5563 else
5565 /* Express in bits. */
5566 if (num_bits == 1)
5567 inform (DECL_SOURCE_LOCATION (field),
5568 "field %qD is uninitialized (1 bit)", field);
5569 else
5570 inform (DECL_SOURCE_LOCATION (field),
5571 "field %qD is uninitialized (%wu bits)",
5572 field, num_bits.to_uhwi ());
5577 static void
5578 complain_about_partially_uninit_item (const record_layout::item &item)
5580 tree field = item.m_field;
5581 if (item.m_is_padding)
5582 inform (DECL_SOURCE_LOCATION (field),
5583 "padding after field %qD is partially uninitialized",
5584 field);
5585 else
5586 inform (DECL_SOURCE_LOCATION (field),
5587 "field %qD is partially uninitialized",
5588 field);
5589 /* TODO: ideally we'd describe what parts are uninitialized. */
5592 void maybe_emit_fixit_hint () const
5594 if (tree decl = m_src_region->maybe_get_decl ())
5596 gcc_rich_location hint_richloc (DECL_SOURCE_LOCATION (decl));
5597 hint_richloc.add_fixit_insert_after (" = {0}");
5598 inform (&hint_richloc,
5599 "suggest forcing zero-initialization by"
5600 " providing a %<{0}%> initializer");
5604 private:
5605 const region *m_src_region;
5606 const region *m_dest_region;
5607 const svalue *m_copied_sval;
5610 /* Return true if any part of SVAL is uninitialized. */
5612 static bool
5613 contains_uninit_p (const svalue *sval)
5615 struct uninit_finder : public visitor
5617 public:
5618 uninit_finder () : m_found_uninit (false) {}
5619 void visit_poisoned_svalue (const poisoned_svalue *sval)
5621 if (sval->get_poison_kind () == POISON_KIND_UNINIT)
5622 m_found_uninit = true;
5624 bool m_found_uninit;
5627 uninit_finder v;
5628 sval->accept (&v);
5630 return v.m_found_uninit;
5633 /* Function for use by plugins when simulating writing data through a
5634 pointer to an "untrusted" region DST_REG (and thus crossing a security
5635 boundary), such as copying data to user space in an OS kernel.
5637 Check that COPIED_SVAL is fully initialized. If not, complain about
5638 an infoleak to CTXT.
5640 SRC_REG can be NULL; if non-NULL it is used as a hint in the diagnostic
5641 as to where COPIED_SVAL came from. */
5643 void
5644 region_model::maybe_complain_about_infoleak (const region *dst_reg,
5645 const svalue *copied_sval,
5646 const region *src_reg,
5647 region_model_context *ctxt)
5649 /* Check for exposure. */
5650 if (contains_uninit_p (copied_sval))
5651 ctxt->warn (make_unique<exposure_through_uninit_copy> (src_reg,
5652 dst_reg,
5653 copied_sval));
5656 /* Set errno to a positive symbolic int, as if some error has occurred. */
5658 void
5659 region_model::set_errno (const call_details &cd)
5661 const region *errno_reg = m_mgr->get_errno_region ();
5662 conjured_purge p (this, cd.get_ctxt ());
5663 const svalue *new_errno_sval
5664 = m_mgr->get_or_create_conjured_svalue (integer_type_node,
5665 cd.get_call_stmt (),
5666 errno_reg, p);
5667 const svalue *zero
5668 = m_mgr->get_or_create_int_cst (integer_type_node, 0);
5669 add_constraint (new_errno_sval, GT_EXPR, zero, cd.get_ctxt ());
5670 set_value (errno_reg, new_errno_sval, cd.get_ctxt ());
5673 /* class noop_region_model_context : public region_model_context. */
5675 void
5676 noop_region_model_context::add_note (std::unique_ptr<pending_note>)
5680 void
5681 noop_region_model_context::bifurcate (std::unique_ptr<custom_edge_info>)
5685 void
5686 noop_region_model_context::terminate_path ()
5690 /* struct model_merger. */
5692 /* Dump a multiline representation of this merger to PP. */
5694 void
5695 model_merger::dump_to_pp (pretty_printer *pp, bool simple) const
5697 pp_string (pp, "model A:");
5698 pp_newline (pp);
5699 m_model_a->dump_to_pp (pp, simple, true);
5700 pp_newline (pp);
5702 pp_string (pp, "model B:");
5703 pp_newline (pp);
5704 m_model_b->dump_to_pp (pp, simple, true);
5705 pp_newline (pp);
5707 pp_string (pp, "merged model:");
5708 pp_newline (pp);
5709 m_merged_model->dump_to_pp (pp, simple, true);
5710 pp_newline (pp);
5713 /* Dump a multiline representation of this merger to FILE. */
5715 void
5716 model_merger::dump (FILE *fp, bool simple) const
5718 pretty_printer pp;
5719 pp_format_decoder (&pp) = default_tree_printer;
5720 pp_show_color (&pp) = pp_show_color (global_dc->printer);
5721 pp.buffer->stream = fp;
5722 dump_to_pp (&pp, simple);
5723 pp_flush (&pp);
5726 /* Dump a multiline representation of this merger to stderr. */
5728 DEBUG_FUNCTION void
5729 model_merger::dump (bool simple) const
5731 dump (stderr, simple);
5734 /* Return true if it's OK to merge SVAL with other svalues. */
5736 bool
5737 model_merger::mergeable_svalue_p (const svalue *sval) const
5739 if (m_ext_state)
5741 /* Reject merging svalues that have non-purgable sm-state,
5742 to avoid falsely reporting memory leaks by merging them
5743 with something else. For example, given a local var "p",
5744 reject the merger of a:
5745 store_a mapping "p" to a malloc-ed ptr
5746 with:
5747 store_b mapping "p" to a NULL ptr. */
5748 if (m_state_a)
5749 if (!m_state_a->can_purge_p (*m_ext_state, sval))
5750 return false;
5751 if (m_state_b)
5752 if (!m_state_b->can_purge_p (*m_ext_state, sval))
5753 return false;
5755 return true;
5758 } // namespace ana
5760 /* Dump RMODEL fully to stderr (i.e. without summarization). */
5762 DEBUG_FUNCTION void
5763 debug (const region_model &rmodel)
5765 rmodel.dump (false);
5768 /* class rejected_op_constraint : public rejected_constraint. */
5770 void
5771 rejected_op_constraint::dump_to_pp (pretty_printer *pp) const
5773 region_model m (m_model);
5774 const svalue *lhs_sval = m.get_rvalue (m_lhs, NULL);
5775 const svalue *rhs_sval = m.get_rvalue (m_rhs, NULL);
5776 lhs_sval->dump_to_pp (pp, true);
5777 pp_printf (pp, " %s ", op_symbol_code (m_op));
5778 rhs_sval->dump_to_pp (pp, true);
5781 /* class rejected_default_case : public rejected_constraint. */
5783 void
5784 rejected_default_case::dump_to_pp (pretty_printer *pp) const
5786 pp_string (pp, "implicit default for enum");
5789 /* class rejected_ranges_constraint : public rejected_constraint. */
5791 void
5792 rejected_ranges_constraint::dump_to_pp (pretty_printer *pp) const
5794 region_model m (m_model);
5795 const svalue *sval = m.get_rvalue (m_expr, NULL);
5796 sval->dump_to_pp (pp, true);
5797 pp_string (pp, " in ");
5798 m_ranges->dump_to_pp (pp, true);
5801 /* class engine. */
5803 /* engine's ctor. */
5805 engine::engine (const supergraph *sg, logger *logger)
5806 : m_sg (sg), m_mgr (logger)
5810 /* Dump the managed objects by class to LOGGER, and the per-class totals. */
5812 void
5813 engine::log_stats (logger *logger) const
5815 m_mgr.log_stats (logger, true);
5818 namespace ana {
5820 #if CHECKING_P
5822 namespace selftest {
5824 /* Build a constant tree of the given type from STR. */
5826 static tree
5827 build_real_cst_from_string (tree type, const char *str)
5829 REAL_VALUE_TYPE real;
5830 real_from_string (&real, str);
5831 return build_real (type, real);
5834 /* Append various "interesting" constants to OUT (e.g. NaN). */
5836 static void
5837 append_interesting_constants (auto_vec<tree> *out)
5839 out->safe_push (build_int_cst (integer_type_node, 0));
5840 out->safe_push (build_int_cst (integer_type_node, 42));
5841 out->safe_push (build_int_cst (unsigned_type_node, 0));
5842 out->safe_push (build_int_cst (unsigned_type_node, 42));
5843 out->safe_push (build_real_cst_from_string (float_type_node, "QNaN"));
5844 out->safe_push (build_real_cst_from_string (float_type_node, "-QNaN"));
5845 out->safe_push (build_real_cst_from_string (float_type_node, "SNaN"));
5846 out->safe_push (build_real_cst_from_string (float_type_node, "-SNaN"));
5847 out->safe_push (build_real_cst_from_string (float_type_node, "0.0"));
5848 out->safe_push (build_real_cst_from_string (float_type_node, "-0.0"));
5849 out->safe_push (build_real_cst_from_string (float_type_node, "Inf"));
5850 out->safe_push (build_real_cst_from_string (float_type_node, "-Inf"));
5853 /* Verify that tree_cmp is a well-behaved comparator for qsort, even
5854 if the underlying constants aren't comparable. */
5856 static void
5857 test_tree_cmp_on_constants ()
5859 auto_vec<tree> csts;
5860 append_interesting_constants (&csts);
5862 /* Try sorting every triple. */
5863 const unsigned num = csts.length ();
5864 for (unsigned i = 0; i < num; i++)
5865 for (unsigned j = 0; j < num; j++)
5866 for (unsigned k = 0; k < num; k++)
5868 auto_vec<tree> v (3);
5869 v.quick_push (csts[i]);
5870 v.quick_push (csts[j]);
5871 v.quick_push (csts[k]);
5872 v.qsort (tree_cmp);
5876 /* Implementation detail of the ASSERT_CONDITION_* macros. */
5878 void
5879 assert_condition (const location &loc,
5880 region_model &model,
5881 const svalue *lhs, tree_code op, const svalue *rhs,
5882 tristate expected)
5884 tristate actual = model.eval_condition (lhs, op, rhs);
5885 ASSERT_EQ_AT (loc, actual, expected);
5888 /* Implementation detail of the ASSERT_CONDITION_* macros. */
5890 void
5891 assert_condition (const location &loc,
5892 region_model &model,
5893 tree lhs, tree_code op, tree rhs,
5894 tristate expected)
5896 tristate actual = model.eval_condition (lhs, op, rhs, NULL);
5897 ASSERT_EQ_AT (loc, actual, expected);
5900 /* Implementation detail of ASSERT_DUMP_TREE_EQ. */
5902 static void
5903 assert_dump_tree_eq (const location &loc, tree t, const char *expected)
5905 auto_fix_quotes sentinel;
5906 pretty_printer pp;
5907 pp_format_decoder (&pp) = default_tree_printer;
5908 dump_tree (&pp, t);
5909 ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected);
5912 /* Assert that dump_tree (T) is EXPECTED. */
5914 #define ASSERT_DUMP_TREE_EQ(T, EXPECTED) \
5915 SELFTEST_BEGIN_STMT \
5916 assert_dump_tree_eq ((SELFTEST_LOCATION), (T), (EXPECTED)); \
5917 SELFTEST_END_STMT
5919 /* Implementation detail of ASSERT_DUMP_EQ. */
5921 static void
5922 assert_dump_eq (const location &loc,
5923 const region_model &model,
5924 bool summarize,
5925 const char *expected)
5927 auto_fix_quotes sentinel;
5928 pretty_printer pp;
5929 pp_format_decoder (&pp) = default_tree_printer;
5931 model.dump_to_pp (&pp, summarize, true);
5932 ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected);
5935 /* Assert that MODEL.dump_to_pp (SUMMARIZE) is EXPECTED. */
5937 #define ASSERT_DUMP_EQ(MODEL, SUMMARIZE, EXPECTED) \
5938 SELFTEST_BEGIN_STMT \
5939 assert_dump_eq ((SELFTEST_LOCATION), (MODEL), (SUMMARIZE), (EXPECTED)); \
5940 SELFTEST_END_STMT
5942 /* Smoketest for region_model::dump_to_pp. */
5944 static void
5945 test_dump ()
5947 region_model_manager mgr;
5948 region_model model (&mgr);
5950 ASSERT_DUMP_EQ (model, false,
5951 "stack depth: 0\n"
5952 "m_called_unknown_fn: FALSE\n"
5953 "constraint_manager:\n"
5954 " equiv classes:\n"
5955 " constraints:\n");
5956 ASSERT_DUMP_EQ (model, true,
5957 "stack depth: 0\n"
5958 "m_called_unknown_fn: FALSE\n"
5959 "constraint_manager:\n"
5960 " equiv classes:\n"
5961 " constraints:\n");
5964 /* Helper function for selftests. Create a struct or union type named NAME,
5965 with the fields given by the FIELD_DECLS in FIELDS.
5966 If IS_STRUCT is true create a RECORD_TYPE (aka a struct), otherwise
5967 create a UNION_TYPE. */
5969 static tree
5970 make_test_compound_type (const char *name, bool is_struct,
5971 const auto_vec<tree> *fields)
5973 tree t = make_node (is_struct ? RECORD_TYPE : UNION_TYPE);
5974 TYPE_NAME (t) = get_identifier (name);
5975 TYPE_SIZE (t) = 0;
5977 tree fieldlist = NULL;
5978 int i;
5979 tree field;
5980 FOR_EACH_VEC_ELT (*fields, i, field)
5982 gcc_assert (TREE_CODE (field) == FIELD_DECL);
5983 DECL_CONTEXT (field) = t;
5984 fieldlist = chainon (field, fieldlist);
5986 fieldlist = nreverse (fieldlist);
5987 TYPE_FIELDS (t) = fieldlist;
5989 layout_type (t);
5990 return t;
5993 /* Selftest fixture for creating the type "struct coord {int x; int y; };". */
5995 struct coord_test
5997 coord_test ()
5999 auto_vec<tree> fields;
6000 m_x_field = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
6001 get_identifier ("x"), integer_type_node);
6002 fields.safe_push (m_x_field);
6003 m_y_field = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
6004 get_identifier ("y"), integer_type_node);
6005 fields.safe_push (m_y_field);
6006 m_coord_type = make_test_compound_type ("coord", true, &fields);
6009 tree m_x_field;
6010 tree m_y_field;
6011 tree m_coord_type;
6014 /* Verify usage of a struct. */
6016 static void
6017 test_struct ()
6019 coord_test ct;
6021 tree c = build_global_decl ("c", ct.m_coord_type);
6022 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
6023 c, ct.m_x_field, NULL_TREE);
6024 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
6025 c, ct.m_y_field, NULL_TREE);
6027 tree int_17 = build_int_cst (integer_type_node, 17);
6028 tree int_m3 = build_int_cst (integer_type_node, -3);
6030 region_model_manager mgr;
6031 region_model model (&mgr);
6032 model.set_value (c_x, int_17, NULL);
6033 model.set_value (c_y, int_m3, NULL);
6035 /* Verify get_offset for "c.x". */
6037 const region *c_x_reg = model.get_lvalue (c_x, NULL);
6038 region_offset offset = c_x_reg->get_offset (&mgr);
6039 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (c, NULL));
6040 ASSERT_EQ (offset.get_bit_offset (), 0);
6043 /* Verify get_offset for "c.y". */
6045 const region *c_y_reg = model.get_lvalue (c_y, NULL);
6046 region_offset offset = c_y_reg->get_offset (&mgr);
6047 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (c, NULL));
6048 ASSERT_EQ (offset.get_bit_offset (), INT_TYPE_SIZE);
6052 /* Verify usage of an array element. */
6054 static void
6055 test_array_1 ()
6057 tree tlen = size_int (10);
6058 tree arr_type = build_array_type (char_type_node, build_index_type (tlen));
6060 tree a = build_global_decl ("a", arr_type);
6062 region_model_manager mgr;
6063 region_model model (&mgr);
6064 tree int_0 = build_int_cst (integer_type_node, 0);
6065 tree a_0 = build4 (ARRAY_REF, char_type_node,
6066 a, int_0, NULL_TREE, NULL_TREE);
6067 tree char_A = build_int_cst (char_type_node, 'A');
6068 model.set_value (a_0, char_A, NULL);
6071 /* Verify that region_model::get_representative_tree works as expected. */
6073 static void
6074 test_get_representative_tree ()
6076 region_model_manager mgr;
6078 /* STRING_CST. */
6080 tree string_cst = build_string (4, "foo");
6081 region_model m (&mgr);
6082 const svalue *str_sval = m.get_rvalue (string_cst, NULL);
6083 tree rep = m.get_representative_tree (str_sval);
6084 ASSERT_EQ (rep, string_cst);
6087 /* String literal. */
6089 tree string_cst_ptr = build_string_literal (4, "foo");
6090 region_model m (&mgr);
6091 const svalue *str_sval = m.get_rvalue (string_cst_ptr, NULL);
6092 tree rep = m.get_representative_tree (str_sval);
6093 ASSERT_DUMP_TREE_EQ (rep, "&\"foo\"[0]");
6096 /* Value of an element within an array. */
6098 tree tlen = size_int (10);
6099 tree arr_type = build_array_type (char_type_node, build_index_type (tlen));
6100 tree a = build_global_decl ("a", arr_type);
6101 placeholder_svalue test_sval (char_type_node, "test value");
6103 /* Value of a[3]. */
6105 test_region_model_context ctxt;
6106 region_model model (&mgr);
6107 tree int_3 = build_int_cst (integer_type_node, 3);
6108 tree a_3 = build4 (ARRAY_REF, char_type_node,
6109 a, int_3, NULL_TREE, NULL_TREE);
6110 const region *a_3_reg = model.get_lvalue (a_3, &ctxt);
6111 model.set_value (a_3_reg, &test_sval, &ctxt);
6112 tree rep = model.get_representative_tree (&test_sval);
6113 ASSERT_DUMP_TREE_EQ (rep, "a[3]");
6116 /* Value of a[0]. */
6118 test_region_model_context ctxt;
6119 region_model model (&mgr);
6120 tree idx = build_int_cst (integer_type_node, 0);
6121 tree a_0 = build4 (ARRAY_REF, char_type_node,
6122 a, idx, NULL_TREE, NULL_TREE);
6123 const region *a_0_reg = model.get_lvalue (a_0, &ctxt);
6124 model.set_value (a_0_reg, &test_sval, &ctxt);
6125 tree rep = model.get_representative_tree (&test_sval);
6126 ASSERT_DUMP_TREE_EQ (rep, "a[0]");
6130 /* Value of a field within a struct. */
6132 coord_test ct;
6134 tree c = build_global_decl ("c", ct.m_coord_type);
6135 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
6136 c, ct.m_x_field, NULL_TREE);
6137 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
6138 c, ct.m_y_field, NULL_TREE);
6140 test_region_model_context ctxt;
6142 /* Value of initial field. */
6144 region_model m (&mgr);
6145 const region *c_x_reg = m.get_lvalue (c_x, &ctxt);
6146 placeholder_svalue test_sval_x (integer_type_node, "test x val");
6147 m.set_value (c_x_reg, &test_sval_x, &ctxt);
6148 tree rep = m.get_representative_tree (&test_sval_x);
6149 ASSERT_DUMP_TREE_EQ (rep, "c.x");
6152 /* Value of non-initial field. */
6154 region_model m (&mgr);
6155 const region *c_y_reg = m.get_lvalue (c_y, &ctxt);
6156 placeholder_svalue test_sval_y (integer_type_node, "test y val");
6157 m.set_value (c_y_reg, &test_sval_y, &ctxt);
6158 tree rep = m.get_representative_tree (&test_sval_y);
6159 ASSERT_DUMP_TREE_EQ (rep, "c.y");
6164 /* Verify that calling region_model::get_rvalue repeatedly on the same
6165 tree constant retrieves the same svalue *. */
6167 static void
6168 test_unique_constants ()
6170 tree int_0 = build_int_cst (integer_type_node, 0);
6171 tree int_42 = build_int_cst (integer_type_node, 42);
6173 test_region_model_context ctxt;
6174 region_model_manager mgr;
6175 region_model model (&mgr);
6176 ASSERT_EQ (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_0, &ctxt));
6177 ASSERT_EQ (model.get_rvalue (int_42, &ctxt),
6178 model.get_rvalue (int_42, &ctxt));
6179 ASSERT_NE (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_42, &ctxt));
6180 ASSERT_EQ (ctxt.get_num_diagnostics (), 0);
6182 /* A "(const int)42" will be a different tree from "(int)42)"... */
6183 tree const_int_type_node
6184 = build_qualified_type (integer_type_node, TYPE_QUAL_CONST);
6185 tree const_int_42 = build_int_cst (const_int_type_node, 42);
6186 ASSERT_NE (int_42, const_int_42);
6187 /* It should have a different const_svalue. */
6188 const svalue *int_42_sval = model.get_rvalue (int_42, &ctxt);
6189 const svalue *const_int_42_sval = model.get_rvalue (const_int_42, &ctxt);
6190 ASSERT_NE (int_42_sval, const_int_42_sval);
6191 /* But they should compare as equal. */
6192 ASSERT_CONDITION_TRUE (model, int_42_sval, EQ_EXPR, const_int_42_sval);
6193 ASSERT_CONDITION_FALSE (model, int_42_sval, NE_EXPR, const_int_42_sval);
6196 /* Verify that each type gets its own singleton unknown_svalue within a
6197 region_model_manager, and that NULL_TREE gets its own singleton. */
6199 static void
6200 test_unique_unknowns ()
6202 region_model_manager mgr;
6203 const svalue *unknown_int
6204 = mgr.get_or_create_unknown_svalue (integer_type_node);
6205 /* Repeated calls with the same type should get the same "unknown"
6206 svalue. */
6207 const svalue *unknown_int_2
6208 = mgr.get_or_create_unknown_svalue (integer_type_node);
6209 ASSERT_EQ (unknown_int, unknown_int_2);
6211 /* Different types (or the NULL type) should have different
6212 unknown_svalues. */
6213 const svalue *unknown_NULL_type = mgr.get_or_create_unknown_svalue (NULL);
6214 ASSERT_NE (unknown_NULL_type, unknown_int);
6216 /* Repeated calls with NULL for the type should get the same "unknown"
6217 svalue. */
6218 const svalue *unknown_NULL_type_2 = mgr.get_or_create_unknown_svalue (NULL);
6219 ASSERT_EQ (unknown_NULL_type, unknown_NULL_type_2);
6222 /* Verify that initial_svalue are handled as expected. */
6224 static void
6225 test_initial_svalue_folding ()
6227 region_model_manager mgr;
6228 tree x = build_global_decl ("x", integer_type_node);
6229 tree y = build_global_decl ("y", integer_type_node);
6231 test_region_model_context ctxt;
6232 region_model model (&mgr);
6233 const svalue *x_init = model.get_rvalue (x, &ctxt);
6234 const svalue *y_init = model.get_rvalue (y, &ctxt);
6235 ASSERT_NE (x_init, y_init);
6236 const region *x_reg = model.get_lvalue (x, &ctxt);
6237 ASSERT_EQ (x_init, mgr.get_or_create_initial_value (x_reg));
6241 /* Verify that unary ops are folded as expected. */
6243 static void
6244 test_unaryop_svalue_folding ()
6246 region_model_manager mgr;
6247 tree x = build_global_decl ("x", integer_type_node);
6248 tree y = build_global_decl ("y", integer_type_node);
6250 test_region_model_context ctxt;
6251 region_model model (&mgr);
6252 const svalue *x_init = model.get_rvalue (x, &ctxt);
6253 const svalue *y_init = model.get_rvalue (y, &ctxt);
6254 const region *x_reg = model.get_lvalue (x, &ctxt);
6255 ASSERT_EQ (x_init, mgr.get_or_create_initial_value (x_reg));
6257 /* "(int)x" -> "x". */
6258 ASSERT_EQ (x_init, mgr.get_or_create_cast (integer_type_node, x_init));
6260 /* "(void *)x" -> something other than "x". */
6261 ASSERT_NE (x_init, mgr.get_or_create_cast (ptr_type_node, x_init));
6263 /* "!(x == y)" -> "x != y". */
6264 ASSERT_EQ (mgr.get_or_create_unaryop
6265 (boolean_type_node, TRUTH_NOT_EXPR,
6266 mgr.get_or_create_binop (boolean_type_node, EQ_EXPR,
6267 x_init, y_init)),
6268 mgr.get_or_create_binop (boolean_type_node, NE_EXPR,
6269 x_init, y_init));
6270 /* "!(x > y)" -> "x <= y". */
6271 ASSERT_EQ (mgr.get_or_create_unaryop
6272 (boolean_type_node, TRUTH_NOT_EXPR,
6273 mgr.get_or_create_binop (boolean_type_node, GT_EXPR,
6274 x_init, y_init)),
6275 mgr.get_or_create_binop (boolean_type_node, LE_EXPR,
6276 x_init, y_init));
6279 /* Verify that binops on constant svalues are folded. */
6281 static void
6282 test_binop_svalue_folding ()
6284 #define NUM_CSTS 10
6285 tree cst_int[NUM_CSTS];
6286 region_model_manager mgr;
6287 const svalue *cst_sval[NUM_CSTS];
6288 for (int i = 0; i < NUM_CSTS; i++)
6290 cst_int[i] = build_int_cst (integer_type_node, i);
6291 cst_sval[i] = mgr.get_or_create_constant_svalue (cst_int[i]);
6292 ASSERT_EQ (cst_sval[i]->get_kind (), SK_CONSTANT);
6293 ASSERT_EQ (cst_sval[i]->maybe_get_constant (), cst_int[i]);
6296 for (int i = 0; i < NUM_CSTS; i++)
6297 for (int j = 0; j < NUM_CSTS; j++)
6299 if (i != j)
6300 ASSERT_NE (cst_sval[i], cst_sval[j]);
6301 if (i + j < NUM_CSTS)
6303 const svalue *sum
6304 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
6305 cst_sval[i], cst_sval[j]);
6306 ASSERT_EQ (sum, cst_sval[i + j]);
6308 if (i - j >= 0)
6310 const svalue *difference
6311 = mgr.get_or_create_binop (integer_type_node, MINUS_EXPR,
6312 cst_sval[i], cst_sval[j]);
6313 ASSERT_EQ (difference, cst_sval[i - j]);
6315 if (i * j < NUM_CSTS)
6317 const svalue *product
6318 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6319 cst_sval[i], cst_sval[j]);
6320 ASSERT_EQ (product, cst_sval[i * j]);
6322 const svalue *eq = mgr.get_or_create_binop (integer_type_node, EQ_EXPR,
6323 cst_sval[i], cst_sval[j]);
6324 ASSERT_EQ (eq, i == j ? cst_sval[1] : cst_sval [0]);
6325 const svalue *neq = mgr.get_or_create_binop (integer_type_node, NE_EXPR,
6326 cst_sval[i], cst_sval[j]);
6327 ASSERT_EQ (neq, i != j ? cst_sval[1] : cst_sval [0]);
6328 // etc
6331 tree x = build_global_decl ("x", integer_type_node);
6333 test_region_model_context ctxt;
6334 region_model model (&mgr);
6335 const svalue *x_init = model.get_rvalue (x, &ctxt);
6337 /* PLUS_EXPR folding. */
6338 const svalue *x_init_plus_zero
6339 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
6340 x_init, cst_sval[0]);
6341 ASSERT_EQ (x_init_plus_zero, x_init);
6342 const svalue *zero_plus_x_init
6343 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
6344 cst_sval[0], x_init);
6345 ASSERT_EQ (zero_plus_x_init, x_init);
6347 /* MULT_EXPR folding. */
6348 const svalue *x_init_times_zero
6349 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6350 x_init, cst_sval[0]);
6351 ASSERT_EQ (x_init_times_zero, cst_sval[0]);
6352 const svalue *zero_times_x_init
6353 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6354 cst_sval[0], x_init);
6355 ASSERT_EQ (zero_times_x_init, cst_sval[0]);
6357 const svalue *x_init_times_one
6358 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6359 x_init, cst_sval[1]);
6360 ASSERT_EQ (x_init_times_one, x_init);
6361 const svalue *one_times_x_init
6362 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6363 cst_sval[1], x_init);
6364 ASSERT_EQ (one_times_x_init, x_init);
6366 // etc
6367 // TODO: do we want to use the match-and-simplify DSL for this?
6369 /* Verify that binops put any constants on the RHS. */
6370 const svalue *four_times_x_init
6371 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6372 cst_sval[4], x_init);
6373 const svalue *x_init_times_four
6374 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6375 x_init, cst_sval[4]);
6376 ASSERT_EQ (four_times_x_init, x_init_times_four);
6377 const binop_svalue *binop = four_times_x_init->dyn_cast_binop_svalue ();
6378 ASSERT_EQ (binop->get_op (), MULT_EXPR);
6379 ASSERT_EQ (binop->get_arg0 (), x_init);
6380 ASSERT_EQ (binop->get_arg1 (), cst_sval[4]);
6382 /* Verify that ((x + 1) + 1) == (x + 2). */
6383 const svalue *x_init_plus_one
6384 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
6385 x_init, cst_sval[1]);
6386 const svalue *x_init_plus_two
6387 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
6388 x_init, cst_sval[2]);
6389 const svalue *x_init_plus_one_plus_one
6390 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
6391 x_init_plus_one, cst_sval[1]);
6392 ASSERT_EQ (x_init_plus_one_plus_one, x_init_plus_two);
6394 /* Verify various binops on booleans. */
6396 const svalue *sval_true = mgr.get_or_create_int_cst (boolean_type_node, 1);
6397 const svalue *sval_false = mgr.get_or_create_int_cst (boolean_type_node, 0);
6398 const svalue *sval_unknown
6399 = mgr.get_or_create_unknown_svalue (boolean_type_node);
6400 const placeholder_svalue sval_placeholder (boolean_type_node, "v");
6401 for (auto op : {BIT_IOR_EXPR, TRUTH_OR_EXPR})
6403 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
6404 sval_true, sval_unknown),
6405 sval_true);
6406 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
6407 sval_false, sval_unknown),
6408 sval_unknown);
6409 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
6410 sval_false, &sval_placeholder),
6411 &sval_placeholder);
6413 for (auto op : {BIT_AND_EXPR, TRUTH_AND_EXPR})
6415 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
6416 sval_false, sval_unknown),
6417 sval_false);
6418 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
6419 sval_true, sval_unknown),
6420 sval_unknown);
6421 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
6422 sval_true, &sval_placeholder),
6423 &sval_placeholder);
6428 /* Verify that sub_svalues are folded as expected. */
6430 static void
6431 test_sub_svalue_folding ()
6433 coord_test ct;
6434 tree c = build_global_decl ("c", ct.m_coord_type);
6435 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
6436 c, ct.m_x_field, NULL_TREE);
6438 region_model_manager mgr;
6439 region_model model (&mgr);
6440 test_region_model_context ctxt;
6441 const region *c_x_reg = model.get_lvalue (c_x, &ctxt);
6443 /* Verify that sub_svalue of "unknown" simply
6444 yields an unknown. */
6446 const svalue *unknown = mgr.get_or_create_unknown_svalue (ct.m_coord_type);
6447 const svalue *sub = mgr.get_or_create_sub_svalue (TREE_TYPE (ct.m_x_field),
6448 unknown, c_x_reg);
6449 ASSERT_EQ (sub->get_kind (), SK_UNKNOWN);
6450 ASSERT_EQ (sub->get_type (), TREE_TYPE (ct.m_x_field));
6453 /* Get BIT within VAL as a symbolic value within MGR. */
6455 static const svalue *
6456 get_bit (region_model_manager *mgr,
6457 bit_offset_t bit,
6458 unsigned HOST_WIDE_INT val)
6460 const svalue *inner_svalue
6461 = mgr->get_or_create_int_cst (unsigned_type_node, val);
6462 return mgr->get_or_create_bits_within (boolean_type_node,
6463 bit_range (bit, 1),
6464 inner_svalue);
6467 /* Verify that bits_within_svalues are folded as expected. */
6469 static void
6470 test_bits_within_svalue_folding ()
6472 region_model_manager mgr;
6474 const svalue *zero = mgr.get_or_create_int_cst (boolean_type_node, 0);
6475 const svalue *one = mgr.get_or_create_int_cst (boolean_type_node, 1);
6478 const unsigned val = 0x0000;
6479 for (unsigned bit = 0; bit < 16; bit++)
6480 ASSERT_EQ (get_bit (&mgr, bit, val), zero);
6484 const unsigned val = 0x0001;
6485 ASSERT_EQ (get_bit (&mgr, 0, val), one);
6486 for (unsigned bit = 1; bit < 16; bit++)
6487 ASSERT_EQ (get_bit (&mgr, bit, val), zero);
6491 const unsigned val = 0x8000;
6492 for (unsigned bit = 0; bit < 15; bit++)
6493 ASSERT_EQ (get_bit (&mgr, bit, val), zero);
6494 ASSERT_EQ (get_bit (&mgr, 15, val), one);
6498 const unsigned val = 0xFFFF;
6499 for (unsigned bit = 0; bit < 16; bit++)
6500 ASSERT_EQ (get_bit (&mgr, bit, val), one);
6504 /* Test that region::descendent_of_p works as expected. */
6506 static void
6507 test_descendent_of_p ()
6509 region_model_manager mgr;
6510 const region *stack = mgr.get_stack_region ();
6511 const region *heap = mgr.get_heap_region ();
6512 const region *code = mgr.get_code_region ();
6513 const region *globals = mgr.get_globals_region ();
6515 /* descendent_of_p should return true when used on the region itself. */
6516 ASSERT_TRUE (stack->descendent_of_p (stack));
6517 ASSERT_FALSE (stack->descendent_of_p (heap));
6518 ASSERT_FALSE (stack->descendent_of_p (code));
6519 ASSERT_FALSE (stack->descendent_of_p (globals));
6521 tree x = build_global_decl ("x", integer_type_node);
6522 const region *x_reg = mgr.get_region_for_global (x);
6523 ASSERT_TRUE (x_reg->descendent_of_p (globals));
6525 /* A cast_region should be a descendent of the original region. */
6526 const region *cast_reg = mgr.get_cast_region (x_reg, ptr_type_node);
6527 ASSERT_TRUE (cast_reg->descendent_of_p (x_reg));
6530 /* Verify that bit_range_region works as expected. */
6532 static void
6533 test_bit_range_regions ()
6535 tree x = build_global_decl ("x", integer_type_node);
6536 region_model_manager mgr;
6537 const region *x_reg = mgr.get_region_for_global (x);
6538 const region *byte0
6539 = mgr.get_bit_range (x_reg, char_type_node, bit_range (0, 8));
6540 const region *byte1
6541 = mgr.get_bit_range (x_reg, char_type_node, bit_range (8, 8));
6542 ASSERT_TRUE (byte0->descendent_of_p (x_reg));
6543 ASSERT_TRUE (byte1->descendent_of_p (x_reg));
6544 ASSERT_NE (byte0, byte1);
6547 /* Verify that simple assignments work as expected. */
6549 static void
6550 test_assignment ()
6552 tree int_0 = build_int_cst (integer_type_node, 0);
6553 tree x = build_global_decl ("x", integer_type_node);
6554 tree y = build_global_decl ("y", integer_type_node);
6556 /* "x == 0", then use of y, then "y = 0;". */
6557 region_model_manager mgr;
6558 region_model model (&mgr);
6559 ADD_SAT_CONSTRAINT (model, x, EQ_EXPR, int_0);
6560 ASSERT_CONDITION_UNKNOWN (model, y, EQ_EXPR, int_0);
6561 model.set_value (model.get_lvalue (y, NULL),
6562 model.get_rvalue (int_0, NULL),
6563 NULL);
6564 ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, int_0);
6565 ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, x);
6568 /* Verify that compound assignments work as expected. */
6570 static void
6571 test_compound_assignment ()
6573 coord_test ct;
6575 tree c = build_global_decl ("c", ct.m_coord_type);
6576 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
6577 c, ct.m_x_field, NULL_TREE);
6578 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
6579 c, ct.m_y_field, NULL_TREE);
6580 tree d = build_global_decl ("d", ct.m_coord_type);
6581 tree d_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
6582 d, ct.m_x_field, NULL_TREE);
6583 tree d_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
6584 d, ct.m_y_field, NULL_TREE);
6586 tree int_17 = build_int_cst (integer_type_node, 17);
6587 tree int_m3 = build_int_cst (integer_type_node, -3);
6589 region_model_manager mgr;
6590 region_model model (&mgr);
6591 model.set_value (c_x, int_17, NULL);
6592 model.set_value (c_y, int_m3, NULL);
6594 /* Copy c to d. */
6595 const svalue *sval = model.get_rvalue (c, NULL);
6596 model.set_value (model.get_lvalue (d, NULL), sval, NULL);
6598 /* Check that the fields have the same svalues. */
6599 ASSERT_EQ (model.get_rvalue (c_x, NULL), model.get_rvalue (d_x, NULL));
6600 ASSERT_EQ (model.get_rvalue (c_y, NULL), model.get_rvalue (d_y, NULL));
6603 /* Verify the details of pushing and popping stack frames. */
6605 static void
6606 test_stack_frames ()
6608 tree int_42 = build_int_cst (integer_type_node, 42);
6609 tree int_10 = build_int_cst (integer_type_node, 10);
6610 tree int_5 = build_int_cst (integer_type_node, 5);
6611 tree int_0 = build_int_cst (integer_type_node, 0);
6613 auto_vec <tree> param_types;
6614 tree parent_fndecl = make_fndecl (integer_type_node,
6615 "parent_fn",
6616 param_types);
6617 allocate_struct_function (parent_fndecl, true);
6619 tree child_fndecl = make_fndecl (integer_type_node,
6620 "child_fn",
6621 param_types);
6622 allocate_struct_function (child_fndecl, true);
6624 /* "a" and "b" in the parent frame. */
6625 tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL,
6626 get_identifier ("a"),
6627 integer_type_node);
6628 DECL_CONTEXT (a) = parent_fndecl;
6629 tree b = build_decl (UNKNOWN_LOCATION, PARM_DECL,
6630 get_identifier ("b"),
6631 integer_type_node);
6632 DECL_CONTEXT (b) = parent_fndecl;
6633 /* "x" and "y" in a child frame. */
6634 tree x = build_decl (UNKNOWN_LOCATION, PARM_DECL,
6635 get_identifier ("x"),
6636 integer_type_node);
6637 DECL_CONTEXT (x) = child_fndecl;
6638 tree y = build_decl (UNKNOWN_LOCATION, PARM_DECL,
6639 get_identifier ("y"),
6640 integer_type_node);
6641 DECL_CONTEXT (y) = child_fndecl;
6643 /* "p" global. */
6644 tree p = build_global_decl ("p", ptr_type_node);
6646 /* "q" global. */
6647 tree q = build_global_decl ("q", ptr_type_node);
6649 region_model_manager mgr;
6650 test_region_model_context ctxt;
6651 region_model model (&mgr);
6653 /* Push stack frame for "parent_fn". */
6654 const region *parent_frame_reg
6655 = model.push_frame (DECL_STRUCT_FUNCTION (parent_fndecl),
6656 NULL, &ctxt);
6657 ASSERT_EQ (model.get_current_frame (), parent_frame_reg);
6658 ASSERT_TRUE (model.region_exists_p (parent_frame_reg));
6659 const region *a_in_parent_reg = model.get_lvalue (a, &ctxt);
6660 model.set_value (a_in_parent_reg,
6661 model.get_rvalue (int_42, &ctxt),
6662 &ctxt);
6663 ASSERT_EQ (a_in_parent_reg->maybe_get_frame_region (), parent_frame_reg);
6665 model.add_constraint (b, LT_EXPR, int_10, &ctxt);
6666 ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt),
6667 tristate (tristate::TS_TRUE));
6669 /* Push stack frame for "child_fn". */
6670 const region *child_frame_reg
6671 = model.push_frame (DECL_STRUCT_FUNCTION (child_fndecl), NULL, &ctxt);
6672 ASSERT_EQ (model.get_current_frame (), child_frame_reg);
6673 ASSERT_TRUE (model.region_exists_p (child_frame_reg));
6674 const region *x_in_child_reg = model.get_lvalue (x, &ctxt);
6675 model.set_value (x_in_child_reg,
6676 model.get_rvalue (int_0, &ctxt),
6677 &ctxt);
6678 ASSERT_EQ (x_in_child_reg->maybe_get_frame_region (), child_frame_reg);
6680 model.add_constraint (y, NE_EXPR, int_5, &ctxt);
6681 ASSERT_EQ (model.eval_condition (y, NE_EXPR, int_5, &ctxt),
6682 tristate (tristate::TS_TRUE));
6684 /* Point a global pointer at a local in the child frame: p = &x. */
6685 const region *p_in_globals_reg = model.get_lvalue (p, &ctxt);
6686 model.set_value (p_in_globals_reg,
6687 mgr.get_ptr_svalue (ptr_type_node, x_in_child_reg),
6688 &ctxt);
6689 ASSERT_EQ (p_in_globals_reg->maybe_get_frame_region (), NULL);
6691 /* Point another global pointer at p: q = &p. */
6692 const region *q_in_globals_reg = model.get_lvalue (q, &ctxt);
6693 model.set_value (q_in_globals_reg,
6694 mgr.get_ptr_svalue (ptr_type_node, p_in_globals_reg),
6695 &ctxt);
6697 /* Test region::descendent_of_p. */
6698 ASSERT_TRUE (child_frame_reg->descendent_of_p (child_frame_reg));
6699 ASSERT_TRUE (x_in_child_reg->descendent_of_p (child_frame_reg));
6700 ASSERT_FALSE (a_in_parent_reg->descendent_of_p (child_frame_reg));
6702 /* Pop the "child_fn" frame from the stack. */
6703 model.pop_frame (NULL, NULL, &ctxt);
6704 ASSERT_FALSE (model.region_exists_p (child_frame_reg));
6705 ASSERT_TRUE (model.region_exists_p (parent_frame_reg));
6707 /* Verify that p (which was pointing at the local "x" in the popped
6708 frame) has been poisoned. */
6709 const svalue *new_p_sval = model.get_rvalue (p, NULL);
6710 ASSERT_EQ (new_p_sval->get_kind (), SK_POISONED);
6711 ASSERT_EQ (new_p_sval->dyn_cast_poisoned_svalue ()->get_poison_kind (),
6712 POISON_KIND_POPPED_STACK);
6714 /* Verify that q still points to p, in spite of the region
6715 renumbering. */
6716 const svalue *new_q_sval = model.get_rvalue (q, &ctxt);
6717 ASSERT_EQ (new_q_sval->get_kind (), SK_REGION);
6718 ASSERT_EQ (new_q_sval->maybe_get_region (),
6719 model.get_lvalue (p, &ctxt));
6721 /* Verify that top of stack has been updated. */
6722 ASSERT_EQ (model.get_current_frame (), parent_frame_reg);
6724 /* Verify locals in parent frame. */
6725 /* Verify "a" still has its value. */
6726 const svalue *new_a_sval = model.get_rvalue (a, &ctxt);
6727 ASSERT_EQ (new_a_sval->get_kind (), SK_CONSTANT);
6728 ASSERT_EQ (new_a_sval->dyn_cast_constant_svalue ()->get_constant (),
6729 int_42);
6730 /* Verify "b" still has its constraint. */
6731 ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt),
6732 tristate (tristate::TS_TRUE));
6735 /* Verify that get_representative_path_var works as expected, that
6736 we can map from regions to parms and back within a recursive call
6737 stack. */
6739 static void
6740 test_get_representative_path_var ()
6742 auto_vec <tree> param_types;
6743 tree fndecl = make_fndecl (integer_type_node,
6744 "factorial",
6745 param_types);
6746 allocate_struct_function (fndecl, true);
6748 /* Parm "n". */
6749 tree n = build_decl (UNKNOWN_LOCATION, PARM_DECL,
6750 get_identifier ("n"),
6751 integer_type_node);
6752 DECL_CONTEXT (n) = fndecl;
6754 region_model_manager mgr;
6755 test_region_model_context ctxt;
6756 region_model model (&mgr);
6758 /* Push 5 stack frames for "factorial", each with a param */
6759 auto_vec<const region *> parm_regs;
6760 auto_vec<const svalue *> parm_svals;
6761 for (int depth = 0; depth < 5; depth++)
6763 const region *frame_n_reg
6764 = model.push_frame (DECL_STRUCT_FUNCTION (fndecl), NULL, &ctxt);
6765 const region *parm_n_reg = model.get_lvalue (path_var (n, depth), &ctxt);
6766 parm_regs.safe_push (parm_n_reg);
6768 ASSERT_EQ (parm_n_reg->get_parent_region (), frame_n_reg);
6769 const svalue *sval_n = mgr.get_or_create_initial_value (parm_n_reg);
6770 parm_svals.safe_push (sval_n);
6773 /* Verify that we can recognize that the regions are the parms,
6774 at every depth. */
6775 for (int depth = 0; depth < 5; depth++)
6778 svalue_set visited;
6779 ASSERT_EQ (model.get_representative_path_var (parm_regs[depth],
6780 &visited),
6781 path_var (n, depth + 1));
6783 /* ...and that we can lookup lvalues for locals for all frames,
6784 not just the top. */
6785 ASSERT_EQ (model.get_lvalue (path_var (n, depth), NULL),
6786 parm_regs[depth]);
6787 /* ...and that we can locate the svalues. */
6789 svalue_set visited;
6790 ASSERT_EQ (model.get_representative_path_var (parm_svals[depth],
6791 &visited),
6792 path_var (n, depth + 1));
6797 /* Ensure that region_model::operator== works as expected. */
6799 static void
6800 test_equality_1 ()
6802 tree int_42 = build_int_cst (integer_type_node, 42);
6803 tree int_17 = build_int_cst (integer_type_node, 17);
6805 /* Verify that "empty" region_model instances are equal to each other. */
6806 region_model_manager mgr;
6807 region_model model0 (&mgr);
6808 region_model model1 (&mgr);
6809 ASSERT_EQ (model0, model1);
6811 /* Verify that setting state in model1 makes the models non-equal. */
6812 tree x = build_global_decl ("x", integer_type_node);
6813 model0.set_value (x, int_42, NULL);
6814 ASSERT_EQ (model0.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
6815 ASSERT_NE (model0, model1);
6817 /* Verify the copy-ctor. */
6818 region_model model2 (model0);
6819 ASSERT_EQ (model0, model2);
6820 ASSERT_EQ (model2.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
6821 ASSERT_NE (model1, model2);
6823 /* Verify that models obtained from copy-ctor are independently editable
6824 w/o affecting the original model. */
6825 model2.set_value (x, int_17, NULL);
6826 ASSERT_NE (model0, model2);
6827 ASSERT_EQ (model2.get_rvalue (x, NULL)->maybe_get_constant (), int_17);
6828 ASSERT_EQ (model0.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
6831 /* Verify that region models for
6832 x = 42; y = 113;
6834 y = 113; x = 42;
6835 are equal. */
6837 static void
6838 test_canonicalization_2 ()
6840 tree int_42 = build_int_cst (integer_type_node, 42);
6841 tree int_113 = build_int_cst (integer_type_node, 113);
6842 tree x = build_global_decl ("x", integer_type_node);
6843 tree y = build_global_decl ("y", integer_type_node);
6845 region_model_manager mgr;
6846 region_model model0 (&mgr);
6847 model0.set_value (model0.get_lvalue (x, NULL),
6848 model0.get_rvalue (int_42, NULL),
6849 NULL);
6850 model0.set_value (model0.get_lvalue (y, NULL),
6851 model0.get_rvalue (int_113, NULL),
6852 NULL);
6854 region_model model1 (&mgr);
6855 model1.set_value (model1.get_lvalue (y, NULL),
6856 model1.get_rvalue (int_113, NULL),
6857 NULL);
6858 model1.set_value (model1.get_lvalue (x, NULL),
6859 model1.get_rvalue (int_42, NULL),
6860 NULL);
6862 ASSERT_EQ (model0, model1);
6865 /* Verify that constraints for
6866 x > 3 && y > 42
6868 y > 42 && x > 3
6869 are equal after canonicalization. */
6871 static void
6872 test_canonicalization_3 ()
6874 tree int_3 = build_int_cst (integer_type_node, 3);
6875 tree int_42 = build_int_cst (integer_type_node, 42);
6876 tree x = build_global_decl ("x", integer_type_node);
6877 tree y = build_global_decl ("y", integer_type_node);
6879 region_model_manager mgr;
6880 region_model model0 (&mgr);
6881 model0.add_constraint (x, GT_EXPR, int_3, NULL);
6882 model0.add_constraint (y, GT_EXPR, int_42, NULL);
6884 region_model model1 (&mgr);
6885 model1.add_constraint (y, GT_EXPR, int_42, NULL);
6886 model1.add_constraint (x, GT_EXPR, int_3, NULL);
6888 model0.canonicalize ();
6889 model1.canonicalize ();
6890 ASSERT_EQ (model0, model1);
6893 /* Verify that we can canonicalize a model containing NaN and other real
6894 constants. */
6896 static void
6897 test_canonicalization_4 ()
6899 auto_vec<tree> csts;
6900 append_interesting_constants (&csts);
6902 region_model_manager mgr;
6903 region_model model (&mgr);
6905 for (tree cst : csts)
6906 model.get_rvalue (cst, NULL);
6908 model.canonicalize ();
6911 /* Assert that if we have two region_model instances
6912 with values VAL_A and VAL_B for EXPR that they are
6913 mergable. Write the merged model to *OUT_MERGED_MODEL,
6914 and the merged svalue ptr to *OUT_MERGED_SVALUE.
6915 If VAL_A or VAL_B are NULL_TREE, don't populate EXPR
6916 for that region_model. */
6918 static void
6919 assert_region_models_merge (tree expr, tree val_a, tree val_b,
6920 region_model *out_merged_model,
6921 const svalue **out_merged_svalue)
6923 region_model_manager *mgr = out_merged_model->get_manager ();
6924 program_point point (program_point::origin (*mgr));
6925 test_region_model_context ctxt;
6926 region_model model0 (mgr);
6927 region_model model1 (mgr);
6928 if (val_a)
6929 model0.set_value (model0.get_lvalue (expr, &ctxt),
6930 model0.get_rvalue (val_a, &ctxt),
6931 &ctxt);
6932 if (val_b)
6933 model1.set_value (model1.get_lvalue (expr, &ctxt),
6934 model1.get_rvalue (val_b, &ctxt),
6935 &ctxt);
6937 /* They should be mergeable. */
6938 ASSERT_TRUE (model0.can_merge_with_p (model1, point, out_merged_model));
6939 *out_merged_svalue = out_merged_model->get_rvalue (expr, &ctxt);
6942 /* Verify that we can merge region_model instances. */
6944 static void
6945 test_state_merging ()
6947 tree int_42 = build_int_cst (integer_type_node, 42);
6948 tree int_113 = build_int_cst (integer_type_node, 113);
6949 tree x = build_global_decl ("x", integer_type_node);
6950 tree y = build_global_decl ("y", integer_type_node);
6951 tree z = build_global_decl ("z", integer_type_node);
6952 tree p = build_global_decl ("p", ptr_type_node);
6954 tree addr_of_y = build1 (ADDR_EXPR, ptr_type_node, y);
6955 tree addr_of_z = build1 (ADDR_EXPR, ptr_type_node, z);
6957 auto_vec <tree> param_types;
6958 tree test_fndecl = make_fndecl (integer_type_node, "test_fn", param_types);
6959 allocate_struct_function (test_fndecl, true);
6961 /* Param "a". */
6962 tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL,
6963 get_identifier ("a"),
6964 integer_type_node);
6965 DECL_CONTEXT (a) = test_fndecl;
6966 tree addr_of_a = build1 (ADDR_EXPR, ptr_type_node, a);
6968 /* Param "q", a pointer. */
6969 tree q = build_decl (UNKNOWN_LOCATION, PARM_DECL,
6970 get_identifier ("q"),
6971 ptr_type_node);
6972 DECL_CONTEXT (q) = test_fndecl;
6974 region_model_manager mgr;
6975 program_point point (program_point::origin (mgr));
6978 region_model model0 (&mgr);
6979 region_model model1 (&mgr);
6980 region_model merged (&mgr);
6981 /* Verify empty models can be merged. */
6982 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
6983 ASSERT_EQ (model0, merged);
6986 /* Verify that we can merge two contradictory constraints on the
6987 value for a global. */
6988 /* TODO: verify that the merged model doesn't have a value for
6989 the global */
6991 region_model model0 (&mgr);
6992 region_model model1 (&mgr);
6993 region_model merged (&mgr);
6994 test_region_model_context ctxt;
6995 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
6996 model1.add_constraint (x, EQ_EXPR, int_113, &ctxt);
6997 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
6998 ASSERT_NE (model0, merged);
6999 ASSERT_NE (model1, merged);
7002 /* Verify handling of a PARM_DECL. */
7004 test_region_model_context ctxt;
7005 region_model model0 (&mgr);
7006 region_model model1 (&mgr);
7007 ASSERT_EQ (model0.get_stack_depth (), 0);
7008 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, &ctxt);
7009 ASSERT_EQ (model0.get_stack_depth (), 1);
7010 model1.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, &ctxt);
7012 placeholder_svalue test_sval (integer_type_node, "test sval");
7013 model0.set_value (model0.get_lvalue (a, &ctxt), &test_sval, &ctxt);
7014 model1.set_value (model1.get_lvalue (a, &ctxt), &test_sval, &ctxt);
7015 ASSERT_EQ (model0, model1);
7017 /* They should be mergeable, and the result should be the same. */
7018 region_model merged (&mgr);
7019 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7020 ASSERT_EQ (model0, merged);
7021 /* In particular, "a" should have the placeholder value. */
7022 ASSERT_EQ (merged.get_rvalue (a, &ctxt), &test_sval);
7025 /* Verify handling of a global. */
7027 test_region_model_context ctxt;
7028 region_model model0 (&mgr);
7029 region_model model1 (&mgr);
7031 placeholder_svalue test_sval (integer_type_node, "test sval");
7032 model0.set_value (model0.get_lvalue (x, &ctxt), &test_sval, &ctxt);
7033 model1.set_value (model1.get_lvalue (x, &ctxt), &test_sval, &ctxt);
7034 ASSERT_EQ (model0, model1);
7036 /* They should be mergeable, and the result should be the same. */
7037 region_model merged (&mgr);
7038 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7039 ASSERT_EQ (model0, merged);
7040 /* In particular, "x" should have the placeholder value. */
7041 ASSERT_EQ (merged.get_rvalue (x, &ctxt), &test_sval);
7044 /* Use global-handling to verify various combinations of values. */
7046 /* Two equal constant values. */
7048 region_model merged (&mgr);
7049 const svalue *merged_x_sval;
7050 assert_region_models_merge (x, int_42, int_42, &merged, &merged_x_sval);
7052 /* In particular, there should be a constant value for "x". */
7053 ASSERT_EQ (merged_x_sval->get_kind (), SK_CONSTANT);
7054 ASSERT_EQ (merged_x_sval->dyn_cast_constant_svalue ()->get_constant (),
7055 int_42);
7058 /* Two non-equal constant values. */
7060 region_model merged (&mgr);
7061 const svalue *merged_x_sval;
7062 assert_region_models_merge (x, int_42, int_113, &merged, &merged_x_sval);
7064 /* In particular, there should be a "widening" value for "x". */
7065 ASSERT_EQ (merged_x_sval->get_kind (), SK_WIDENING);
7068 /* Initial and constant. */
7070 region_model merged (&mgr);
7071 const svalue *merged_x_sval;
7072 assert_region_models_merge (x, NULL_TREE, int_113, &merged, &merged_x_sval);
7074 /* In particular, there should be an unknown value for "x". */
7075 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
7078 /* Constant and initial. */
7080 region_model merged (&mgr);
7081 const svalue *merged_x_sval;
7082 assert_region_models_merge (x, int_42, NULL_TREE, &merged, &merged_x_sval);
7084 /* In particular, there should be an unknown value for "x". */
7085 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
7088 /* Unknown and constant. */
7089 // TODO
7091 /* Pointers: NULL and NULL. */
7092 // TODO
7094 /* Pointers: NULL and non-NULL. */
7095 // TODO
7097 /* Pointers: non-NULL and non-NULL: ptr to a local. */
7099 region_model model0 (&mgr);
7100 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
7101 model0.set_value (model0.get_lvalue (p, NULL),
7102 model0.get_rvalue (addr_of_a, NULL), NULL);
7104 region_model model1 (model0);
7105 ASSERT_EQ (model0, model1);
7107 /* They should be mergeable, and the result should be the same. */
7108 region_model merged (&mgr);
7109 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7110 ASSERT_EQ (model0, merged);
7113 /* Pointers: non-NULL and non-NULL: ptr to a global. */
7115 region_model merged (&mgr);
7116 /* p == &y in both input models. */
7117 const svalue *merged_p_sval;
7118 assert_region_models_merge (p, addr_of_y, addr_of_y, &merged,
7119 &merged_p_sval);
7121 /* We should get p == &y in the merged model. */
7122 ASSERT_EQ (merged_p_sval->get_kind (), SK_REGION);
7123 const region_svalue *merged_p_ptr
7124 = merged_p_sval->dyn_cast_region_svalue ();
7125 const region *merged_p_star_reg = merged_p_ptr->get_pointee ();
7126 ASSERT_EQ (merged_p_star_reg, merged.get_lvalue (y, NULL));
7129 /* Pointers: non-NULL ptrs to different globals: should be unknown. */
7131 region_model merged (&mgr);
7132 /* x == &y vs x == &z in the input models; these are actually casts
7133 of the ptrs to "int". */
7134 const svalue *merged_x_sval;
7135 // TODO:
7136 assert_region_models_merge (x, addr_of_y, addr_of_z, &merged,
7137 &merged_x_sval);
7139 /* We should get x == unknown in the merged model. */
7140 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
7143 /* Pointers: non-NULL and non-NULL: ptr to a heap region. */
7145 test_region_model_context ctxt;
7146 region_model model0 (&mgr);
7147 tree size = build_int_cst (size_type_node, 1024);
7148 const svalue *size_sval = mgr.get_or_create_constant_svalue (size);
7149 const region *new_reg
7150 = model0.get_or_create_region_for_heap_alloc (size_sval, &ctxt);
7151 const svalue *ptr_sval = mgr.get_ptr_svalue (ptr_type_node, new_reg);
7152 model0.set_value (model0.get_lvalue (p, &ctxt),
7153 ptr_sval, &ctxt);
7155 region_model model1 (model0);
7157 ASSERT_EQ (model0, model1);
7159 region_model merged (&mgr);
7160 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7162 /* The merged model ought to be identical. */
7163 ASSERT_EQ (model0, merged);
7166 /* Two regions sharing the same placeholder svalue should continue sharing
7167 it after self-merger. */
7169 test_region_model_context ctxt;
7170 region_model model0 (&mgr);
7171 placeholder_svalue placeholder_sval (integer_type_node, "test");
7172 model0.set_value (model0.get_lvalue (x, &ctxt),
7173 &placeholder_sval, &ctxt);
7174 model0.set_value (model0.get_lvalue (y, &ctxt), &placeholder_sval, &ctxt);
7175 region_model model1 (model0);
7177 /* They should be mergeable, and the result should be the same. */
7178 region_model merged (&mgr);
7179 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7180 ASSERT_EQ (model0, merged);
7182 /* In particular, we should have x == y. */
7183 ASSERT_EQ (merged.eval_condition (x, EQ_EXPR, y, &ctxt),
7184 tristate (tristate::TS_TRUE));
7188 region_model model0 (&mgr);
7189 region_model model1 (&mgr);
7190 test_region_model_context ctxt;
7191 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
7192 model1.add_constraint (x, NE_EXPR, int_42, &ctxt);
7193 region_model merged (&mgr);
7194 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7198 region_model model0 (&mgr);
7199 region_model model1 (&mgr);
7200 test_region_model_context ctxt;
7201 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
7202 model1.add_constraint (x, NE_EXPR, int_42, &ctxt);
7203 model1.add_constraint (x, EQ_EXPR, int_113, &ctxt);
7204 region_model merged (&mgr);
7205 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7208 // TODO: what can't we merge? need at least one such test
7210 /* TODO: various things
7211 - heap regions
7212 - value merging:
7213 - every combination, but in particular
7214 - pairs of regions
7217 /* Views. */
7219 test_region_model_context ctxt;
7220 region_model model0 (&mgr);
7222 const region *x_reg = model0.get_lvalue (x, &ctxt);
7223 const region *x_as_ptr = mgr.get_cast_region (x_reg, ptr_type_node);
7224 model0.set_value (x_as_ptr, model0.get_rvalue (addr_of_y, &ctxt), &ctxt);
7226 region_model model1 (model0);
7227 ASSERT_EQ (model1, model0);
7229 /* They should be mergeable, and the result should be the same. */
7230 region_model merged (&mgr);
7231 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7234 /* Verify that we can merge a model in which a local in an older stack
7235 frame points to a local in a more recent stack frame. */
7237 region_model model0 (&mgr);
7238 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
7239 const region *q_in_first_frame = model0.get_lvalue (q, NULL);
7241 /* Push a second frame. */
7242 const region *reg_2nd_frame
7243 = model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
7245 /* Have a pointer in the older frame point to a local in the
7246 more recent frame. */
7247 const svalue *sval_ptr = model0.get_rvalue (addr_of_a, NULL);
7248 model0.set_value (q_in_first_frame, sval_ptr, NULL);
7250 /* Verify that it's pointing at the newer frame. */
7251 const region *reg_pointee = sval_ptr->maybe_get_region ();
7252 ASSERT_EQ (reg_pointee->get_parent_region (), reg_2nd_frame);
7254 model0.canonicalize ();
7256 region_model model1 (model0);
7257 ASSERT_EQ (model0, model1);
7259 /* They should be mergeable, and the result should be the same
7260 (after canonicalization, at least). */
7261 region_model merged (&mgr);
7262 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7263 merged.canonicalize ();
7264 ASSERT_EQ (model0, merged);
7267 /* Verify that we can merge a model in which a local points to a global. */
7269 region_model model0 (&mgr);
7270 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
7271 model0.set_value (model0.get_lvalue (q, NULL),
7272 model0.get_rvalue (addr_of_y, NULL), NULL);
7274 region_model model1 (model0);
7275 ASSERT_EQ (model0, model1);
7277 /* They should be mergeable, and the result should be the same
7278 (after canonicalization, at least). */
7279 region_model merged (&mgr);
7280 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7281 ASSERT_EQ (model0, merged);
7285 /* Verify that constraints are correctly merged when merging region_model
7286 instances. */
7288 static void
7289 test_constraint_merging ()
7291 tree int_0 = build_int_cst (integer_type_node, 0);
7292 tree int_5 = build_int_cst (integer_type_node, 5);
7293 tree x = build_global_decl ("x", integer_type_node);
7294 tree y = build_global_decl ("y", integer_type_node);
7295 tree z = build_global_decl ("z", integer_type_node);
7296 tree n = build_global_decl ("n", integer_type_node);
7298 region_model_manager mgr;
7299 test_region_model_context ctxt;
7301 /* model0: 0 <= (x == y) < n. */
7302 region_model model0 (&mgr);
7303 model0.add_constraint (x, EQ_EXPR, y, &ctxt);
7304 model0.add_constraint (x, GE_EXPR, int_0, NULL);
7305 model0.add_constraint (x, LT_EXPR, n, NULL);
7307 /* model1: z != 5 && (0 <= x < n). */
7308 region_model model1 (&mgr);
7309 model1.add_constraint (z, NE_EXPR, int_5, NULL);
7310 model1.add_constraint (x, GE_EXPR, int_0, NULL);
7311 model1.add_constraint (x, LT_EXPR, n, NULL);
7313 /* They should be mergeable; the merged constraints should
7314 be: (0 <= x < n). */
7315 program_point point (program_point::origin (mgr));
7316 region_model merged (&mgr);
7317 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7319 ASSERT_EQ (merged.eval_condition (x, GE_EXPR, int_0, &ctxt),
7320 tristate (tristate::TS_TRUE));
7321 ASSERT_EQ (merged.eval_condition (x, LT_EXPR, n, &ctxt),
7322 tristate (tristate::TS_TRUE));
7324 ASSERT_EQ (merged.eval_condition (z, NE_EXPR, int_5, &ctxt),
7325 tristate (tristate::TS_UNKNOWN));
7326 ASSERT_EQ (merged.eval_condition (x, LT_EXPR, y, &ctxt),
7327 tristate (tristate::TS_UNKNOWN));
7330 /* Verify that widening_svalue::eval_condition_without_cm works as
7331 expected. */
7333 static void
7334 test_widening_constraints ()
7336 region_model_manager mgr;
7337 function_point point (program_point::origin (mgr).get_function_point ());
7338 tree int_0 = build_int_cst (integer_type_node, 0);
7339 tree int_m1 = build_int_cst (integer_type_node, -1);
7340 tree int_1 = build_int_cst (integer_type_node, 1);
7341 tree int_256 = build_int_cst (integer_type_node, 256);
7342 test_region_model_context ctxt;
7343 const svalue *int_0_sval = mgr.get_or_create_constant_svalue (int_0);
7344 const svalue *int_1_sval = mgr.get_or_create_constant_svalue (int_1);
7345 const svalue *w_zero_then_one_sval
7346 = mgr.get_or_create_widening_svalue (integer_type_node, point,
7347 int_0_sval, int_1_sval);
7348 const widening_svalue *w_zero_then_one
7349 = w_zero_then_one_sval->dyn_cast_widening_svalue ();
7350 ASSERT_EQ (w_zero_then_one->get_direction (),
7351 widening_svalue::DIR_ASCENDING);
7352 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_m1),
7353 tristate::TS_FALSE);
7354 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_0),
7355 tristate::TS_FALSE);
7356 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_1),
7357 tristate::TS_UNKNOWN);
7358 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_256),
7359 tristate::TS_UNKNOWN);
7361 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_m1),
7362 tristate::TS_FALSE);
7363 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_0),
7364 tristate::TS_UNKNOWN);
7365 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_1),
7366 tristate::TS_UNKNOWN);
7367 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_256),
7368 tristate::TS_UNKNOWN);
7370 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_m1),
7371 tristate::TS_TRUE);
7372 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_0),
7373 tristate::TS_UNKNOWN);
7374 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_1),
7375 tristate::TS_UNKNOWN);
7376 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_256),
7377 tristate::TS_UNKNOWN);
7379 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_m1),
7380 tristate::TS_TRUE);
7381 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_0),
7382 tristate::TS_TRUE);
7383 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_1),
7384 tristate::TS_UNKNOWN);
7385 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_256),
7386 tristate::TS_UNKNOWN);
7388 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_m1),
7389 tristate::TS_FALSE);
7390 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_0),
7391 tristate::TS_UNKNOWN);
7392 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_1),
7393 tristate::TS_UNKNOWN);
7394 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_256),
7395 tristate::TS_UNKNOWN);
7397 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_m1),
7398 tristate::TS_TRUE);
7399 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_0),
7400 tristate::TS_UNKNOWN);
7401 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_1),
7402 tristate::TS_UNKNOWN);
7403 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_256),
7404 tristate::TS_UNKNOWN);
7407 /* Verify merging constraints for states simulating successive iterations
7408 of a loop.
7409 Simulate:
7410 for (i = 0; i < 256; i++)
7411 [...body...]
7412 i.e. this gimple:.
7413 i_15 = 0;
7414 goto <bb 4>;
7416 <bb 4> :
7417 i_11 = PHI <i_15(2), i_23(3)>
7418 if (i_11 <= 255)
7419 goto <bb 3>;
7420 else
7421 goto [AFTER LOOP]
7423 <bb 3> :
7424 [LOOP BODY]
7425 i_23 = i_11 + 1;
7427 and thus these ops (and resultant states):
7428 i_11 = PHI()
7429 {i_11: 0}
7430 add_constraint (i_11 <= 255) [for the true edge]
7431 {i_11: 0} [constraint was a no-op]
7432 i_23 = i_11 + 1;
7433 {i_22: 1}
7434 i_11 = PHI()
7435 {i_11: WIDENED (at phi, 0, 1)}
7436 add_constraint (i_11 <= 255) [for the true edge]
7437 {i_11: WIDENED (at phi, 0, 1); WIDENED <= 255}
7438 i_23 = i_11 + 1;
7439 {i_23: (WIDENED (at phi, 0, 1) + 1); WIDENED <= 255}
7440 i_11 = PHI(); merge with state at phi above
7441 {i_11: WIDENED (at phi, 0, 1); WIDENED <= 256}
7442 [changing meaning of "WIDENED" here]
7443 if (i_11 <= 255)
7444 T: {i_11: WIDENED (at phi, 0, 1); WIDENED <= 255}; cache hit
7445 F: {i_11: 256}
7448 static void
7449 test_iteration_1 ()
7451 region_model_manager mgr;
7452 program_point point (program_point::origin (mgr));
7454 tree int_0 = build_int_cst (integer_type_node, 0);
7455 tree int_1 = build_int_cst (integer_type_node, 1);
7456 tree int_256 = build_int_cst (integer_type_node, 256);
7457 tree int_257 = build_int_cst (integer_type_node, 257);
7458 tree i = build_global_decl ("i", integer_type_node);
7460 test_region_model_context ctxt;
7462 /* model0: i: 0. */
7463 region_model model0 (&mgr);
7464 model0.set_value (i, int_0, &ctxt);
7466 /* model1: i: 1. */
7467 region_model model1 (&mgr);
7468 model1.set_value (i, int_1, &ctxt);
7470 /* Should merge "i" to a widened value. */
7471 region_model model2 (&mgr);
7472 ASSERT_TRUE (model1.can_merge_with_p (model0, point, &model2));
7473 const svalue *merged_i = model2.get_rvalue (i, &ctxt);
7474 ASSERT_EQ (merged_i->get_kind (), SK_WIDENING);
7475 const widening_svalue *w = merged_i->dyn_cast_widening_svalue ();
7476 ASSERT_EQ (w->get_direction (), widening_svalue::DIR_ASCENDING);
7478 /* Add constraint: i < 256 */
7479 model2.add_constraint (i, LT_EXPR, int_256, &ctxt);
7480 ASSERT_EQ (model2.eval_condition (i, LT_EXPR, int_256, &ctxt),
7481 tristate (tristate::TS_TRUE));
7482 ASSERT_EQ (model2.eval_condition (i, GE_EXPR, int_0, &ctxt),
7483 tristate (tristate::TS_TRUE));
7485 /* Try merging with the initial state. */
7486 region_model model3 (&mgr);
7487 ASSERT_TRUE (model2.can_merge_with_p (model0, point, &model3));
7488 /* Merging the merged value with the initial value should be idempotent,
7489 so that the analysis converges. */
7490 ASSERT_EQ (model3.get_rvalue (i, &ctxt), merged_i);
7491 /* Merger of 0 and a widening value with constraint < CST
7492 should retain the constraint, even though it was implicit
7493 for the 0 case. */
7494 ASSERT_EQ (model3.eval_condition (i, LT_EXPR, int_256, &ctxt),
7495 tristate (tristate::TS_TRUE));
7496 /* ...and we should have equality: the analysis should have converged. */
7497 ASSERT_EQ (model3, model2);
7499 /* "i_23 = i_11 + 1;" */
7500 region_model model4 (model3);
7501 ASSERT_EQ (model4, model2);
7502 model4.set_value (i, build2 (PLUS_EXPR, integer_type_node, i, int_1), &ctxt);
7503 const svalue *plus_one = model4.get_rvalue (i, &ctxt);
7504 ASSERT_EQ (plus_one->get_kind (), SK_BINOP);
7506 /* Try merging with the "i: 1" state. */
7507 region_model model5 (&mgr);
7508 ASSERT_TRUE (model4.can_merge_with_p (model1, point, &model5));
7509 ASSERT_EQ (model5.get_rvalue (i, &ctxt), plus_one);
7510 ASSERT_EQ (model5, model4);
7512 /* "i_11 = PHI();" merge with state at phi above.
7513 For i, we should have a merger of WIDENING with WIDENING + 1,
7514 and this should be WIDENING again. */
7515 region_model model6 (&mgr);
7516 ASSERT_TRUE (model5.can_merge_with_p (model2, point, &model6));
7517 const svalue *merged_widening = model6.get_rvalue (i, &ctxt);
7518 ASSERT_EQ (merged_widening->get_kind (), SK_WIDENING);
7520 ASSERT_CONDITION_TRUE (model6, i, LT_EXPR, int_257);
7523 /* Verify that if we mark a pointer to a malloc-ed region as non-NULL,
7524 all cast pointers to that region are also known to be non-NULL. */
7526 static void
7527 test_malloc_constraints ()
7529 region_model_manager mgr;
7530 region_model model (&mgr);
7531 tree p = build_global_decl ("p", ptr_type_node);
7532 tree char_star = build_pointer_type (char_type_node);
7533 tree q = build_global_decl ("q", char_star);
7534 tree null_ptr = build_int_cst (ptr_type_node, 0);
7536 const svalue *size_in_bytes
7537 = mgr.get_or_create_unknown_svalue (size_type_node);
7538 const region *reg
7539 = model.get_or_create_region_for_heap_alloc (size_in_bytes, NULL);
7540 const svalue *sval = mgr.get_ptr_svalue (ptr_type_node, reg);
7541 model.set_value (model.get_lvalue (p, NULL), sval, NULL);
7542 model.set_value (q, p, NULL);
7544 ASSERT_CONDITION_UNKNOWN (model, p, NE_EXPR, null_ptr);
7545 ASSERT_CONDITION_UNKNOWN (model, p, EQ_EXPR, null_ptr);
7546 ASSERT_CONDITION_UNKNOWN (model, q, NE_EXPR, null_ptr);
7547 ASSERT_CONDITION_UNKNOWN (model, q, EQ_EXPR, null_ptr);
7549 model.add_constraint (p, NE_EXPR, null_ptr, NULL);
7551 ASSERT_CONDITION_TRUE (model, p, NE_EXPR, null_ptr);
7552 ASSERT_CONDITION_FALSE (model, p, EQ_EXPR, null_ptr);
7553 ASSERT_CONDITION_TRUE (model, q, NE_EXPR, null_ptr);
7554 ASSERT_CONDITION_FALSE (model, q, EQ_EXPR, null_ptr);
7557 /* Smoketest of getting and setting the value of a variable. */
7559 static void
7560 test_var ()
7562 /* "int i;" */
7563 tree i = build_global_decl ("i", integer_type_node);
7565 tree int_17 = build_int_cst (integer_type_node, 17);
7566 tree int_m3 = build_int_cst (integer_type_node, -3);
7568 region_model_manager mgr;
7569 region_model model (&mgr);
7571 const region *i_reg = model.get_lvalue (i, NULL);
7572 ASSERT_EQ (i_reg->get_kind (), RK_DECL);
7574 /* Reading "i" should give a symbolic "initial value". */
7575 const svalue *sval_init = model.get_rvalue (i, NULL);
7576 ASSERT_EQ (sval_init->get_kind (), SK_INITIAL);
7577 ASSERT_EQ (sval_init->dyn_cast_initial_svalue ()->get_region (), i_reg);
7578 /* ..and doing it again should give the same "initial value". */
7579 ASSERT_EQ (model.get_rvalue (i, NULL), sval_init);
7581 /* "i = 17;". */
7582 model.set_value (i, int_17, NULL);
7583 ASSERT_EQ (model.get_rvalue (i, NULL),
7584 model.get_rvalue (int_17, NULL));
7586 /* "i = -3;". */
7587 model.set_value (i, int_m3, NULL);
7588 ASSERT_EQ (model.get_rvalue (i, NULL),
7589 model.get_rvalue (int_m3, NULL));
7591 /* Verify get_offset for "i". */
7593 region_offset offset = i_reg->get_offset (&mgr);
7594 ASSERT_EQ (offset.get_base_region (), i_reg);
7595 ASSERT_EQ (offset.get_bit_offset (), 0);
7599 static void
7600 test_array_2 ()
7602 /* "int arr[10];" */
7603 tree tlen = size_int (10);
7604 tree arr_type
7605 = build_array_type (integer_type_node, build_index_type (tlen));
7606 tree arr = build_global_decl ("arr", arr_type);
7608 /* "int i;" */
7609 tree i = build_global_decl ("i", integer_type_node);
7611 tree int_0 = build_int_cst (integer_type_node, 0);
7612 tree int_1 = build_int_cst (integer_type_node, 1);
7614 tree arr_0 = build4 (ARRAY_REF, integer_type_node,
7615 arr, int_0, NULL_TREE, NULL_TREE);
7616 tree arr_1 = build4 (ARRAY_REF, integer_type_node,
7617 arr, int_1, NULL_TREE, NULL_TREE);
7618 tree arr_i = build4 (ARRAY_REF, integer_type_node,
7619 arr, i, NULL_TREE, NULL_TREE);
7621 tree int_17 = build_int_cst (integer_type_node, 17);
7622 tree int_42 = build_int_cst (integer_type_node, 42);
7623 tree int_m3 = build_int_cst (integer_type_node, -3);
7625 region_model_manager mgr;
7626 region_model model (&mgr);
7627 /* "arr[0] = 17;". */
7628 model.set_value (arr_0, int_17, NULL);
7629 /* "arr[1] = -3;". */
7630 model.set_value (arr_1, int_m3, NULL);
7632 ASSERT_EQ (model.get_rvalue (arr_0, NULL), model.get_rvalue (int_17, NULL));
7633 ASSERT_EQ (model.get_rvalue (arr_1, NULL), model.get_rvalue (int_m3, NULL));
7635 /* Overwrite a pre-existing binding: "arr[1] = 42;". */
7636 model.set_value (arr_1, int_42, NULL);
7637 ASSERT_EQ (model.get_rvalue (arr_1, NULL), model.get_rvalue (int_42, NULL));
7639 /* Verify get_offset for "arr[0]". */
7641 const region *arr_0_reg = model.get_lvalue (arr_0, NULL);
7642 region_offset offset = arr_0_reg->get_offset (&mgr);
7643 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
7644 ASSERT_EQ (offset.get_bit_offset (), 0);
7647 /* Verify get_offset for "arr[1]". */
7649 const region *arr_1_reg = model.get_lvalue (arr_1, NULL);
7650 region_offset offset = arr_1_reg->get_offset (&mgr);
7651 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
7652 ASSERT_EQ (offset.get_bit_offset (), INT_TYPE_SIZE);
7655 /* Verify get_offset for "arr[i]". */
7657 const region *arr_i_reg = model.get_lvalue (arr_i, NULL);
7658 region_offset offset = arr_i_reg->get_offset (&mgr);
7659 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
7660 ASSERT_EQ (offset.get_symbolic_byte_offset ()->get_kind (), SK_BINOP);
7663 /* "arr[i] = i;" - this should remove the earlier bindings. */
7664 model.set_value (arr_i, i, NULL);
7665 ASSERT_EQ (model.get_rvalue (arr_i, NULL), model.get_rvalue (i, NULL));
7666 ASSERT_EQ (model.get_rvalue (arr_0, NULL)->get_kind (), SK_UNKNOWN);
7668 /* "arr[0] = 17;" - this should remove the arr[i] binding. */
7669 model.set_value (arr_0, int_17, NULL);
7670 ASSERT_EQ (model.get_rvalue (arr_0, NULL), model.get_rvalue (int_17, NULL));
7671 ASSERT_EQ (model.get_rvalue (arr_i, NULL)->get_kind (), SK_UNKNOWN);
7674 /* Smoketest of dereferencing a pointer via MEM_REF. */
7676 static void
7677 test_mem_ref ()
7680 x = 17;
7681 p = &x;
7684 tree x = build_global_decl ("x", integer_type_node);
7685 tree int_star = build_pointer_type (integer_type_node);
7686 tree p = build_global_decl ("p", int_star);
7688 tree int_17 = build_int_cst (integer_type_node, 17);
7689 tree addr_of_x = build1 (ADDR_EXPR, int_star, x);
7690 tree offset_0 = build_int_cst (integer_type_node, 0);
7691 tree star_p = build2 (MEM_REF, integer_type_node, p, offset_0);
7693 region_model_manager mgr;
7694 region_model model (&mgr);
7696 /* "x = 17;". */
7697 model.set_value (x, int_17, NULL);
7699 /* "p = &x;". */
7700 model.set_value (p, addr_of_x, NULL);
7702 const svalue *sval = model.get_rvalue (star_p, NULL);
7703 ASSERT_EQ (sval->maybe_get_constant (), int_17);
7706 /* Test for a POINTER_PLUS_EXPR followed by a MEM_REF.
7707 Analogous to this code:
7708 void test_6 (int a[10])
7710 __analyzer_eval (a[3] == 42); [should be UNKNOWN]
7711 a[3] = 42;
7712 __analyzer_eval (a[3] == 42); [should be TRUE]
7714 from data-model-1.c, which looks like this at the gimple level:
7715 # __analyzer_eval (a[3] == 42); [should be UNKNOWN]
7716 int *_1 = a_10(D) + 12; # POINTER_PLUS_EXPR
7717 int _2 = *_1; # MEM_REF
7718 _Bool _3 = _2 == 42;
7719 int _4 = (int) _3;
7720 __analyzer_eval (_4);
7722 # a[3] = 42;
7723 int *_5 = a_10(D) + 12; # POINTER_PLUS_EXPR
7724 *_5 = 42; # MEM_REF
7726 # __analyzer_eval (a[3] == 42); [should be TRUE]
7727 int *_6 = a_10(D) + 12; # POINTER_PLUS_EXPR
7728 int _7 = *_6; # MEM_REF
7729 _Bool _8 = _7 == 42;
7730 int _9 = (int) _8;
7731 __analyzer_eval (_9); */
7733 static void
7734 test_POINTER_PLUS_EXPR_then_MEM_REF ()
7736 tree int_star = build_pointer_type (integer_type_node);
7737 tree a = build_global_decl ("a", int_star);
7738 tree offset_12 = build_int_cst (size_type_node, 12);
7739 tree pointer_plus_expr = build2 (POINTER_PLUS_EXPR, int_star, a, offset_12);
7740 tree offset_0 = build_int_cst (integer_type_node, 0);
7741 tree mem_ref = build2 (MEM_REF, integer_type_node,
7742 pointer_plus_expr, offset_0);
7743 region_model_manager mgr;
7744 region_model m (&mgr);
7746 tree int_42 = build_int_cst (integer_type_node, 42);
7747 m.set_value (mem_ref, int_42, NULL);
7748 ASSERT_EQ (m.get_rvalue (mem_ref, NULL)->maybe_get_constant (), int_42);
7751 /* Verify that malloc works. */
7753 static void
7754 test_malloc ()
7756 tree int_star = build_pointer_type (integer_type_node);
7757 tree p = build_global_decl ("p", int_star);
7758 tree n = build_global_decl ("n", integer_type_node);
7759 tree n_times_4 = build2 (MULT_EXPR, size_type_node,
7760 n, build_int_cst (size_type_node, 4));
7762 region_model_manager mgr;
7763 test_region_model_context ctxt;
7764 region_model model (&mgr);
7766 /* "p = malloc (n * 4);". */
7767 const svalue *size_sval = model.get_rvalue (n_times_4, &ctxt);
7768 const region *reg
7769 = model.get_or_create_region_for_heap_alloc (size_sval, &ctxt);
7770 const svalue *ptr = mgr.get_ptr_svalue (int_star, reg);
7771 model.set_value (model.get_lvalue (p, &ctxt), ptr, &ctxt);
7772 ASSERT_EQ (model.get_capacity (reg), size_sval);
7775 /* Verify that alloca works. */
7777 static void
7778 test_alloca ()
7780 auto_vec <tree> param_types;
7781 tree fndecl = make_fndecl (integer_type_node,
7782 "test_fn",
7783 param_types);
7784 allocate_struct_function (fndecl, true);
7787 tree int_star = build_pointer_type (integer_type_node);
7788 tree p = build_global_decl ("p", int_star);
7789 tree n = build_global_decl ("n", integer_type_node);
7790 tree n_times_4 = build2 (MULT_EXPR, size_type_node,
7791 n, build_int_cst (size_type_node, 4));
7793 region_model_manager mgr;
7794 test_region_model_context ctxt;
7795 region_model model (&mgr);
7797 /* Push stack frame. */
7798 const region *frame_reg
7799 = model.push_frame (DECL_STRUCT_FUNCTION (fndecl),
7800 NULL, &ctxt);
7801 /* "p = alloca (n * 4);". */
7802 const svalue *size_sval = model.get_rvalue (n_times_4, &ctxt);
7803 const region *reg = model.create_region_for_alloca (size_sval, &ctxt);
7804 ASSERT_EQ (reg->get_parent_region (), frame_reg);
7805 const svalue *ptr = mgr.get_ptr_svalue (int_star, reg);
7806 model.set_value (model.get_lvalue (p, &ctxt), ptr, &ctxt);
7807 ASSERT_EQ (model.get_capacity (reg), size_sval);
7809 /* Verify that the pointers to the alloca region are replaced by
7810 poisoned values when the frame is popped. */
7811 model.pop_frame (NULL, NULL, &ctxt);
7812 ASSERT_EQ (model.get_rvalue (p, NULL)->get_kind (), SK_POISONED);
7815 /* Verify that svalue::involves_p works. */
7817 static void
7818 test_involves_p ()
7820 region_model_manager mgr;
7821 tree int_star = build_pointer_type (integer_type_node);
7822 tree p = build_global_decl ("p", int_star);
7823 tree q = build_global_decl ("q", int_star);
7825 test_region_model_context ctxt;
7826 region_model model (&mgr);
7827 const svalue *p_init = model.get_rvalue (p, &ctxt);
7828 const svalue *q_init = model.get_rvalue (q, &ctxt);
7830 ASSERT_TRUE (p_init->involves_p (p_init));
7831 ASSERT_FALSE (p_init->involves_p (q_init));
7833 const region *star_p_reg = mgr.get_symbolic_region (p_init);
7834 const region *star_q_reg = mgr.get_symbolic_region (q_init);
7836 const svalue *init_star_p = mgr.get_or_create_initial_value (star_p_reg);
7837 const svalue *init_star_q = mgr.get_or_create_initial_value (star_q_reg);
7839 ASSERT_TRUE (init_star_p->involves_p (p_init));
7840 ASSERT_FALSE (p_init->involves_p (init_star_p));
7841 ASSERT_FALSE (init_star_p->involves_p (q_init));
7842 ASSERT_TRUE (init_star_q->involves_p (q_init));
7843 ASSERT_FALSE (init_star_q->involves_p (p_init));
7846 /* Run all of the selftests within this file. */
7848 void
7849 analyzer_region_model_cc_tests ()
7851 test_tree_cmp_on_constants ();
7852 test_dump ();
7853 test_struct ();
7854 test_array_1 ();
7855 test_get_representative_tree ();
7856 test_unique_constants ();
7857 test_unique_unknowns ();
7858 test_initial_svalue_folding ();
7859 test_unaryop_svalue_folding ();
7860 test_binop_svalue_folding ();
7861 test_sub_svalue_folding ();
7862 test_bits_within_svalue_folding ();
7863 test_descendent_of_p ();
7864 test_bit_range_regions ();
7865 test_assignment ();
7866 test_compound_assignment ();
7867 test_stack_frames ();
7868 test_get_representative_path_var ();
7869 test_equality_1 ();
7870 test_canonicalization_2 ();
7871 test_canonicalization_3 ();
7872 test_canonicalization_4 ();
7873 test_state_merging ();
7874 test_constraint_merging ();
7875 test_widening_constraints ();
7876 test_iteration_1 ();
7877 test_malloc_constraints ();
7878 test_var ();
7879 test_array_2 ();
7880 test_mem_ref ();
7881 test_POINTER_PLUS_EXPR_then_MEM_REF ();
7882 test_malloc ();
7883 test_alloca ();
7884 test_involves_p ();
7887 } // namespace selftest
7889 #endif /* CHECKING_P */
7891 } // namespace ana
7893 #endif /* #if ENABLE_ANALYZER */