analyzer: Fix PR analyzer/101980
[official-gcc.git] / gcc / analyzer / region-model.cc
blob9870007e57e94e947c43a58b167e9472ccb558b3
1 /* Classes for modeling the state of memory.
2 Copyright (C) 2019-2021 Free Software Foundation, Inc.
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tree.h"
25 #include "function.h"
26 #include "basic-block.h"
27 #include "gimple.h"
28 #include "gimple-iterator.h"
29 #include "diagnostic-core.h"
30 #include "graphviz.h"
31 #include "options.h"
32 #include "cgraph.h"
33 #include "tree-dfa.h"
34 #include "stringpool.h"
35 #include "convert.h"
36 #include "target.h"
37 #include "fold-const.h"
38 #include "tree-pretty-print.h"
39 #include "diagnostic-color.h"
40 #include "diagnostic-metadata.h"
41 #include "tristate.h"
42 #include "bitmap.h"
43 #include "selftest.h"
44 #include "function.h"
45 #include "json.h"
46 #include "analyzer/analyzer.h"
47 #include "analyzer/analyzer-logging.h"
48 #include "ordered-hash-map.h"
49 #include "options.h"
50 #include "cgraph.h"
51 #include "cfg.h"
52 #include "digraph.h"
53 #include "analyzer/supergraph.h"
54 #include "sbitmap.h"
55 #include "analyzer/call-string.h"
56 #include "analyzer/program-point.h"
57 #include "analyzer/store.h"
58 #include "analyzer/region-model.h"
59 #include "analyzer/constraint-manager.h"
60 #include "diagnostic-event-id.h"
61 #include "analyzer/sm.h"
62 #include "diagnostic-event-id.h"
63 #include "analyzer/sm.h"
64 #include "analyzer/pending-diagnostic.h"
65 #include "analyzer/region-model-reachability.h"
66 #include "analyzer/analyzer-selftests.h"
67 #include "stor-layout.h"
68 #include "attribs.h"
69 #include "tree-object-size.h"
71 #if ENABLE_ANALYZER
73 namespace ana {
75 /* Dump T to PP in language-independent form, for debugging/logging/dumping
76 purposes. */
78 void
79 dump_tree (pretty_printer *pp, tree t)
81 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
84 /* Dump T to PP in language-independent form in quotes, for
85 debugging/logging/dumping purposes. */
87 void
88 dump_quoted_tree (pretty_printer *pp, tree t)
90 pp_begin_quote (pp, pp_show_color (pp));
91 dump_tree (pp, t);
92 pp_end_quote (pp, pp_show_color (pp));
95 /* Equivalent to pp_printf (pp, "%qT", t), to avoid nesting pp_printf
96 calls within other pp_printf calls.
98 default_tree_printer handles 'T' and some other codes by calling
99 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
100 dump_generic_node calls pp_printf in various places, leading to
101 garbled output.
103 Ideally pp_printf could be made to be reentrant, but in the meantime
104 this function provides a workaround. */
106 void
107 print_quoted_type (pretty_printer *pp, tree t)
109 pp_begin_quote (pp, pp_show_color (pp));
110 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
111 pp_end_quote (pp, pp_show_color (pp));
114 /* class region_to_value_map. */
116 /* Assignment operator for region_to_value_map. */
118 region_to_value_map &
119 region_to_value_map::operator= (const region_to_value_map &other)
121 m_hash_map.empty ();
122 for (auto iter : other.m_hash_map)
124 const region *reg = iter.first;
125 const svalue *sval = iter.second;
126 m_hash_map.put (reg, sval);
128 return *this;
131 /* Equality operator for region_to_value_map. */
133 bool
134 region_to_value_map::operator== (const region_to_value_map &other) const
136 if (m_hash_map.elements () != other.m_hash_map.elements ())
137 return false;
139 for (auto iter : *this)
141 const region *reg = iter.first;
142 const svalue *sval = iter.second;
143 const svalue * const *other_slot = other.get (reg);
144 if (other_slot == NULL)
145 return false;
146 if (sval != *other_slot)
147 return false;
150 return true;
153 /* Dump this object to PP. */
155 void
156 region_to_value_map::dump_to_pp (pretty_printer *pp, bool simple,
157 bool multiline) const
159 auto_vec<const region *> regs;
160 for (iterator iter = begin (); iter != end (); ++iter)
161 regs.safe_push ((*iter).first);
162 regs.qsort (region::cmp_ptr_ptr);
163 if (multiline)
164 pp_newline (pp);
165 else
166 pp_string (pp, " {");
167 unsigned i;
168 const region *reg;
169 FOR_EACH_VEC_ELT (regs, i, reg)
171 if (multiline)
172 pp_string (pp, " ");
173 else if (i > 0)
174 pp_string (pp, ", ");
175 reg->dump_to_pp (pp, simple);
176 pp_string (pp, ": ");
177 const svalue *sval = *get (reg);
178 sval->dump_to_pp (pp, true);
179 if (multiline)
180 pp_newline (pp);
182 if (!multiline)
183 pp_string (pp, "}");
186 /* Dump this object to stderr. */
188 DEBUG_FUNCTION void
189 region_to_value_map::dump (bool simple) const
191 pretty_printer pp;
192 pp_format_decoder (&pp) = default_tree_printer;
193 pp_show_color (&pp) = pp_show_color (global_dc->printer);
194 pp.buffer->stream = stderr;
195 dump_to_pp (&pp, simple, true);
196 pp_newline (&pp);
197 pp_flush (&pp);
201 /* Attempt to merge THIS with OTHER, writing the result
202 to OUT.
204 For now, write (region, value) mappings that are in common between THIS
205 and OTHER to OUT, effectively taking the intersection, rather than
206 rejecting differences. */
208 bool
209 region_to_value_map::can_merge_with_p (const region_to_value_map &other,
210 region_to_value_map *out) const
212 for (auto iter : *this)
214 const region *iter_reg = iter.first;
215 const svalue *iter_sval = iter.second;
216 const svalue * const * other_slot = other.get (iter_reg);
217 if (other_slot)
218 if (iter_sval == *other_slot)
219 out->put (iter_reg, iter_sval);
221 return true;
224 /* Purge any state involving SVAL. */
226 void
227 region_to_value_map::purge_state_involving (const svalue *sval)
229 auto_vec<const region *> to_purge;
230 for (auto iter : *this)
232 const region *iter_reg = iter.first;
233 const svalue *iter_sval = iter.second;
234 if (iter_reg->involves_p (sval) || iter_sval->involves_p (sval))
235 to_purge.safe_push (iter_reg);
237 for (auto iter : to_purge)
238 m_hash_map.remove (iter);
241 /* class region_model. */
243 /* Ctor for region_model: construct an "empty" model. */
245 region_model::region_model (region_model_manager *mgr)
246 : m_mgr (mgr), m_store (), m_current_frame (NULL),
247 m_dynamic_extents ()
249 m_constraints = new constraint_manager (mgr);
252 /* region_model's copy ctor. */
254 region_model::region_model (const region_model &other)
255 : m_mgr (other.m_mgr), m_store (other.m_store),
256 m_constraints (new constraint_manager (*other.m_constraints)),
257 m_current_frame (other.m_current_frame),
258 m_dynamic_extents (other.m_dynamic_extents)
262 /* region_model's dtor. */
264 region_model::~region_model ()
266 delete m_constraints;
269 /* region_model's assignment operator. */
271 region_model &
272 region_model::operator= (const region_model &other)
274 /* m_mgr is const. */
275 gcc_assert (m_mgr == other.m_mgr);
277 m_store = other.m_store;
279 delete m_constraints;
280 m_constraints = new constraint_manager (*other.m_constraints);
282 m_current_frame = other.m_current_frame;
284 m_dynamic_extents = other.m_dynamic_extents;
286 return *this;
289 /* Equality operator for region_model.
291 Amongst other things this directly compares the stores and the constraint
292 managers, so for this to be meaningful both this and OTHER should
293 have been canonicalized. */
295 bool
296 region_model::operator== (const region_model &other) const
298 /* We can only compare instances that use the same manager. */
299 gcc_assert (m_mgr == other.m_mgr);
301 if (m_store != other.m_store)
302 return false;
304 if (*m_constraints != *other.m_constraints)
305 return false;
307 if (m_current_frame != other.m_current_frame)
308 return false;
310 if (m_dynamic_extents != other.m_dynamic_extents)
311 return false;
313 gcc_checking_assert (hash () == other.hash ());
315 return true;
318 /* Generate a hash value for this region_model. */
320 hashval_t
321 region_model::hash () const
323 hashval_t result = m_store.hash ();
324 result ^= m_constraints->hash ();
325 return result;
328 /* Dump a representation of this model to PP, showing the
329 stack, the store, and any constraints.
330 Use SIMPLE to control how svalues and regions are printed. */
332 void
333 region_model::dump_to_pp (pretty_printer *pp, bool simple,
334 bool multiline) const
336 /* Dump stack. */
337 pp_printf (pp, "stack depth: %i", get_stack_depth ());
338 if (multiline)
339 pp_newline (pp);
340 else
341 pp_string (pp, " {");
342 for (const frame_region *iter_frame = m_current_frame; iter_frame;
343 iter_frame = iter_frame->get_calling_frame ())
345 if (multiline)
346 pp_string (pp, " ");
347 else if (iter_frame != m_current_frame)
348 pp_string (pp, ", ");
349 pp_printf (pp, "frame (index %i): ", iter_frame->get_index ());
350 iter_frame->dump_to_pp (pp, simple);
351 if (multiline)
352 pp_newline (pp);
354 if (!multiline)
355 pp_string (pp, "}");
357 /* Dump store. */
358 if (!multiline)
359 pp_string (pp, ", {");
360 m_store.dump_to_pp (pp, simple, multiline,
361 m_mgr->get_store_manager ());
362 if (!multiline)
363 pp_string (pp, "}");
365 /* Dump constraints. */
366 pp_string (pp, "constraint_manager:");
367 if (multiline)
368 pp_newline (pp);
369 else
370 pp_string (pp, " {");
371 m_constraints->dump_to_pp (pp, multiline);
372 if (!multiline)
373 pp_string (pp, "}");
375 /* Dump sizes of dynamic regions, if any are known. */
376 if (!m_dynamic_extents.is_empty ())
378 pp_string (pp, "dynamic_extents:");
379 m_dynamic_extents.dump_to_pp (pp, simple, multiline);
383 /* Dump a representation of this model to FILE. */
385 void
386 region_model::dump (FILE *fp, bool simple, bool multiline) const
388 pretty_printer pp;
389 pp_format_decoder (&pp) = default_tree_printer;
390 pp_show_color (&pp) = pp_show_color (global_dc->printer);
391 pp.buffer->stream = fp;
392 dump_to_pp (&pp, simple, multiline);
393 pp_newline (&pp);
394 pp_flush (&pp);
397 /* Dump a multiline representation of this model to stderr. */
399 DEBUG_FUNCTION void
400 region_model::dump (bool simple) const
402 dump (stderr, simple, true);
405 /* Dump a multiline representation of this model to stderr. */
407 DEBUG_FUNCTION void
408 region_model::debug () const
410 dump (true);
413 /* Assert that this object is valid. */
415 void
416 region_model::validate () const
418 m_store.validate ();
421 /* Canonicalize the store and constraints, to maximize the chance of
422 equality between region_model instances. */
424 void
425 region_model::canonicalize ()
427 m_store.canonicalize (m_mgr->get_store_manager ());
428 m_constraints->canonicalize ();
431 /* Return true if this region_model is in canonical form. */
433 bool
434 region_model::canonicalized_p () const
436 region_model copy (*this);
437 copy.canonicalize ();
438 return *this == copy;
441 /* See the comment for store::loop_replay_fixup. */
443 void
444 region_model::loop_replay_fixup (const region_model *dst_state)
446 m_store.loop_replay_fixup (dst_state->get_store (), m_mgr);
449 /* A subclass of pending_diagnostic for complaining about uses of
450 poisoned values. */
452 class poisoned_value_diagnostic
453 : public pending_diagnostic_subclass<poisoned_value_diagnostic>
455 public:
456 poisoned_value_diagnostic (tree expr, enum poison_kind pkind)
457 : m_expr (expr), m_pkind (pkind)
460 const char *get_kind () const FINAL OVERRIDE { return "poisoned_value_diagnostic"; }
462 bool use_of_uninit_p () const FINAL OVERRIDE
464 return m_pkind == POISON_KIND_UNINIT;
467 bool operator== (const poisoned_value_diagnostic &other) const
469 return m_expr == other.m_expr;
472 bool emit (rich_location *rich_loc) FINAL OVERRIDE
474 switch (m_pkind)
476 default:
477 gcc_unreachable ();
478 case POISON_KIND_UNINIT:
480 diagnostic_metadata m;
481 m.add_cwe (457); /* "CWE-457: Use of Uninitialized Variable". */
482 return warning_meta (rich_loc, m,
483 OPT_Wanalyzer_use_of_uninitialized_value,
484 "use of uninitialized value %qE",
485 m_expr);
487 break;
488 case POISON_KIND_FREED:
490 diagnostic_metadata m;
491 m.add_cwe (416); /* "CWE-416: Use After Free". */
492 return warning_meta (rich_loc, m,
493 OPT_Wanalyzer_use_after_free,
494 "use after %<free%> of %qE",
495 m_expr);
497 break;
498 case POISON_KIND_POPPED_STACK:
500 /* TODO: which CWE? */
501 return warning_at
502 (rich_loc,
503 OPT_Wanalyzer_use_of_pointer_in_stale_stack_frame,
504 "dereferencing pointer %qE to within stale stack frame",
505 m_expr);
507 break;
511 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
513 switch (m_pkind)
515 default:
516 gcc_unreachable ();
517 case POISON_KIND_UNINIT:
518 return ev.formatted_print ("use of uninitialized value %qE here",
519 m_expr);
520 case POISON_KIND_FREED:
521 return ev.formatted_print ("use after %<free%> of %qE here",
522 m_expr);
523 case POISON_KIND_POPPED_STACK:
524 return ev.formatted_print
525 ("dereferencing pointer %qE to within stale stack frame",
526 m_expr);
530 private:
531 tree m_expr;
532 enum poison_kind m_pkind;
535 /* A subclass of pending_diagnostic for complaining about shifts
536 by negative counts. */
538 class shift_count_negative_diagnostic
539 : public pending_diagnostic_subclass<shift_count_negative_diagnostic>
541 public:
542 shift_count_negative_diagnostic (const gassign *assign, tree count_cst)
543 : m_assign (assign), m_count_cst (count_cst)
546 const char *get_kind () const FINAL OVERRIDE
548 return "shift_count_negative_diagnostic";
551 bool operator== (const shift_count_negative_diagnostic &other) const
553 return (m_assign == other.m_assign
554 && same_tree_p (m_count_cst, other.m_count_cst));
557 bool emit (rich_location *rich_loc) FINAL OVERRIDE
559 return warning_at (rich_loc, OPT_Wanalyzer_shift_count_negative,
560 "shift by negative count (%qE)", m_count_cst);
563 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
565 return ev.formatted_print ("shift by negative amount here (%qE)", m_count_cst);
568 private:
569 const gassign *m_assign;
570 tree m_count_cst;
573 /* A subclass of pending_diagnostic for complaining about shifts
574 by counts >= the width of the operand type. */
576 class shift_count_overflow_diagnostic
577 : public pending_diagnostic_subclass<shift_count_overflow_diagnostic>
579 public:
580 shift_count_overflow_diagnostic (const gassign *assign,
581 int operand_precision,
582 tree count_cst)
583 : m_assign (assign), m_operand_precision (operand_precision),
584 m_count_cst (count_cst)
587 const char *get_kind () const FINAL OVERRIDE
589 return "shift_count_overflow_diagnostic";
592 bool operator== (const shift_count_overflow_diagnostic &other) const
594 return (m_assign == other.m_assign
595 && m_operand_precision == other.m_operand_precision
596 && same_tree_p (m_count_cst, other.m_count_cst));
599 bool emit (rich_location *rich_loc) FINAL OVERRIDE
601 return warning_at (rich_loc, OPT_Wanalyzer_shift_count_overflow,
602 "shift by count (%qE) >= precision of type (%qi)",
603 m_count_cst, m_operand_precision);
606 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
608 return ev.formatted_print ("shift by count %qE here", m_count_cst);
611 private:
612 const gassign *m_assign;
613 int m_operand_precision;
614 tree m_count_cst;
617 /* If ASSIGN is a stmt that can be modelled via
618 set_value (lhs_reg, SVALUE, CTXT)
619 for some SVALUE, get the SVALUE.
620 Otherwise return NULL. */
622 const svalue *
623 region_model::get_gassign_result (const gassign *assign,
624 region_model_context *ctxt)
626 tree lhs = gimple_assign_lhs (assign);
627 tree rhs1 = gimple_assign_rhs1 (assign);
628 enum tree_code op = gimple_assign_rhs_code (assign);
629 switch (op)
631 default:
632 return NULL;
634 case POINTER_PLUS_EXPR:
636 /* e.g. "_1 = a_10(D) + 12;" */
637 tree ptr = rhs1;
638 tree offset = gimple_assign_rhs2 (assign);
640 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
641 const svalue *offset_sval = get_rvalue (offset, ctxt);
642 /* Quoting tree.def, "the second operand [of a POINTER_PLUS_EXPR]
643 is an integer of type sizetype". */
644 offset_sval = m_mgr->get_or_create_cast (size_type_node, offset_sval);
646 const svalue *sval_binop
647 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
648 ptr_sval, offset_sval);
649 return sval_binop;
651 break;
653 case POINTER_DIFF_EXPR:
655 /* e.g. "_1 = p_2(D) - q_3(D);". */
656 tree rhs2 = gimple_assign_rhs2 (assign);
657 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
658 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
660 // TODO: perhaps fold to zero if they're known to be equal?
662 const svalue *sval_binop
663 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
664 rhs1_sval, rhs2_sval);
665 return sval_binop;
667 break;
669 /* Assignments of the form
670 set_value (lvalue (LHS), rvalue (EXPR))
671 for various EXPR.
672 We already have the lvalue for the LHS above, as "lhs_reg". */
673 case ADDR_EXPR: /* LHS = &RHS; */
674 case BIT_FIELD_REF:
675 case COMPONENT_REF: /* LHS = op0.op1; */
676 case MEM_REF:
677 case REAL_CST:
678 case COMPLEX_CST:
679 case VECTOR_CST:
680 case INTEGER_CST:
681 case ARRAY_REF:
682 case SSA_NAME: /* LHS = VAR; */
683 case VAR_DECL: /* LHS = VAR; */
684 case PARM_DECL:/* LHS = VAR; */
685 case REALPART_EXPR:
686 case IMAGPART_EXPR:
687 return get_rvalue (rhs1, ctxt);
689 case ABS_EXPR:
690 case ABSU_EXPR:
691 case CONJ_EXPR:
692 case BIT_NOT_EXPR:
693 case FIX_TRUNC_EXPR:
694 case FLOAT_EXPR:
695 case NEGATE_EXPR:
696 case NOP_EXPR:
697 case VIEW_CONVERT_EXPR:
699 /* Unary ops. */
700 const svalue *rhs_sval = get_rvalue (rhs1, ctxt);
701 const svalue *sval_unaryop
702 = m_mgr->get_or_create_unaryop (TREE_TYPE (lhs), op, rhs_sval);
703 return sval_unaryop;
706 case EQ_EXPR:
707 case GE_EXPR:
708 case LE_EXPR:
709 case NE_EXPR:
710 case GT_EXPR:
711 case LT_EXPR:
712 case UNORDERED_EXPR:
713 case ORDERED_EXPR:
715 tree rhs2 = gimple_assign_rhs2 (assign);
717 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
718 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
720 if (TREE_TYPE (lhs) == boolean_type_node)
722 /* Consider constraints between svalues. */
723 tristate t = eval_condition (rhs1_sval, op, rhs2_sval);
724 if (t.is_known ())
725 return m_mgr->get_or_create_constant_svalue
726 (t.is_true () ? boolean_true_node : boolean_false_node);
729 /* Otherwise, generate a symbolic binary op. */
730 const svalue *sval_binop
731 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
732 rhs1_sval, rhs2_sval);
733 return sval_binop;
735 break;
737 case PLUS_EXPR:
738 case MINUS_EXPR:
739 case MULT_EXPR:
740 case MULT_HIGHPART_EXPR:
741 case TRUNC_DIV_EXPR:
742 case CEIL_DIV_EXPR:
743 case FLOOR_DIV_EXPR:
744 case ROUND_DIV_EXPR:
745 case TRUNC_MOD_EXPR:
746 case CEIL_MOD_EXPR:
747 case FLOOR_MOD_EXPR:
748 case ROUND_MOD_EXPR:
749 case RDIV_EXPR:
750 case EXACT_DIV_EXPR:
751 case LSHIFT_EXPR:
752 case RSHIFT_EXPR:
753 case LROTATE_EXPR:
754 case RROTATE_EXPR:
755 case BIT_IOR_EXPR:
756 case BIT_XOR_EXPR:
757 case BIT_AND_EXPR:
758 case MIN_EXPR:
759 case MAX_EXPR:
760 case COMPLEX_EXPR:
762 /* Binary ops. */
763 tree rhs2 = gimple_assign_rhs2 (assign);
765 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
766 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
768 if (ctxt && (op == LSHIFT_EXPR || op == RSHIFT_EXPR))
770 /* "INT34-C. Do not shift an expression by a negative number of bits
771 or by greater than or equal to the number of bits that exist in
772 the operand." */
773 if (const tree rhs2_cst = rhs2_sval->maybe_get_constant ())
774 if (TREE_CODE (rhs2_cst) == INTEGER_CST)
776 if (tree_int_cst_sgn (rhs2_cst) < 0)
777 ctxt->warn (new shift_count_negative_diagnostic
778 (assign, rhs2_cst));
779 else if (compare_tree_int (rhs2_cst,
780 TYPE_PRECISION (TREE_TYPE (rhs1)))
781 >= 0)
782 ctxt->warn (new shift_count_overflow_diagnostic
783 (assign, TYPE_PRECISION (TREE_TYPE (rhs1)),
784 rhs2_cst));
788 const svalue *sval_binop
789 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
790 rhs1_sval, rhs2_sval);
791 return sval_binop;
794 /* Vector expressions. In theory we could implement these elementwise,
795 but for now, simply return unknown values. */
796 case VEC_DUPLICATE_EXPR:
797 case VEC_SERIES_EXPR:
798 case VEC_COND_EXPR:
799 case VEC_PERM_EXPR:
800 case VEC_WIDEN_MULT_HI_EXPR:
801 case VEC_WIDEN_MULT_LO_EXPR:
802 case VEC_WIDEN_MULT_EVEN_EXPR:
803 case VEC_WIDEN_MULT_ODD_EXPR:
804 case VEC_UNPACK_HI_EXPR:
805 case VEC_UNPACK_LO_EXPR:
806 case VEC_UNPACK_FLOAT_HI_EXPR:
807 case VEC_UNPACK_FLOAT_LO_EXPR:
808 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
809 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
810 case VEC_PACK_TRUNC_EXPR:
811 case VEC_PACK_SAT_EXPR:
812 case VEC_PACK_FIX_TRUNC_EXPR:
813 case VEC_PACK_FLOAT_EXPR:
814 case VEC_WIDEN_LSHIFT_HI_EXPR:
815 case VEC_WIDEN_LSHIFT_LO_EXPR:
816 return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (lhs));
820 /* Check for SVAL being poisoned, adding a warning to CTXT.
821 Return SVAL, or, if a warning is added, another value, to avoid
822 repeatedly complaining about the same poisoned value in followup code. */
824 const svalue *
825 region_model::check_for_poison (const svalue *sval,
826 tree expr,
827 region_model_context *ctxt) const
829 if (!ctxt)
830 return sval;
832 if (const poisoned_svalue *poisoned_sval = sval->dyn_cast_poisoned_svalue ())
834 /* If we have an SSA name for a temporary, we don't want to print
835 '<unknown>'.
836 Poisoned values are shared by type, and so we can't reconstruct
837 the tree other than via the def stmts, using
838 fixup_tree_for_diagnostic. */
839 tree diag_arg = fixup_tree_for_diagnostic (expr);
840 enum poison_kind pkind = poisoned_sval->get_poison_kind ();
841 if (ctxt->warn (new poisoned_value_diagnostic (diag_arg, pkind)))
843 /* We only want to report use of a poisoned value at the first
844 place it gets used; return an unknown value to avoid generating
845 a chain of followup warnings. */
846 sval = m_mgr->get_or_create_unknown_svalue (sval->get_type ());
849 return sval;
852 return sval;
855 /* Update this model for the ASSIGN stmt, using CTXT to report any
856 diagnostics. */
858 void
859 region_model::on_assignment (const gassign *assign, region_model_context *ctxt)
861 tree lhs = gimple_assign_lhs (assign);
862 tree rhs1 = gimple_assign_rhs1 (assign);
864 const region *lhs_reg = get_lvalue (lhs, ctxt);
866 /* Most assignments are handled by:
867 set_value (lhs_reg, SVALUE, CTXT)
868 for some SVALUE. */
869 if (const svalue *sval = get_gassign_result (assign, ctxt))
871 tree expr = get_diagnostic_tree_for_gassign (assign);
872 check_for_poison (sval, expr, ctxt);
873 set_value (lhs_reg, sval, ctxt);
874 return;
877 enum tree_code op = gimple_assign_rhs_code (assign);
878 switch (op)
880 default:
882 if (0)
883 sorry_at (assign->location, "unhandled assignment op: %qs",
884 get_tree_code_name (op));
885 const svalue *unknown_sval
886 = m_mgr->get_or_create_unknown_svalue (TREE_TYPE (lhs));
887 set_value (lhs_reg, unknown_sval, ctxt);
889 break;
891 case CONSTRUCTOR:
893 if (TREE_CLOBBER_P (rhs1))
895 /* e.g. "x ={v} {CLOBBER};" */
896 clobber_region (lhs_reg);
898 else
900 /* Any CONSTRUCTOR that survives to this point is either
901 just a zero-init of everything, or a vector. */
902 if (!CONSTRUCTOR_NO_CLEARING (rhs1))
903 zero_fill_region (lhs_reg);
904 unsigned ix;
905 tree index;
906 tree val;
907 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), ix, index, val)
909 gcc_assert (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE);
910 if (!index)
911 index = build_int_cst (integer_type_node, ix);
912 gcc_assert (TREE_CODE (index) == INTEGER_CST);
913 const svalue *index_sval
914 = m_mgr->get_or_create_constant_svalue (index);
915 gcc_assert (index_sval);
916 const region *sub_reg
917 = m_mgr->get_element_region (lhs_reg,
918 TREE_TYPE (val),
919 index_sval);
920 const svalue *val_sval = get_rvalue (val, ctxt);
921 set_value (sub_reg, val_sval, ctxt);
925 break;
927 case STRING_CST:
929 /* e.g. "struct s2 x = {{'A', 'B', 'C', 'D'}};". */
930 const svalue *rhs_sval = get_rvalue (rhs1, ctxt);
931 m_store.set_value (m_mgr->get_store_manager(), lhs_reg, rhs_sval,
932 ctxt ? ctxt->get_uncertainty () : NULL);
934 break;
938 /* A pending_diagnostic subclass for implementing "__analyzer_dump_path". */
940 class dump_path_diagnostic
941 : public pending_diagnostic_subclass<dump_path_diagnostic>
943 public:
944 bool emit (rich_location *richloc) FINAL OVERRIDE
946 inform (richloc, "path");
947 return true;
950 const char *get_kind () const FINAL OVERRIDE { return "dump_path_diagnostic"; }
952 bool operator== (const dump_path_diagnostic &) const
954 return true;
958 /* Handle the pre-sm-state part of STMT, modifying this object in-place.
959 Write true to *OUT_TERMINATE_PATH if the path should be terminated.
960 Write true to *OUT_UNKNOWN_SIDE_EFFECTS if the stmt has unknown
961 side effects. */
963 void
964 region_model::on_stmt_pre (const gimple *stmt,
965 bool *out_terminate_path,
966 bool *out_unknown_side_effects,
967 region_model_context *ctxt)
969 switch (gimple_code (stmt))
971 default:
972 /* No-op for now. */
973 break;
975 case GIMPLE_ASSIGN:
977 const gassign *assign = as_a <const gassign *> (stmt);
978 on_assignment (assign, ctxt);
980 break;
982 case GIMPLE_ASM:
984 const gasm *asm_stmt = as_a <const gasm *> (stmt);
985 on_asm_stmt (asm_stmt, ctxt);
987 break;
989 case GIMPLE_CALL:
991 /* Track whether we have a gcall to a function that's not recognized by
992 anything, for which we don't have a function body, or for which we
993 don't know the fndecl. */
994 const gcall *call = as_a <const gcall *> (stmt);
996 /* Debugging/test support. */
997 if (is_special_named_call_p (call, "__analyzer_describe", 2))
998 impl_call_analyzer_describe (call, ctxt);
999 else if (is_special_named_call_p (call, "__analyzer_dump_capacity", 1))
1000 impl_call_analyzer_dump_capacity (call, ctxt);
1001 else if (is_special_named_call_p (call, "__analyzer_dump_path", 0))
1003 /* Handle the builtin "__analyzer_dump_path" by queuing a
1004 diagnostic at this exploded_node. */
1005 ctxt->warn (new dump_path_diagnostic ());
1007 else if (is_special_named_call_p (call, "__analyzer_dump_region_model",
1010 /* Handle the builtin "__analyzer_dump_region_model" by dumping
1011 the region model's state to stderr. */
1012 dump (false);
1014 else if (is_special_named_call_p (call, "__analyzer_eval", 1))
1015 impl_call_analyzer_eval (call, ctxt);
1016 else if (is_special_named_call_p (call, "__analyzer_break", 0))
1018 /* Handle the builtin "__analyzer_break" by triggering a
1019 breakpoint. */
1020 /* TODO: is there a good cross-platform way to do this? */
1021 raise (SIGINT);
1023 else if (is_special_named_call_p (call,
1024 "__analyzer_dump_exploded_nodes",
1027 /* This is handled elsewhere. */
1029 else
1030 *out_unknown_side_effects = on_call_pre (call, ctxt,
1031 out_terminate_path);
1033 break;
1035 case GIMPLE_RETURN:
1037 const greturn *return_ = as_a <const greturn *> (stmt);
1038 on_return (return_, ctxt);
1040 break;
1044 /* Update this model for the CALL stmt, using CTXT to report any
1045 diagnostics - the first half.
1047 Updates to the region_model that should be made *before* sm-states
1048 are updated are done here; other updates to the region_model are done
1049 in region_model::on_call_post.
1051 Return true if the function call has unknown side effects (it wasn't
1052 recognized and we don't have a body for it, or are unable to tell which
1053 fndecl it is).
1055 Write true to *OUT_TERMINATE_PATH if this execution path should be
1056 terminated (e.g. the function call terminates the process). */
1058 bool
1059 region_model::on_call_pre (const gcall *call, region_model_context *ctxt,
1060 bool *out_terminate_path)
1062 call_details cd (call, this, ctxt);
1064 bool unknown_side_effects = false;
1066 /* Some of the cases below update the lhs of the call based on the
1067 return value, but not all. Provide a default value, which may
1068 get overwritten below. */
1069 if (tree lhs = gimple_call_lhs (call))
1071 const region *lhs_region = get_lvalue (lhs, ctxt);
1072 const svalue *sval
1073 = m_mgr->get_or_create_conjured_svalue (TREE_TYPE (lhs), call,
1074 lhs_region);
1075 purge_state_involving (sval, ctxt);
1076 set_value (lhs_region, sval, ctxt);
1079 if (gimple_call_internal_p (call))
1081 switch (gimple_call_internal_fn (call))
1083 default:
1084 break;
1085 case IFN_BUILTIN_EXPECT:
1086 impl_call_builtin_expect (cd);
1087 return false;
1088 case IFN_UBSAN_BOUNDS:
1089 return false;
1093 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
1095 /* The various impl_call_* member functions are implemented
1096 in region-model-impl-calls.cc.
1097 Having them split out into separate functions makes it easier
1098 to put breakpoints on the handling of specific functions. */
1100 if (fndecl_built_in_p (callee_fndecl, BUILT_IN_NORMAL)
1101 && gimple_builtin_call_types_compatible_p (call, callee_fndecl))
1102 switch (DECL_UNCHECKED_FUNCTION_CODE (callee_fndecl))
1104 default:
1105 if (!DECL_PURE_P (callee_fndecl))
1106 unknown_side_effects = true;
1107 break;
1108 case BUILT_IN_ALLOCA:
1109 case BUILT_IN_ALLOCA_WITH_ALIGN:
1110 impl_call_alloca (cd);
1111 return false;
1112 case BUILT_IN_CALLOC:
1113 impl_call_calloc (cd);
1114 return false;
1115 case BUILT_IN_EXPECT:
1116 case BUILT_IN_EXPECT_WITH_PROBABILITY:
1117 impl_call_builtin_expect (cd);
1118 return false;
1119 case BUILT_IN_FREE:
1120 /* Handle in "on_call_post". */
1121 break;
1122 case BUILT_IN_MALLOC:
1123 impl_call_malloc (cd);
1124 return false;
1125 case BUILT_IN_MEMCPY:
1126 case BUILT_IN_MEMCPY_CHK:
1127 impl_call_memcpy (cd);
1128 return false;
1129 case BUILT_IN_MEMSET:
1130 case BUILT_IN_MEMSET_CHK:
1131 impl_call_memset (cd);
1132 return false;
1133 break;
1134 case BUILT_IN_REALLOC:
1135 impl_call_realloc (cd);
1136 return false;
1137 case BUILT_IN_STRCPY:
1138 case BUILT_IN_STRCPY_CHK:
1139 impl_call_strcpy (cd);
1140 return false;
1141 case BUILT_IN_STRLEN:
1142 impl_call_strlen (cd);
1143 return false;
1145 case BUILT_IN_STACK_SAVE:
1146 case BUILT_IN_STACK_RESTORE:
1147 return false;
1149 /* Stdio builtins. */
1150 case BUILT_IN_FPRINTF:
1151 case BUILT_IN_FPRINTF_UNLOCKED:
1152 case BUILT_IN_PUTC:
1153 case BUILT_IN_PUTC_UNLOCKED:
1154 case BUILT_IN_FPUTC:
1155 case BUILT_IN_FPUTC_UNLOCKED:
1156 case BUILT_IN_FPUTS:
1157 case BUILT_IN_FPUTS_UNLOCKED:
1158 case BUILT_IN_FWRITE:
1159 case BUILT_IN_FWRITE_UNLOCKED:
1160 case BUILT_IN_PRINTF:
1161 case BUILT_IN_PRINTF_UNLOCKED:
1162 case BUILT_IN_PUTCHAR:
1163 case BUILT_IN_PUTCHAR_UNLOCKED:
1164 case BUILT_IN_PUTS:
1165 case BUILT_IN_PUTS_UNLOCKED:
1166 case BUILT_IN_VFPRINTF:
1167 case BUILT_IN_VPRINTF:
1168 /* These stdio builtins have external effects that are out
1169 of scope for the analyzer: we only want to model the effects
1170 on the return value. */
1171 break;
1173 else if (is_named_call_p (callee_fndecl, "malloc", call, 1))
1175 impl_call_malloc (cd);
1176 return false;
1178 else if (is_named_call_p (callee_fndecl, "calloc", call, 2))
1180 impl_call_calloc (cd);
1181 return false;
1183 else if (is_named_call_p (callee_fndecl, "alloca", call, 1))
1185 impl_call_alloca (cd);
1186 return false;
1188 else if (is_named_call_p (callee_fndecl, "realloc", call, 2))
1190 impl_call_realloc (cd);
1191 return false;
1193 else if (is_named_call_p (callee_fndecl, "error"))
1195 if (impl_call_error (cd, 3, out_terminate_path))
1196 return false;
1197 else
1198 unknown_side_effects = true;
1200 else if (is_named_call_p (callee_fndecl, "error_at_line"))
1202 if (impl_call_error (cd, 5, out_terminate_path))
1203 return false;
1204 else
1205 unknown_side_effects = true;
1207 else if (is_named_call_p (callee_fndecl, "fgets", call, 3)
1208 || is_named_call_p (callee_fndecl, "fgets_unlocked", call, 3))
1210 impl_call_fgets (cd);
1211 return false;
1213 else if (is_named_call_p (callee_fndecl, "fread", call, 4))
1215 impl_call_fread (cd);
1216 return false;
1218 else if (is_named_call_p (callee_fndecl, "getchar", call, 0))
1220 /* No side-effects (tracking stream state is out-of-scope
1221 for the analyzer). */
1223 else if (is_named_call_p (callee_fndecl, "memset", call, 3)
1224 && POINTER_TYPE_P (cd.get_arg_type (0)))
1226 impl_call_memset (cd);
1227 return false;
1229 else if (is_named_call_p (callee_fndecl, "strlen", call, 1)
1230 && POINTER_TYPE_P (cd.get_arg_type (0)))
1232 impl_call_strlen (cd);
1233 return false;
1235 else if (is_named_call_p (callee_fndecl, "operator new", call, 1))
1237 impl_call_operator_new (cd);
1238 return false;
1240 else if (is_named_call_p (callee_fndecl, "operator new []", call, 1))
1242 impl_call_operator_new (cd);
1243 return false;
1245 else if (is_named_call_p (callee_fndecl, "operator delete", call, 1)
1246 || is_named_call_p (callee_fndecl, "operator delete", call, 2)
1247 || is_named_call_p (callee_fndecl, "operator delete []", call, 1))
1249 /* Handle in "on_call_post". */
1251 else if (!fndecl_has_gimple_body_p (callee_fndecl)
1252 && !DECL_PURE_P (callee_fndecl)
1253 && !fndecl_built_in_p (callee_fndecl))
1254 unknown_side_effects = true;
1256 else
1257 unknown_side_effects = true;
1259 return unknown_side_effects;
1262 /* Update this model for the CALL stmt, using CTXT to report any
1263 diagnostics - the second half.
1265 Updates to the region_model that should be made *after* sm-states
1266 are updated are done here; other updates to the region_model are done
1267 in region_model::on_call_pre.
1269 If UNKNOWN_SIDE_EFFECTS is true, also call handle_unrecognized_call
1270 to purge state. */
1272 void
1273 region_model::on_call_post (const gcall *call,
1274 bool unknown_side_effects,
1275 region_model_context *ctxt)
1277 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
1279 if (is_named_call_p (callee_fndecl, "free", call, 1))
1281 call_details cd (call, this, ctxt);
1282 impl_call_free (cd);
1283 return;
1285 if (is_named_call_p (callee_fndecl, "operator delete", call, 1)
1286 || is_named_call_p (callee_fndecl, "operator delete", call, 2)
1287 || is_named_call_p (callee_fndecl, "operator delete []", call, 1))
1289 call_details cd (call, this, ctxt);
1290 impl_call_operator_delete (cd);
1291 return;
1293 /* Was this fndecl referenced by
1294 __attribute__((malloc(FOO)))? */
1295 if (lookup_attribute ("*dealloc", DECL_ATTRIBUTES (callee_fndecl)))
1297 call_details cd (call, this, ctxt);
1298 impl_deallocation_call (cd);
1299 return;
1303 if (unknown_side_effects)
1304 handle_unrecognized_call (call, ctxt);
1307 /* Purge state involving SVAL from this region_model, using CTXT
1308 (if non-NULL) to purge other state in a program_state.
1310 For example, if we're at the def-stmt of an SSA name, then we need to
1311 purge any state for svalues that involve that SSA name. This avoids
1312 false positives in loops, since a symbolic value referring to the
1313 SSA name will be referring to the previous value of that SSA name.
1315 For example, in:
1316 while ((e = hashmap_iter_next(&iter))) {
1317 struct oid2strbuf *e_strbuf = (struct oid2strbuf *)e;
1318 free (e_strbuf->value);
1320 at the def-stmt of e_8:
1321 e_8 = hashmap_iter_next (&iter);
1322 we should purge the "freed" state of:
1323 INIT_VAL(CAST_REG(‘struct oid2strbuf’, (*INIT_VAL(e_8))).value)
1324 which is the "e_strbuf->value" value from the previous iteration,
1325 or we will erroneously report a double-free - the "e_8" within it
1326 refers to the previous value. */
1328 void
1329 region_model::purge_state_involving (const svalue *sval,
1330 region_model_context *ctxt)
1332 if (!sval->can_have_associated_state_p ())
1333 return;
1334 m_store.purge_state_involving (sval, m_mgr);
1335 m_constraints->purge_state_involving (sval);
1336 m_dynamic_extents.purge_state_involving (sval);
1337 if (ctxt)
1338 ctxt->purge_state_involving (sval);
1341 /* Handle a call CALL to a function with unknown behavior.
1343 Traverse the regions in this model, determining what regions are
1344 reachable from pointer arguments to CALL and from global variables,
1345 recursively.
1347 Set all reachable regions to new unknown values and purge sm-state
1348 from their values, and from values that point to them. */
1350 void
1351 region_model::handle_unrecognized_call (const gcall *call,
1352 region_model_context *ctxt)
1354 tree fndecl = get_fndecl_for_call (call, ctxt);
1356 reachable_regions reachable_regs (this);
1358 /* Determine the reachable regions and their mutability. */
1360 /* Add globals and regions that already escaped in previous
1361 unknown calls. */
1362 m_store.for_each_cluster (reachable_regions::init_cluster_cb,
1363 &reachable_regs);
1365 /* Params that are pointers. */
1366 tree iter_param_types = NULL_TREE;
1367 if (fndecl)
1368 iter_param_types = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
1369 for (unsigned arg_idx = 0; arg_idx < gimple_call_num_args (call); arg_idx++)
1371 /* Track expected param type, where available. */
1372 tree param_type = NULL_TREE;
1373 if (iter_param_types)
1375 param_type = TREE_VALUE (iter_param_types);
1376 gcc_assert (param_type);
1377 iter_param_types = TREE_CHAIN (iter_param_types);
1380 tree parm = gimple_call_arg (call, arg_idx);
1381 const svalue *parm_sval = get_rvalue (parm, ctxt);
1382 reachable_regs.handle_parm (parm_sval, param_type);
1386 uncertainty_t *uncertainty = ctxt ? ctxt->get_uncertainty () : NULL;
1388 /* Purge sm-state for the svalues that were reachable,
1389 both in non-mutable and mutable form. */
1390 for (svalue_set::iterator iter
1391 = reachable_regs.begin_reachable_svals ();
1392 iter != reachable_regs.end_reachable_svals (); ++iter)
1394 const svalue *sval = (*iter);
1395 if (ctxt)
1396 ctxt->on_unknown_change (sval, false);
1398 for (svalue_set::iterator iter
1399 = reachable_regs.begin_mutable_svals ();
1400 iter != reachable_regs.end_mutable_svals (); ++iter)
1402 const svalue *sval = (*iter);
1403 if (ctxt)
1404 ctxt->on_unknown_change (sval, true);
1405 if (uncertainty)
1406 uncertainty->on_mutable_sval_at_unknown_call (sval);
1409 /* Mark any clusters that have escaped. */
1410 reachable_regs.mark_escaped_clusters (ctxt);
1412 /* Update bindings for all clusters that have escaped, whether above,
1413 or previously. */
1414 m_store.on_unknown_fncall (call, m_mgr->get_store_manager ());
1416 /* Purge dynamic extents from any regions that have escaped mutably:
1417 realloc could have been called on them. */
1418 for (hash_set<const region *>::iterator
1419 iter = reachable_regs.begin_mutable_base_regs ();
1420 iter != reachable_regs.end_mutable_base_regs ();
1421 ++iter)
1423 const region *base_reg = (*iter);
1424 unset_dynamic_extents (base_reg);
1428 /* Traverse the regions in this model, determining what regions are
1429 reachable from the store and populating *OUT.
1431 If EXTRA_SVAL is non-NULL, treat it as an additional "root"
1432 for reachability (for handling return values from functions when
1433 analyzing return of the only function on the stack).
1435 If UNCERTAINTY is non-NULL, treat any svalues that were recorded
1436 within it as being maybe-bound as additional "roots" for reachability.
1438 Find svalues that haven't leaked. */
1440 void
1441 region_model::get_reachable_svalues (svalue_set *out,
1442 const svalue *extra_sval,
1443 const uncertainty_t *uncertainty)
1445 reachable_regions reachable_regs (this);
1447 /* Add globals and regions that already escaped in previous
1448 unknown calls. */
1449 m_store.for_each_cluster (reachable_regions::init_cluster_cb,
1450 &reachable_regs);
1452 if (extra_sval)
1453 reachable_regs.handle_sval (extra_sval);
1455 if (uncertainty)
1456 for (uncertainty_t::iterator iter
1457 = uncertainty->begin_maybe_bound_svals ();
1458 iter != uncertainty->end_maybe_bound_svals (); ++iter)
1459 reachable_regs.handle_sval (*iter);
1461 /* Get regions for locals that have explicitly bound values. */
1462 for (store::cluster_map_t::iterator iter = m_store.begin ();
1463 iter != m_store.end (); ++iter)
1465 const region *base_reg = (*iter).first;
1466 if (const region *parent = base_reg->get_parent_region ())
1467 if (parent->get_kind () == RK_FRAME)
1468 reachable_regs.add (base_reg, false);
1471 /* Populate *OUT based on the values that were reachable. */
1472 for (svalue_set::iterator iter
1473 = reachable_regs.begin_reachable_svals ();
1474 iter != reachable_regs.end_reachable_svals (); ++iter)
1475 out->add (*iter);
1478 /* Update this model for the RETURN_STMT, using CTXT to report any
1479 diagnostics. */
1481 void
1482 region_model::on_return (const greturn *return_stmt, region_model_context *ctxt)
1484 tree callee = get_current_function ()->decl;
1485 tree lhs = DECL_RESULT (callee);
1486 tree rhs = gimple_return_retval (return_stmt);
1488 if (lhs && rhs)
1489 copy_region (get_lvalue (lhs, ctxt), get_lvalue (rhs, ctxt), ctxt);
1492 /* Update this model for a call and return of setjmp/sigsetjmp at CALL within
1493 ENODE, using CTXT to report any diagnostics.
1495 This is for the initial direct invocation of setjmp/sigsetjmp (which returns
1496 0), as opposed to any second return due to longjmp/sigsetjmp. */
1498 void
1499 region_model::on_setjmp (const gcall *call, const exploded_node *enode,
1500 region_model_context *ctxt)
1502 const svalue *buf_ptr = get_rvalue (gimple_call_arg (call, 0), ctxt);
1503 const region *buf_reg = deref_rvalue (buf_ptr, gimple_call_arg (call, 0),
1504 ctxt);
1506 /* Create a setjmp_svalue for this call and store it in BUF_REG's
1507 region. */
1508 if (buf_reg)
1510 setjmp_record r (enode, call);
1511 const svalue *sval
1512 = m_mgr->get_or_create_setjmp_svalue (r, buf_reg->get_type ());
1513 set_value (buf_reg, sval, ctxt);
1516 /* Direct calls to setjmp return 0. */
1517 if (tree lhs = gimple_call_lhs (call))
1519 const svalue *new_sval
1520 = m_mgr->get_or_create_int_cst (TREE_TYPE (lhs), 0);
1521 const region *lhs_reg = get_lvalue (lhs, ctxt);
1522 set_value (lhs_reg, new_sval, ctxt);
1526 /* Update this region_model for rewinding from a "longjmp" at LONGJMP_CALL
1527 to a "setjmp" at SETJMP_CALL where the final stack depth should be
1528 SETJMP_STACK_DEPTH. Pop any stack frames. Leak detection is *not*
1529 done, and should be done by the caller. */
1531 void
1532 region_model::on_longjmp (const gcall *longjmp_call, const gcall *setjmp_call,
1533 int setjmp_stack_depth, region_model_context *ctxt)
1535 /* Evaluate the val, using the frame of the "longjmp". */
1536 tree fake_retval = gimple_call_arg (longjmp_call, 1);
1537 const svalue *fake_retval_sval = get_rvalue (fake_retval, ctxt);
1539 /* Pop any frames until we reach the stack depth of the function where
1540 setjmp was called. */
1541 gcc_assert (get_stack_depth () >= setjmp_stack_depth);
1542 while (get_stack_depth () > setjmp_stack_depth)
1543 pop_frame (NULL, NULL, ctxt);
1545 gcc_assert (get_stack_depth () == setjmp_stack_depth);
1547 /* Assign to LHS of "setjmp" in new_state. */
1548 if (tree lhs = gimple_call_lhs (setjmp_call))
1550 /* Passing 0 as the val to longjmp leads to setjmp returning 1. */
1551 const svalue *zero_sval
1552 = m_mgr->get_or_create_int_cst (TREE_TYPE (fake_retval), 0);
1553 tristate eq_zero = eval_condition (fake_retval_sval, EQ_EXPR, zero_sval);
1554 /* If we have 0, use 1. */
1555 if (eq_zero.is_true ())
1557 const svalue *one_sval
1558 = m_mgr->get_or_create_int_cst (TREE_TYPE (fake_retval), 1);
1559 fake_retval_sval = one_sval;
1561 else
1563 /* Otherwise note that the value is nonzero. */
1564 m_constraints->add_constraint (fake_retval_sval, NE_EXPR, zero_sval);
1567 /* Decorate the return value from setjmp as being unmergeable,
1568 so that we don't attempt to merge states with it as zero
1569 with states in which it's nonzero, leading to a clean distinction
1570 in the exploded_graph betweeen the first return and the second
1571 return. */
1572 fake_retval_sval = m_mgr->get_or_create_unmergeable (fake_retval_sval);
1574 const region *lhs_reg = get_lvalue (lhs, ctxt);
1575 set_value (lhs_reg, fake_retval_sval, ctxt);
1579 /* Update this region_model for a phi stmt of the form
1580 LHS = PHI <...RHS...>.
1581 where RHS is for the appropriate edge.
1582 Get state from OLD_STATE so that all of the phi stmts for a basic block
1583 are effectively handled simultaneously. */
1585 void
1586 region_model::handle_phi (const gphi *phi,
1587 tree lhs, tree rhs,
1588 const region_model &old_state,
1589 region_model_context *ctxt)
1591 /* For now, don't bother tracking the .MEM SSA names. */
1592 if (tree var = SSA_NAME_VAR (lhs))
1593 if (TREE_CODE (var) == VAR_DECL)
1594 if (VAR_DECL_IS_VIRTUAL_OPERAND (var))
1595 return;
1597 const svalue *src_sval = old_state.get_rvalue (rhs, ctxt);
1598 const region *dst_reg = old_state.get_lvalue (lhs, ctxt);
1600 set_value (dst_reg, src_sval, ctxt);
1602 if (ctxt)
1603 ctxt->on_phi (phi, rhs);
1606 /* Implementation of region_model::get_lvalue; the latter adds type-checking.
1608 Get the id of the region for PV within this region_model,
1609 emitting any diagnostics to CTXT. */
1611 const region *
1612 region_model::get_lvalue_1 (path_var pv, region_model_context *ctxt) const
1614 tree expr = pv.m_tree;
1616 gcc_assert (expr);
1618 switch (TREE_CODE (expr))
1620 default:
1621 return m_mgr->get_region_for_unexpected_tree_code (ctxt, expr,
1622 dump_location_t ());
1624 case ARRAY_REF:
1626 tree array = TREE_OPERAND (expr, 0);
1627 tree index = TREE_OPERAND (expr, 1);
1629 const region *array_reg = get_lvalue (array, ctxt);
1630 const svalue *index_sval = get_rvalue (index, ctxt);
1631 return m_mgr->get_element_region (array_reg,
1632 TREE_TYPE (TREE_TYPE (array)),
1633 index_sval);
1635 break;
1637 case MEM_REF:
1639 tree ptr = TREE_OPERAND (expr, 0);
1640 tree offset = TREE_OPERAND (expr, 1);
1641 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
1642 const svalue *offset_sval = get_rvalue (offset, ctxt);
1643 const region *star_ptr = deref_rvalue (ptr_sval, ptr, ctxt);
1644 return m_mgr->get_offset_region (star_ptr,
1645 TREE_TYPE (expr),
1646 offset_sval);
1648 break;
1650 case FUNCTION_DECL:
1651 return m_mgr->get_region_for_fndecl (expr);
1653 case LABEL_DECL:
1654 return m_mgr->get_region_for_label (expr);
1656 case VAR_DECL:
1657 /* Handle globals. */
1658 if (is_global_var (expr))
1659 return m_mgr->get_region_for_global (expr);
1661 /* Fall through. */
1663 case SSA_NAME:
1664 case PARM_DECL:
1665 case RESULT_DECL:
1667 gcc_assert (TREE_CODE (expr) == SSA_NAME
1668 || TREE_CODE (expr) == PARM_DECL
1669 || TREE_CODE (expr) == VAR_DECL
1670 || TREE_CODE (expr) == RESULT_DECL);
1672 int stack_index = pv.m_stack_depth;
1673 const frame_region *frame = get_frame_at_index (stack_index);
1674 gcc_assert (frame);
1675 return frame->get_region_for_local (m_mgr, expr);
1678 case COMPONENT_REF:
1680 /* obj.field */
1681 tree obj = TREE_OPERAND (expr, 0);
1682 tree field = TREE_OPERAND (expr, 1);
1683 const region *obj_reg = get_lvalue (obj, ctxt);
1684 return m_mgr->get_field_region (obj_reg, field);
1686 break;
1688 case STRING_CST:
1689 return m_mgr->get_region_for_string (expr);
1693 /* Assert that SRC_TYPE can be converted to DST_TYPE as a no-op. */
1695 static void
1696 assert_compat_types (tree src_type, tree dst_type)
1698 if (src_type && dst_type && !VOID_TYPE_P (dst_type))
1700 #if CHECKING_P
1701 if (!(useless_type_conversion_p (src_type, dst_type)))
1702 internal_error ("incompatible types: %qT and %qT", src_type, dst_type);
1703 #endif
1707 /* Return true if SRC_TYPE can be converted to DST_TYPE as a no-op. */
1709 static bool
1710 compat_types_p (tree src_type, tree dst_type)
1712 if (src_type && dst_type && !VOID_TYPE_P (dst_type))
1713 if (!(useless_type_conversion_p (src_type, dst_type)))
1714 return false;
1715 return true;
1718 /* Get the region for PV within this region_model,
1719 emitting any diagnostics to CTXT. */
1721 const region *
1722 region_model::get_lvalue (path_var pv, region_model_context *ctxt) const
1724 if (pv.m_tree == NULL_TREE)
1725 return NULL;
1727 const region *result_reg = get_lvalue_1 (pv, ctxt);
1728 assert_compat_types (result_reg->get_type (), TREE_TYPE (pv.m_tree));
1729 return result_reg;
1732 /* Get the region for EXPR within this region_model (assuming the most
1733 recent stack frame if it's a local). */
1735 const region *
1736 region_model::get_lvalue (tree expr, region_model_context *ctxt) const
1738 return get_lvalue (path_var (expr, get_stack_depth () - 1), ctxt);
1741 /* Implementation of region_model::get_rvalue; the latter adds type-checking.
1743 Get the value of PV within this region_model,
1744 emitting any diagnostics to CTXT. */
1746 const svalue *
1747 region_model::get_rvalue_1 (path_var pv, region_model_context *ctxt) const
1749 gcc_assert (pv.m_tree);
1751 switch (TREE_CODE (pv.m_tree))
1753 default:
1754 return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (pv.m_tree));
1756 case ADDR_EXPR:
1758 /* "&EXPR". */
1759 tree expr = pv.m_tree;
1760 tree op0 = TREE_OPERAND (expr, 0);
1761 const region *expr_reg = get_lvalue (op0, ctxt);
1762 return m_mgr->get_ptr_svalue (TREE_TYPE (expr), expr_reg);
1764 break;
1766 case BIT_FIELD_REF:
1768 tree expr = pv.m_tree;
1769 tree op0 = TREE_OPERAND (expr, 0);
1770 const region *reg = get_lvalue (op0, ctxt);
1771 tree num_bits = TREE_OPERAND (expr, 1);
1772 tree first_bit_offset = TREE_OPERAND (expr, 2);
1773 gcc_assert (TREE_CODE (num_bits) == INTEGER_CST);
1774 gcc_assert (TREE_CODE (first_bit_offset) == INTEGER_CST);
1775 bit_range bits (TREE_INT_CST_LOW (first_bit_offset),
1776 TREE_INT_CST_LOW (num_bits));
1777 return get_rvalue_for_bits (TREE_TYPE (expr), reg, bits, ctxt);
1780 case SSA_NAME:
1781 case VAR_DECL:
1782 case PARM_DECL:
1783 case RESULT_DECL:
1784 case ARRAY_REF:
1786 const region *reg = get_lvalue (pv, ctxt);
1787 return get_store_value (reg, ctxt);
1790 case REALPART_EXPR:
1791 case IMAGPART_EXPR:
1792 case VIEW_CONVERT_EXPR:
1794 tree expr = pv.m_tree;
1795 tree arg = TREE_OPERAND (expr, 0);
1796 const svalue *arg_sval = get_rvalue (arg, ctxt);
1797 const svalue *sval_unaryop
1798 = m_mgr->get_or_create_unaryop (TREE_TYPE (expr), TREE_CODE (expr),
1799 arg_sval);
1800 return sval_unaryop;
1803 case INTEGER_CST:
1804 case REAL_CST:
1805 case COMPLEX_CST:
1806 case VECTOR_CST:
1807 case STRING_CST:
1808 return m_mgr->get_or_create_constant_svalue (pv.m_tree);
1810 case POINTER_PLUS_EXPR:
1812 tree expr = pv.m_tree;
1813 tree ptr = TREE_OPERAND (expr, 0);
1814 tree offset = TREE_OPERAND (expr, 1);
1815 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
1816 const svalue *offset_sval = get_rvalue (offset, ctxt);
1817 const svalue *sval_binop
1818 = m_mgr->get_or_create_binop (TREE_TYPE (expr), POINTER_PLUS_EXPR,
1819 ptr_sval, offset_sval);
1820 return sval_binop;
1823 /* Binary ops. */
1824 case PLUS_EXPR:
1825 case MULT_EXPR:
1827 tree expr = pv.m_tree;
1828 tree arg0 = TREE_OPERAND (expr, 0);
1829 tree arg1 = TREE_OPERAND (expr, 1);
1830 const svalue *arg0_sval = get_rvalue (arg0, ctxt);
1831 const svalue *arg1_sval = get_rvalue (arg1, ctxt);
1832 const svalue *sval_binop
1833 = m_mgr->get_or_create_binop (TREE_TYPE (expr), TREE_CODE (expr),
1834 arg0_sval, arg1_sval);
1835 return sval_binop;
1838 case COMPONENT_REF:
1839 case MEM_REF:
1841 const region *ref_reg = get_lvalue (pv, ctxt);
1842 return get_store_value (ref_reg, ctxt);
1844 case OBJ_TYPE_REF:
1846 tree expr = OBJ_TYPE_REF_EXPR (pv.m_tree);
1847 return get_rvalue (expr, ctxt);
1852 /* Get the value of PV within this region_model,
1853 emitting any diagnostics to CTXT. */
1855 const svalue *
1856 region_model::get_rvalue (path_var pv, region_model_context *ctxt) const
1858 if (pv.m_tree == NULL_TREE)
1859 return NULL;
1861 const svalue *result_sval = get_rvalue_1 (pv, ctxt);
1863 assert_compat_types (result_sval->get_type (), TREE_TYPE (pv.m_tree));
1865 result_sval = check_for_poison (result_sval, pv.m_tree, ctxt);
1867 return result_sval;
1870 /* Get the value of EXPR within this region_model (assuming the most
1871 recent stack frame if it's a local). */
1873 const svalue *
1874 region_model::get_rvalue (tree expr, region_model_context *ctxt) const
1876 return get_rvalue (path_var (expr, get_stack_depth () - 1), ctxt);
1879 /* Return true if this model is on a path with "main" as the entrypoint
1880 (as opposed to one in which we're merely analyzing a subset of the
1881 path through the code). */
1883 bool
1884 region_model::called_from_main_p () const
1886 if (!m_current_frame)
1887 return false;
1888 /* Determine if the oldest stack frame in this model is for "main". */
1889 const frame_region *frame0 = get_frame_at_index (0);
1890 gcc_assert (frame0);
1891 return id_equal (DECL_NAME (frame0->get_function ()->decl), "main");
1894 /* Subroutine of region_model::get_store_value for when REG is (or is within)
1895 a global variable that hasn't been touched since the start of this path
1896 (or was implicitly touched due to a call to an unknown function). */
1898 const svalue *
1899 region_model::get_initial_value_for_global (const region *reg) const
1901 /* Get the decl that REG is for (or is within). */
1902 const decl_region *base_reg
1903 = reg->get_base_region ()->dyn_cast_decl_region ();
1904 gcc_assert (base_reg);
1905 tree decl = base_reg->get_decl ();
1907 /* Special-case: to avoid having to explicitly update all previously
1908 untracked globals when calling an unknown fn, they implicitly have
1909 an unknown value if an unknown call has occurred, unless this is
1910 static to-this-TU and hasn't escaped. Globals that have escaped
1911 are explicitly tracked, so we shouldn't hit this case for them. */
1912 if (m_store.called_unknown_fn_p ()
1913 && TREE_PUBLIC (decl)
1914 && !TREE_READONLY (decl))
1915 return m_mgr->get_or_create_unknown_svalue (reg->get_type ());
1917 /* If we are on a path from the entrypoint from "main" and we have a
1918 global decl defined in this TU that hasn't been touched yet, then
1919 the initial value of REG can be taken from the initialization value
1920 of the decl. */
1921 if (called_from_main_p () || TREE_READONLY (decl))
1923 /* Attempt to get the initializer value for base_reg. */
1924 if (const svalue *base_reg_init
1925 = base_reg->get_svalue_for_initializer (m_mgr))
1927 if (reg == base_reg)
1928 return base_reg_init;
1929 else
1931 /* Get the value for REG within base_reg_init. */
1932 binding_cluster c (base_reg);
1933 c.bind (m_mgr->get_store_manager (), base_reg, base_reg_init);
1934 const svalue *sval
1935 = c.get_any_binding (m_mgr->get_store_manager (), reg);
1936 if (sval)
1938 if (reg->get_type ())
1939 sval = m_mgr->get_or_create_cast (reg->get_type (),
1940 sval);
1941 return sval;
1947 /* Otherwise, return INIT_VAL(REG). */
1948 return m_mgr->get_or_create_initial_value (reg);
1951 /* Get a value for REG, looking it up in the store, or otherwise falling
1952 back to "initial" or "unknown" values.
1953 Use CTXT to report any warnings associated with reading from REG. */
1955 const svalue *
1956 region_model::get_store_value (const region *reg,
1957 region_model_context *ctxt) const
1959 check_region_for_read (reg, ctxt);
1961 /* Special-case: handle var_decls in the constant pool. */
1962 if (const decl_region *decl_reg = reg->dyn_cast_decl_region ())
1963 if (const svalue *sval = decl_reg->maybe_get_constant_value (m_mgr))
1964 return sval;
1966 const svalue *sval
1967 = m_store.get_any_binding (m_mgr->get_store_manager (), reg);
1968 if (sval)
1970 if (reg->get_type ())
1971 sval = m_mgr->get_or_create_cast (reg->get_type (), sval);
1972 return sval;
1975 /* Special-case: read at a constant index within a STRING_CST. */
1976 if (const offset_region *offset_reg = reg->dyn_cast_offset_region ())
1977 if (tree byte_offset_cst
1978 = offset_reg->get_byte_offset ()->maybe_get_constant ())
1979 if (const string_region *str_reg
1980 = reg->get_parent_region ()->dyn_cast_string_region ())
1982 tree string_cst = str_reg->get_string_cst ();
1983 if (const svalue *char_sval
1984 = m_mgr->maybe_get_char_from_string_cst (string_cst,
1985 byte_offset_cst))
1986 return m_mgr->get_or_create_cast (reg->get_type (), char_sval);
1989 /* Special-case: read the initial char of a STRING_CST. */
1990 if (const cast_region *cast_reg = reg->dyn_cast_cast_region ())
1991 if (const string_region *str_reg
1992 = cast_reg->get_original_region ()->dyn_cast_string_region ())
1994 tree string_cst = str_reg->get_string_cst ();
1995 tree byte_offset_cst = build_int_cst (integer_type_node, 0);
1996 if (const svalue *char_sval
1997 = m_mgr->maybe_get_char_from_string_cst (string_cst,
1998 byte_offset_cst))
1999 return m_mgr->get_or_create_cast (reg->get_type (), char_sval);
2002 /* Otherwise we implicitly have the initial value of the region
2003 (if the cluster had been touched, binding_cluster::get_any_binding,
2004 would have returned UNKNOWN, and we would already have returned
2005 that above). */
2007 /* Handle globals. */
2008 if (reg->get_base_region ()->get_parent_region ()->get_kind ()
2009 == RK_GLOBALS)
2010 return get_initial_value_for_global (reg);
2012 return m_mgr->get_or_create_initial_value (reg);
2015 /* Return false if REG does not exist, true if it may do.
2016 This is for detecting regions within the stack that don't exist anymore
2017 after frames are popped. */
2019 bool
2020 region_model::region_exists_p (const region *reg) const
2022 /* If within a stack frame, check that the stack frame is live. */
2023 if (const frame_region *enclosing_frame = reg->maybe_get_frame_region ())
2025 /* Check that the current frame is the enclosing frame, or is called
2026 by it. */
2027 for (const frame_region *iter_frame = get_current_frame (); iter_frame;
2028 iter_frame = iter_frame->get_calling_frame ())
2029 if (iter_frame == enclosing_frame)
2030 return true;
2031 return false;
2034 return true;
2037 /* Get a region for referencing PTR_SVAL, creating a region if need be, and
2038 potentially generating warnings via CTXT.
2039 PTR_SVAL must be of pointer type.
2040 PTR_TREE if non-NULL can be used when emitting diagnostics. */
2042 const region *
2043 region_model::deref_rvalue (const svalue *ptr_sval, tree ptr_tree,
2044 region_model_context *ctxt) const
2046 gcc_assert (ptr_sval);
2047 gcc_assert (POINTER_TYPE_P (ptr_sval->get_type ()));
2049 /* If we're dereferencing PTR_SVAL, assume that it is non-NULL; add this
2050 as a constraint. This suppresses false positives from
2051 -Wanalyzer-null-dereference for the case where we later have an
2052 if (PTR_SVAL) that would occur if we considered the false branch
2053 and transitioned the malloc state machine from start->null. */
2054 tree null_ptr_cst = build_int_cst (ptr_sval->get_type (), 0);
2055 const svalue *null_ptr = m_mgr->get_or_create_constant_svalue (null_ptr_cst);
2056 m_constraints->add_constraint (ptr_sval, NE_EXPR, null_ptr);
2058 switch (ptr_sval->get_kind ())
2060 default:
2061 break;
2063 case SK_REGION:
2065 const region_svalue *region_sval
2066 = as_a <const region_svalue *> (ptr_sval);
2067 return region_sval->get_pointee ();
2070 case SK_BINOP:
2072 const binop_svalue *binop_sval
2073 = as_a <const binop_svalue *> (ptr_sval);
2074 switch (binop_sval->get_op ())
2076 case POINTER_PLUS_EXPR:
2078 /* If we have a symbolic value expressing pointer arithmentic,
2079 try to convert it to a suitable region. */
2080 const region *parent_region
2081 = deref_rvalue (binop_sval->get_arg0 (), NULL_TREE, ctxt);
2082 const svalue *offset = binop_sval->get_arg1 ();
2083 tree type= TREE_TYPE (ptr_sval->get_type ());
2084 return m_mgr->get_offset_region (parent_region, type, offset);
2086 default:
2087 break;
2090 break;
2092 case SK_POISONED:
2094 if (ctxt)
2096 tree ptr = get_representative_tree (ptr_sval);
2097 /* If we can't get a representative tree for PTR_SVAL
2098 (e.g. if it hasn't been bound into the store), then
2099 fall back on PTR_TREE, if non-NULL. */
2100 if (!ptr)
2101 ptr = ptr_tree;
2102 if (ptr)
2104 const poisoned_svalue *poisoned_sval
2105 = as_a <const poisoned_svalue *> (ptr_sval);
2106 enum poison_kind pkind = poisoned_sval->get_poison_kind ();
2107 ctxt->warn (new poisoned_value_diagnostic (ptr, pkind));
2111 break;
2114 return m_mgr->get_symbolic_region (ptr_sval);
2117 /* Attempt to get BITS within any value of REG, as TYPE.
2118 In particular, extract values from compound_svalues for the case
2119 where there's a concrete binding at BITS.
2120 Return an unknown svalue if we can't handle the given case.
2121 Use CTXT to report any warnings associated with reading from REG. */
2123 const svalue *
2124 region_model::get_rvalue_for_bits (tree type,
2125 const region *reg,
2126 const bit_range &bits,
2127 region_model_context *ctxt) const
2129 const svalue *sval = get_store_value (reg, ctxt);
2130 return m_mgr->get_or_create_bits_within (type, bits, sval);
2133 /* A subclass of pending_diagnostic for complaining about writes to
2134 constant regions of memory. */
2136 class write_to_const_diagnostic
2137 : public pending_diagnostic_subclass<write_to_const_diagnostic>
2139 public:
2140 write_to_const_diagnostic (const region *reg, tree decl)
2141 : m_reg (reg), m_decl (decl)
2144 const char *get_kind () const FINAL OVERRIDE
2146 return "write_to_const_diagnostic";
2149 bool operator== (const write_to_const_diagnostic &other) const
2151 return (m_reg == other.m_reg
2152 && m_decl == other.m_decl);
2155 bool emit (rich_location *rich_loc) FINAL OVERRIDE
2157 bool warned = warning_at (rich_loc, OPT_Wanalyzer_write_to_const,
2158 "write to %<const%> object %qE", m_decl);
2159 if (warned)
2160 inform (DECL_SOURCE_LOCATION (m_decl), "declared here");
2161 return warned;
2164 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
2166 return ev.formatted_print ("write to %<const%> object %qE here", m_decl);
2169 private:
2170 const region *m_reg;
2171 tree m_decl;
2174 /* A subclass of pending_diagnostic for complaining about writes to
2175 string literals. */
2177 class write_to_string_literal_diagnostic
2178 : public pending_diagnostic_subclass<write_to_string_literal_diagnostic>
2180 public:
2181 write_to_string_literal_diagnostic (const region *reg)
2182 : m_reg (reg)
2185 const char *get_kind () const FINAL OVERRIDE
2187 return "write_to_string_literal_diagnostic";
2190 bool operator== (const write_to_string_literal_diagnostic &other) const
2192 return m_reg == other.m_reg;
2195 bool emit (rich_location *rich_loc) FINAL OVERRIDE
2197 return warning_at (rich_loc, OPT_Wanalyzer_write_to_string_literal,
2198 "write to string literal");
2199 /* Ideally we would show the location of the STRING_CST as well,
2200 but it is not available at this point. */
2203 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
2205 return ev.formatted_print ("write to string literal here");
2208 private:
2209 const region *m_reg;
2212 /* Use CTXT to warn If DEST_REG is a region that shouldn't be written to. */
2214 void
2215 region_model::check_for_writable_region (const region* dest_reg,
2216 region_model_context *ctxt) const
2218 /* Fail gracefully if CTXT is NULL. */
2219 if (!ctxt)
2220 return;
2222 const region *base_reg = dest_reg->get_base_region ();
2223 switch (base_reg->get_kind ())
2225 default:
2226 break;
2227 case RK_DECL:
2229 const decl_region *decl_reg = as_a <const decl_region *> (base_reg);
2230 tree decl = decl_reg->get_decl ();
2231 /* Warn about writes to const globals.
2232 Don't warn for writes to const locals, and params in particular,
2233 since we would warn in push_frame when setting them up (e.g the
2234 "this" param is "T* const"). */
2235 if (TREE_READONLY (decl)
2236 && is_global_var (decl))
2237 ctxt->warn (new write_to_const_diagnostic (dest_reg, decl));
2239 break;
2240 case RK_STRING:
2241 ctxt->warn (new write_to_string_literal_diagnostic (dest_reg));
2242 break;
2246 /* Get the capacity of REG in bytes. */
2248 const svalue *
2249 region_model::get_capacity (const region *reg) const
2251 switch (reg->get_kind ())
2253 default:
2254 break;
2255 case RK_DECL:
2257 const decl_region *decl_reg = as_a <const decl_region *> (reg);
2258 tree decl = decl_reg->get_decl ();
2259 if (TREE_CODE (decl) == SSA_NAME)
2261 tree type = TREE_TYPE (decl);
2262 tree size = TYPE_SIZE (type);
2263 return get_rvalue (size, NULL);
2265 else
2267 tree size = decl_init_size (decl, false);
2268 if (size)
2269 return get_rvalue (size, NULL);
2272 break;
2273 case RK_SIZED:
2274 /* Look through sized regions to get at the capacity
2275 of the underlying regions. */
2276 return get_capacity (reg->get_parent_region ());
2279 if (const svalue *recorded = get_dynamic_extents (reg))
2280 return recorded;
2282 return m_mgr->get_or_create_unknown_svalue (sizetype);
2285 /* If CTXT is non-NULL, use it to warn about any problems accessing REG,
2286 using DIR to determine if this access is a read or write. */
2288 void
2289 region_model::check_region_access (const region *reg,
2290 enum access_direction dir,
2291 region_model_context *ctxt) const
2293 /* Fail gracefully if CTXT is NULL. */
2294 if (!ctxt)
2295 return;
2297 switch (dir)
2299 default:
2300 gcc_unreachable ();
2301 case DIR_READ:
2302 /* Currently a no-op. */
2303 break;
2304 case DIR_WRITE:
2305 check_for_writable_region (reg, ctxt);
2306 break;
2310 /* If CTXT is non-NULL, use it to warn about any problems writing to REG. */
2312 void
2313 region_model::check_region_for_write (const region *dest_reg,
2314 region_model_context *ctxt) const
2316 check_region_access (dest_reg, DIR_WRITE, ctxt);
2319 /* If CTXT is non-NULL, use it to warn about any problems reading from REG. */
2321 void
2322 region_model::check_region_for_read (const region *src_reg,
2323 region_model_context *ctxt) const
2325 check_region_access (src_reg, DIR_READ, ctxt);
2328 /* Set the value of the region given by LHS_REG to the value given
2329 by RHS_SVAL.
2330 Use CTXT to report any warnings associated with writing to LHS_REG. */
2332 void
2333 region_model::set_value (const region *lhs_reg, const svalue *rhs_sval,
2334 region_model_context *ctxt)
2336 gcc_assert (lhs_reg);
2337 gcc_assert (rhs_sval);
2339 check_region_for_write (lhs_reg, ctxt);
2341 m_store.set_value (m_mgr->get_store_manager(), lhs_reg, rhs_sval,
2342 ctxt ? ctxt->get_uncertainty () : NULL);
2345 /* Set the value of the region given by LHS to the value given by RHS. */
2347 void
2348 region_model::set_value (tree lhs, tree rhs, region_model_context *ctxt)
2350 const region *lhs_reg = get_lvalue (lhs, ctxt);
2351 const svalue *rhs_sval = get_rvalue (rhs, ctxt);
2352 gcc_assert (lhs_reg);
2353 gcc_assert (rhs_sval);
2354 set_value (lhs_reg, rhs_sval, ctxt);
2357 /* Remove all bindings overlapping REG within the store. */
2359 void
2360 region_model::clobber_region (const region *reg)
2362 m_store.clobber_region (m_mgr->get_store_manager(), reg);
2365 /* Remove any bindings for REG within the store. */
2367 void
2368 region_model::purge_region (const region *reg)
2370 m_store.purge_region (m_mgr->get_store_manager(), reg);
2373 /* Fill REG with SVAL. */
2375 void
2376 region_model::fill_region (const region *reg, const svalue *sval)
2378 m_store.fill_region (m_mgr->get_store_manager(), reg, sval);
2381 /* Zero-fill REG. */
2383 void
2384 region_model::zero_fill_region (const region *reg)
2386 m_store.zero_fill_region (m_mgr->get_store_manager(), reg);
2389 /* Mark REG as having unknown content. */
2391 void
2392 region_model::mark_region_as_unknown (const region *reg,
2393 uncertainty_t *uncertainty)
2395 m_store.mark_region_as_unknown (m_mgr->get_store_manager(), reg,
2396 uncertainty);
2399 /* Determine what is known about the condition "LHS_SVAL OP RHS_SVAL" within
2400 this model. */
2402 tristate
2403 region_model::eval_condition (const svalue *lhs,
2404 enum tree_code op,
2405 const svalue *rhs) const
2407 /* For now, make no attempt to capture constraints on floating-point
2408 values. */
2409 if ((lhs->get_type () && FLOAT_TYPE_P (lhs->get_type ()))
2410 || (rhs->get_type () && FLOAT_TYPE_P (rhs->get_type ())))
2411 return tristate::unknown ();
2413 tristate ts = eval_condition_without_cm (lhs, op, rhs);
2414 if (ts.is_known ())
2415 return ts;
2417 /* Otherwise, try constraints. */
2418 return m_constraints->eval_condition (lhs, op, rhs);
2421 /* Determine what is known about the condition "LHS_SVAL OP RHS_SVAL" within
2422 this model, without resorting to the constraint_manager.
2424 This is exposed so that impl_region_model_context::on_state_leak can
2425 check for equality part-way through region_model::purge_unused_svalues
2426 without risking creating new ECs. */
2428 tristate
2429 region_model::eval_condition_without_cm (const svalue *lhs,
2430 enum tree_code op,
2431 const svalue *rhs) const
2433 gcc_assert (lhs);
2434 gcc_assert (rhs);
2436 /* See what we know based on the values. */
2438 /* For now, make no attempt to capture constraints on floating-point
2439 values. */
2440 if ((lhs->get_type () && FLOAT_TYPE_P (lhs->get_type ()))
2441 || (rhs->get_type () && FLOAT_TYPE_P (rhs->get_type ())))
2442 return tristate::unknown ();
2444 /* Unwrap any unmergeable values. */
2445 lhs = lhs->unwrap_any_unmergeable ();
2446 rhs = rhs->unwrap_any_unmergeable ();
2448 if (lhs == rhs)
2450 /* If we have the same svalue, then we have equality
2451 (apart from NaN-handling).
2452 TODO: should this definitely be the case for poisoned values? */
2453 /* Poisoned and unknown values are "unknowable". */
2454 if (lhs->get_kind () == SK_POISONED
2455 || lhs->get_kind () == SK_UNKNOWN)
2456 return tristate::TS_UNKNOWN;
2458 switch (op)
2460 case EQ_EXPR:
2461 case GE_EXPR:
2462 case LE_EXPR:
2463 return tristate::TS_TRUE;
2465 case NE_EXPR:
2466 case GT_EXPR:
2467 case LT_EXPR:
2468 return tristate::TS_FALSE;
2470 default:
2471 /* For other ops, use the logic below. */
2472 break;
2476 /* If we have a pair of region_svalues, compare them. */
2477 if (const region_svalue *lhs_ptr = lhs->dyn_cast_region_svalue ())
2478 if (const region_svalue *rhs_ptr = rhs->dyn_cast_region_svalue ())
2480 tristate res = region_svalue::eval_condition (lhs_ptr, op, rhs_ptr);
2481 if (res.is_known ())
2482 return res;
2483 /* Otherwise, only known through constraints. */
2486 /* If we have a pair of constants, compare them. */
2487 if (const constant_svalue *cst_lhs = lhs->dyn_cast_constant_svalue ())
2488 if (const constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ())
2489 return constant_svalue::eval_condition (cst_lhs, op, cst_rhs);
2491 /* Handle comparison of a region_svalue against zero. */
2493 if (const region_svalue *ptr = lhs->dyn_cast_region_svalue ())
2494 if (const constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ())
2495 if (zerop (cst_rhs->get_constant ()))
2497 /* A region_svalue is a non-NULL pointer, except in certain
2498 special cases (see the comment for region::non_null_p. */
2499 const region *pointee = ptr->get_pointee ();
2500 if (pointee->non_null_p ())
2502 switch (op)
2504 default:
2505 gcc_unreachable ();
2507 case EQ_EXPR:
2508 case GE_EXPR:
2509 case LE_EXPR:
2510 return tristate::TS_FALSE;
2512 case NE_EXPR:
2513 case GT_EXPR:
2514 case LT_EXPR:
2515 return tristate::TS_TRUE;
2520 /* Handle rejection of equality for comparisons of the initial values of
2521 "external" values (such as params) with the address of locals. */
2522 if (const initial_svalue *init_lhs = lhs->dyn_cast_initial_svalue ())
2523 if (const region_svalue *rhs_ptr = rhs->dyn_cast_region_svalue ())
2525 tristate res = compare_initial_and_pointer (init_lhs, rhs_ptr);
2526 if (res.is_known ())
2527 return res;
2529 if (const initial_svalue *init_rhs = rhs->dyn_cast_initial_svalue ())
2530 if (const region_svalue *lhs_ptr = lhs->dyn_cast_region_svalue ())
2532 tristate res = compare_initial_and_pointer (init_rhs, lhs_ptr);
2533 if (res.is_known ())
2534 return res;
2537 if (const widening_svalue *widen_lhs = lhs->dyn_cast_widening_svalue ())
2538 if (tree rhs_cst = rhs->maybe_get_constant ())
2540 tristate res = widen_lhs->eval_condition_without_cm (op, rhs_cst);
2541 if (res.is_known ())
2542 return res;
2545 return tristate::TS_UNKNOWN;
2548 /* Subroutine of region_model::eval_condition_without_cm, for rejecting
2549 equality of INIT_VAL(PARM) with &LOCAL. */
2551 tristate
2552 region_model::compare_initial_and_pointer (const initial_svalue *init,
2553 const region_svalue *ptr) const
2555 const region *pointee = ptr->get_pointee ();
2557 /* If we have a pointer to something within a stack frame, it can't be the
2558 initial value of a param. */
2559 if (pointee->maybe_get_frame_region ())
2560 if (init->initial_value_of_param_p ())
2561 return tristate::TS_FALSE;
2563 return tristate::TS_UNKNOWN;
2566 /* Handle various constraints of the form:
2567 LHS: ((bool)INNER_LHS INNER_OP INNER_RHS))
2568 OP : == or !=
2569 RHS: zero
2570 and (with a cast):
2571 LHS: CAST([long]int, ((bool)INNER_LHS INNER_OP INNER_RHS))
2572 OP : == or !=
2573 RHS: zero
2574 by adding constraints for INNER_LHS INNEROP INNER_RHS.
2576 Return true if this function can fully handle the constraint; if
2577 so, add the implied constraint(s) and write true to *OUT if they
2578 are consistent with existing constraints, or write false to *OUT
2579 if they contradicts existing constraints.
2581 Return false for cases that this function doeesn't know how to handle.
2583 For example, if we're checking a stored conditional, we'll have
2584 something like:
2585 LHS: CAST(long int, (&HEAP_ALLOCATED_REGION(8)!=(int *)0B))
2586 OP : NE_EXPR
2587 RHS: zero
2588 which this function can turn into an add_constraint of:
2589 (&HEAP_ALLOCATED_REGION(8) != (int *)0B)
2591 Similarly, optimized && and || conditionals lead to e.g.
2592 if (p && q)
2593 becoming gimple like this:
2594 _1 = p_6 == 0B;
2595 _2 = q_8 == 0B
2596 _3 = _1 | _2
2597 On the "_3 is false" branch we can have constraints of the form:
2598 ((&HEAP_ALLOCATED_REGION(8)!=(int *)0B)
2599 | (&HEAP_ALLOCATED_REGION(10)!=(int *)0B))
2600 == 0
2601 which implies that both _1 and _2 are false,
2602 which this function can turn into a pair of add_constraints of
2603 (&HEAP_ALLOCATED_REGION(8)!=(int *)0B)
2604 and:
2605 (&HEAP_ALLOCATED_REGION(10)!=(int *)0B). */
2607 bool
2608 region_model::add_constraints_from_binop (const svalue *outer_lhs,
2609 enum tree_code outer_op,
2610 const svalue *outer_rhs,
2611 bool *out,
2612 region_model_context *ctxt)
2614 while (const svalue *cast = outer_lhs->maybe_undo_cast ())
2615 outer_lhs = cast;
2616 const binop_svalue *binop_sval = outer_lhs->dyn_cast_binop_svalue ();
2617 if (!binop_sval)
2618 return false;
2619 if (!outer_rhs->all_zeroes_p ())
2620 return false;
2622 const svalue *inner_lhs = binop_sval->get_arg0 ();
2623 enum tree_code inner_op = binop_sval->get_op ();
2624 const svalue *inner_rhs = binop_sval->get_arg1 ();
2626 if (outer_op != NE_EXPR && outer_op != EQ_EXPR)
2627 return false;
2629 /* We have either
2630 - "OUTER_LHS != false" (i.e. OUTER is true), or
2631 - "OUTER_LHS == false" (i.e. OUTER is false). */
2632 bool is_true = outer_op == NE_EXPR;
2634 switch (inner_op)
2636 default:
2637 return false;
2639 case EQ_EXPR:
2640 case NE_EXPR:
2642 /* ...and "(inner_lhs OP inner_rhs) == 0"
2643 then (inner_lhs OP inner_rhs) must have the same
2644 logical value as LHS. */
2645 if (!is_true)
2646 inner_op = invert_tree_comparison (inner_op, false /* honor_nans */);
2647 *out = add_constraint (inner_lhs, inner_op, inner_rhs, ctxt);
2648 return true;
2650 break;
2652 case BIT_AND_EXPR:
2653 if (is_true)
2655 /* ...and "(inner_lhs & inner_rhs) != 0"
2656 then both inner_lhs and inner_rhs must be true. */
2657 const svalue *false_sval
2658 = m_mgr->get_or_create_constant_svalue (boolean_false_node);
2659 bool sat1 = add_constraint (inner_lhs, NE_EXPR, false_sval, ctxt);
2660 bool sat2 = add_constraint (inner_rhs, NE_EXPR, false_sval, ctxt);
2661 *out = sat1 && sat2;
2662 return true;
2664 return false;
2666 case BIT_IOR_EXPR:
2667 if (!is_true)
2669 /* ...and "(inner_lhs | inner_rhs) == 0"
2670 i.e. "(inner_lhs | inner_rhs)" is false
2671 then both inner_lhs and inner_rhs must be false. */
2672 const svalue *false_sval
2673 = m_mgr->get_or_create_constant_svalue (boolean_false_node);
2674 bool sat1 = add_constraint (inner_lhs, EQ_EXPR, false_sval, ctxt);
2675 bool sat2 = add_constraint (inner_rhs, EQ_EXPR, false_sval, ctxt);
2676 *out = sat1 && sat2;
2677 return true;
2679 return false;
2683 /* Attempt to add the constraint "LHS OP RHS" to this region_model.
2684 If it is consistent with existing constraints, add it, and return true.
2685 Return false if it contradicts existing constraints.
2686 Use CTXT for reporting any diagnostics associated with the accesses. */
2688 bool
2689 region_model::add_constraint (tree lhs, enum tree_code op, tree rhs,
2690 region_model_context *ctxt)
2692 /* For now, make no attempt to capture constraints on floating-point
2693 values. */
2694 if (FLOAT_TYPE_P (TREE_TYPE (lhs)) || FLOAT_TYPE_P (TREE_TYPE (rhs)))
2695 return true;
2697 const svalue *lhs_sval = get_rvalue (lhs, ctxt);
2698 const svalue *rhs_sval = get_rvalue (rhs, ctxt);
2700 return add_constraint (lhs_sval, op, rhs_sval, ctxt);
2703 /* Attempt to add the constraint "LHS OP RHS" to this region_model.
2704 If it is consistent with existing constraints, add it, and return true.
2705 Return false if it contradicts existing constraints.
2706 Use CTXT for reporting any diagnostics associated with the accesses. */
2708 bool
2709 region_model::add_constraint (const svalue *lhs,
2710 enum tree_code op,
2711 const svalue *rhs,
2712 region_model_context *ctxt)
2714 tristate t_cond = eval_condition (lhs, op, rhs);
2716 /* If we already have the condition, do nothing. */
2717 if (t_cond.is_true ())
2718 return true;
2720 /* Reject a constraint that would contradict existing knowledge, as
2721 unsatisfiable. */
2722 if (t_cond.is_false ())
2723 return false;
2725 bool out;
2726 if (add_constraints_from_binop (lhs, op, rhs, &out, ctxt))
2727 return out;
2729 /* Store the constraint. */
2730 m_constraints->add_constraint (lhs, op, rhs);
2732 /* Notify the context, if any. This exists so that the state machines
2733 in a program_state can be notified about the condition, and so can
2734 set sm-state for e.g. unchecked->checked, both for cfg-edges, and
2735 when synthesizing constraints as above. */
2736 if (ctxt)
2737 ctxt->on_condition (lhs, op, rhs);
2739 /* If we have &REGION == NULL, then drop dynamic extents for REGION (for
2740 the case where REGION is heap-allocated and thus could be NULL). */
2741 if (tree rhs_cst = rhs->maybe_get_constant ())
2742 if (op == EQ_EXPR && zerop (rhs_cst))
2743 if (const region_svalue *region_sval = lhs->dyn_cast_region_svalue ())
2744 unset_dynamic_extents (region_sval->get_pointee ());
2746 return true;
2749 /* As above, but when returning false, if OUT is non-NULL, write a
2750 new rejected_constraint to *OUT. */
2752 bool
2753 region_model::add_constraint (tree lhs, enum tree_code op, tree rhs,
2754 region_model_context *ctxt,
2755 rejected_constraint **out)
2757 bool sat = add_constraint (lhs, op, rhs, ctxt);
2758 if (!sat && out)
2759 *out = new rejected_constraint (*this, lhs, op, rhs);
2760 return sat;
2763 /* Determine what is known about the condition "LHS OP RHS" within
2764 this model.
2765 Use CTXT for reporting any diagnostics associated with the accesses. */
2767 tristate
2768 region_model::eval_condition (tree lhs,
2769 enum tree_code op,
2770 tree rhs,
2771 region_model_context *ctxt)
2773 /* For now, make no attempt to model constraints on floating-point
2774 values. */
2775 if (FLOAT_TYPE_P (TREE_TYPE (lhs)) || FLOAT_TYPE_P (TREE_TYPE (rhs)))
2776 return tristate::unknown ();
2778 return eval_condition (get_rvalue (lhs, ctxt), op, get_rvalue (rhs, ctxt));
2781 /* Implementation of region_model::get_representative_path_var.
2782 Attempt to return a path_var that represents SVAL, or return NULL_TREE.
2783 Use VISITED to prevent infinite mutual recursion with the overload for
2784 regions. */
2786 path_var
2787 region_model::get_representative_path_var_1 (const svalue *sval,
2788 svalue_set *visited) const
2790 gcc_assert (sval);
2792 /* Prevent infinite recursion. */
2793 if (visited->contains (sval))
2794 return path_var (NULL_TREE, 0);
2795 visited->add (sval);
2797 /* Handle casts by recursion into get_representative_path_var. */
2798 if (const svalue *cast_sval = sval->maybe_undo_cast ())
2800 path_var result = get_representative_path_var (cast_sval, visited);
2801 tree orig_type = sval->get_type ();
2802 /* If necessary, wrap the result in a cast. */
2803 if (result.m_tree && orig_type)
2804 result.m_tree = build1 (NOP_EXPR, orig_type, result.m_tree);
2805 return result;
2808 auto_vec<path_var> pvs;
2809 m_store.get_representative_path_vars (this, visited, sval, &pvs);
2811 if (tree cst = sval->maybe_get_constant ())
2812 pvs.safe_push (path_var (cst, 0));
2814 /* Handle string literals and various other pointers. */
2815 if (const region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
2817 const region *reg = ptr_sval->get_pointee ();
2818 if (path_var pv = get_representative_path_var (reg, visited))
2819 return path_var (build1 (ADDR_EXPR,
2820 sval->get_type (),
2821 pv.m_tree),
2822 pv.m_stack_depth);
2825 /* If we have a sub_svalue, look for ways to represent the parent. */
2826 if (const sub_svalue *sub_sval = sval->dyn_cast_sub_svalue ())
2828 const svalue *parent_sval = sub_sval->get_parent ();
2829 const region *subreg = sub_sval->get_subregion ();
2830 if (path_var parent_pv
2831 = get_representative_path_var (parent_sval, visited))
2832 if (const field_region *field_reg = subreg->dyn_cast_field_region ())
2833 return path_var (build3 (COMPONENT_REF,
2834 sval->get_type (),
2835 parent_pv.m_tree,
2836 field_reg->get_field (),
2837 NULL_TREE),
2838 parent_pv.m_stack_depth);
2841 if (pvs.length () < 1)
2842 return path_var (NULL_TREE, 0);
2844 pvs.qsort (readability_comparator);
2845 return pvs[0];
2848 /* Attempt to return a path_var that represents SVAL, or return NULL_TREE.
2849 Use VISITED to prevent infinite mutual recursion with the overload for
2850 regions
2852 This function defers to get_representative_path_var_1 to do the work;
2853 it adds verification that get_representative_path_var_1 returned a tree
2854 of the correct type. */
2856 path_var
2857 region_model::get_representative_path_var (const svalue *sval,
2858 svalue_set *visited) const
2860 if (sval == NULL)
2861 return path_var (NULL_TREE, 0);
2863 tree orig_type = sval->get_type ();
2865 path_var result = get_representative_path_var_1 (sval, visited);
2867 /* Verify that the result has the same type as SVAL, if any. */
2868 if (result.m_tree && orig_type)
2869 gcc_assert (TREE_TYPE (result.m_tree) == orig_type);
2871 return result;
2874 /* Attempt to return a tree that represents SVAL, or return NULL_TREE.
2876 Strip off any top-level cast, to avoid messages like
2877 double-free of '(void *)ptr'
2878 from analyzer diagnostics. */
2880 tree
2881 region_model::get_representative_tree (const svalue *sval) const
2883 svalue_set visited;
2884 tree expr = get_representative_path_var (sval, &visited).m_tree;
2886 /* Strip off any top-level cast. */
2887 if (expr && TREE_CODE (expr) == NOP_EXPR)
2888 expr = TREE_OPERAND (expr, 0);
2890 return fixup_tree_for_diagnostic (expr);
2893 /* Implementation of region_model::get_representative_path_var.
2895 Attempt to return a path_var that represents REG, or return
2896 the NULL path_var.
2897 For example, a region for a field of a local would be a path_var
2898 wrapping a COMPONENT_REF.
2899 Use VISITED to prevent infinite mutual recursion with the overload for
2900 svalues. */
2902 path_var
2903 region_model::get_representative_path_var_1 (const region *reg,
2904 svalue_set *visited) const
2906 switch (reg->get_kind ())
2908 default:
2909 gcc_unreachable ();
2911 case RK_FRAME:
2912 case RK_GLOBALS:
2913 case RK_CODE:
2914 case RK_HEAP:
2915 case RK_STACK:
2916 case RK_ROOT:
2917 /* Regions that represent memory spaces are not expressible as trees. */
2918 return path_var (NULL_TREE, 0);
2920 case RK_FUNCTION:
2922 const function_region *function_reg
2923 = as_a <const function_region *> (reg);
2924 return path_var (function_reg->get_fndecl (), 0);
2926 case RK_LABEL:
2928 const label_region *label_reg = as_a <const label_region *> (reg);
2929 return path_var (label_reg->get_label (), 0);
2932 case RK_SYMBOLIC:
2934 const symbolic_region *symbolic_reg
2935 = as_a <const symbolic_region *> (reg);
2936 const svalue *pointer = symbolic_reg->get_pointer ();
2937 path_var pointer_pv = get_representative_path_var (pointer, visited);
2938 if (!pointer_pv)
2939 return path_var (NULL_TREE, 0);
2940 tree offset = build_int_cst (pointer->get_type (), 0);
2941 return path_var (build2 (MEM_REF,
2942 reg->get_type (),
2943 pointer_pv.m_tree,
2944 offset),
2945 pointer_pv.m_stack_depth);
2947 case RK_DECL:
2949 const decl_region *decl_reg = as_a <const decl_region *> (reg);
2950 return path_var (decl_reg->get_decl (), decl_reg->get_stack_depth ());
2952 case RK_FIELD:
2954 const field_region *field_reg = as_a <const field_region *> (reg);
2955 path_var parent_pv
2956 = get_representative_path_var (reg->get_parent_region (), visited);
2957 if (!parent_pv)
2958 return path_var (NULL_TREE, 0);
2959 return path_var (build3 (COMPONENT_REF,
2960 reg->get_type (),
2961 parent_pv.m_tree,
2962 field_reg->get_field (),
2963 NULL_TREE),
2964 parent_pv.m_stack_depth);
2967 case RK_ELEMENT:
2969 const element_region *element_reg
2970 = as_a <const element_region *> (reg);
2971 path_var parent_pv
2972 = get_representative_path_var (reg->get_parent_region (), visited);
2973 if (!parent_pv)
2974 return path_var (NULL_TREE, 0);
2975 path_var index_pv
2976 = get_representative_path_var (element_reg->get_index (), visited);
2977 if (!index_pv)
2978 return path_var (NULL_TREE, 0);
2979 return path_var (build4 (ARRAY_REF,
2980 reg->get_type (),
2981 parent_pv.m_tree, index_pv.m_tree,
2982 NULL_TREE, NULL_TREE),
2983 parent_pv.m_stack_depth);
2986 case RK_OFFSET:
2988 const offset_region *offset_reg
2989 = as_a <const offset_region *> (reg);
2990 path_var parent_pv
2991 = get_representative_path_var (reg->get_parent_region (), visited);
2992 if (!parent_pv)
2993 return path_var (NULL_TREE, 0);
2994 path_var offset_pv
2995 = get_representative_path_var (offset_reg->get_byte_offset (),
2996 visited);
2997 if (!offset_pv || TREE_CODE (offset_pv.m_tree) != INTEGER_CST)
2998 return path_var (NULL_TREE, 0);
2999 tree addr_parent = build1 (ADDR_EXPR,
3000 build_pointer_type (reg->get_type ()),
3001 parent_pv.m_tree);
3002 return path_var (build2 (MEM_REF,
3003 reg->get_type (),
3004 addr_parent, offset_pv.m_tree),
3005 parent_pv.m_stack_depth);
3008 case RK_SIZED:
3009 return path_var (NULL_TREE, 0);
3011 case RK_CAST:
3013 path_var parent_pv
3014 = get_representative_path_var (reg->get_parent_region (), visited);
3015 if (!parent_pv)
3016 return path_var (NULL_TREE, 0);
3017 return path_var (build1 (NOP_EXPR,
3018 reg->get_type (),
3019 parent_pv.m_tree),
3020 parent_pv.m_stack_depth);
3023 case RK_HEAP_ALLOCATED:
3024 case RK_ALLOCA:
3025 /* No good way to express heap-allocated/alloca regions as trees. */
3026 return path_var (NULL_TREE, 0);
3028 case RK_STRING:
3030 const string_region *string_reg = as_a <const string_region *> (reg);
3031 return path_var (string_reg->get_string_cst (), 0);
3034 case RK_UNKNOWN:
3035 return path_var (NULL_TREE, 0);
3039 /* Attempt to return a path_var that represents REG, or return
3040 the NULL path_var.
3041 For example, a region for a field of a local would be a path_var
3042 wrapping a COMPONENT_REF.
3043 Use VISITED to prevent infinite mutual recursion with the overload for
3044 svalues.
3046 This function defers to get_representative_path_var_1 to do the work;
3047 it adds verification that get_representative_path_var_1 returned a tree
3048 of the correct type. */
3050 path_var
3051 region_model::get_representative_path_var (const region *reg,
3052 svalue_set *visited) const
3054 path_var result = get_representative_path_var_1 (reg, visited);
3056 /* Verify that the result has the same type as REG, if any. */
3057 if (result.m_tree && reg->get_type ())
3058 gcc_assert (TREE_TYPE (result.m_tree) == reg->get_type ());
3060 return result;
3063 /* Update this model for any phis in SNODE, assuming we came from
3064 LAST_CFG_SUPEREDGE. */
3066 void
3067 region_model::update_for_phis (const supernode *snode,
3068 const cfg_superedge *last_cfg_superedge,
3069 region_model_context *ctxt)
3071 gcc_assert (last_cfg_superedge);
3073 /* Copy this state and pass it to handle_phi so that all of the phi stmts
3074 are effectively handled simultaneously. */
3075 const region_model old_state (*this);
3077 for (gphi_iterator gpi = const_cast<supernode *>(snode)->start_phis ();
3078 !gsi_end_p (gpi); gsi_next (&gpi))
3080 gphi *phi = gpi.phi ();
3082 tree src = last_cfg_superedge->get_phi_arg (phi);
3083 tree lhs = gimple_phi_result (phi);
3085 /* Update next_state based on phi and old_state. */
3086 handle_phi (phi, lhs, src, old_state, ctxt);
3090 /* Attempt to update this model for taking EDGE (where the last statement
3091 was LAST_STMT), returning true if the edge can be taken, false
3092 otherwise.
3093 When returning false, if OUT is non-NULL, write a new rejected_constraint
3094 to it.
3096 For CFG superedges where LAST_STMT is a conditional or a switch
3097 statement, attempt to add the relevant conditions for EDGE to this
3098 model, returning true if they are feasible, or false if they are
3099 impossible.
3101 For call superedges, push frame information and store arguments
3102 into parameters.
3104 For return superedges, pop frame information and store return
3105 values into any lhs.
3107 Rejection of call/return superedges happens elsewhere, in
3108 program_point::on_edge (i.e. based on program point, rather
3109 than program state). */
3111 bool
3112 region_model::maybe_update_for_edge (const superedge &edge,
3113 const gimple *last_stmt,
3114 region_model_context *ctxt,
3115 rejected_constraint **out)
3117 /* Handle frame updates for interprocedural edges. */
3118 switch (edge.m_kind)
3120 default:
3121 break;
3123 case SUPEREDGE_CALL:
3125 const call_superedge *call_edge = as_a <const call_superedge *> (&edge);
3126 update_for_call_superedge (*call_edge, ctxt);
3128 break;
3130 case SUPEREDGE_RETURN:
3132 const return_superedge *return_edge
3133 = as_a <const return_superedge *> (&edge);
3134 update_for_return_superedge (*return_edge, ctxt);
3136 break;
3138 case SUPEREDGE_INTRAPROCEDURAL_CALL:
3140 const callgraph_superedge *cg_sedge
3141 = as_a <const callgraph_superedge *> (&edge);
3142 update_for_call_summary (*cg_sedge, ctxt);
3144 break;
3147 if (last_stmt == NULL)
3148 return true;
3150 /* Apply any constraints for conditionals/switch statements. */
3152 if (const gcond *cond_stmt = dyn_cast <const gcond *> (last_stmt))
3154 const cfg_superedge *cfg_sedge = as_a <const cfg_superedge *> (&edge);
3155 return apply_constraints_for_gcond (*cfg_sedge, cond_stmt, ctxt, out);
3158 if (const gswitch *switch_stmt = dyn_cast <const gswitch *> (last_stmt))
3160 const switch_cfg_superedge *switch_sedge
3161 = as_a <const switch_cfg_superedge *> (&edge);
3162 return apply_constraints_for_gswitch (*switch_sedge, switch_stmt,
3163 ctxt, out);
3166 /* Apply any constraints due to an exception being thrown. */
3167 if (const cfg_superedge *cfg_sedge = dyn_cast <const cfg_superedge *> (&edge))
3168 if (cfg_sedge->get_flags () & EDGE_EH)
3169 return apply_constraints_for_exception (last_stmt, ctxt, out);
3171 return true;
3174 /* Push a new frame_region on to the stack region.
3175 Populate the frame_region with child regions for the function call's
3176 parameters, using values from the arguments at the callsite in the
3177 caller's frame. */
3179 void
3180 region_model::update_for_gcall (const gcall *call_stmt,
3181 region_model_context *ctxt,
3182 function *callee)
3184 /* Build a vec of argument svalues, using the current top
3185 frame for resolving tree expressions. */
3186 auto_vec<const svalue *> arg_svals (gimple_call_num_args (call_stmt));
3188 for (unsigned i = 0; i < gimple_call_num_args (call_stmt); i++)
3190 tree arg = gimple_call_arg (call_stmt, i);
3191 arg_svals.quick_push (get_rvalue (arg, ctxt));
3194 if(!callee)
3196 /* Get the function * from the gcall. */
3197 tree fn_decl = get_fndecl_for_call (call_stmt,ctxt);
3198 callee = DECL_STRUCT_FUNCTION (fn_decl);
3201 push_frame (callee, &arg_svals, ctxt);
3204 /* Pop the top-most frame_region from the stack, and copy the return
3205 region's values (if any) into the region for the lvalue of the LHS of
3206 the call (if any). */
3208 void
3209 region_model::update_for_return_gcall (const gcall *call_stmt,
3210 region_model_context *ctxt)
3212 /* Get the region for the result of the call, within the caller frame. */
3213 const region *result_dst_reg = NULL;
3214 tree lhs = gimple_call_lhs (call_stmt);
3215 if (lhs)
3217 /* Normally we access the top-level frame, which is:
3218 path_var (expr, get_stack_depth () - 1)
3219 whereas here we need the caller frame, hence "- 2" here. */
3220 gcc_assert (get_stack_depth () >= 2);
3221 result_dst_reg = get_lvalue (path_var (lhs, get_stack_depth () - 2),
3222 ctxt);
3225 pop_frame (result_dst_reg, NULL, ctxt);
3228 /* Extract calling information from the superedge and update the model for the
3229 call */
3231 void
3232 region_model::update_for_call_superedge (const call_superedge &call_edge,
3233 region_model_context *ctxt)
3235 const gcall *call_stmt = call_edge.get_call_stmt ();
3236 update_for_gcall (call_stmt, ctxt, call_edge.get_callee_function ());
3239 /* Extract calling information from the return superedge and update the model
3240 for the returning call */
3242 void
3243 region_model::update_for_return_superedge (const return_superedge &return_edge,
3244 region_model_context *ctxt)
3246 const gcall *call_stmt = return_edge.get_call_stmt ();
3247 update_for_return_gcall (call_stmt, ctxt);
3250 /* Update this region_model with a summary of the effect of calling
3251 and returning from CG_SEDGE.
3253 TODO: Currently this is extremely simplistic: we merely set the
3254 return value to "unknown". A proper implementation would e.g. update
3255 sm-state, and presumably be reworked to support multiple outcomes. */
3257 void
3258 region_model::update_for_call_summary (const callgraph_superedge &cg_sedge,
3259 region_model_context *ctxt)
3261 /* For now, set any return value to "unknown". */
3262 const gcall *call_stmt = cg_sedge.get_call_stmt ();
3263 tree lhs = gimple_call_lhs (call_stmt);
3264 if (lhs)
3265 mark_region_as_unknown (get_lvalue (lhs, ctxt),
3266 ctxt ? ctxt->get_uncertainty () : NULL);
3268 // TODO: actually implement some kind of summary here
3271 /* Given a true or false edge guarded by conditional statement COND_STMT,
3272 determine appropriate constraints for the edge to be taken.
3274 If they are feasible, add the constraints and return true.
3276 Return false if the constraints contradict existing knowledge
3277 (and so the edge should not be taken).
3278 When returning false, if OUT is non-NULL, write a new rejected_constraint
3279 to it. */
3281 bool
3282 region_model::apply_constraints_for_gcond (const cfg_superedge &sedge,
3283 const gcond *cond_stmt,
3284 region_model_context *ctxt,
3285 rejected_constraint **out)
3287 ::edge cfg_edge = sedge.get_cfg_edge ();
3288 gcc_assert (cfg_edge != NULL);
3289 gcc_assert (cfg_edge->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE));
3291 enum tree_code op = gimple_cond_code (cond_stmt);
3292 tree lhs = gimple_cond_lhs (cond_stmt);
3293 tree rhs = gimple_cond_rhs (cond_stmt);
3294 if (cfg_edge->flags & EDGE_FALSE_VALUE)
3295 op = invert_tree_comparison (op, false /* honor_nans */);
3296 return add_constraint (lhs, op, rhs, ctxt, out);
3299 /* Given an EDGE guarded by SWITCH_STMT, determine appropriate constraints
3300 for the edge to be taken.
3302 If they are feasible, add the constraints and return true.
3304 Return false if the constraints contradict existing knowledge
3305 (and so the edge should not be taken).
3306 When returning false, if OUT is non-NULL, write a new rejected_constraint
3307 to it. */
3309 bool
3310 region_model::apply_constraints_for_gswitch (const switch_cfg_superedge &edge,
3311 const gswitch *switch_stmt,
3312 region_model_context *ctxt,
3313 rejected_constraint **out)
3315 tree index = gimple_switch_index (switch_stmt);
3316 tree case_label = edge.get_case_label ();
3317 gcc_assert (TREE_CODE (case_label) == CASE_LABEL_EXPR);
3318 tree lower_bound = CASE_LOW (case_label);
3319 tree upper_bound = CASE_HIGH (case_label);
3320 if (lower_bound)
3322 if (upper_bound)
3324 /* Range. */
3325 if (!add_constraint (index, GE_EXPR, lower_bound, ctxt, out))
3326 return false;
3327 return add_constraint (index, LE_EXPR, upper_bound, ctxt, out);
3329 else
3330 /* Single-value. */
3331 return add_constraint (index, EQ_EXPR, lower_bound, ctxt, out);
3333 else
3335 /* The default case.
3336 Add exclusions based on the other cases. */
3337 for (unsigned other_idx = 1;
3338 other_idx < gimple_switch_num_labels (switch_stmt);
3339 other_idx++)
3341 tree other_label = gimple_switch_label (switch_stmt,
3342 other_idx);
3343 tree other_lower_bound = CASE_LOW (other_label);
3344 tree other_upper_bound = CASE_HIGH (other_label);
3345 gcc_assert (other_lower_bound);
3346 if (other_upper_bound)
3348 /* Exclude this range-valued case.
3349 For now, we just exclude the boundary values.
3350 TODO: exclude the values within the region. */
3351 if (!add_constraint (index, NE_EXPR, other_lower_bound,
3352 ctxt, out))
3353 return false;
3354 if (!add_constraint (index, NE_EXPR, other_upper_bound,
3355 ctxt, out))
3356 return false;
3358 else
3359 /* Exclude this single-valued case. */
3360 if (!add_constraint (index, NE_EXPR, other_lower_bound, ctxt, out))
3361 return false;
3363 return true;
3367 /* Apply any constraints due to an exception being thrown at LAST_STMT.
3369 If they are feasible, add the constraints and return true.
3371 Return false if the constraints contradict existing knowledge
3372 (and so the edge should not be taken).
3373 When returning false, if OUT is non-NULL, write a new rejected_constraint
3374 to it. */
3376 bool
3377 region_model::apply_constraints_for_exception (const gimple *last_stmt,
3378 region_model_context *ctxt,
3379 rejected_constraint **out)
3381 gcc_assert (last_stmt);
3382 if (const gcall *call = dyn_cast <const gcall *> (last_stmt))
3383 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
3384 if (is_named_call_p (callee_fndecl, "operator new", call, 1)
3385 || is_named_call_p (callee_fndecl, "operator new []", call, 1))
3387 /* We have an exception thrown from operator new.
3388 Add a constraint that the result was NULL, to avoid a false
3389 leak report due to the result being lost when following
3390 the EH edge. */
3391 if (tree lhs = gimple_call_lhs (call))
3392 return add_constraint (lhs, EQ_EXPR, null_pointer_node, ctxt, out);
3393 return true;
3395 return true;
3398 /* For use with push_frame when handling a top-level call within the analysis.
3399 PARAM has a defined but unknown initial value.
3400 Anything it points to has escaped, since the calling context "knows"
3401 the pointer, and thus calls to unknown functions could read/write into
3402 the region. */
3404 void
3405 region_model::on_top_level_param (tree param,
3406 region_model_context *ctxt)
3408 if (POINTER_TYPE_P (TREE_TYPE (param)))
3410 const region *param_reg = get_lvalue (param, ctxt);
3411 const svalue *init_ptr_sval
3412 = m_mgr->get_or_create_initial_value (param_reg);
3413 const region *pointee_reg = m_mgr->get_symbolic_region (init_ptr_sval);
3414 m_store.mark_as_escaped (pointee_reg);
3418 /* Update this region_model to reflect pushing a frame onto the stack
3419 for a call to FUN.
3421 If ARG_SVALS is non-NULL, use it to populate the parameters
3422 in the new frame.
3423 Otherwise, the params have their initial_svalues.
3425 Return the frame_region for the new frame. */
3427 const region *
3428 region_model::push_frame (function *fun, const vec<const svalue *> *arg_svals,
3429 region_model_context *ctxt)
3431 m_current_frame = m_mgr->get_frame_region (m_current_frame, fun);
3432 if (arg_svals)
3434 /* Arguments supplied from a caller frame. */
3435 tree fndecl = fun->decl;
3436 unsigned idx = 0;
3437 for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm;
3438 iter_parm = DECL_CHAIN (iter_parm), ++idx)
3440 /* If there's a mismatching declaration, the call stmt might
3441 not have enough args. Handle this case by leaving the
3442 rest of the params as uninitialized. */
3443 if (idx >= arg_svals->length ())
3444 break;
3445 tree parm_lval = iter_parm;
3446 if (tree parm_default_ssa = ssa_default_def (fun, iter_parm))
3447 parm_lval = parm_default_ssa;
3448 const region *parm_reg = get_lvalue (parm_lval, ctxt);
3449 const svalue *arg_sval = (*arg_svals)[idx];
3450 set_value (parm_reg, arg_sval, ctxt);
3453 else
3455 /* Otherwise we have a top-level call within the analysis. The params
3456 have defined but unknown initial values.
3457 Anything they point to has escaped. */
3458 tree fndecl = fun->decl;
3459 for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm;
3460 iter_parm = DECL_CHAIN (iter_parm))
3462 if (tree parm_default_ssa = ssa_default_def (fun, iter_parm))
3463 on_top_level_param (parm_default_ssa, ctxt);
3464 else
3465 on_top_level_param (iter_parm, ctxt);
3469 return m_current_frame;
3472 /* Get the function of the top-most frame in this region_model's stack.
3473 There must be such a frame. */
3475 function *
3476 region_model::get_current_function () const
3478 const frame_region *frame = get_current_frame ();
3479 gcc_assert (frame);
3480 return frame->get_function ();
3483 /* Pop the topmost frame_region from this region_model's stack;
3485 If RESULT_DST_REG is non-null, copy any return value from the frame
3486 into RESULT_DST_REG's region.
3487 If OUT_RESULT is non-null, copy any return value from the frame
3488 into *OUT_RESULT.
3490 Purge the frame region and all its descendent regions.
3491 Convert any pointers that point into such regions into
3492 POISON_KIND_POPPED_STACK svalues. */
3494 void
3495 region_model::pop_frame (const region *result_dst_reg,
3496 const svalue **out_result,
3497 region_model_context *ctxt)
3499 gcc_assert (m_current_frame);
3501 /* Evaluate the result, within the callee frame. */
3502 const frame_region *frame_reg = m_current_frame;
3503 tree fndecl = m_current_frame->get_function ()->decl;
3504 tree result = DECL_RESULT (fndecl);
3505 if (result && TREE_TYPE (result) != void_type_node)
3507 if (result_dst_reg)
3509 /* Copy the result to RESULT_DST_REG. */
3510 copy_region (result_dst_reg,
3511 get_lvalue (result, ctxt),
3512 ctxt);
3514 if (out_result)
3515 *out_result = get_rvalue (result, ctxt);
3518 /* Pop the frame. */
3519 m_current_frame = m_current_frame->get_calling_frame ();
3521 unbind_region_and_descendents (frame_reg,POISON_KIND_POPPED_STACK);
3524 /* Get the number of frames in this region_model's stack. */
3527 region_model::get_stack_depth () const
3529 const frame_region *frame = get_current_frame ();
3530 if (frame)
3531 return frame->get_stack_depth ();
3532 else
3533 return 0;
3536 /* Get the frame_region with the given index within the stack.
3537 The frame_region must exist. */
3539 const frame_region *
3540 region_model::get_frame_at_index (int index) const
3542 const frame_region *frame = get_current_frame ();
3543 gcc_assert (frame);
3544 gcc_assert (index >= 0);
3545 gcc_assert (index <= frame->get_index ());
3546 while (index != frame->get_index ())
3548 frame = frame->get_calling_frame ();
3549 gcc_assert (frame);
3551 return frame;
3554 /* Unbind svalues for any regions in REG and below.
3555 Find any pointers to such regions; convert them to
3556 poisoned values of kind PKIND.
3557 Also purge any dynamic extents. */
3559 void
3560 region_model::unbind_region_and_descendents (const region *reg,
3561 enum poison_kind pkind)
3563 /* Gather a set of base regions to be unbound. */
3564 hash_set<const region *> base_regs;
3565 for (store::cluster_map_t::iterator iter = m_store.begin ();
3566 iter != m_store.end (); ++iter)
3568 const region *iter_base_reg = (*iter).first;
3569 if (iter_base_reg->descendent_of_p (reg))
3570 base_regs.add (iter_base_reg);
3572 for (hash_set<const region *>::iterator iter = base_regs.begin ();
3573 iter != base_regs.end (); ++iter)
3574 m_store.purge_cluster (*iter);
3576 /* Find any pointers to REG or its descendents; convert to poisoned. */
3577 poison_any_pointers_to_descendents (reg, pkind);
3579 /* Purge dynamic extents of any base regions in REG and below
3580 (e.g. VLAs and alloca stack regions). */
3581 for (auto iter : m_dynamic_extents)
3583 const region *iter_reg = iter.first;
3584 if (iter_reg->descendent_of_p (reg))
3585 unset_dynamic_extents (iter_reg);
3589 /* Implementation of BindingVisitor.
3590 Update the bound svalues for regions below REG to use poisoned
3591 values instead. */
3593 struct bad_pointer_finder
3595 bad_pointer_finder (const region *reg, enum poison_kind pkind,
3596 region_model_manager *mgr)
3597 : m_reg (reg), m_pkind (pkind), m_mgr (mgr), m_count (0)
3600 void on_binding (const binding_key *, const svalue *&sval)
3602 if (const region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
3604 const region *ptr_dst = ptr_sval->get_pointee ();
3605 /* Poison ptrs to descendents of REG, but not to REG itself,
3606 otherwise double-free detection doesn't work (since sm-state
3607 for "free" is stored on the original ptr svalue). */
3608 if (ptr_dst->descendent_of_p (m_reg)
3609 && ptr_dst != m_reg)
3611 sval = m_mgr->get_or_create_poisoned_svalue (m_pkind,
3612 sval->get_type ());
3613 ++m_count;
3618 const region *m_reg;
3619 enum poison_kind m_pkind;
3620 region_model_manager *const m_mgr;
3621 int m_count;
3624 /* Find any pointers to REG or its descendents; convert them to
3625 poisoned values of kind PKIND.
3626 Return the number of pointers that were poisoned. */
3629 region_model::poison_any_pointers_to_descendents (const region *reg,
3630 enum poison_kind pkind)
3632 bad_pointer_finder bv (reg, pkind, m_mgr);
3633 m_store.for_each_binding (bv);
3634 return bv.m_count;
3637 /* Attempt to merge THIS with OTHER_MODEL, writing the result
3638 to OUT_MODEL. Use POINT to distinguish values created as a
3639 result of merging. */
3641 bool
3642 region_model::can_merge_with_p (const region_model &other_model,
3643 const program_point &point,
3644 region_model *out_model) const
3646 gcc_assert (out_model);
3647 gcc_assert (m_mgr == other_model.m_mgr);
3648 gcc_assert (m_mgr == out_model->m_mgr);
3650 if (m_current_frame != other_model.m_current_frame)
3651 return false;
3652 out_model->m_current_frame = m_current_frame;
3654 model_merger m (this, &other_model, point, out_model);
3656 if (!store::can_merge_p (&m_store, &other_model.m_store,
3657 &out_model->m_store, m_mgr->get_store_manager (),
3658 &m))
3659 return false;
3661 if (!m_dynamic_extents.can_merge_with_p (other_model.m_dynamic_extents,
3662 &out_model->m_dynamic_extents))
3663 return false;
3665 /* Merge constraints. */
3666 constraint_manager::merge (*m_constraints,
3667 *other_model.m_constraints,
3668 out_model->m_constraints);
3670 return true;
3673 /* Attempt to get the fndecl used at CALL, if known, or NULL_TREE
3674 otherwise. */
3676 tree
3677 region_model::get_fndecl_for_call (const gcall *call,
3678 region_model_context *ctxt)
3680 tree fn_ptr = gimple_call_fn (call);
3681 if (fn_ptr == NULL_TREE)
3682 return NULL_TREE;
3683 const svalue *fn_ptr_sval = get_rvalue (fn_ptr, ctxt);
3684 if (const region_svalue *fn_ptr_ptr
3685 = fn_ptr_sval->dyn_cast_region_svalue ())
3687 const region *reg = fn_ptr_ptr->get_pointee ();
3688 if (const function_region *fn_reg = reg->dyn_cast_function_region ())
3690 tree fn_decl = fn_reg->get_fndecl ();
3691 cgraph_node *node = cgraph_node::get (fn_decl);
3692 if (!node)
3693 return NULL_TREE;
3694 const cgraph_node *ultimate_node = node->ultimate_alias_target ();
3695 if (ultimate_node)
3696 return ultimate_node->decl;
3700 return NULL_TREE;
3703 /* Would be much simpler to use a lambda here, if it were supported. */
3705 struct append_ssa_names_cb_data
3707 const region_model *model;
3708 auto_vec<const decl_region *> *out;
3711 /* Populate *OUT with all decl_regions for SSA names in the current
3712 frame that have clusters within the store. */
3714 void
3715 region_model::
3716 get_ssa_name_regions_for_current_frame (auto_vec<const decl_region *> *out)
3717 const
3719 append_ssa_names_cb_data data;
3720 data.model = this;
3721 data.out = out;
3722 m_store.for_each_cluster (append_ssa_names_cb, &data);
3725 /* Implementation detail of get_ssa_name_regions_for_current_frame. */
3727 void
3728 region_model::append_ssa_names_cb (const region *base_reg,
3729 append_ssa_names_cb_data *cb_data)
3731 if (base_reg->get_parent_region () != cb_data->model->m_current_frame)
3732 return;
3733 if (const decl_region *decl_reg = base_reg->dyn_cast_decl_region ())
3735 if (TREE_CODE (decl_reg->get_decl ()) == SSA_NAME)
3736 cb_data->out->safe_push (decl_reg);
3740 /* Return a new region describing a heap-allocated block of memory. */
3742 const region *
3743 region_model::create_region_for_heap_alloc (const svalue *size_in_bytes)
3745 const region *reg = m_mgr->create_region_for_heap_alloc ();
3746 if (compat_types_p (size_in_bytes->get_type (), size_type_node))
3747 set_dynamic_extents (reg, size_in_bytes);
3748 return reg;
3751 /* Return a new region describing a block of memory allocated within the
3752 current frame. */
3754 const region *
3755 region_model::create_region_for_alloca (const svalue *size_in_bytes)
3757 const region *reg = m_mgr->create_region_for_alloca (m_current_frame);
3758 if (compat_types_p (size_in_bytes->get_type (), size_type_node))
3759 set_dynamic_extents (reg, size_in_bytes);
3760 return reg;
3763 /* Record that the size of REG is SIZE_IN_BYTES. */
3765 void
3766 region_model::set_dynamic_extents (const region *reg,
3767 const svalue *size_in_bytes)
3769 assert_compat_types (size_in_bytes->get_type (), size_type_node);
3770 m_dynamic_extents.put (reg, size_in_bytes);
3773 /* Get the recording of REG in bytes, or NULL if no dynamic size was
3774 recorded. */
3776 const svalue *
3777 region_model::get_dynamic_extents (const region *reg) const
3779 if (const svalue * const *slot = m_dynamic_extents.get (reg))
3780 return *slot;
3781 return NULL;
3784 /* Unset any recorded dynamic size of REG. */
3786 void
3787 region_model::unset_dynamic_extents (const region *reg)
3789 m_dynamic_extents.remove (reg);
3792 /* struct model_merger. */
3794 /* Dump a multiline representation of this merger to PP. */
3796 void
3797 model_merger::dump_to_pp (pretty_printer *pp, bool simple) const
3799 pp_string (pp, "model A:");
3800 pp_newline (pp);
3801 m_model_a->dump_to_pp (pp, simple, true);
3802 pp_newline (pp);
3804 pp_string (pp, "model B:");
3805 pp_newline (pp);
3806 m_model_b->dump_to_pp (pp, simple, true);
3807 pp_newline (pp);
3809 pp_string (pp, "merged model:");
3810 pp_newline (pp);
3811 m_merged_model->dump_to_pp (pp, simple, true);
3812 pp_newline (pp);
3815 /* Dump a multiline representation of this merger to FILE. */
3817 void
3818 model_merger::dump (FILE *fp, bool simple) const
3820 pretty_printer pp;
3821 pp_format_decoder (&pp) = default_tree_printer;
3822 pp_show_color (&pp) = pp_show_color (global_dc->printer);
3823 pp.buffer->stream = fp;
3824 dump_to_pp (&pp, simple);
3825 pp_flush (&pp);
3828 /* Dump a multiline representation of this merger to stderr. */
3830 DEBUG_FUNCTION void
3831 model_merger::dump (bool simple) const
3833 dump (stderr, simple);
3836 } // namespace ana
3838 /* Dump RMODEL fully to stderr (i.e. without summarization). */
3840 DEBUG_FUNCTION void
3841 debug (const region_model &rmodel)
3843 rmodel.dump (false);
3846 /* struct rejected_constraint. */
3848 void
3849 rejected_constraint::dump_to_pp (pretty_printer *pp) const
3851 region_model m (m_model);
3852 const svalue *lhs_sval = m.get_rvalue (m_lhs, NULL);
3853 const svalue *rhs_sval = m.get_rvalue (m_rhs, NULL);
3854 lhs_sval->dump_to_pp (pp, true);
3855 pp_printf (pp, " %s ", op_symbol_code (m_op));
3856 rhs_sval->dump_to_pp (pp, true);
3859 /* class engine. */
3861 /* Dump the managed objects by class to LOGGER, and the per-class totals. */
3863 void
3864 engine::log_stats (logger *logger) const
3866 m_mgr.log_stats (logger, true);
3869 namespace ana {
3871 #if CHECKING_P
3873 namespace selftest {
3875 /* Build a constant tree of the given type from STR. */
3877 static tree
3878 build_real_cst_from_string (tree type, const char *str)
3880 REAL_VALUE_TYPE real;
3881 real_from_string (&real, str);
3882 return build_real (type, real);
3885 /* Append various "interesting" constants to OUT (e.g. NaN). */
3887 static void
3888 append_interesting_constants (auto_vec<tree> *out)
3890 out->safe_push (build_int_cst (integer_type_node, 0));
3891 out->safe_push (build_int_cst (integer_type_node, 42));
3892 out->safe_push (build_int_cst (unsigned_type_node, 0));
3893 out->safe_push (build_int_cst (unsigned_type_node, 42));
3894 out->safe_push (build_real_cst_from_string (float_type_node, "QNaN"));
3895 out->safe_push (build_real_cst_from_string (float_type_node, "-QNaN"));
3896 out->safe_push (build_real_cst_from_string (float_type_node, "SNaN"));
3897 out->safe_push (build_real_cst_from_string (float_type_node, "-SNaN"));
3898 out->safe_push (build_real_cst_from_string (float_type_node, "0.0"));
3899 out->safe_push (build_real_cst_from_string (float_type_node, "-0.0"));
3900 out->safe_push (build_real_cst_from_string (float_type_node, "Inf"));
3901 out->safe_push (build_real_cst_from_string (float_type_node, "-Inf"));
3904 /* Verify that tree_cmp is a well-behaved comparator for qsort, even
3905 if the underlying constants aren't comparable. */
3907 static void
3908 test_tree_cmp_on_constants ()
3910 auto_vec<tree> csts;
3911 append_interesting_constants (&csts);
3913 /* Try sorting every triple. */
3914 const unsigned num = csts.length ();
3915 for (unsigned i = 0; i < num; i++)
3916 for (unsigned j = 0; j < num; j++)
3917 for (unsigned k = 0; k < num; k++)
3919 auto_vec<tree> v (3);
3920 v.quick_push (csts[i]);
3921 v.quick_push (csts[j]);
3922 v.quick_push (csts[k]);
3923 v.qsort (tree_cmp);
3927 /* Implementation detail of the ASSERT_CONDITION_* macros. */
3929 void
3930 assert_condition (const location &loc,
3931 region_model &model,
3932 const svalue *lhs, tree_code op, const svalue *rhs,
3933 tristate expected)
3935 tristate actual = model.eval_condition (lhs, op, rhs);
3936 ASSERT_EQ_AT (loc, actual, expected);
3939 /* Implementation detail of the ASSERT_CONDITION_* macros. */
3941 void
3942 assert_condition (const location &loc,
3943 region_model &model,
3944 tree lhs, tree_code op, tree rhs,
3945 tristate expected)
3947 tristate actual = model.eval_condition (lhs, op, rhs, NULL);
3948 ASSERT_EQ_AT (loc, actual, expected);
3951 /* Implementation detail of ASSERT_DUMP_TREE_EQ. */
3953 static void
3954 assert_dump_tree_eq (const location &loc, tree t, const char *expected)
3956 auto_fix_quotes sentinel;
3957 pretty_printer pp;
3958 pp_format_decoder (&pp) = default_tree_printer;
3959 dump_tree (&pp, t);
3960 ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected);
3963 /* Assert that dump_tree (T) is EXPECTED. */
3965 #define ASSERT_DUMP_TREE_EQ(T, EXPECTED) \
3966 SELFTEST_BEGIN_STMT \
3967 assert_dump_tree_eq ((SELFTEST_LOCATION), (T), (EXPECTED)); \
3968 SELFTEST_END_STMT
3970 /* Implementation detail of ASSERT_DUMP_EQ. */
3972 static void
3973 assert_dump_eq (const location &loc,
3974 const region_model &model,
3975 bool summarize,
3976 const char *expected)
3978 auto_fix_quotes sentinel;
3979 pretty_printer pp;
3980 pp_format_decoder (&pp) = default_tree_printer;
3982 model.dump_to_pp (&pp, summarize, true);
3983 ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected);
3986 /* Assert that MODEL.dump_to_pp (SUMMARIZE) is EXPECTED. */
3988 #define ASSERT_DUMP_EQ(MODEL, SUMMARIZE, EXPECTED) \
3989 SELFTEST_BEGIN_STMT \
3990 assert_dump_eq ((SELFTEST_LOCATION), (MODEL), (SUMMARIZE), (EXPECTED)); \
3991 SELFTEST_END_STMT
3993 /* Smoketest for region_model::dump_to_pp. */
3995 static void
3996 test_dump ()
3998 region_model_manager mgr;
3999 region_model model (&mgr);
4001 ASSERT_DUMP_EQ (model, false,
4002 "stack depth: 0\n"
4003 "m_called_unknown_fn: FALSE\n"
4004 "constraint_manager:\n"
4005 " equiv classes:\n"
4006 " constraints:\n");
4007 ASSERT_DUMP_EQ (model, true,
4008 "stack depth: 0\n"
4009 "m_called_unknown_fn: FALSE\n"
4010 "constraint_manager:\n"
4011 " equiv classes:\n"
4012 " constraints:\n");
4015 /* Helper function for selftests. Create a struct or union type named NAME,
4016 with the fields given by the FIELD_DECLS in FIELDS.
4017 If IS_STRUCT is true create a RECORD_TYPE (aka a struct), otherwise
4018 create a UNION_TYPE. */
4020 static tree
4021 make_test_compound_type (const char *name, bool is_struct,
4022 const auto_vec<tree> *fields)
4024 tree t = make_node (is_struct ? RECORD_TYPE : UNION_TYPE);
4025 TYPE_NAME (t) = get_identifier (name);
4026 TYPE_SIZE (t) = 0;
4028 tree fieldlist = NULL;
4029 int i;
4030 tree field;
4031 FOR_EACH_VEC_ELT (*fields, i, field)
4033 gcc_assert (TREE_CODE (field) == FIELD_DECL);
4034 DECL_CONTEXT (field) = t;
4035 fieldlist = chainon (field, fieldlist);
4037 fieldlist = nreverse (fieldlist);
4038 TYPE_FIELDS (t) = fieldlist;
4040 layout_type (t);
4041 return t;
4044 /* Selftest fixture for creating the type "struct coord {int x; int y; };". */
4046 struct coord_test
4048 coord_test ()
4050 auto_vec<tree> fields;
4051 m_x_field = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
4052 get_identifier ("x"), integer_type_node);
4053 fields.safe_push (m_x_field);
4054 m_y_field = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
4055 get_identifier ("y"), integer_type_node);
4056 fields.safe_push (m_y_field);
4057 m_coord_type = make_test_compound_type ("coord", true, &fields);
4060 tree m_x_field;
4061 tree m_y_field;
4062 tree m_coord_type;
4065 /* Verify usage of a struct. */
4067 static void
4068 test_struct ()
4070 coord_test ct;
4072 tree c = build_global_decl ("c", ct.m_coord_type);
4073 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
4074 c, ct.m_x_field, NULL_TREE);
4075 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
4076 c, ct.m_y_field, NULL_TREE);
4078 tree int_17 = build_int_cst (integer_type_node, 17);
4079 tree int_m3 = build_int_cst (integer_type_node, -3);
4081 region_model_manager mgr;
4082 region_model model (&mgr);
4083 model.set_value (c_x, int_17, NULL);
4084 model.set_value (c_y, int_m3, NULL);
4086 /* Verify get_offset for "c.x". */
4088 const region *c_x_reg = model.get_lvalue (c_x, NULL);
4089 region_offset offset = c_x_reg->get_offset ();
4090 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (c, NULL));
4091 ASSERT_EQ (offset.get_bit_offset (), 0);
4094 /* Verify get_offset for "c.y". */
4096 const region *c_y_reg = model.get_lvalue (c_y, NULL);
4097 region_offset offset = c_y_reg->get_offset ();
4098 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (c, NULL));
4099 ASSERT_EQ (offset.get_bit_offset (), INT_TYPE_SIZE);
4103 /* Verify usage of an array element. */
4105 static void
4106 test_array_1 ()
4108 tree tlen = size_int (10);
4109 tree arr_type = build_array_type (char_type_node, build_index_type (tlen));
4111 tree a = build_global_decl ("a", arr_type);
4113 region_model_manager mgr;
4114 region_model model (&mgr);
4115 tree int_0 = build_int_cst (integer_type_node, 0);
4116 tree a_0 = build4 (ARRAY_REF, char_type_node,
4117 a, int_0, NULL_TREE, NULL_TREE);
4118 tree char_A = build_int_cst (char_type_node, 'A');
4119 model.set_value (a_0, char_A, NULL);
4122 /* Verify that region_model::get_representative_tree works as expected. */
4124 static void
4125 test_get_representative_tree ()
4127 region_model_manager mgr;
4129 /* STRING_CST. */
4131 tree string_cst = build_string (4, "foo");
4132 region_model m (&mgr);
4133 const svalue *str_sval = m.get_rvalue (string_cst, NULL);
4134 tree rep = m.get_representative_tree (str_sval);
4135 ASSERT_EQ (rep, string_cst);
4138 /* String literal. */
4140 tree string_cst_ptr = build_string_literal (4, "foo");
4141 region_model m (&mgr);
4142 const svalue *str_sval = m.get_rvalue (string_cst_ptr, NULL);
4143 tree rep = m.get_representative_tree (str_sval);
4144 ASSERT_DUMP_TREE_EQ (rep, "&\"foo\"[0]");
4147 /* Value of an element within an array. */
4149 tree tlen = size_int (10);
4150 tree arr_type = build_array_type (char_type_node, build_index_type (tlen));
4151 tree a = build_global_decl ("a", arr_type);
4152 placeholder_svalue test_sval (char_type_node, "test value");
4154 /* Value of a[3]. */
4156 test_region_model_context ctxt;
4157 region_model model (&mgr);
4158 tree int_3 = build_int_cst (integer_type_node, 3);
4159 tree a_3 = build4 (ARRAY_REF, char_type_node,
4160 a, int_3, NULL_TREE, NULL_TREE);
4161 const region *a_3_reg = model.get_lvalue (a_3, &ctxt);
4162 model.set_value (a_3_reg, &test_sval, &ctxt);
4163 tree rep = model.get_representative_tree (&test_sval);
4164 ASSERT_DUMP_TREE_EQ (rep, "a[3]");
4167 /* Value of a[0]. */
4169 test_region_model_context ctxt;
4170 region_model model (&mgr);
4171 tree idx = build_int_cst (integer_type_node, 0);
4172 tree a_0 = build4 (ARRAY_REF, char_type_node,
4173 a, idx, NULL_TREE, NULL_TREE);
4174 const region *a_0_reg = model.get_lvalue (a_0, &ctxt);
4175 model.set_value (a_0_reg, &test_sval, &ctxt);
4176 tree rep = model.get_representative_tree (&test_sval);
4177 ASSERT_DUMP_TREE_EQ (rep, "a[0]");
4181 /* Value of a field within a struct. */
4183 coord_test ct;
4185 tree c = build_global_decl ("c", ct.m_coord_type);
4186 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
4187 c, ct.m_x_field, NULL_TREE);
4188 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
4189 c, ct.m_y_field, NULL_TREE);
4191 test_region_model_context ctxt;
4193 /* Value of initial field. */
4195 region_model m (&mgr);
4196 const region *c_x_reg = m.get_lvalue (c_x, &ctxt);
4197 placeholder_svalue test_sval_x (integer_type_node, "test x val");
4198 m.set_value (c_x_reg, &test_sval_x, &ctxt);
4199 tree rep = m.get_representative_tree (&test_sval_x);
4200 ASSERT_DUMP_TREE_EQ (rep, "c.x");
4203 /* Value of non-initial field. */
4205 region_model m (&mgr);
4206 const region *c_y_reg = m.get_lvalue (c_y, &ctxt);
4207 placeholder_svalue test_sval_y (integer_type_node, "test y val");
4208 m.set_value (c_y_reg, &test_sval_y, &ctxt);
4209 tree rep = m.get_representative_tree (&test_sval_y);
4210 ASSERT_DUMP_TREE_EQ (rep, "c.y");
4215 /* Verify that calling region_model::get_rvalue repeatedly on the same
4216 tree constant retrieves the same svalue *. */
4218 static void
4219 test_unique_constants ()
4221 tree int_0 = build_int_cst (integer_type_node, 0);
4222 tree int_42 = build_int_cst (integer_type_node, 42);
4224 test_region_model_context ctxt;
4225 region_model_manager mgr;
4226 region_model model (&mgr);
4227 ASSERT_EQ (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_0, &ctxt));
4228 ASSERT_EQ (model.get_rvalue (int_42, &ctxt),
4229 model.get_rvalue (int_42, &ctxt));
4230 ASSERT_NE (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_42, &ctxt));
4231 ASSERT_EQ (ctxt.get_num_diagnostics (), 0);
4233 /* A "(const int)42" will be a different tree from "(int)42)"... */
4234 tree const_int_type_node
4235 = build_qualified_type (integer_type_node, TYPE_QUAL_CONST);
4236 tree const_int_42 = build_int_cst (const_int_type_node, 42);
4237 ASSERT_NE (int_42, const_int_42);
4238 /* It should have a different const_svalue. */
4239 const svalue *int_42_sval = model.get_rvalue (int_42, &ctxt);
4240 const svalue *const_int_42_sval = model.get_rvalue (const_int_42, &ctxt);
4241 ASSERT_NE (int_42_sval, const_int_42_sval);
4242 /* But they should compare as equal. */
4243 ASSERT_CONDITION_TRUE (model, int_42_sval, EQ_EXPR, const_int_42_sval);
4244 ASSERT_CONDITION_FALSE (model, int_42_sval, NE_EXPR, const_int_42_sval);
4247 /* Verify that each type gets its own singleton unknown_svalue within a
4248 region_model_manager, and that NULL_TREE gets its own singleton. */
4250 static void
4251 test_unique_unknowns ()
4253 region_model_manager mgr;
4254 const svalue *unknown_int
4255 = mgr.get_or_create_unknown_svalue (integer_type_node);
4256 /* Repeated calls with the same type should get the same "unknown"
4257 svalue. */
4258 const svalue *unknown_int_2
4259 = mgr.get_or_create_unknown_svalue (integer_type_node);
4260 ASSERT_EQ (unknown_int, unknown_int_2);
4262 /* Different types (or the NULL type) should have different
4263 unknown_svalues. */
4264 const svalue *unknown_NULL_type = mgr.get_or_create_unknown_svalue (NULL);
4265 ASSERT_NE (unknown_NULL_type, unknown_int);
4267 /* Repeated calls with NULL for the type should get the same "unknown"
4268 svalue. */
4269 const svalue *unknown_NULL_type_2 = mgr.get_or_create_unknown_svalue (NULL);
4270 ASSERT_EQ (unknown_NULL_type, unknown_NULL_type_2);
4273 /* Verify that initial_svalue are handled as expected. */
4275 static void
4276 test_initial_svalue_folding ()
4278 region_model_manager mgr;
4279 tree x = build_global_decl ("x", integer_type_node);
4280 tree y = build_global_decl ("y", integer_type_node);
4282 test_region_model_context ctxt;
4283 region_model model (&mgr);
4284 const svalue *x_init = model.get_rvalue (x, &ctxt);
4285 const svalue *y_init = model.get_rvalue (y, &ctxt);
4286 ASSERT_NE (x_init, y_init);
4287 const region *x_reg = model.get_lvalue (x, &ctxt);
4288 ASSERT_EQ (x_init, mgr.get_or_create_initial_value (x_reg));
4292 /* Verify that unary ops are folded as expected. */
4294 static void
4295 test_unaryop_svalue_folding ()
4297 region_model_manager mgr;
4298 tree x = build_global_decl ("x", integer_type_node);
4299 tree y = build_global_decl ("y", integer_type_node);
4301 test_region_model_context ctxt;
4302 region_model model (&mgr);
4303 const svalue *x_init = model.get_rvalue (x, &ctxt);
4304 const svalue *y_init = model.get_rvalue (y, &ctxt);
4305 const region *x_reg = model.get_lvalue (x, &ctxt);
4306 ASSERT_EQ (x_init, mgr.get_or_create_initial_value (x_reg));
4308 /* "(int)x" -> "x". */
4309 ASSERT_EQ (x_init, mgr.get_or_create_cast (integer_type_node, x_init));
4311 /* "(void *)x" -> something other than "x". */
4312 ASSERT_NE (x_init, mgr.get_or_create_cast (ptr_type_node, x_init));
4314 /* "!(x == y)" -> "x != y". */
4315 ASSERT_EQ (mgr.get_or_create_unaryop
4316 (boolean_type_node, TRUTH_NOT_EXPR,
4317 mgr.get_or_create_binop (boolean_type_node, EQ_EXPR,
4318 x_init, y_init)),
4319 mgr.get_or_create_binop (boolean_type_node, NE_EXPR,
4320 x_init, y_init));
4321 /* "!(x > y)" -> "x <= y". */
4322 ASSERT_EQ (mgr.get_or_create_unaryop
4323 (boolean_type_node, TRUTH_NOT_EXPR,
4324 mgr.get_or_create_binop (boolean_type_node, GT_EXPR,
4325 x_init, y_init)),
4326 mgr.get_or_create_binop (boolean_type_node, LE_EXPR,
4327 x_init, y_init));
4330 /* Verify that binops on constant svalues are folded. */
4332 static void
4333 test_binop_svalue_folding ()
4335 #define NUM_CSTS 10
4336 tree cst_int[NUM_CSTS];
4337 region_model_manager mgr;
4338 const svalue *cst_sval[NUM_CSTS];
4339 for (int i = 0; i < NUM_CSTS; i++)
4341 cst_int[i] = build_int_cst (integer_type_node, i);
4342 cst_sval[i] = mgr.get_or_create_constant_svalue (cst_int[i]);
4343 ASSERT_EQ (cst_sval[i]->get_kind (), SK_CONSTANT);
4344 ASSERT_EQ (cst_sval[i]->maybe_get_constant (), cst_int[i]);
4347 for (int i = 0; i < NUM_CSTS; i++)
4348 for (int j = 0; j < NUM_CSTS; j++)
4350 if (i != j)
4351 ASSERT_NE (cst_sval[i], cst_sval[j]);
4352 if (i + j < NUM_CSTS)
4354 const svalue *sum
4355 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
4356 cst_sval[i], cst_sval[j]);
4357 ASSERT_EQ (sum, cst_sval[i + j]);
4359 if (i - j >= 0)
4361 const svalue *difference
4362 = mgr.get_or_create_binop (integer_type_node, MINUS_EXPR,
4363 cst_sval[i], cst_sval[j]);
4364 ASSERT_EQ (difference, cst_sval[i - j]);
4366 if (i * j < NUM_CSTS)
4368 const svalue *product
4369 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
4370 cst_sval[i], cst_sval[j]);
4371 ASSERT_EQ (product, cst_sval[i * j]);
4373 const svalue *eq = mgr.get_or_create_binop (integer_type_node, EQ_EXPR,
4374 cst_sval[i], cst_sval[j]);
4375 ASSERT_EQ (eq, i == j ? cst_sval[1] : cst_sval [0]);
4376 const svalue *neq = mgr.get_or_create_binop (integer_type_node, NE_EXPR,
4377 cst_sval[i], cst_sval[j]);
4378 ASSERT_EQ (neq, i != j ? cst_sval[1] : cst_sval [0]);
4379 // etc
4382 tree x = build_global_decl ("x", integer_type_node);
4384 test_region_model_context ctxt;
4385 region_model model (&mgr);
4386 const svalue *x_init = model.get_rvalue (x, &ctxt);
4388 /* PLUS_EXPR folding. */
4389 const svalue *x_init_plus_zero
4390 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
4391 x_init, cst_sval[0]);
4392 ASSERT_EQ (x_init_plus_zero, x_init);
4393 const svalue *zero_plus_x_init
4394 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
4395 cst_sval[0], x_init);
4396 ASSERT_EQ (zero_plus_x_init, x_init);
4398 /* MULT_EXPR folding. */
4399 const svalue *x_init_times_zero
4400 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
4401 x_init, cst_sval[0]);
4402 ASSERT_EQ (x_init_times_zero, cst_sval[0]);
4403 const svalue *zero_times_x_init
4404 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
4405 cst_sval[0], x_init);
4406 ASSERT_EQ (zero_times_x_init, cst_sval[0]);
4408 const svalue *x_init_times_one
4409 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
4410 x_init, cst_sval[1]);
4411 ASSERT_EQ (x_init_times_one, x_init);
4412 const svalue *one_times_x_init
4413 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
4414 cst_sval[1], x_init);
4415 ASSERT_EQ (one_times_x_init, x_init);
4417 // etc
4418 // TODO: do we want to use the match-and-simplify DSL for this?
4420 /* Verify that binops put any constants on the RHS. */
4421 const svalue *four_times_x_init
4422 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
4423 cst_sval[4], x_init);
4424 const svalue *x_init_times_four
4425 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
4426 x_init, cst_sval[4]);
4427 ASSERT_EQ (four_times_x_init, x_init_times_four);
4428 const binop_svalue *binop = four_times_x_init->dyn_cast_binop_svalue ();
4429 ASSERT_EQ (binop->get_op (), MULT_EXPR);
4430 ASSERT_EQ (binop->get_arg0 (), x_init);
4431 ASSERT_EQ (binop->get_arg1 (), cst_sval[4]);
4433 /* Verify that ((x + 1) + 1) == (x + 2). */
4434 const svalue *x_init_plus_one
4435 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
4436 x_init, cst_sval[1]);
4437 const svalue *x_init_plus_two
4438 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
4439 x_init, cst_sval[2]);
4440 const svalue *x_init_plus_one_plus_one
4441 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
4442 x_init_plus_one, cst_sval[1]);
4443 ASSERT_EQ (x_init_plus_one_plus_one, x_init_plus_two);
4446 /* Verify that sub_svalues are folded as expected. */
4448 static void
4449 test_sub_svalue_folding ()
4451 coord_test ct;
4452 tree c = build_global_decl ("c", ct.m_coord_type);
4453 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
4454 c, ct.m_x_field, NULL_TREE);
4456 region_model_manager mgr;
4457 region_model model (&mgr);
4458 test_region_model_context ctxt;
4459 const region *c_x_reg = model.get_lvalue (c_x, &ctxt);
4461 /* Verify that sub_svalue of "unknown" simply
4462 yields an unknown. */
4464 const svalue *unknown = mgr.get_or_create_unknown_svalue (ct.m_coord_type);
4465 const svalue *sub = mgr.get_or_create_sub_svalue (TREE_TYPE (ct.m_x_field),
4466 unknown, c_x_reg);
4467 ASSERT_EQ (sub->get_kind (), SK_UNKNOWN);
4468 ASSERT_EQ (sub->get_type (), TREE_TYPE (ct.m_x_field));
4471 /* Test that region::descendent_of_p works as expected. */
4473 static void
4474 test_descendent_of_p ()
4476 region_model_manager mgr;
4477 const region *stack = mgr.get_stack_region ();
4478 const region *heap = mgr.get_heap_region ();
4479 const region *code = mgr.get_code_region ();
4480 const region *globals = mgr.get_globals_region ();
4482 /* descendent_of_p should return true when used on the region itself. */
4483 ASSERT_TRUE (stack->descendent_of_p (stack));
4484 ASSERT_FALSE (stack->descendent_of_p (heap));
4485 ASSERT_FALSE (stack->descendent_of_p (code));
4486 ASSERT_FALSE (stack->descendent_of_p (globals));
4488 tree x = build_global_decl ("x", integer_type_node);
4489 const region *x_reg = mgr.get_region_for_global (x);
4490 ASSERT_TRUE (x_reg->descendent_of_p (globals));
4492 /* A cast_region should be a descendent of the original region. */
4493 const region *cast_reg = mgr.get_cast_region (x_reg, ptr_type_node);
4494 ASSERT_TRUE (cast_reg->descendent_of_p (x_reg));
4497 /* Verify that simple assignments work as expected. */
4499 static void
4500 test_assignment ()
4502 tree int_0 = build_int_cst (integer_type_node, 0);
4503 tree x = build_global_decl ("x", integer_type_node);
4504 tree y = build_global_decl ("y", integer_type_node);
4506 /* "x == 0", then use of y, then "y = 0;". */
4507 region_model_manager mgr;
4508 region_model model (&mgr);
4509 ADD_SAT_CONSTRAINT (model, x, EQ_EXPR, int_0);
4510 ASSERT_CONDITION_UNKNOWN (model, y, EQ_EXPR, int_0);
4511 model.set_value (model.get_lvalue (y, NULL),
4512 model.get_rvalue (int_0, NULL),
4513 NULL);
4514 ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, int_0);
4515 ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, x);
4518 /* Verify that compound assignments work as expected. */
4520 static void
4521 test_compound_assignment ()
4523 coord_test ct;
4525 tree c = build_global_decl ("c", ct.m_coord_type);
4526 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
4527 c, ct.m_x_field, NULL_TREE);
4528 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
4529 c, ct.m_y_field, NULL_TREE);
4530 tree d = build_global_decl ("d", ct.m_coord_type);
4531 tree d_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
4532 d, ct.m_x_field, NULL_TREE);
4533 tree d_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
4534 d, ct.m_y_field, NULL_TREE);
4536 tree int_17 = build_int_cst (integer_type_node, 17);
4537 tree int_m3 = build_int_cst (integer_type_node, -3);
4539 region_model_manager mgr;
4540 region_model model (&mgr);
4541 model.set_value (c_x, int_17, NULL);
4542 model.set_value (c_y, int_m3, NULL);
4544 /* Copy c to d. */
4545 model.copy_region (model.get_lvalue (d, NULL), model.get_lvalue (c, NULL),
4546 NULL);
4547 /* Check that the fields have the same svalues. */
4548 ASSERT_EQ (model.get_rvalue (c_x, NULL), model.get_rvalue (d_x, NULL));
4549 ASSERT_EQ (model.get_rvalue (c_y, NULL), model.get_rvalue (d_y, NULL));
4552 /* Verify the details of pushing and popping stack frames. */
4554 static void
4555 test_stack_frames ()
4557 tree int_42 = build_int_cst (integer_type_node, 42);
4558 tree int_10 = build_int_cst (integer_type_node, 10);
4559 tree int_5 = build_int_cst (integer_type_node, 5);
4560 tree int_0 = build_int_cst (integer_type_node, 0);
4562 auto_vec <tree> param_types;
4563 tree parent_fndecl = make_fndecl (integer_type_node,
4564 "parent_fn",
4565 param_types);
4566 allocate_struct_function (parent_fndecl, true);
4568 tree child_fndecl = make_fndecl (integer_type_node,
4569 "child_fn",
4570 param_types);
4571 allocate_struct_function (child_fndecl, true);
4573 /* "a" and "b" in the parent frame. */
4574 tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL,
4575 get_identifier ("a"),
4576 integer_type_node);
4577 tree b = build_decl (UNKNOWN_LOCATION, PARM_DECL,
4578 get_identifier ("b"),
4579 integer_type_node);
4580 /* "x" and "y" in a child frame. */
4581 tree x = build_decl (UNKNOWN_LOCATION, PARM_DECL,
4582 get_identifier ("x"),
4583 integer_type_node);
4584 tree y = build_decl (UNKNOWN_LOCATION, PARM_DECL,
4585 get_identifier ("y"),
4586 integer_type_node);
4588 /* "p" global. */
4589 tree p = build_global_decl ("p", ptr_type_node);
4591 /* "q" global. */
4592 tree q = build_global_decl ("q", ptr_type_node);
4594 region_model_manager mgr;
4595 test_region_model_context ctxt;
4596 region_model model (&mgr);
4598 /* Push stack frame for "parent_fn". */
4599 const region *parent_frame_reg
4600 = model.push_frame (DECL_STRUCT_FUNCTION (parent_fndecl),
4601 NULL, &ctxt);
4602 ASSERT_EQ (model.get_current_frame (), parent_frame_reg);
4603 ASSERT_TRUE (model.region_exists_p (parent_frame_reg));
4604 const region *a_in_parent_reg = model.get_lvalue (a, &ctxt);
4605 model.set_value (a_in_parent_reg,
4606 model.get_rvalue (int_42, &ctxt),
4607 &ctxt);
4608 ASSERT_EQ (a_in_parent_reg->maybe_get_frame_region (), parent_frame_reg);
4610 model.add_constraint (b, LT_EXPR, int_10, &ctxt);
4611 ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt),
4612 tristate (tristate::TS_TRUE));
4614 /* Push stack frame for "child_fn". */
4615 const region *child_frame_reg
4616 = model.push_frame (DECL_STRUCT_FUNCTION (child_fndecl), NULL, &ctxt);
4617 ASSERT_EQ (model.get_current_frame (), child_frame_reg);
4618 ASSERT_TRUE (model.region_exists_p (child_frame_reg));
4619 const region *x_in_child_reg = model.get_lvalue (x, &ctxt);
4620 model.set_value (x_in_child_reg,
4621 model.get_rvalue (int_0, &ctxt),
4622 &ctxt);
4623 ASSERT_EQ (x_in_child_reg->maybe_get_frame_region (), child_frame_reg);
4625 model.add_constraint (y, NE_EXPR, int_5, &ctxt);
4626 ASSERT_EQ (model.eval_condition (y, NE_EXPR, int_5, &ctxt),
4627 tristate (tristate::TS_TRUE));
4629 /* Point a global pointer at a local in the child frame: p = &x. */
4630 const region *p_in_globals_reg = model.get_lvalue (p, &ctxt);
4631 model.set_value (p_in_globals_reg,
4632 mgr.get_ptr_svalue (ptr_type_node, x_in_child_reg),
4633 &ctxt);
4634 ASSERT_EQ (p_in_globals_reg->maybe_get_frame_region (), NULL);
4636 /* Point another global pointer at p: q = &p. */
4637 const region *q_in_globals_reg = model.get_lvalue (q, &ctxt);
4638 model.set_value (q_in_globals_reg,
4639 mgr.get_ptr_svalue (ptr_type_node, p_in_globals_reg),
4640 &ctxt);
4642 /* Test region::descendent_of_p. */
4643 ASSERT_TRUE (child_frame_reg->descendent_of_p (child_frame_reg));
4644 ASSERT_TRUE (x_in_child_reg->descendent_of_p (child_frame_reg));
4645 ASSERT_FALSE (a_in_parent_reg->descendent_of_p (child_frame_reg));
4647 /* Pop the "child_fn" frame from the stack. */
4648 model.pop_frame (NULL, NULL, &ctxt);
4649 ASSERT_FALSE (model.region_exists_p (child_frame_reg));
4650 ASSERT_TRUE (model.region_exists_p (parent_frame_reg));
4652 /* Verify that p (which was pointing at the local "x" in the popped
4653 frame) has been poisoned. */
4654 const svalue *new_p_sval = model.get_rvalue (p, NULL);
4655 ASSERT_EQ (new_p_sval->get_kind (), SK_POISONED);
4656 ASSERT_EQ (new_p_sval->dyn_cast_poisoned_svalue ()->get_poison_kind (),
4657 POISON_KIND_POPPED_STACK);
4659 /* Verify that q still points to p, in spite of the region
4660 renumbering. */
4661 const svalue *new_q_sval = model.get_rvalue (q, &ctxt);
4662 ASSERT_EQ (new_q_sval->get_kind (), SK_REGION);
4663 ASSERT_EQ (new_q_sval->maybe_get_region (),
4664 model.get_lvalue (p, &ctxt));
4666 /* Verify that top of stack has been updated. */
4667 ASSERT_EQ (model.get_current_frame (), parent_frame_reg);
4669 /* Verify locals in parent frame. */
4670 /* Verify "a" still has its value. */
4671 const svalue *new_a_sval = model.get_rvalue (a, &ctxt);
4672 ASSERT_EQ (new_a_sval->get_kind (), SK_CONSTANT);
4673 ASSERT_EQ (new_a_sval->dyn_cast_constant_svalue ()->get_constant (),
4674 int_42);
4675 /* Verify "b" still has its constraint. */
4676 ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt),
4677 tristate (tristate::TS_TRUE));
4680 /* Verify that get_representative_path_var works as expected, that
4681 we can map from regions to parms and back within a recursive call
4682 stack. */
4684 static void
4685 test_get_representative_path_var ()
4687 auto_vec <tree> param_types;
4688 tree fndecl = make_fndecl (integer_type_node,
4689 "factorial",
4690 param_types);
4691 allocate_struct_function (fndecl, true);
4693 /* Parm "n". */
4694 tree n = build_decl (UNKNOWN_LOCATION, PARM_DECL,
4695 get_identifier ("n"),
4696 integer_type_node);
4698 region_model_manager mgr;
4699 test_region_model_context ctxt;
4700 region_model model (&mgr);
4702 /* Push 5 stack frames for "factorial", each with a param */
4703 auto_vec<const region *> parm_regs;
4704 auto_vec<const svalue *> parm_svals;
4705 for (int depth = 0; depth < 5; depth++)
4707 const region *frame_n_reg
4708 = model.push_frame (DECL_STRUCT_FUNCTION (fndecl), NULL, &ctxt);
4709 const region *parm_n_reg = model.get_lvalue (path_var (n, depth), &ctxt);
4710 parm_regs.safe_push (parm_n_reg);
4712 ASSERT_EQ (parm_n_reg->get_parent_region (), frame_n_reg);
4713 const svalue *sval_n = mgr.get_or_create_initial_value (parm_n_reg);
4714 parm_svals.safe_push (sval_n);
4717 /* Verify that we can recognize that the regions are the parms,
4718 at every depth. */
4719 for (int depth = 0; depth < 5; depth++)
4722 svalue_set visited;
4723 ASSERT_EQ (model.get_representative_path_var (parm_regs[depth],
4724 &visited),
4725 path_var (n, depth + 1));
4727 /* ...and that we can lookup lvalues for locals for all frames,
4728 not just the top. */
4729 ASSERT_EQ (model.get_lvalue (path_var (n, depth), NULL),
4730 parm_regs[depth]);
4731 /* ...and that we can locate the svalues. */
4733 svalue_set visited;
4734 ASSERT_EQ (model.get_representative_path_var (parm_svals[depth],
4735 &visited),
4736 path_var (n, depth + 1));
4741 /* Ensure that region_model::operator== works as expected. */
4743 static void
4744 test_equality_1 ()
4746 tree int_42 = build_int_cst (integer_type_node, 42);
4747 tree int_17 = build_int_cst (integer_type_node, 17);
4749 /* Verify that "empty" region_model instances are equal to each other. */
4750 region_model_manager mgr;
4751 region_model model0 (&mgr);
4752 region_model model1 (&mgr);
4753 ASSERT_EQ (model0, model1);
4755 /* Verify that setting state in model1 makes the models non-equal. */
4756 tree x = build_global_decl ("x", integer_type_node);
4757 model0.set_value (x, int_42, NULL);
4758 ASSERT_EQ (model0.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
4759 ASSERT_NE (model0, model1);
4761 /* Verify the copy-ctor. */
4762 region_model model2 (model0);
4763 ASSERT_EQ (model0, model2);
4764 ASSERT_EQ (model2.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
4765 ASSERT_NE (model1, model2);
4767 /* Verify that models obtained from copy-ctor are independently editable
4768 w/o affecting the original model. */
4769 model2.set_value (x, int_17, NULL);
4770 ASSERT_NE (model0, model2);
4771 ASSERT_EQ (model2.get_rvalue (x, NULL)->maybe_get_constant (), int_17);
4772 ASSERT_EQ (model0.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
4775 /* Verify that region models for
4776 x = 42; y = 113;
4778 y = 113; x = 42;
4779 are equal. */
4781 static void
4782 test_canonicalization_2 ()
4784 tree int_42 = build_int_cst (integer_type_node, 42);
4785 tree int_113 = build_int_cst (integer_type_node, 113);
4786 tree x = build_global_decl ("x", integer_type_node);
4787 tree y = build_global_decl ("y", integer_type_node);
4789 region_model_manager mgr;
4790 region_model model0 (&mgr);
4791 model0.set_value (model0.get_lvalue (x, NULL),
4792 model0.get_rvalue (int_42, NULL),
4793 NULL);
4794 model0.set_value (model0.get_lvalue (y, NULL),
4795 model0.get_rvalue (int_113, NULL),
4796 NULL);
4798 region_model model1 (&mgr);
4799 model1.set_value (model1.get_lvalue (y, NULL),
4800 model1.get_rvalue (int_113, NULL),
4801 NULL);
4802 model1.set_value (model1.get_lvalue (x, NULL),
4803 model1.get_rvalue (int_42, NULL),
4804 NULL);
4806 ASSERT_EQ (model0, model1);
4809 /* Verify that constraints for
4810 x > 3 && y > 42
4812 y > 42 && x > 3
4813 are equal after canonicalization. */
4815 static void
4816 test_canonicalization_3 ()
4818 tree int_3 = build_int_cst (integer_type_node, 3);
4819 tree int_42 = build_int_cst (integer_type_node, 42);
4820 tree x = build_global_decl ("x", integer_type_node);
4821 tree y = build_global_decl ("y", integer_type_node);
4823 region_model_manager mgr;
4824 region_model model0 (&mgr);
4825 model0.add_constraint (x, GT_EXPR, int_3, NULL);
4826 model0.add_constraint (y, GT_EXPR, int_42, NULL);
4828 region_model model1 (&mgr);
4829 model1.add_constraint (y, GT_EXPR, int_42, NULL);
4830 model1.add_constraint (x, GT_EXPR, int_3, NULL);
4832 model0.canonicalize ();
4833 model1.canonicalize ();
4834 ASSERT_EQ (model0, model1);
4837 /* Verify that we can canonicalize a model containing NaN and other real
4838 constants. */
4840 static void
4841 test_canonicalization_4 ()
4843 auto_vec<tree> csts;
4844 append_interesting_constants (&csts);
4846 region_model_manager mgr;
4847 region_model model (&mgr);
4849 for (tree cst : csts)
4850 model.get_rvalue (cst, NULL);
4852 model.canonicalize ();
4855 /* Assert that if we have two region_model instances
4856 with values VAL_A and VAL_B for EXPR that they are
4857 mergable. Write the merged model to *OUT_MERGED_MODEL,
4858 and the merged svalue ptr to *OUT_MERGED_SVALUE.
4859 If VAL_A or VAL_B are NULL_TREE, don't populate EXPR
4860 for that region_model. */
4862 static void
4863 assert_region_models_merge (tree expr, tree val_a, tree val_b,
4864 region_model *out_merged_model,
4865 const svalue **out_merged_svalue)
4867 program_point point (program_point::origin ());
4868 test_region_model_context ctxt;
4869 region_model_manager *mgr = out_merged_model->get_manager ();
4870 region_model model0 (mgr);
4871 region_model model1 (mgr);
4872 if (val_a)
4873 model0.set_value (model0.get_lvalue (expr, &ctxt),
4874 model0.get_rvalue (val_a, &ctxt),
4875 &ctxt);
4876 if (val_b)
4877 model1.set_value (model1.get_lvalue (expr, &ctxt),
4878 model1.get_rvalue (val_b, &ctxt),
4879 &ctxt);
4881 /* They should be mergeable. */
4882 ASSERT_TRUE (model0.can_merge_with_p (model1, point, out_merged_model));
4883 *out_merged_svalue = out_merged_model->get_rvalue (expr, &ctxt);
4886 /* Verify that we can merge region_model instances. */
4888 static void
4889 test_state_merging ()
4891 tree int_42 = build_int_cst (integer_type_node, 42);
4892 tree int_113 = build_int_cst (integer_type_node, 113);
4893 tree x = build_global_decl ("x", integer_type_node);
4894 tree y = build_global_decl ("y", integer_type_node);
4895 tree z = build_global_decl ("z", integer_type_node);
4896 tree p = build_global_decl ("p", ptr_type_node);
4898 tree addr_of_y = build1 (ADDR_EXPR, ptr_type_node, y);
4899 tree addr_of_z = build1 (ADDR_EXPR, ptr_type_node, z);
4901 auto_vec <tree> param_types;
4902 tree test_fndecl = make_fndecl (integer_type_node, "test_fn", param_types);
4903 allocate_struct_function (test_fndecl, true);
4905 /* Param "a". */
4906 tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL,
4907 get_identifier ("a"),
4908 integer_type_node);
4909 tree addr_of_a = build1 (ADDR_EXPR, ptr_type_node, a);
4911 /* Param "q", a pointer. */
4912 tree q = build_decl (UNKNOWN_LOCATION, PARM_DECL,
4913 get_identifier ("q"),
4914 ptr_type_node);
4916 program_point point (program_point::origin ());
4917 region_model_manager mgr;
4920 region_model model0 (&mgr);
4921 region_model model1 (&mgr);
4922 region_model merged (&mgr);
4923 /* Verify empty models can be merged. */
4924 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
4925 ASSERT_EQ (model0, merged);
4928 /* Verify that we can merge two contradictory constraints on the
4929 value for a global. */
4930 /* TODO: verify that the merged model doesn't have a value for
4931 the global */
4933 region_model model0 (&mgr);
4934 region_model model1 (&mgr);
4935 region_model merged (&mgr);
4936 test_region_model_context ctxt;
4937 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
4938 model1.add_constraint (x, EQ_EXPR, int_113, &ctxt);
4939 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
4940 ASSERT_NE (model0, merged);
4941 ASSERT_NE (model1, merged);
4944 /* Verify handling of a PARM_DECL. */
4946 test_region_model_context ctxt;
4947 region_model model0 (&mgr);
4948 region_model model1 (&mgr);
4949 ASSERT_EQ (model0.get_stack_depth (), 0);
4950 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, &ctxt);
4951 ASSERT_EQ (model0.get_stack_depth (), 1);
4952 model1.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, &ctxt);
4954 placeholder_svalue test_sval (integer_type_node, "test sval");
4955 model0.set_value (model0.get_lvalue (a, &ctxt), &test_sval, &ctxt);
4956 model1.set_value (model1.get_lvalue (a, &ctxt), &test_sval, &ctxt);
4957 ASSERT_EQ (model0, model1);
4959 /* They should be mergeable, and the result should be the same. */
4960 region_model merged (&mgr);
4961 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
4962 ASSERT_EQ (model0, merged);
4963 /* In particular, "a" should have the placeholder value. */
4964 ASSERT_EQ (merged.get_rvalue (a, &ctxt), &test_sval);
4967 /* Verify handling of a global. */
4969 test_region_model_context ctxt;
4970 region_model model0 (&mgr);
4971 region_model model1 (&mgr);
4973 placeholder_svalue test_sval (integer_type_node, "test sval");
4974 model0.set_value (model0.get_lvalue (x, &ctxt), &test_sval, &ctxt);
4975 model1.set_value (model1.get_lvalue (x, &ctxt), &test_sval, &ctxt);
4976 ASSERT_EQ (model0, model1);
4978 /* They should be mergeable, and the result should be the same. */
4979 region_model merged (&mgr);
4980 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
4981 ASSERT_EQ (model0, merged);
4982 /* In particular, "x" should have the placeholder value. */
4983 ASSERT_EQ (merged.get_rvalue (x, &ctxt), &test_sval);
4986 /* Use global-handling to verify various combinations of values. */
4988 /* Two equal constant values. */
4990 region_model merged (&mgr);
4991 const svalue *merged_x_sval;
4992 assert_region_models_merge (x, int_42, int_42, &merged, &merged_x_sval);
4994 /* In particular, there should be a constant value for "x". */
4995 ASSERT_EQ (merged_x_sval->get_kind (), SK_CONSTANT);
4996 ASSERT_EQ (merged_x_sval->dyn_cast_constant_svalue ()->get_constant (),
4997 int_42);
5000 /* Two non-equal constant values. */
5002 region_model merged (&mgr);
5003 const svalue *merged_x_sval;
5004 assert_region_models_merge (x, int_42, int_113, &merged, &merged_x_sval);
5006 /* In particular, there should be a "widening" value for "x". */
5007 ASSERT_EQ (merged_x_sval->get_kind (), SK_WIDENING);
5010 /* Initial and constant. */
5012 region_model merged (&mgr);
5013 const svalue *merged_x_sval;
5014 assert_region_models_merge (x, NULL_TREE, int_113, &merged, &merged_x_sval);
5016 /* In particular, there should be an unknown value for "x". */
5017 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
5020 /* Constant and initial. */
5022 region_model merged (&mgr);
5023 const svalue *merged_x_sval;
5024 assert_region_models_merge (x, int_42, NULL_TREE, &merged, &merged_x_sval);
5026 /* In particular, there should be an unknown value for "x". */
5027 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
5030 /* Unknown and constant. */
5031 // TODO
5033 /* Pointers: NULL and NULL. */
5034 // TODO
5036 /* Pointers: NULL and non-NULL. */
5037 // TODO
5039 /* Pointers: non-NULL and non-NULL: ptr to a local. */
5041 region_model model0 (&mgr);
5042 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
5043 model0.set_value (model0.get_lvalue (p, NULL),
5044 model0.get_rvalue (addr_of_a, NULL), NULL);
5046 region_model model1 (model0);
5047 ASSERT_EQ (model0, model1);
5049 /* They should be mergeable, and the result should be the same. */
5050 region_model merged (&mgr);
5051 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
5052 ASSERT_EQ (model0, merged);
5055 /* Pointers: non-NULL and non-NULL: ptr to a global. */
5057 region_model merged (&mgr);
5058 /* p == &y in both input models. */
5059 const svalue *merged_p_sval;
5060 assert_region_models_merge (p, addr_of_y, addr_of_y, &merged,
5061 &merged_p_sval);
5063 /* We should get p == &y in the merged model. */
5064 ASSERT_EQ (merged_p_sval->get_kind (), SK_REGION);
5065 const region_svalue *merged_p_ptr
5066 = merged_p_sval->dyn_cast_region_svalue ();
5067 const region *merged_p_star_reg = merged_p_ptr->get_pointee ();
5068 ASSERT_EQ (merged_p_star_reg, merged.get_lvalue (y, NULL));
5071 /* Pointers: non-NULL ptrs to different globals: should be unknown. */
5073 region_model merged (&mgr);
5074 /* x == &y vs x == &z in the input models; these are actually casts
5075 of the ptrs to "int". */
5076 const svalue *merged_x_sval;
5077 // TODO:
5078 assert_region_models_merge (x, addr_of_y, addr_of_z, &merged,
5079 &merged_x_sval);
5081 /* We should get x == unknown in the merged model. */
5082 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
5085 /* Pointers: non-NULL and non-NULL: ptr to a heap region. */
5087 test_region_model_context ctxt;
5088 region_model model0 (&mgr);
5089 tree size = build_int_cst (size_type_node, 1024);
5090 const svalue *size_sval = mgr.get_or_create_constant_svalue (size);
5091 const region *new_reg = model0.create_region_for_heap_alloc (size_sval);
5092 const svalue *ptr_sval = mgr.get_ptr_svalue (ptr_type_node, new_reg);
5093 model0.set_value (model0.get_lvalue (p, &ctxt),
5094 ptr_sval, &ctxt);
5096 region_model model1 (model0);
5098 ASSERT_EQ (model0, model1);
5100 region_model merged (&mgr);
5101 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
5103 /* The merged model ought to be identical. */
5104 ASSERT_EQ (model0, merged);
5107 /* Two regions sharing the same placeholder svalue should continue sharing
5108 it after self-merger. */
5110 test_region_model_context ctxt;
5111 region_model model0 (&mgr);
5112 placeholder_svalue placeholder_sval (integer_type_node, "test");
5113 model0.set_value (model0.get_lvalue (x, &ctxt),
5114 &placeholder_sval, &ctxt);
5115 model0.set_value (model0.get_lvalue (y, &ctxt), &placeholder_sval, &ctxt);
5116 region_model model1 (model0);
5118 /* They should be mergeable, and the result should be the same. */
5119 region_model merged (&mgr);
5120 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
5121 ASSERT_EQ (model0, merged);
5123 /* In particular, we should have x == y. */
5124 ASSERT_EQ (merged.eval_condition (x, EQ_EXPR, y, &ctxt),
5125 tristate (tristate::TS_TRUE));
5129 region_model model0 (&mgr);
5130 region_model model1 (&mgr);
5131 test_region_model_context ctxt;
5132 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
5133 model1.add_constraint (x, NE_EXPR, int_42, &ctxt);
5134 region_model merged (&mgr);
5135 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
5139 region_model model0 (&mgr);
5140 region_model model1 (&mgr);
5141 test_region_model_context ctxt;
5142 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
5143 model1.add_constraint (x, NE_EXPR, int_42, &ctxt);
5144 model1.add_constraint (x, EQ_EXPR, int_113, &ctxt);
5145 region_model merged (&mgr);
5146 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
5149 // TODO: what can't we merge? need at least one such test
5151 /* TODO: various things
5152 - heap regions
5153 - value merging:
5154 - every combination, but in particular
5155 - pairs of regions
5158 /* Views. */
5160 test_region_model_context ctxt;
5161 region_model model0 (&mgr);
5163 const region *x_reg = model0.get_lvalue (x, &ctxt);
5164 const region *x_as_ptr = mgr.get_cast_region (x_reg, ptr_type_node);
5165 model0.set_value (x_as_ptr, model0.get_rvalue (addr_of_y, &ctxt), &ctxt);
5167 region_model model1 (model0);
5168 ASSERT_EQ (model1, model0);
5170 /* They should be mergeable, and the result should be the same. */
5171 region_model merged (&mgr);
5172 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
5175 /* Verify that we can merge a model in which a local in an older stack
5176 frame points to a local in a more recent stack frame. */
5178 region_model model0 (&mgr);
5179 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
5180 const region *q_in_first_frame = model0.get_lvalue (q, NULL);
5182 /* Push a second frame. */
5183 const region *reg_2nd_frame
5184 = model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
5186 /* Have a pointer in the older frame point to a local in the
5187 more recent frame. */
5188 const svalue *sval_ptr = model0.get_rvalue (addr_of_a, NULL);
5189 model0.set_value (q_in_first_frame, sval_ptr, NULL);
5191 /* Verify that it's pointing at the newer frame. */
5192 const region *reg_pointee = sval_ptr->maybe_get_region ();
5193 ASSERT_EQ (reg_pointee->get_parent_region (), reg_2nd_frame);
5195 model0.canonicalize ();
5197 region_model model1 (model0);
5198 ASSERT_EQ (model0, model1);
5200 /* They should be mergeable, and the result should be the same
5201 (after canonicalization, at least). */
5202 region_model merged (&mgr);
5203 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
5204 merged.canonicalize ();
5205 ASSERT_EQ (model0, merged);
5208 /* Verify that we can merge a model in which a local points to a global. */
5210 region_model model0 (&mgr);
5211 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
5212 model0.set_value (model0.get_lvalue (q, NULL),
5213 model0.get_rvalue (addr_of_y, NULL), NULL);
5215 region_model model1 (model0);
5216 ASSERT_EQ (model0, model1);
5218 /* They should be mergeable, and the result should be the same
5219 (after canonicalization, at least). */
5220 region_model merged (&mgr);
5221 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
5222 ASSERT_EQ (model0, merged);
5226 /* Verify that constraints are correctly merged when merging region_model
5227 instances. */
5229 static void
5230 test_constraint_merging ()
5232 tree int_0 = build_int_cst (integer_type_node, 0);
5233 tree int_5 = build_int_cst (integer_type_node, 5);
5234 tree x = build_global_decl ("x", integer_type_node);
5235 tree y = build_global_decl ("y", integer_type_node);
5236 tree z = build_global_decl ("z", integer_type_node);
5237 tree n = build_global_decl ("n", integer_type_node);
5239 region_model_manager mgr;
5240 test_region_model_context ctxt;
5242 /* model0: 0 <= (x == y) < n. */
5243 region_model model0 (&mgr);
5244 model0.add_constraint (x, EQ_EXPR, y, &ctxt);
5245 model0.add_constraint (x, GE_EXPR, int_0, NULL);
5246 model0.add_constraint (x, LT_EXPR, n, NULL);
5248 /* model1: z != 5 && (0 <= x < n). */
5249 region_model model1 (&mgr);
5250 model1.add_constraint (z, NE_EXPR, int_5, NULL);
5251 model1.add_constraint (x, GE_EXPR, int_0, NULL);
5252 model1.add_constraint (x, LT_EXPR, n, NULL);
5254 /* They should be mergeable; the merged constraints should
5255 be: (0 <= x < n). */
5256 program_point point (program_point::origin ());
5257 region_model merged (&mgr);
5258 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
5260 ASSERT_EQ (merged.eval_condition (x, GE_EXPR, int_0, &ctxt),
5261 tristate (tristate::TS_TRUE));
5262 ASSERT_EQ (merged.eval_condition (x, LT_EXPR, n, &ctxt),
5263 tristate (tristate::TS_TRUE));
5265 ASSERT_EQ (merged.eval_condition (z, NE_EXPR, int_5, &ctxt),
5266 tristate (tristate::TS_UNKNOWN));
5267 ASSERT_EQ (merged.eval_condition (x, LT_EXPR, y, &ctxt),
5268 tristate (tristate::TS_UNKNOWN));
5271 /* Verify that widening_svalue::eval_condition_without_cm works as
5272 expected. */
5274 static void
5275 test_widening_constraints ()
5277 program_point point (program_point::origin ());
5278 tree int_0 = build_int_cst (integer_type_node, 0);
5279 tree int_m1 = build_int_cst (integer_type_node, -1);
5280 tree int_1 = build_int_cst (integer_type_node, 1);
5281 tree int_256 = build_int_cst (integer_type_node, 256);
5282 region_model_manager mgr;
5283 test_region_model_context ctxt;
5284 const svalue *int_0_sval = mgr.get_or_create_constant_svalue (int_0);
5285 const svalue *int_1_sval = mgr.get_or_create_constant_svalue (int_1);
5286 const svalue *w_zero_then_one_sval
5287 = mgr.get_or_create_widening_svalue (integer_type_node, point,
5288 int_0_sval, int_1_sval);
5289 const widening_svalue *w_zero_then_one
5290 = w_zero_then_one_sval->dyn_cast_widening_svalue ();
5291 ASSERT_EQ (w_zero_then_one->get_direction (),
5292 widening_svalue::DIR_ASCENDING);
5293 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_m1),
5294 tristate::TS_FALSE);
5295 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_0),
5296 tristate::TS_FALSE);
5297 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_1),
5298 tristate::TS_UNKNOWN);
5299 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_256),
5300 tristate::TS_UNKNOWN);
5302 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_m1),
5303 tristate::TS_FALSE);
5304 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_0),
5305 tristate::TS_UNKNOWN);
5306 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_1),
5307 tristate::TS_UNKNOWN);
5308 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_256),
5309 tristate::TS_UNKNOWN);
5311 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_m1),
5312 tristate::TS_TRUE);
5313 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_0),
5314 tristate::TS_UNKNOWN);
5315 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_1),
5316 tristate::TS_UNKNOWN);
5317 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_256),
5318 tristate::TS_UNKNOWN);
5320 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_m1),
5321 tristate::TS_TRUE);
5322 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_0),
5323 tristate::TS_TRUE);
5324 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_1),
5325 tristate::TS_UNKNOWN);
5326 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_256),
5327 tristate::TS_UNKNOWN);
5329 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_m1),
5330 tristate::TS_FALSE);
5331 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_0),
5332 tristate::TS_UNKNOWN);
5333 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_1),
5334 tristate::TS_UNKNOWN);
5335 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_256),
5336 tristate::TS_UNKNOWN);
5338 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_m1),
5339 tristate::TS_TRUE);
5340 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_0),
5341 tristate::TS_UNKNOWN);
5342 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_1),
5343 tristate::TS_UNKNOWN);
5344 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_256),
5345 tristate::TS_UNKNOWN);
5348 /* Verify merging constraints for states simulating successive iterations
5349 of a loop.
5350 Simulate:
5351 for (i = 0; i < 256; i++)
5352 [...body...]
5353 i.e. this gimple:.
5354 i_15 = 0;
5355 goto <bb 4>;
5357 <bb 4> :
5358 i_11 = PHI <i_15(2), i_23(3)>
5359 if (i_11 <= 255)
5360 goto <bb 3>;
5361 else
5362 goto [AFTER LOOP]
5364 <bb 3> :
5365 [LOOP BODY]
5366 i_23 = i_11 + 1;
5368 and thus these ops (and resultant states):
5369 i_11 = PHI()
5370 {i_11: 0}
5371 add_constraint (i_11 <= 255) [for the true edge]
5372 {i_11: 0} [constraint was a no-op]
5373 i_23 = i_11 + 1;
5374 {i_22: 1}
5375 i_11 = PHI()
5376 {i_11: WIDENED (at phi, 0, 1)}
5377 add_constraint (i_11 <= 255) [for the true edge]
5378 {i_11: WIDENED (at phi, 0, 1); WIDENED <= 255}
5379 i_23 = i_11 + 1;
5380 {i_23: (WIDENED (at phi, 0, 1) + 1); WIDENED <= 255}
5381 i_11 = PHI(); merge with state at phi above
5382 {i_11: WIDENED (at phi, 0, 1); WIDENED <= 256}
5383 [changing meaning of "WIDENED" here]
5384 if (i_11 <= 255)
5385 T: {i_11: WIDENED (at phi, 0, 1); WIDENED <= 255}; cache hit
5386 F: {i_11: 256}
5389 static void
5390 test_iteration_1 ()
5392 program_point point (program_point::origin ());
5394 tree int_0 = build_int_cst (integer_type_node, 0);
5395 tree int_1 = build_int_cst (integer_type_node, 1);
5396 tree int_256 = build_int_cst (integer_type_node, 256);
5397 tree int_257 = build_int_cst (integer_type_node, 257);
5398 tree i = build_global_decl ("i", integer_type_node);
5400 region_model_manager mgr;
5401 test_region_model_context ctxt;
5403 /* model0: i: 0. */
5404 region_model model0 (&mgr);
5405 model0.set_value (i, int_0, &ctxt);
5407 /* model1: i: 1. */
5408 region_model model1 (&mgr);
5409 model1.set_value (i, int_1, &ctxt);
5411 /* Should merge "i" to a widened value. */
5412 region_model model2 (&mgr);
5413 ASSERT_TRUE (model1.can_merge_with_p (model0, point, &model2));
5414 const svalue *merged_i = model2.get_rvalue (i, &ctxt);
5415 ASSERT_EQ (merged_i->get_kind (), SK_WIDENING);
5416 const widening_svalue *w = merged_i->dyn_cast_widening_svalue ();
5417 ASSERT_EQ (w->get_direction (), widening_svalue::DIR_ASCENDING);
5419 /* Add constraint: i < 256 */
5420 model2.add_constraint (i, LT_EXPR, int_256, &ctxt);
5421 ASSERT_EQ (model2.eval_condition (i, LT_EXPR, int_256, &ctxt),
5422 tristate (tristate::TS_TRUE));
5423 ASSERT_EQ (model2.eval_condition (i, GE_EXPR, int_0, &ctxt),
5424 tristate (tristate::TS_TRUE));
5426 /* Try merging with the initial state. */
5427 region_model model3 (&mgr);
5428 ASSERT_TRUE (model2.can_merge_with_p (model0, point, &model3));
5429 /* Merging the merged value with the initial value should be idempotent,
5430 so that the analysis converges. */
5431 ASSERT_EQ (model3.get_rvalue (i, &ctxt), merged_i);
5432 /* Merger of 0 and a widening value with constraint < CST
5433 should retain the constraint, even though it was implicit
5434 for the 0 case. */
5435 ASSERT_EQ (model3.eval_condition (i, LT_EXPR, int_256, &ctxt),
5436 tristate (tristate::TS_TRUE));
5437 /* ...and we should have equality: the analysis should have converged. */
5438 ASSERT_EQ (model3, model2);
5440 /* "i_23 = i_11 + 1;" */
5441 region_model model4 (model3);
5442 ASSERT_EQ (model4, model2);
5443 model4.set_value (i, build2 (PLUS_EXPR, integer_type_node, i, int_1), &ctxt);
5444 const svalue *plus_one = model4.get_rvalue (i, &ctxt);
5445 ASSERT_EQ (plus_one->get_kind (), SK_BINOP);
5447 /* Try merging with the "i: 1" state. */
5448 region_model model5 (&mgr);
5449 ASSERT_TRUE (model4.can_merge_with_p (model1, point, &model5));
5450 ASSERT_EQ (model5.get_rvalue (i, &ctxt), plus_one);
5451 ASSERT_EQ (model5, model4);
5453 /* "i_11 = PHI();" merge with state at phi above.
5454 For i, we should have a merger of WIDENING with WIDENING + 1,
5455 and this should be WIDENING again. */
5456 region_model model6 (&mgr);
5457 ASSERT_TRUE (model5.can_merge_with_p (model2, point, &model6));
5458 const svalue *merged_widening = model6.get_rvalue (i, &ctxt);
5459 ASSERT_EQ (merged_widening->get_kind (), SK_WIDENING);
5461 ASSERT_CONDITION_TRUE (model6, i, LT_EXPR, int_257);
5464 /* Verify that if we mark a pointer to a malloc-ed region as non-NULL,
5465 all cast pointers to that region are also known to be non-NULL. */
5467 static void
5468 test_malloc_constraints ()
5470 region_model_manager mgr;
5471 region_model model (&mgr);
5472 tree p = build_global_decl ("p", ptr_type_node);
5473 tree char_star = build_pointer_type (char_type_node);
5474 tree q = build_global_decl ("q", char_star);
5475 tree null_ptr = build_int_cst (ptr_type_node, 0);
5477 const svalue *size_in_bytes
5478 = mgr.get_or_create_unknown_svalue (size_type_node);
5479 const region *reg = model.create_region_for_heap_alloc (size_in_bytes);
5480 const svalue *sval = mgr.get_ptr_svalue (ptr_type_node, reg);
5481 model.set_value (model.get_lvalue (p, NULL), sval, NULL);
5482 model.set_value (q, p, NULL);
5484 ASSERT_CONDITION_UNKNOWN (model, p, NE_EXPR, null_ptr);
5485 ASSERT_CONDITION_UNKNOWN (model, p, EQ_EXPR, null_ptr);
5486 ASSERT_CONDITION_UNKNOWN (model, q, NE_EXPR, null_ptr);
5487 ASSERT_CONDITION_UNKNOWN (model, q, EQ_EXPR, null_ptr);
5489 model.add_constraint (p, NE_EXPR, null_ptr, NULL);
5491 ASSERT_CONDITION_TRUE (model, p, NE_EXPR, null_ptr);
5492 ASSERT_CONDITION_FALSE (model, p, EQ_EXPR, null_ptr);
5493 ASSERT_CONDITION_TRUE (model, q, NE_EXPR, null_ptr);
5494 ASSERT_CONDITION_FALSE (model, q, EQ_EXPR, null_ptr);
5497 /* Smoketest of getting and setting the value of a variable. */
5499 static void
5500 test_var ()
5502 /* "int i;" */
5503 tree i = build_global_decl ("i", integer_type_node);
5505 tree int_17 = build_int_cst (integer_type_node, 17);
5506 tree int_m3 = build_int_cst (integer_type_node, -3);
5508 region_model_manager mgr;
5509 region_model model (&mgr);
5511 const region *i_reg = model.get_lvalue (i, NULL);
5512 ASSERT_EQ (i_reg->get_kind (), RK_DECL);
5514 /* Reading "i" should give a symbolic "initial value". */
5515 const svalue *sval_init = model.get_rvalue (i, NULL);
5516 ASSERT_EQ (sval_init->get_kind (), SK_INITIAL);
5517 ASSERT_EQ (sval_init->dyn_cast_initial_svalue ()->get_region (), i_reg);
5518 /* ..and doing it again should give the same "initial value". */
5519 ASSERT_EQ (model.get_rvalue (i, NULL), sval_init);
5521 /* "i = 17;". */
5522 model.set_value (i, int_17, NULL);
5523 ASSERT_EQ (model.get_rvalue (i, NULL),
5524 model.get_rvalue (int_17, NULL));
5526 /* "i = -3;". */
5527 model.set_value (i, int_m3, NULL);
5528 ASSERT_EQ (model.get_rvalue (i, NULL),
5529 model.get_rvalue (int_m3, NULL));
5531 /* Verify get_offset for "i". */
5533 region_offset offset = i_reg->get_offset ();
5534 ASSERT_EQ (offset.get_base_region (), i_reg);
5535 ASSERT_EQ (offset.get_bit_offset (), 0);
5539 static void
5540 test_array_2 ()
5542 /* "int arr[10];" */
5543 tree tlen = size_int (10);
5544 tree arr_type
5545 = build_array_type (integer_type_node, build_index_type (tlen));
5546 tree arr = build_global_decl ("arr", arr_type);
5548 /* "int i;" */
5549 tree i = build_global_decl ("i", integer_type_node);
5551 tree int_0 = build_int_cst (integer_type_node, 0);
5552 tree int_1 = build_int_cst (integer_type_node, 1);
5554 tree arr_0 = build4 (ARRAY_REF, integer_type_node,
5555 arr, int_0, NULL_TREE, NULL_TREE);
5556 tree arr_1 = build4 (ARRAY_REF, integer_type_node,
5557 arr, int_1, NULL_TREE, NULL_TREE);
5558 tree arr_i = build4 (ARRAY_REF, integer_type_node,
5559 arr, i, NULL_TREE, NULL_TREE);
5561 tree int_17 = build_int_cst (integer_type_node, 17);
5562 tree int_42 = build_int_cst (integer_type_node, 42);
5563 tree int_m3 = build_int_cst (integer_type_node, -3);
5565 region_model_manager mgr;
5566 region_model model (&mgr);
5567 /* "arr[0] = 17;". */
5568 model.set_value (arr_0, int_17, NULL);
5569 /* "arr[1] = -3;". */
5570 model.set_value (arr_1, int_m3, NULL);
5572 ASSERT_EQ (model.get_rvalue (arr_0, NULL), model.get_rvalue (int_17, NULL));
5573 ASSERT_EQ (model.get_rvalue (arr_1, NULL), model.get_rvalue (int_m3, NULL));
5575 /* Overwrite a pre-existing binding: "arr[1] = 42;". */
5576 model.set_value (arr_1, int_42, NULL);
5577 ASSERT_EQ (model.get_rvalue (arr_1, NULL), model.get_rvalue (int_42, NULL));
5579 /* Verify get_offset for "arr[0]". */
5581 const region *arr_0_reg = model.get_lvalue (arr_0, NULL);
5582 region_offset offset = arr_0_reg->get_offset ();
5583 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
5584 ASSERT_EQ (offset.get_bit_offset (), 0);
5587 /* Verify get_offset for "arr[1]". */
5589 const region *arr_1_reg = model.get_lvalue (arr_1, NULL);
5590 region_offset offset = arr_1_reg->get_offset ();
5591 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
5592 ASSERT_EQ (offset.get_bit_offset (), INT_TYPE_SIZE);
5595 /* "arr[i] = i;" - this should remove the earlier bindings. */
5596 model.set_value (arr_i, i, NULL);
5597 ASSERT_EQ (model.get_rvalue (arr_i, NULL), model.get_rvalue (i, NULL));
5598 ASSERT_EQ (model.get_rvalue (arr_0, NULL)->get_kind (), SK_UNKNOWN);
5600 /* "arr[0] = 17;" - this should remove the arr[i] binding. */
5601 model.set_value (arr_0, int_17, NULL);
5602 ASSERT_EQ (model.get_rvalue (arr_0, NULL), model.get_rvalue (int_17, NULL));
5603 ASSERT_EQ (model.get_rvalue (arr_i, NULL)->get_kind (), SK_UNKNOWN);
5606 /* Smoketest of dereferencing a pointer via MEM_REF. */
5608 static void
5609 test_mem_ref ()
5612 x = 17;
5613 p = &x;
5616 tree x = build_global_decl ("x", integer_type_node);
5617 tree int_star = build_pointer_type (integer_type_node);
5618 tree p = build_global_decl ("p", int_star);
5620 tree int_17 = build_int_cst (integer_type_node, 17);
5621 tree addr_of_x = build1 (ADDR_EXPR, int_star, x);
5622 tree offset_0 = build_int_cst (integer_type_node, 0);
5623 tree star_p = build2 (MEM_REF, integer_type_node, p, offset_0);
5625 region_model_manager mgr;
5626 region_model model (&mgr);
5628 /* "x = 17;". */
5629 model.set_value (x, int_17, NULL);
5631 /* "p = &x;". */
5632 model.set_value (p, addr_of_x, NULL);
5634 const svalue *sval = model.get_rvalue (star_p, NULL);
5635 ASSERT_EQ (sval->maybe_get_constant (), int_17);
5638 /* Test for a POINTER_PLUS_EXPR followed by a MEM_REF.
5639 Analogous to this code:
5640 void test_6 (int a[10])
5642 __analyzer_eval (a[3] == 42); [should be UNKNOWN]
5643 a[3] = 42;
5644 __analyzer_eval (a[3] == 42); [should be TRUE]
5646 from data-model-1.c, which looks like this at the gimple level:
5647 # __analyzer_eval (a[3] == 42); [should be UNKNOWN]
5648 int *_1 = a_10(D) + 12; # POINTER_PLUS_EXPR
5649 int _2 = *_1; # MEM_REF
5650 _Bool _3 = _2 == 42;
5651 int _4 = (int) _3;
5652 __analyzer_eval (_4);
5654 # a[3] = 42;
5655 int *_5 = a_10(D) + 12; # POINTER_PLUS_EXPR
5656 *_5 = 42; # MEM_REF
5658 # __analyzer_eval (a[3] == 42); [should be TRUE]
5659 int *_6 = a_10(D) + 12; # POINTER_PLUS_EXPR
5660 int _7 = *_6; # MEM_REF
5661 _Bool _8 = _7 == 42;
5662 int _9 = (int) _8;
5663 __analyzer_eval (_9); */
5665 static void
5666 test_POINTER_PLUS_EXPR_then_MEM_REF ()
5668 tree int_star = build_pointer_type (integer_type_node);
5669 tree a = build_global_decl ("a", int_star);
5670 tree offset_12 = build_int_cst (size_type_node, 12);
5671 tree pointer_plus_expr = build2 (POINTER_PLUS_EXPR, int_star, a, offset_12);
5672 tree offset_0 = build_int_cst (integer_type_node, 0);
5673 tree mem_ref = build2 (MEM_REF, integer_type_node,
5674 pointer_plus_expr, offset_0);
5675 region_model_manager mgr;
5676 region_model m (&mgr);
5678 tree int_42 = build_int_cst (integer_type_node, 42);
5679 m.set_value (mem_ref, int_42, NULL);
5680 ASSERT_EQ (m.get_rvalue (mem_ref, NULL)->maybe_get_constant (), int_42);
5683 /* Verify that malloc works. */
5685 static void
5686 test_malloc ()
5688 tree int_star = build_pointer_type (integer_type_node);
5689 tree p = build_global_decl ("p", int_star);
5690 tree n = build_global_decl ("n", integer_type_node);
5691 tree n_times_4 = build2 (MULT_EXPR, size_type_node,
5692 n, build_int_cst (size_type_node, 4));
5694 region_model_manager mgr;
5695 test_region_model_context ctxt;
5696 region_model model (&mgr);
5698 /* "p = malloc (n * 4);". */
5699 const svalue *size_sval = model.get_rvalue (n_times_4, &ctxt);
5700 const region *reg = model.create_region_for_heap_alloc (size_sval);
5701 const svalue *ptr = mgr.get_ptr_svalue (int_star, reg);
5702 model.set_value (model.get_lvalue (p, &ctxt), ptr, &ctxt);
5703 ASSERT_EQ (model.get_capacity (reg), size_sval);
5706 /* Verify that alloca works. */
5708 static void
5709 test_alloca ()
5711 auto_vec <tree> param_types;
5712 tree fndecl = make_fndecl (integer_type_node,
5713 "test_fn",
5714 param_types);
5715 allocate_struct_function (fndecl, true);
5718 tree int_star = build_pointer_type (integer_type_node);
5719 tree p = build_global_decl ("p", int_star);
5720 tree n = build_global_decl ("n", integer_type_node);
5721 tree n_times_4 = build2 (MULT_EXPR, size_type_node,
5722 n, build_int_cst (size_type_node, 4));
5724 region_model_manager mgr;
5725 test_region_model_context ctxt;
5726 region_model model (&mgr);
5728 /* Push stack frame. */
5729 const region *frame_reg
5730 = model.push_frame (DECL_STRUCT_FUNCTION (fndecl),
5731 NULL, &ctxt);
5732 /* "p = alloca (n * 4);". */
5733 const svalue *size_sval = model.get_rvalue (n_times_4, &ctxt);
5734 const region *reg = model.create_region_for_alloca (size_sval);
5735 ASSERT_EQ (reg->get_parent_region (), frame_reg);
5736 const svalue *ptr = mgr.get_ptr_svalue (int_star, reg);
5737 model.set_value (model.get_lvalue (p, &ctxt), ptr, &ctxt);
5738 ASSERT_EQ (model.get_capacity (reg), size_sval);
5740 /* Verify that the pointers to the alloca region are replaced by
5741 poisoned values when the frame is popped. */
5742 model.pop_frame (NULL, NULL, &ctxt);
5743 ASSERT_EQ (model.get_rvalue (p, NULL)->get_kind (), SK_POISONED);
5746 /* Verify that svalue::involves_p works. */
5748 static void
5749 test_involves_p ()
5751 region_model_manager mgr;
5752 tree int_star = build_pointer_type (integer_type_node);
5753 tree p = build_global_decl ("p", int_star);
5754 tree q = build_global_decl ("q", int_star);
5756 test_region_model_context ctxt;
5757 region_model model (&mgr);
5758 const svalue *p_init = model.get_rvalue (p, &ctxt);
5759 const svalue *q_init = model.get_rvalue (q, &ctxt);
5761 ASSERT_TRUE (p_init->involves_p (p_init));
5762 ASSERT_FALSE (p_init->involves_p (q_init));
5764 const region *star_p_reg = mgr.get_symbolic_region (p_init);
5765 const region *star_q_reg = mgr.get_symbolic_region (q_init);
5767 const svalue *init_star_p = mgr.get_or_create_initial_value (star_p_reg);
5768 const svalue *init_star_q = mgr.get_or_create_initial_value (star_q_reg);
5770 ASSERT_TRUE (init_star_p->involves_p (p_init));
5771 ASSERT_FALSE (p_init->involves_p (init_star_p));
5772 ASSERT_FALSE (init_star_p->involves_p (q_init));
5773 ASSERT_TRUE (init_star_q->involves_p (q_init));
5774 ASSERT_FALSE (init_star_q->involves_p (p_init));
5777 /* Run all of the selftests within this file. */
5779 void
5780 analyzer_region_model_cc_tests ()
5782 test_tree_cmp_on_constants ();
5783 test_dump ();
5784 test_struct ();
5785 test_array_1 ();
5786 test_get_representative_tree ();
5787 test_unique_constants ();
5788 test_unique_unknowns ();
5789 test_initial_svalue_folding ();
5790 test_unaryop_svalue_folding ();
5791 test_binop_svalue_folding ();
5792 test_sub_svalue_folding ();
5793 test_descendent_of_p ();
5794 test_assignment ();
5795 test_compound_assignment ();
5796 test_stack_frames ();
5797 test_get_representative_path_var ();
5798 test_equality_1 ();
5799 test_canonicalization_2 ();
5800 test_canonicalization_3 ();
5801 test_canonicalization_4 ();
5802 test_state_merging ();
5803 test_constraint_merging ();
5804 test_widening_constraints ();
5805 test_iteration_1 ();
5806 test_malloc_constraints ();
5807 test_var ();
5808 test_array_2 ();
5809 test_mem_ref ();
5810 test_POINTER_PLUS_EXPR_then_MEM_REF ();
5811 test_malloc ();
5812 test_alloca ();
5813 test_involves_p ();
5816 } // namespace selftest
5818 #endif /* CHECKING_P */
5820 } // namespace ana
5822 #endif /* #if ENABLE_ANALYZER */