Daily bump.
[official-gcc.git] / gcc / analyzer / region-model.cc
bloba14d107709cd923b2c3b28419689ec2432878d0b
1 /* Classes for modeling the state of memory.
2 Copyright (C) 2019-2021 Free Software Foundation, Inc.
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tree.h"
25 #include "function.h"
26 #include "basic-block.h"
27 #include "gimple.h"
28 #include "gimple-iterator.h"
29 #include "diagnostic-core.h"
30 #include "graphviz.h"
31 #include "options.h"
32 #include "cgraph.h"
33 #include "tree-dfa.h"
34 #include "stringpool.h"
35 #include "convert.h"
36 #include "target.h"
37 #include "fold-const.h"
38 #include "tree-pretty-print.h"
39 #include "diagnostic-color.h"
40 #include "diagnostic-metadata.h"
41 #include "tristate.h"
42 #include "bitmap.h"
43 #include "selftest.h"
44 #include "function.h"
45 #include "json.h"
46 #include "analyzer/analyzer.h"
47 #include "analyzer/analyzer-logging.h"
48 #include "ordered-hash-map.h"
49 #include "options.h"
50 #include "cgraph.h"
51 #include "cfg.h"
52 #include "digraph.h"
53 #include "analyzer/supergraph.h"
54 #include "sbitmap.h"
55 #include "analyzer/call-string.h"
56 #include "analyzer/program-point.h"
57 #include "analyzer/store.h"
58 #include "analyzer/region-model.h"
59 #include "analyzer/constraint-manager.h"
60 #include "diagnostic-event-id.h"
61 #include "analyzer/sm.h"
62 #include "diagnostic-event-id.h"
63 #include "analyzer/sm.h"
64 #include "analyzer/pending-diagnostic.h"
65 #include "analyzer/region-model-reachability.h"
66 #include "analyzer/analyzer-selftests.h"
67 #include "stor-layout.h"
68 #include "attribs.h"
69 #include "tree-object-size.h"
71 #if ENABLE_ANALYZER
73 namespace ana {
75 /* Dump T to PP in language-independent form, for debugging/logging/dumping
76 purposes. */
78 void
79 dump_tree (pretty_printer *pp, tree t)
81 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
84 /* Dump T to PP in language-independent form in quotes, for
85 debugging/logging/dumping purposes. */
87 void
88 dump_quoted_tree (pretty_printer *pp, tree t)
90 pp_begin_quote (pp, pp_show_color (pp));
91 dump_tree (pp, t);
92 pp_end_quote (pp, pp_show_color (pp));
95 /* Equivalent to pp_printf (pp, "%qT", t), to avoid nesting pp_printf
96 calls within other pp_printf calls.
98 default_tree_printer handles 'T' and some other codes by calling
99 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
100 dump_generic_node calls pp_printf in various places, leading to
101 garbled output.
103 Ideally pp_printf could be made to be reentrant, but in the meantime
104 this function provides a workaround. */
106 void
107 print_quoted_type (pretty_printer *pp, tree t)
109 pp_begin_quote (pp, pp_show_color (pp));
110 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
111 pp_end_quote (pp, pp_show_color (pp));
114 /* class region_to_value_map. */
116 /* Assignment operator for region_to_value_map. */
118 region_to_value_map &
119 region_to_value_map::operator= (const region_to_value_map &other)
121 m_hash_map.empty ();
122 for (auto iter : other.m_hash_map)
124 const region *reg = iter.first;
125 const svalue *sval = iter.second;
126 m_hash_map.put (reg, sval);
128 return *this;
131 /* Equality operator for region_to_value_map. */
133 bool
134 region_to_value_map::operator== (const region_to_value_map &other) const
136 if (m_hash_map.elements () != other.m_hash_map.elements ())
137 return false;
139 for (auto iter : *this)
141 const region *reg = iter.first;
142 const svalue *sval = iter.second;
143 const svalue * const *other_slot = other.get (reg);
144 if (other_slot == NULL)
145 return false;
146 if (sval != *other_slot)
147 return false;
150 return true;
153 /* Dump this object to PP. */
155 void
156 region_to_value_map::dump_to_pp (pretty_printer *pp, bool simple,
157 bool multiline) const
159 auto_vec<const region *> regs;
160 for (iterator iter = begin (); iter != end (); ++iter)
161 regs.safe_push ((*iter).first);
162 regs.qsort (region::cmp_ptr_ptr);
163 if (multiline)
164 pp_newline (pp);
165 else
166 pp_string (pp, " {");
167 unsigned i;
168 const region *reg;
169 FOR_EACH_VEC_ELT (regs, i, reg)
171 if (multiline)
172 pp_string (pp, " ");
173 else if (i > 0)
174 pp_string (pp, ", ");
175 reg->dump_to_pp (pp, simple);
176 pp_string (pp, ": ");
177 const svalue *sval = *get (reg);
178 sval->dump_to_pp (pp, true);
179 if (multiline)
180 pp_newline (pp);
182 if (!multiline)
183 pp_string (pp, "}");
186 /* Dump this object to stderr. */
188 DEBUG_FUNCTION void
189 region_to_value_map::dump (bool simple) const
191 pretty_printer pp;
192 pp_format_decoder (&pp) = default_tree_printer;
193 pp_show_color (&pp) = pp_show_color (global_dc->printer);
194 pp.buffer->stream = stderr;
195 dump_to_pp (&pp, simple, true);
196 pp_newline (&pp);
197 pp_flush (&pp);
201 /* Attempt to merge THIS with OTHER, writing the result
202 to OUT.
204 For now, write (region, value) mappings that are in common between THIS
205 and OTHER to OUT, effectively taking the intersection, rather than
206 rejecting differences. */
208 bool
209 region_to_value_map::can_merge_with_p (const region_to_value_map &other,
210 region_to_value_map *out) const
212 for (auto iter : *this)
214 const region *iter_reg = iter.first;
215 const svalue *iter_sval = iter.second;
216 const svalue * const * other_slot = other.get (iter_reg);
217 if (other_slot)
218 if (iter_sval == *other_slot)
219 out->put (iter_reg, iter_sval);
221 return true;
224 /* Purge any state involving SVAL. */
226 void
227 region_to_value_map::purge_state_involving (const svalue *sval)
229 auto_vec<const region *> to_purge;
230 for (auto iter : *this)
232 const region *iter_reg = iter.first;
233 const svalue *iter_sval = iter.second;
234 if (iter_reg->involves_p (sval) || iter_sval->involves_p (sval))
235 to_purge.safe_push (iter_reg);
237 for (auto iter : to_purge)
238 m_hash_map.remove (iter);
241 /* class region_model. */
243 /* Ctor for region_model: construct an "empty" model. */
245 region_model::region_model (region_model_manager *mgr)
246 : m_mgr (mgr), m_store (), m_current_frame (NULL),
247 m_dynamic_extents ()
249 m_constraints = new constraint_manager (mgr);
252 /* region_model's copy ctor. */
254 region_model::region_model (const region_model &other)
255 : m_mgr (other.m_mgr), m_store (other.m_store),
256 m_constraints (new constraint_manager (*other.m_constraints)),
257 m_current_frame (other.m_current_frame),
258 m_dynamic_extents (other.m_dynamic_extents)
262 /* region_model's dtor. */
264 region_model::~region_model ()
266 delete m_constraints;
269 /* region_model's assignment operator. */
271 region_model &
272 region_model::operator= (const region_model &other)
274 /* m_mgr is const. */
275 gcc_assert (m_mgr == other.m_mgr);
277 m_store = other.m_store;
279 delete m_constraints;
280 m_constraints = new constraint_manager (*other.m_constraints);
282 m_current_frame = other.m_current_frame;
284 m_dynamic_extents = other.m_dynamic_extents;
286 return *this;
289 /* Equality operator for region_model.
291 Amongst other things this directly compares the stores and the constraint
292 managers, so for this to be meaningful both this and OTHER should
293 have been canonicalized. */
295 bool
296 region_model::operator== (const region_model &other) const
298 /* We can only compare instances that use the same manager. */
299 gcc_assert (m_mgr == other.m_mgr);
301 if (m_store != other.m_store)
302 return false;
304 if (*m_constraints != *other.m_constraints)
305 return false;
307 if (m_current_frame != other.m_current_frame)
308 return false;
310 if (m_dynamic_extents != other.m_dynamic_extents)
311 return false;
313 gcc_checking_assert (hash () == other.hash ());
315 return true;
318 /* Generate a hash value for this region_model. */
320 hashval_t
321 region_model::hash () const
323 hashval_t result = m_store.hash ();
324 result ^= m_constraints->hash ();
325 return result;
328 /* Dump a representation of this model to PP, showing the
329 stack, the store, and any constraints.
330 Use SIMPLE to control how svalues and regions are printed. */
332 void
333 region_model::dump_to_pp (pretty_printer *pp, bool simple,
334 bool multiline) const
336 /* Dump stack. */
337 pp_printf (pp, "stack depth: %i", get_stack_depth ());
338 if (multiline)
339 pp_newline (pp);
340 else
341 pp_string (pp, " {");
342 for (const frame_region *iter_frame = m_current_frame; iter_frame;
343 iter_frame = iter_frame->get_calling_frame ())
345 if (multiline)
346 pp_string (pp, " ");
347 else if (iter_frame != m_current_frame)
348 pp_string (pp, ", ");
349 pp_printf (pp, "frame (index %i): ", iter_frame->get_index ());
350 iter_frame->dump_to_pp (pp, simple);
351 if (multiline)
352 pp_newline (pp);
354 if (!multiline)
355 pp_string (pp, "}");
357 /* Dump store. */
358 if (!multiline)
359 pp_string (pp, ", {");
360 m_store.dump_to_pp (pp, simple, multiline,
361 m_mgr->get_store_manager ());
362 if (!multiline)
363 pp_string (pp, "}");
365 /* Dump constraints. */
366 pp_string (pp, "constraint_manager:");
367 if (multiline)
368 pp_newline (pp);
369 else
370 pp_string (pp, " {");
371 m_constraints->dump_to_pp (pp, multiline);
372 if (!multiline)
373 pp_string (pp, "}");
375 /* Dump sizes of dynamic regions, if any are known. */
376 if (!m_dynamic_extents.is_empty ())
378 pp_string (pp, "dynamic_extents:");
379 m_dynamic_extents.dump_to_pp (pp, simple, multiline);
383 /* Dump a representation of this model to FILE. */
385 void
386 region_model::dump (FILE *fp, bool simple, bool multiline) const
388 pretty_printer pp;
389 pp_format_decoder (&pp) = default_tree_printer;
390 pp_show_color (&pp) = pp_show_color (global_dc->printer);
391 pp.buffer->stream = fp;
392 dump_to_pp (&pp, simple, multiline);
393 pp_newline (&pp);
394 pp_flush (&pp);
397 /* Dump a multiline representation of this model to stderr. */
399 DEBUG_FUNCTION void
400 region_model::dump (bool simple) const
402 dump (stderr, simple, true);
405 /* Dump a multiline representation of this model to stderr. */
407 DEBUG_FUNCTION void
408 region_model::debug () const
410 dump (true);
413 /* Assert that this object is valid. */
415 void
416 region_model::validate () const
418 m_store.validate ();
421 /* Canonicalize the store and constraints, to maximize the chance of
422 equality between region_model instances. */
424 void
425 region_model::canonicalize ()
427 m_store.canonicalize (m_mgr->get_store_manager ());
428 m_constraints->canonicalize ();
431 /* Return true if this region_model is in canonical form. */
433 bool
434 region_model::canonicalized_p () const
436 region_model copy (*this);
437 copy.canonicalize ();
438 return *this == copy;
441 /* See the comment for store::loop_replay_fixup. */
443 void
444 region_model::loop_replay_fixup (const region_model *dst_state)
446 m_store.loop_replay_fixup (dst_state->get_store (), m_mgr);
449 /* A subclass of pending_diagnostic for complaining about uses of
450 poisoned values. */
452 class poisoned_value_diagnostic
453 : public pending_diagnostic_subclass<poisoned_value_diagnostic>
455 public:
456 poisoned_value_diagnostic (tree expr, enum poison_kind pkind)
457 : m_expr (expr), m_pkind (pkind)
460 const char *get_kind () const FINAL OVERRIDE { return "poisoned_value_diagnostic"; }
462 bool use_of_uninit_p () const FINAL OVERRIDE
464 return m_pkind == POISON_KIND_UNINIT;
467 bool operator== (const poisoned_value_diagnostic &other) const
469 return m_expr == other.m_expr;
472 bool emit (rich_location *rich_loc) FINAL OVERRIDE
474 switch (m_pkind)
476 default:
477 gcc_unreachable ();
478 case POISON_KIND_UNINIT:
480 diagnostic_metadata m;
481 m.add_cwe (457); /* "CWE-457: Use of Uninitialized Variable". */
482 return warning_meta (rich_loc, m,
483 OPT_Wanalyzer_use_of_uninitialized_value,
484 "use of uninitialized value %qE",
485 m_expr);
487 break;
488 case POISON_KIND_FREED:
490 diagnostic_metadata m;
491 m.add_cwe (416); /* "CWE-416: Use After Free". */
492 return warning_meta (rich_loc, m,
493 OPT_Wanalyzer_use_after_free,
494 "use after %<free%> of %qE",
495 m_expr);
497 break;
498 case POISON_KIND_POPPED_STACK:
500 /* TODO: which CWE? */
501 return warning_at
502 (rich_loc,
503 OPT_Wanalyzer_use_of_pointer_in_stale_stack_frame,
504 "dereferencing pointer %qE to within stale stack frame",
505 m_expr);
507 break;
511 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
513 switch (m_pkind)
515 default:
516 gcc_unreachable ();
517 case POISON_KIND_UNINIT:
518 return ev.formatted_print ("use of uninitialized value %qE here",
519 m_expr);
520 case POISON_KIND_FREED:
521 return ev.formatted_print ("use after %<free%> of %qE here",
522 m_expr);
523 case POISON_KIND_POPPED_STACK:
524 return ev.formatted_print
525 ("dereferencing pointer %qE to within stale stack frame",
526 m_expr);
530 private:
531 tree m_expr;
532 enum poison_kind m_pkind;
535 /* A subclass of pending_diagnostic for complaining about shifts
536 by negative counts. */
538 class shift_count_negative_diagnostic
539 : public pending_diagnostic_subclass<shift_count_negative_diagnostic>
541 public:
542 shift_count_negative_diagnostic (const gassign *assign, tree count_cst)
543 : m_assign (assign), m_count_cst (count_cst)
546 const char *get_kind () const FINAL OVERRIDE
548 return "shift_count_negative_diagnostic";
551 bool operator== (const shift_count_negative_diagnostic &other) const
553 return (m_assign == other.m_assign
554 && same_tree_p (m_count_cst, other.m_count_cst));
557 bool emit (rich_location *rich_loc) FINAL OVERRIDE
559 return warning_at (rich_loc, OPT_Wanalyzer_shift_count_negative,
560 "shift by negative count (%qE)", m_count_cst);
563 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
565 return ev.formatted_print ("shift by negative amount here (%qE)", m_count_cst);
568 private:
569 const gassign *m_assign;
570 tree m_count_cst;
573 /* A subclass of pending_diagnostic for complaining about shifts
574 by counts >= the width of the operand type. */
576 class shift_count_overflow_diagnostic
577 : public pending_diagnostic_subclass<shift_count_overflow_diagnostic>
579 public:
580 shift_count_overflow_diagnostic (const gassign *assign,
581 int operand_precision,
582 tree count_cst)
583 : m_assign (assign), m_operand_precision (operand_precision),
584 m_count_cst (count_cst)
587 const char *get_kind () const FINAL OVERRIDE
589 return "shift_count_overflow_diagnostic";
592 bool operator== (const shift_count_overflow_diagnostic &other) const
594 return (m_assign == other.m_assign
595 && m_operand_precision == other.m_operand_precision
596 && same_tree_p (m_count_cst, other.m_count_cst));
599 bool emit (rich_location *rich_loc) FINAL OVERRIDE
601 return warning_at (rich_loc, OPT_Wanalyzer_shift_count_overflow,
602 "shift by count (%qE) >= precision of type (%qi)",
603 m_count_cst, m_operand_precision);
606 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
608 return ev.formatted_print ("shift by count %qE here", m_count_cst);
611 private:
612 const gassign *m_assign;
613 int m_operand_precision;
614 tree m_count_cst;
617 /* If ASSIGN is a stmt that can be modelled via
618 set_value (lhs_reg, SVALUE, CTXT)
619 for some SVALUE, get the SVALUE.
620 Otherwise return NULL. */
622 const svalue *
623 region_model::get_gassign_result (const gassign *assign,
624 region_model_context *ctxt)
626 tree lhs = gimple_assign_lhs (assign);
627 tree rhs1 = gimple_assign_rhs1 (assign);
628 enum tree_code op = gimple_assign_rhs_code (assign);
629 switch (op)
631 default:
632 return NULL;
634 case POINTER_PLUS_EXPR:
636 /* e.g. "_1 = a_10(D) + 12;" */
637 tree ptr = rhs1;
638 tree offset = gimple_assign_rhs2 (assign);
640 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
641 const svalue *offset_sval = get_rvalue (offset, ctxt);
642 /* Quoting tree.def, "the second operand [of a POINTER_PLUS_EXPR]
643 is an integer of type sizetype". */
644 offset_sval = m_mgr->get_or_create_cast (size_type_node, offset_sval);
646 const svalue *sval_binop
647 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
648 ptr_sval, offset_sval);
649 return sval_binop;
651 break;
653 case POINTER_DIFF_EXPR:
655 /* e.g. "_1 = p_2(D) - q_3(D);". */
656 tree rhs2 = gimple_assign_rhs2 (assign);
657 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
658 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
660 // TODO: perhaps fold to zero if they're known to be equal?
662 const svalue *sval_binop
663 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
664 rhs1_sval, rhs2_sval);
665 return sval_binop;
667 break;
669 /* Assignments of the form
670 set_value (lvalue (LHS), rvalue (EXPR))
671 for various EXPR.
672 We already have the lvalue for the LHS above, as "lhs_reg". */
673 case ADDR_EXPR: /* LHS = &RHS; */
674 case BIT_FIELD_REF:
675 case COMPONENT_REF: /* LHS = op0.op1; */
676 case MEM_REF:
677 case REAL_CST:
678 case COMPLEX_CST:
679 case VECTOR_CST:
680 case INTEGER_CST:
681 case ARRAY_REF:
682 case SSA_NAME: /* LHS = VAR; */
683 case VAR_DECL: /* LHS = VAR; */
684 case PARM_DECL:/* LHS = VAR; */
685 case REALPART_EXPR:
686 case IMAGPART_EXPR:
687 return get_rvalue (rhs1, ctxt);
689 case ABS_EXPR:
690 case ABSU_EXPR:
691 case CONJ_EXPR:
692 case BIT_NOT_EXPR:
693 case FIX_TRUNC_EXPR:
694 case FLOAT_EXPR:
695 case NEGATE_EXPR:
696 case NOP_EXPR:
697 case VIEW_CONVERT_EXPR:
699 /* Unary ops. */
700 const svalue *rhs_sval = get_rvalue (rhs1, ctxt);
701 const svalue *sval_unaryop
702 = m_mgr->get_or_create_unaryop (TREE_TYPE (lhs), op, rhs_sval);
703 return sval_unaryop;
706 case EQ_EXPR:
707 case GE_EXPR:
708 case LE_EXPR:
709 case NE_EXPR:
710 case GT_EXPR:
711 case LT_EXPR:
712 case UNORDERED_EXPR:
713 case ORDERED_EXPR:
715 tree rhs2 = gimple_assign_rhs2 (assign);
717 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
718 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
720 if (TREE_TYPE (lhs) == boolean_type_node)
722 /* Consider constraints between svalues. */
723 tristate t = eval_condition (rhs1_sval, op, rhs2_sval);
724 if (t.is_known ())
725 return m_mgr->get_or_create_constant_svalue
726 (t.is_true () ? boolean_true_node : boolean_false_node);
729 /* Otherwise, generate a symbolic binary op. */
730 const svalue *sval_binop
731 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
732 rhs1_sval, rhs2_sval);
733 return sval_binop;
735 break;
737 case PLUS_EXPR:
738 case MINUS_EXPR:
739 case MULT_EXPR:
740 case MULT_HIGHPART_EXPR:
741 case TRUNC_DIV_EXPR:
742 case CEIL_DIV_EXPR:
743 case FLOOR_DIV_EXPR:
744 case ROUND_DIV_EXPR:
745 case TRUNC_MOD_EXPR:
746 case CEIL_MOD_EXPR:
747 case FLOOR_MOD_EXPR:
748 case ROUND_MOD_EXPR:
749 case RDIV_EXPR:
750 case EXACT_DIV_EXPR:
751 case LSHIFT_EXPR:
752 case RSHIFT_EXPR:
753 case LROTATE_EXPR:
754 case RROTATE_EXPR:
755 case BIT_IOR_EXPR:
756 case BIT_XOR_EXPR:
757 case BIT_AND_EXPR:
758 case MIN_EXPR:
759 case MAX_EXPR:
760 case COMPLEX_EXPR:
762 /* Binary ops. */
763 tree rhs2 = gimple_assign_rhs2 (assign);
765 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
766 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
768 if (ctxt && (op == LSHIFT_EXPR || op == RSHIFT_EXPR))
770 /* "INT34-C. Do not shift an expression by a negative number of bits
771 or by greater than or equal to the number of bits that exist in
772 the operand." */
773 if (const tree rhs2_cst = rhs2_sval->maybe_get_constant ())
774 if (TREE_CODE (rhs2_cst) == INTEGER_CST)
776 if (tree_int_cst_sgn (rhs2_cst) < 0)
777 ctxt->warn (new shift_count_negative_diagnostic
778 (assign, rhs2_cst));
779 else if (compare_tree_int (rhs2_cst,
780 TYPE_PRECISION (TREE_TYPE (rhs1)))
781 >= 0)
782 ctxt->warn (new shift_count_overflow_diagnostic
783 (assign, TYPE_PRECISION (TREE_TYPE (rhs1)),
784 rhs2_cst));
788 const svalue *sval_binop
789 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
790 rhs1_sval, rhs2_sval);
791 return sval_binop;
794 /* Vector expressions. In theory we could implement these elementwise,
795 but for now, simply return unknown values. */
796 case VEC_DUPLICATE_EXPR:
797 case VEC_SERIES_EXPR:
798 case VEC_COND_EXPR:
799 case VEC_PERM_EXPR:
800 case VEC_WIDEN_MULT_HI_EXPR:
801 case VEC_WIDEN_MULT_LO_EXPR:
802 case VEC_WIDEN_MULT_EVEN_EXPR:
803 case VEC_WIDEN_MULT_ODD_EXPR:
804 case VEC_UNPACK_HI_EXPR:
805 case VEC_UNPACK_LO_EXPR:
806 case VEC_UNPACK_FLOAT_HI_EXPR:
807 case VEC_UNPACK_FLOAT_LO_EXPR:
808 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
809 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
810 case VEC_PACK_TRUNC_EXPR:
811 case VEC_PACK_SAT_EXPR:
812 case VEC_PACK_FIX_TRUNC_EXPR:
813 case VEC_PACK_FLOAT_EXPR:
814 case VEC_WIDEN_LSHIFT_HI_EXPR:
815 case VEC_WIDEN_LSHIFT_LO_EXPR:
816 return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (lhs));
820 /* Check for SVAL being poisoned, adding a warning to CTXT.
821 Return SVAL, or, if a warning is added, another value, to avoid
822 repeatedly complaining about the same poisoned value in followup code. */
824 const svalue *
825 region_model::check_for_poison (const svalue *sval,
826 tree expr,
827 region_model_context *ctxt) const
829 if (!ctxt)
830 return sval;
832 if (const poisoned_svalue *poisoned_sval = sval->dyn_cast_poisoned_svalue ())
834 /* If we have an SSA name for a temporary, we don't want to print
835 '<unknown>'.
836 Poisoned values are shared by type, and so we can't reconstruct
837 the tree other than via the def stmts, using
838 fixup_tree_for_diagnostic. */
839 tree diag_arg = fixup_tree_for_diagnostic (expr);
840 enum poison_kind pkind = poisoned_sval->get_poison_kind ();
841 if (ctxt->warn (new poisoned_value_diagnostic (diag_arg, pkind)))
843 /* We only want to report use of a poisoned value at the first
844 place it gets used; return an unknown value to avoid generating
845 a chain of followup warnings. */
846 sval = m_mgr->get_or_create_unknown_svalue (sval->get_type ());
849 return sval;
852 return sval;
855 /* Update this model for the ASSIGN stmt, using CTXT to report any
856 diagnostics. */
858 void
859 region_model::on_assignment (const gassign *assign, region_model_context *ctxt)
861 tree lhs = gimple_assign_lhs (assign);
862 tree rhs1 = gimple_assign_rhs1 (assign);
864 const region *lhs_reg = get_lvalue (lhs, ctxt);
866 /* Most assignments are handled by:
867 set_value (lhs_reg, SVALUE, CTXT)
868 for some SVALUE. */
869 if (const svalue *sval = get_gassign_result (assign, ctxt))
871 tree expr = get_diagnostic_tree_for_gassign (assign);
872 check_for_poison (sval, expr, ctxt);
873 set_value (lhs_reg, sval, ctxt);
874 return;
877 enum tree_code op = gimple_assign_rhs_code (assign);
878 switch (op)
880 default:
882 if (0)
883 sorry_at (assign->location, "unhandled assignment op: %qs",
884 get_tree_code_name (op));
885 const svalue *unknown_sval
886 = m_mgr->get_or_create_unknown_svalue (TREE_TYPE (lhs));
887 set_value (lhs_reg, unknown_sval, ctxt);
889 break;
891 case CONSTRUCTOR:
893 if (TREE_CLOBBER_P (rhs1))
895 /* e.g. "x ={v} {CLOBBER};" */
896 clobber_region (lhs_reg);
898 else
900 /* Any CONSTRUCTOR that survives to this point is either
901 just a zero-init of everything, or a vector. */
902 if (!CONSTRUCTOR_NO_CLEARING (rhs1))
903 zero_fill_region (lhs_reg);
904 unsigned ix;
905 tree index;
906 tree val;
907 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), ix, index, val)
909 gcc_assert (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE);
910 if (!index)
911 index = build_int_cst (integer_type_node, ix);
912 gcc_assert (TREE_CODE (index) == INTEGER_CST);
913 const svalue *index_sval
914 = m_mgr->get_or_create_constant_svalue (index);
915 gcc_assert (index_sval);
916 const region *sub_reg
917 = m_mgr->get_element_region (lhs_reg,
918 TREE_TYPE (val),
919 index_sval);
920 const svalue *val_sval = get_rvalue (val, ctxt);
921 set_value (sub_reg, val_sval, ctxt);
925 break;
927 case STRING_CST:
929 /* e.g. "struct s2 x = {{'A', 'B', 'C', 'D'}};". */
930 const svalue *rhs_sval = get_rvalue (rhs1, ctxt);
931 m_store.set_value (m_mgr->get_store_manager(), lhs_reg, rhs_sval,
932 ctxt ? ctxt->get_uncertainty () : NULL);
934 break;
938 /* A pending_diagnostic subclass for implementing "__analyzer_dump_path". */
940 class dump_path_diagnostic
941 : public pending_diagnostic_subclass<dump_path_diagnostic>
943 public:
944 bool emit (rich_location *richloc) FINAL OVERRIDE
946 inform (richloc, "path");
947 return true;
950 const char *get_kind () const FINAL OVERRIDE { return "dump_path_diagnostic"; }
952 bool operator== (const dump_path_diagnostic &) const
954 return true;
958 /* Handle the pre-sm-state part of STMT, modifying this object in-place.
959 Write true to *OUT_TERMINATE_PATH if the path should be terminated.
960 Write true to *OUT_UNKNOWN_SIDE_EFFECTS if the stmt has unknown
961 side effects. */
963 void
964 region_model::on_stmt_pre (const gimple *stmt,
965 bool *out_terminate_path,
966 bool *out_unknown_side_effects,
967 region_model_context *ctxt)
969 switch (gimple_code (stmt))
971 default:
972 /* No-op for now. */
973 break;
975 case GIMPLE_ASSIGN:
977 const gassign *assign = as_a <const gassign *> (stmt);
978 on_assignment (assign, ctxt);
980 break;
982 case GIMPLE_ASM:
984 const gasm *asm_stmt = as_a <const gasm *> (stmt);
985 on_asm_stmt (asm_stmt, ctxt);
987 break;
989 case GIMPLE_CALL:
991 /* Track whether we have a gcall to a function that's not recognized by
992 anything, for which we don't have a function body, or for which we
993 don't know the fndecl. */
994 const gcall *call = as_a <const gcall *> (stmt);
996 /* Debugging/test support. */
997 if (is_special_named_call_p (call, "__analyzer_describe", 2))
998 impl_call_analyzer_describe (call, ctxt);
999 else if (is_special_named_call_p (call, "__analyzer_dump_capacity", 1))
1000 impl_call_analyzer_dump_capacity (call, ctxt);
1001 else if (is_special_named_call_p (call, "__analyzer_dump_path", 0))
1003 /* Handle the builtin "__analyzer_dump_path" by queuing a
1004 diagnostic at this exploded_node. */
1005 ctxt->warn (new dump_path_diagnostic ());
1007 else if (is_special_named_call_p (call, "__analyzer_dump_region_model",
1010 /* Handle the builtin "__analyzer_dump_region_model" by dumping
1011 the region model's state to stderr. */
1012 dump (false);
1014 else if (is_special_named_call_p (call, "__analyzer_eval", 1))
1015 impl_call_analyzer_eval (call, ctxt);
1016 else if (is_special_named_call_p (call, "__analyzer_break", 0))
1018 /* Handle the builtin "__analyzer_break" by triggering a
1019 breakpoint. */
1020 /* TODO: is there a good cross-platform way to do this? */
1021 raise (SIGINT);
1023 else if (is_special_named_call_p (call,
1024 "__analyzer_dump_exploded_nodes",
1027 /* This is handled elsewhere. */
1029 else
1030 *out_unknown_side_effects = on_call_pre (call, ctxt,
1031 out_terminate_path);
1033 break;
1035 case GIMPLE_RETURN:
1037 const greturn *return_ = as_a <const greturn *> (stmt);
1038 on_return (return_, ctxt);
1040 break;
1044 /* Update this model for the CALL stmt, using CTXT to report any
1045 diagnostics - the first half.
1047 Updates to the region_model that should be made *before* sm-states
1048 are updated are done here; other updates to the region_model are done
1049 in region_model::on_call_post.
1051 Return true if the function call has unknown side effects (it wasn't
1052 recognized and we don't have a body for it, or are unable to tell which
1053 fndecl it is).
1055 Write true to *OUT_TERMINATE_PATH if this execution path should be
1056 terminated (e.g. the function call terminates the process). */
1058 bool
1059 region_model::on_call_pre (const gcall *call, region_model_context *ctxt,
1060 bool *out_terminate_path)
1062 call_details cd (call, this, ctxt);
1064 bool unknown_side_effects = false;
1066 /* Some of the cases below update the lhs of the call based on the
1067 return value, but not all. Provide a default value, which may
1068 get overwritten below. */
1069 if (tree lhs = gimple_call_lhs (call))
1071 const region *lhs_region = get_lvalue (lhs, ctxt);
1072 const svalue *sval
1073 = m_mgr->get_or_create_conjured_svalue (TREE_TYPE (lhs), call,
1074 lhs_region);
1075 purge_state_involving (sval, ctxt);
1076 set_value (lhs_region, sval, ctxt);
1079 if (gimple_call_internal_p (call))
1081 switch (gimple_call_internal_fn (call))
1083 default:
1084 break;
1085 case IFN_BUILTIN_EXPECT:
1086 impl_call_builtin_expect (cd);
1087 return false;
1088 case IFN_UBSAN_BOUNDS:
1089 return false;
1093 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
1095 /* The various impl_call_* member functions are implemented
1096 in region-model-impl-calls.cc.
1097 Having them split out into separate functions makes it easier
1098 to put breakpoints on the handling of specific functions. */
1100 if (fndecl_built_in_p (callee_fndecl, BUILT_IN_NORMAL)
1101 && gimple_builtin_call_types_compatible_p (call, callee_fndecl))
1102 switch (DECL_UNCHECKED_FUNCTION_CODE (callee_fndecl))
1104 default:
1105 if (!DECL_PURE_P (callee_fndecl))
1106 unknown_side_effects = true;
1107 break;
1108 case BUILT_IN_ALLOCA:
1109 case BUILT_IN_ALLOCA_WITH_ALIGN:
1110 impl_call_alloca (cd);
1111 return false;
1112 case BUILT_IN_CALLOC:
1113 impl_call_calloc (cd);
1114 return false;
1115 case BUILT_IN_EXPECT:
1116 case BUILT_IN_EXPECT_WITH_PROBABILITY:
1117 impl_call_builtin_expect (cd);
1118 return false;
1119 case BUILT_IN_FREE:
1120 /* Handle in "on_call_post". */
1121 break;
1122 case BUILT_IN_MALLOC:
1123 impl_call_malloc (cd);
1124 return false;
1125 case BUILT_IN_MEMCPY:
1126 case BUILT_IN_MEMCPY_CHK:
1127 impl_call_memcpy (cd);
1128 return false;
1129 case BUILT_IN_MEMSET:
1130 case BUILT_IN_MEMSET_CHK:
1131 impl_call_memset (cd);
1132 return false;
1133 break;
1134 case BUILT_IN_REALLOC:
1135 return false;
1136 case BUILT_IN_STRCPY:
1137 case BUILT_IN_STRCPY_CHK:
1138 impl_call_strcpy (cd);
1139 return false;
1140 case BUILT_IN_STRLEN:
1141 impl_call_strlen (cd);
1142 return false;
1144 case BUILT_IN_STACK_SAVE:
1145 case BUILT_IN_STACK_RESTORE:
1146 return false;
1148 /* Stdio builtins. */
1149 case BUILT_IN_FPRINTF:
1150 case BUILT_IN_FPRINTF_UNLOCKED:
1151 case BUILT_IN_PUTC:
1152 case BUILT_IN_PUTC_UNLOCKED:
1153 case BUILT_IN_FPUTC:
1154 case BUILT_IN_FPUTC_UNLOCKED:
1155 case BUILT_IN_FPUTS:
1156 case BUILT_IN_FPUTS_UNLOCKED:
1157 case BUILT_IN_FWRITE:
1158 case BUILT_IN_FWRITE_UNLOCKED:
1159 case BUILT_IN_PRINTF:
1160 case BUILT_IN_PRINTF_UNLOCKED:
1161 case BUILT_IN_PUTCHAR:
1162 case BUILT_IN_PUTCHAR_UNLOCKED:
1163 case BUILT_IN_PUTS:
1164 case BUILT_IN_PUTS_UNLOCKED:
1165 case BUILT_IN_VFPRINTF:
1166 case BUILT_IN_VPRINTF:
1167 /* These stdio builtins have external effects that are out
1168 of scope for the analyzer: we only want to model the effects
1169 on the return value. */
1170 break;
1172 else if (is_named_call_p (callee_fndecl, "malloc", call, 1))
1174 impl_call_malloc (cd);
1175 return false;
1177 else if (is_named_call_p (callee_fndecl, "calloc", call, 2))
1179 impl_call_calloc (cd);
1180 return false;
1182 else if (is_named_call_p (callee_fndecl, "alloca", call, 1))
1184 impl_call_alloca (cd);
1185 return false;
1187 else if (is_named_call_p (callee_fndecl, "realloc", call, 2))
1189 impl_call_realloc (cd);
1190 return false;
1192 else if (is_named_call_p (callee_fndecl, "error"))
1194 if (impl_call_error (cd, 3, out_terminate_path))
1195 return false;
1196 else
1197 unknown_side_effects = true;
1199 else if (is_named_call_p (callee_fndecl, "error_at_line"))
1201 if (impl_call_error (cd, 5, out_terminate_path))
1202 return false;
1203 else
1204 unknown_side_effects = true;
1206 else if (is_named_call_p (callee_fndecl, "fgets", call, 3)
1207 || is_named_call_p (callee_fndecl, "fgets_unlocked", call, 3))
1209 impl_call_fgets (cd);
1210 return false;
1212 else if (is_named_call_p (callee_fndecl, "fread", call, 4))
1214 impl_call_fread (cd);
1215 return false;
1217 else if (is_named_call_p (callee_fndecl, "getchar", call, 0))
1219 /* No side-effects (tracking stream state is out-of-scope
1220 for the analyzer). */
1222 else if (is_named_call_p (callee_fndecl, "memset", call, 3)
1223 && POINTER_TYPE_P (cd.get_arg_type (0)))
1225 impl_call_memset (cd);
1226 return false;
1228 else if (is_named_call_p (callee_fndecl, "strlen", call, 1)
1229 && POINTER_TYPE_P (cd.get_arg_type (0)))
1231 impl_call_strlen (cd);
1232 return false;
1234 else if (is_named_call_p (callee_fndecl, "operator new", call, 1))
1236 impl_call_operator_new (cd);
1237 return false;
1239 else if (is_named_call_p (callee_fndecl, "operator new []", call, 1))
1241 impl_call_operator_new (cd);
1242 return false;
1244 else if (is_named_call_p (callee_fndecl, "operator delete", call, 1)
1245 || is_named_call_p (callee_fndecl, "operator delete", call, 2)
1246 || is_named_call_p (callee_fndecl, "operator delete []", call, 1))
1248 /* Handle in "on_call_post". */
1250 else if (!fndecl_has_gimple_body_p (callee_fndecl)
1251 && !DECL_PURE_P (callee_fndecl)
1252 && !fndecl_built_in_p (callee_fndecl))
1253 unknown_side_effects = true;
1255 else
1256 unknown_side_effects = true;
1258 return unknown_side_effects;
1261 /* Update this model for the CALL stmt, using CTXT to report any
1262 diagnostics - the second half.
1264 Updates to the region_model that should be made *after* sm-states
1265 are updated are done here; other updates to the region_model are done
1266 in region_model::on_call_pre.
1268 If UNKNOWN_SIDE_EFFECTS is true, also call handle_unrecognized_call
1269 to purge state. */
1271 void
1272 region_model::on_call_post (const gcall *call,
1273 bool unknown_side_effects,
1274 region_model_context *ctxt)
1276 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
1278 call_details cd (call, this, ctxt);
1279 if (is_named_call_p (callee_fndecl, "free", call, 1))
1281 impl_call_free (cd);
1282 return;
1284 if (is_named_call_p (callee_fndecl, "operator delete", call, 1)
1285 || is_named_call_p (callee_fndecl, "operator delete", call, 2)
1286 || is_named_call_p (callee_fndecl, "operator delete []", call, 1))
1288 impl_call_operator_delete (cd);
1289 return;
1291 /* Was this fndecl referenced by
1292 __attribute__((malloc(FOO)))? */
1293 if (lookup_attribute ("*dealloc", DECL_ATTRIBUTES (callee_fndecl)))
1295 impl_deallocation_call (cd);
1296 return;
1298 if (fndecl_built_in_p (callee_fndecl, BUILT_IN_NORMAL)
1299 && gimple_builtin_call_types_compatible_p (call, callee_fndecl))
1300 switch (DECL_UNCHECKED_FUNCTION_CODE (callee_fndecl))
1302 default:
1303 break;
1304 case BUILT_IN_REALLOC:
1305 impl_call_realloc (cd);
1306 return;
1310 if (unknown_side_effects)
1311 handle_unrecognized_call (call, ctxt);
1314 /* Purge state involving SVAL from this region_model, using CTXT
1315 (if non-NULL) to purge other state in a program_state.
1317 For example, if we're at the def-stmt of an SSA name, then we need to
1318 purge any state for svalues that involve that SSA name. This avoids
1319 false positives in loops, since a symbolic value referring to the
1320 SSA name will be referring to the previous value of that SSA name.
1322 For example, in:
1323 while ((e = hashmap_iter_next(&iter))) {
1324 struct oid2strbuf *e_strbuf = (struct oid2strbuf *)e;
1325 free (e_strbuf->value);
1327 at the def-stmt of e_8:
1328 e_8 = hashmap_iter_next (&iter);
1329 we should purge the "freed" state of:
1330 INIT_VAL(CAST_REG(‘struct oid2strbuf’, (*INIT_VAL(e_8))).value)
1331 which is the "e_strbuf->value" value from the previous iteration,
1332 or we will erroneously report a double-free - the "e_8" within it
1333 refers to the previous value. */
1335 void
1336 region_model::purge_state_involving (const svalue *sval,
1337 region_model_context *ctxt)
1339 if (!sval->can_have_associated_state_p ())
1340 return;
1341 m_store.purge_state_involving (sval, m_mgr);
1342 m_constraints->purge_state_involving (sval);
1343 m_dynamic_extents.purge_state_involving (sval);
1344 if (ctxt)
1345 ctxt->purge_state_involving (sval);
1348 /* Handle a call CALL to a function with unknown behavior.
1350 Traverse the regions in this model, determining what regions are
1351 reachable from pointer arguments to CALL and from global variables,
1352 recursively.
1354 Set all reachable regions to new unknown values and purge sm-state
1355 from their values, and from values that point to them. */
1357 void
1358 region_model::handle_unrecognized_call (const gcall *call,
1359 region_model_context *ctxt)
1361 tree fndecl = get_fndecl_for_call (call, ctxt);
1363 reachable_regions reachable_regs (this);
1365 /* Determine the reachable regions and their mutability. */
1367 /* Add globals and regions that already escaped in previous
1368 unknown calls. */
1369 m_store.for_each_cluster (reachable_regions::init_cluster_cb,
1370 &reachable_regs);
1372 /* Params that are pointers. */
1373 tree iter_param_types = NULL_TREE;
1374 if (fndecl)
1375 iter_param_types = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
1376 for (unsigned arg_idx = 0; arg_idx < gimple_call_num_args (call); arg_idx++)
1378 /* Track expected param type, where available. */
1379 tree param_type = NULL_TREE;
1380 if (iter_param_types)
1382 param_type = TREE_VALUE (iter_param_types);
1383 gcc_assert (param_type);
1384 iter_param_types = TREE_CHAIN (iter_param_types);
1387 tree parm = gimple_call_arg (call, arg_idx);
1388 const svalue *parm_sval = get_rvalue (parm, ctxt);
1389 reachable_regs.handle_parm (parm_sval, param_type);
1393 uncertainty_t *uncertainty = ctxt ? ctxt->get_uncertainty () : NULL;
1395 /* Purge sm-state for the svalues that were reachable,
1396 both in non-mutable and mutable form. */
1397 for (svalue_set::iterator iter
1398 = reachable_regs.begin_reachable_svals ();
1399 iter != reachable_regs.end_reachable_svals (); ++iter)
1401 const svalue *sval = (*iter);
1402 if (ctxt)
1403 ctxt->on_unknown_change (sval, false);
1405 for (svalue_set::iterator iter
1406 = reachable_regs.begin_mutable_svals ();
1407 iter != reachable_regs.end_mutable_svals (); ++iter)
1409 const svalue *sval = (*iter);
1410 if (ctxt)
1411 ctxt->on_unknown_change (sval, true);
1412 if (uncertainty)
1413 uncertainty->on_mutable_sval_at_unknown_call (sval);
1416 /* Mark any clusters that have escaped. */
1417 reachable_regs.mark_escaped_clusters (ctxt);
1419 /* Update bindings for all clusters that have escaped, whether above,
1420 or previously. */
1421 m_store.on_unknown_fncall (call, m_mgr->get_store_manager ());
1423 /* Purge dynamic extents from any regions that have escaped mutably:
1424 realloc could have been called on them. */
1425 for (hash_set<const region *>::iterator
1426 iter = reachable_regs.begin_mutable_base_regs ();
1427 iter != reachable_regs.end_mutable_base_regs ();
1428 ++iter)
1430 const region *base_reg = (*iter);
1431 unset_dynamic_extents (base_reg);
1435 /* Traverse the regions in this model, determining what regions are
1436 reachable from the store and populating *OUT.
1438 If EXTRA_SVAL is non-NULL, treat it as an additional "root"
1439 for reachability (for handling return values from functions when
1440 analyzing return of the only function on the stack).
1442 If UNCERTAINTY is non-NULL, treat any svalues that were recorded
1443 within it as being maybe-bound as additional "roots" for reachability.
1445 Find svalues that haven't leaked. */
1447 void
1448 region_model::get_reachable_svalues (svalue_set *out,
1449 const svalue *extra_sval,
1450 const uncertainty_t *uncertainty)
1452 reachable_regions reachable_regs (this);
1454 /* Add globals and regions that already escaped in previous
1455 unknown calls. */
1456 m_store.for_each_cluster (reachable_regions::init_cluster_cb,
1457 &reachable_regs);
1459 if (extra_sval)
1460 reachable_regs.handle_sval (extra_sval);
1462 if (uncertainty)
1463 for (uncertainty_t::iterator iter
1464 = uncertainty->begin_maybe_bound_svals ();
1465 iter != uncertainty->end_maybe_bound_svals (); ++iter)
1466 reachable_regs.handle_sval (*iter);
1468 /* Get regions for locals that have explicitly bound values. */
1469 for (store::cluster_map_t::iterator iter = m_store.begin ();
1470 iter != m_store.end (); ++iter)
1472 const region *base_reg = (*iter).first;
1473 if (const region *parent = base_reg->get_parent_region ())
1474 if (parent->get_kind () == RK_FRAME)
1475 reachable_regs.add (base_reg, false);
1478 /* Populate *OUT based on the values that were reachable. */
1479 for (svalue_set::iterator iter
1480 = reachable_regs.begin_reachable_svals ();
1481 iter != reachable_regs.end_reachable_svals (); ++iter)
1482 out->add (*iter);
1485 /* Update this model for the RETURN_STMT, using CTXT to report any
1486 diagnostics. */
1488 void
1489 region_model::on_return (const greturn *return_stmt, region_model_context *ctxt)
1491 tree callee = get_current_function ()->decl;
1492 tree lhs = DECL_RESULT (callee);
1493 tree rhs = gimple_return_retval (return_stmt);
1495 if (lhs && rhs)
1496 copy_region (get_lvalue (lhs, ctxt), get_lvalue (rhs, ctxt), ctxt);
1499 /* Update this model for a call and return of setjmp/sigsetjmp at CALL within
1500 ENODE, using CTXT to report any diagnostics.
1502 This is for the initial direct invocation of setjmp/sigsetjmp (which returns
1503 0), as opposed to any second return due to longjmp/sigsetjmp. */
1505 void
1506 region_model::on_setjmp (const gcall *call, const exploded_node *enode,
1507 region_model_context *ctxt)
1509 const svalue *buf_ptr = get_rvalue (gimple_call_arg (call, 0), ctxt);
1510 const region *buf_reg = deref_rvalue (buf_ptr, gimple_call_arg (call, 0),
1511 ctxt);
1513 /* Create a setjmp_svalue for this call and store it in BUF_REG's
1514 region. */
1515 if (buf_reg)
1517 setjmp_record r (enode, call);
1518 const svalue *sval
1519 = m_mgr->get_or_create_setjmp_svalue (r, buf_reg->get_type ());
1520 set_value (buf_reg, sval, ctxt);
1523 /* Direct calls to setjmp return 0. */
1524 if (tree lhs = gimple_call_lhs (call))
1526 const svalue *new_sval
1527 = m_mgr->get_or_create_int_cst (TREE_TYPE (lhs), 0);
1528 const region *lhs_reg = get_lvalue (lhs, ctxt);
1529 set_value (lhs_reg, new_sval, ctxt);
1533 /* Update this region_model for rewinding from a "longjmp" at LONGJMP_CALL
1534 to a "setjmp" at SETJMP_CALL where the final stack depth should be
1535 SETJMP_STACK_DEPTH. Pop any stack frames. Leak detection is *not*
1536 done, and should be done by the caller. */
1538 void
1539 region_model::on_longjmp (const gcall *longjmp_call, const gcall *setjmp_call,
1540 int setjmp_stack_depth, region_model_context *ctxt)
1542 /* Evaluate the val, using the frame of the "longjmp". */
1543 tree fake_retval = gimple_call_arg (longjmp_call, 1);
1544 const svalue *fake_retval_sval = get_rvalue (fake_retval, ctxt);
1546 /* Pop any frames until we reach the stack depth of the function where
1547 setjmp was called. */
1548 gcc_assert (get_stack_depth () >= setjmp_stack_depth);
1549 while (get_stack_depth () > setjmp_stack_depth)
1550 pop_frame (NULL, NULL, ctxt);
1552 gcc_assert (get_stack_depth () == setjmp_stack_depth);
1554 /* Assign to LHS of "setjmp" in new_state. */
1555 if (tree lhs = gimple_call_lhs (setjmp_call))
1557 /* Passing 0 as the val to longjmp leads to setjmp returning 1. */
1558 const svalue *zero_sval
1559 = m_mgr->get_or_create_int_cst (TREE_TYPE (fake_retval), 0);
1560 tristate eq_zero = eval_condition (fake_retval_sval, EQ_EXPR, zero_sval);
1561 /* If we have 0, use 1. */
1562 if (eq_zero.is_true ())
1564 const svalue *one_sval
1565 = m_mgr->get_or_create_int_cst (TREE_TYPE (fake_retval), 1);
1566 fake_retval_sval = one_sval;
1568 else
1570 /* Otherwise note that the value is nonzero. */
1571 m_constraints->add_constraint (fake_retval_sval, NE_EXPR, zero_sval);
1574 /* Decorate the return value from setjmp as being unmergeable,
1575 so that we don't attempt to merge states with it as zero
1576 with states in which it's nonzero, leading to a clean distinction
1577 in the exploded_graph betweeen the first return and the second
1578 return. */
1579 fake_retval_sval = m_mgr->get_or_create_unmergeable (fake_retval_sval);
1581 const region *lhs_reg = get_lvalue (lhs, ctxt);
1582 set_value (lhs_reg, fake_retval_sval, ctxt);
1586 /* Update this region_model for a phi stmt of the form
1587 LHS = PHI <...RHS...>.
1588 where RHS is for the appropriate edge.
1589 Get state from OLD_STATE so that all of the phi stmts for a basic block
1590 are effectively handled simultaneously. */
1592 void
1593 region_model::handle_phi (const gphi *phi,
1594 tree lhs, tree rhs,
1595 const region_model &old_state,
1596 region_model_context *ctxt)
1598 /* For now, don't bother tracking the .MEM SSA names. */
1599 if (tree var = SSA_NAME_VAR (lhs))
1600 if (TREE_CODE (var) == VAR_DECL)
1601 if (VAR_DECL_IS_VIRTUAL_OPERAND (var))
1602 return;
1604 const svalue *src_sval = old_state.get_rvalue (rhs, ctxt);
1605 const region *dst_reg = old_state.get_lvalue (lhs, ctxt);
1607 set_value (dst_reg, src_sval, ctxt);
1609 if (ctxt)
1610 ctxt->on_phi (phi, rhs);
1613 /* Implementation of region_model::get_lvalue; the latter adds type-checking.
1615 Get the id of the region for PV within this region_model,
1616 emitting any diagnostics to CTXT. */
1618 const region *
1619 region_model::get_lvalue_1 (path_var pv, region_model_context *ctxt) const
1621 tree expr = pv.m_tree;
1623 gcc_assert (expr);
1625 switch (TREE_CODE (expr))
1627 default:
1628 return m_mgr->get_region_for_unexpected_tree_code (ctxt, expr,
1629 dump_location_t ());
1631 case ARRAY_REF:
1633 tree array = TREE_OPERAND (expr, 0);
1634 tree index = TREE_OPERAND (expr, 1);
1636 const region *array_reg = get_lvalue (array, ctxt);
1637 const svalue *index_sval = get_rvalue (index, ctxt);
1638 return m_mgr->get_element_region (array_reg,
1639 TREE_TYPE (TREE_TYPE (array)),
1640 index_sval);
1642 break;
1644 case MEM_REF:
1646 tree ptr = TREE_OPERAND (expr, 0);
1647 tree offset = TREE_OPERAND (expr, 1);
1648 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
1649 const svalue *offset_sval = get_rvalue (offset, ctxt);
1650 const region *star_ptr = deref_rvalue (ptr_sval, ptr, ctxt);
1651 return m_mgr->get_offset_region (star_ptr,
1652 TREE_TYPE (expr),
1653 offset_sval);
1655 break;
1657 case FUNCTION_DECL:
1658 return m_mgr->get_region_for_fndecl (expr);
1660 case LABEL_DECL:
1661 return m_mgr->get_region_for_label (expr);
1663 case VAR_DECL:
1664 /* Handle globals. */
1665 if (is_global_var (expr))
1666 return m_mgr->get_region_for_global (expr);
1668 /* Fall through. */
1670 case SSA_NAME:
1671 case PARM_DECL:
1672 case RESULT_DECL:
1674 gcc_assert (TREE_CODE (expr) == SSA_NAME
1675 || TREE_CODE (expr) == PARM_DECL
1676 || TREE_CODE (expr) == VAR_DECL
1677 || TREE_CODE (expr) == RESULT_DECL);
1679 int stack_index = pv.m_stack_depth;
1680 const frame_region *frame = get_frame_at_index (stack_index);
1681 gcc_assert (frame);
1682 return frame->get_region_for_local (m_mgr, expr);
1685 case COMPONENT_REF:
1687 /* obj.field */
1688 tree obj = TREE_OPERAND (expr, 0);
1689 tree field = TREE_OPERAND (expr, 1);
1690 const region *obj_reg = get_lvalue (obj, ctxt);
1691 return m_mgr->get_field_region (obj_reg, field);
1693 break;
1695 case STRING_CST:
1696 return m_mgr->get_region_for_string (expr);
1700 /* Assert that SRC_TYPE can be converted to DST_TYPE as a no-op. */
1702 static void
1703 assert_compat_types (tree src_type, tree dst_type)
1705 if (src_type && dst_type && !VOID_TYPE_P (dst_type))
1707 #if CHECKING_P
1708 if (!(useless_type_conversion_p (src_type, dst_type)))
1709 internal_error ("incompatible types: %qT and %qT", src_type, dst_type);
1710 #endif
1714 /* Return true if SRC_TYPE can be converted to DST_TYPE as a no-op. */
1716 bool
1717 compat_types_p (tree src_type, tree dst_type)
1719 if (src_type && dst_type && !VOID_TYPE_P (dst_type))
1720 if (!(useless_type_conversion_p (src_type, dst_type)))
1721 return false;
1722 return true;
1725 /* Get the region for PV within this region_model,
1726 emitting any diagnostics to CTXT. */
1728 const region *
1729 region_model::get_lvalue (path_var pv, region_model_context *ctxt) const
1731 if (pv.m_tree == NULL_TREE)
1732 return NULL;
1734 const region *result_reg = get_lvalue_1 (pv, ctxt);
1735 assert_compat_types (result_reg->get_type (), TREE_TYPE (pv.m_tree));
1736 return result_reg;
1739 /* Get the region for EXPR within this region_model (assuming the most
1740 recent stack frame if it's a local). */
1742 const region *
1743 region_model::get_lvalue (tree expr, region_model_context *ctxt) const
1745 return get_lvalue (path_var (expr, get_stack_depth () - 1), ctxt);
1748 /* Implementation of region_model::get_rvalue; the latter adds type-checking.
1750 Get the value of PV within this region_model,
1751 emitting any diagnostics to CTXT. */
1753 const svalue *
1754 region_model::get_rvalue_1 (path_var pv, region_model_context *ctxt) const
1756 gcc_assert (pv.m_tree);
1758 switch (TREE_CODE (pv.m_tree))
1760 default:
1761 return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (pv.m_tree));
1763 case ADDR_EXPR:
1765 /* "&EXPR". */
1766 tree expr = pv.m_tree;
1767 tree op0 = TREE_OPERAND (expr, 0);
1768 const region *expr_reg = get_lvalue (op0, ctxt);
1769 return m_mgr->get_ptr_svalue (TREE_TYPE (expr), expr_reg);
1771 break;
1773 case BIT_FIELD_REF:
1775 tree expr = pv.m_tree;
1776 tree op0 = TREE_OPERAND (expr, 0);
1777 const region *reg = get_lvalue (op0, ctxt);
1778 tree num_bits = TREE_OPERAND (expr, 1);
1779 tree first_bit_offset = TREE_OPERAND (expr, 2);
1780 gcc_assert (TREE_CODE (num_bits) == INTEGER_CST);
1781 gcc_assert (TREE_CODE (first_bit_offset) == INTEGER_CST);
1782 bit_range bits (TREE_INT_CST_LOW (first_bit_offset),
1783 TREE_INT_CST_LOW (num_bits));
1784 return get_rvalue_for_bits (TREE_TYPE (expr), reg, bits, ctxt);
1787 case SSA_NAME:
1788 case VAR_DECL:
1789 case PARM_DECL:
1790 case RESULT_DECL:
1791 case ARRAY_REF:
1793 const region *reg = get_lvalue (pv, ctxt);
1794 return get_store_value (reg, ctxt);
1797 case REALPART_EXPR:
1798 case IMAGPART_EXPR:
1799 case VIEW_CONVERT_EXPR:
1801 tree expr = pv.m_tree;
1802 tree arg = TREE_OPERAND (expr, 0);
1803 const svalue *arg_sval = get_rvalue (arg, ctxt);
1804 const svalue *sval_unaryop
1805 = m_mgr->get_or_create_unaryop (TREE_TYPE (expr), TREE_CODE (expr),
1806 arg_sval);
1807 return sval_unaryop;
1810 case INTEGER_CST:
1811 case REAL_CST:
1812 case COMPLEX_CST:
1813 case VECTOR_CST:
1814 case STRING_CST:
1815 return m_mgr->get_or_create_constant_svalue (pv.m_tree);
1817 case POINTER_PLUS_EXPR:
1819 tree expr = pv.m_tree;
1820 tree ptr = TREE_OPERAND (expr, 0);
1821 tree offset = TREE_OPERAND (expr, 1);
1822 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
1823 const svalue *offset_sval = get_rvalue (offset, ctxt);
1824 const svalue *sval_binop
1825 = m_mgr->get_or_create_binop (TREE_TYPE (expr), POINTER_PLUS_EXPR,
1826 ptr_sval, offset_sval);
1827 return sval_binop;
1830 /* Binary ops. */
1831 case PLUS_EXPR:
1832 case MULT_EXPR:
1834 tree expr = pv.m_tree;
1835 tree arg0 = TREE_OPERAND (expr, 0);
1836 tree arg1 = TREE_OPERAND (expr, 1);
1837 const svalue *arg0_sval = get_rvalue (arg0, ctxt);
1838 const svalue *arg1_sval = get_rvalue (arg1, ctxt);
1839 const svalue *sval_binop
1840 = m_mgr->get_or_create_binop (TREE_TYPE (expr), TREE_CODE (expr),
1841 arg0_sval, arg1_sval);
1842 return sval_binop;
1845 case COMPONENT_REF:
1846 case MEM_REF:
1848 const region *ref_reg = get_lvalue (pv, ctxt);
1849 return get_store_value (ref_reg, ctxt);
1851 case OBJ_TYPE_REF:
1853 tree expr = OBJ_TYPE_REF_EXPR (pv.m_tree);
1854 return get_rvalue (expr, ctxt);
1859 /* Get the value of PV within this region_model,
1860 emitting any diagnostics to CTXT. */
1862 const svalue *
1863 region_model::get_rvalue (path_var pv, region_model_context *ctxt) const
1865 if (pv.m_tree == NULL_TREE)
1866 return NULL;
1868 const svalue *result_sval = get_rvalue_1 (pv, ctxt);
1870 assert_compat_types (result_sval->get_type (), TREE_TYPE (pv.m_tree));
1872 result_sval = check_for_poison (result_sval, pv.m_tree, ctxt);
1874 return result_sval;
1877 /* Get the value of EXPR within this region_model (assuming the most
1878 recent stack frame if it's a local). */
1880 const svalue *
1881 region_model::get_rvalue (tree expr, region_model_context *ctxt) const
1883 return get_rvalue (path_var (expr, get_stack_depth () - 1), ctxt);
1886 /* Return true if this model is on a path with "main" as the entrypoint
1887 (as opposed to one in which we're merely analyzing a subset of the
1888 path through the code). */
1890 bool
1891 region_model::called_from_main_p () const
1893 if (!m_current_frame)
1894 return false;
1895 /* Determine if the oldest stack frame in this model is for "main". */
1896 const frame_region *frame0 = get_frame_at_index (0);
1897 gcc_assert (frame0);
1898 return id_equal (DECL_NAME (frame0->get_function ()->decl), "main");
1901 /* Subroutine of region_model::get_store_value for when REG is (or is within)
1902 a global variable that hasn't been touched since the start of this path
1903 (or was implicitly touched due to a call to an unknown function). */
1905 const svalue *
1906 region_model::get_initial_value_for_global (const region *reg) const
1908 /* Get the decl that REG is for (or is within). */
1909 const decl_region *base_reg
1910 = reg->get_base_region ()->dyn_cast_decl_region ();
1911 gcc_assert (base_reg);
1912 tree decl = base_reg->get_decl ();
1914 /* Special-case: to avoid having to explicitly update all previously
1915 untracked globals when calling an unknown fn, they implicitly have
1916 an unknown value if an unknown call has occurred, unless this is
1917 static to-this-TU and hasn't escaped. Globals that have escaped
1918 are explicitly tracked, so we shouldn't hit this case for them. */
1919 if (m_store.called_unknown_fn_p ()
1920 && TREE_PUBLIC (decl)
1921 && !TREE_READONLY (decl))
1922 return m_mgr->get_or_create_unknown_svalue (reg->get_type ());
1924 /* If we are on a path from the entrypoint from "main" and we have a
1925 global decl defined in this TU that hasn't been touched yet, then
1926 the initial value of REG can be taken from the initialization value
1927 of the decl. */
1928 if (called_from_main_p () || TREE_READONLY (decl))
1930 /* Attempt to get the initializer value for base_reg. */
1931 if (const svalue *base_reg_init
1932 = base_reg->get_svalue_for_initializer (m_mgr))
1934 if (reg == base_reg)
1935 return base_reg_init;
1936 else
1938 /* Get the value for REG within base_reg_init. */
1939 binding_cluster c (base_reg);
1940 c.bind (m_mgr->get_store_manager (), base_reg, base_reg_init);
1941 const svalue *sval
1942 = c.get_any_binding (m_mgr->get_store_manager (), reg);
1943 if (sval)
1945 if (reg->get_type ())
1946 sval = m_mgr->get_or_create_cast (reg->get_type (),
1947 sval);
1948 return sval;
1954 /* Otherwise, return INIT_VAL(REG). */
1955 return m_mgr->get_or_create_initial_value (reg);
1958 /* Get a value for REG, looking it up in the store, or otherwise falling
1959 back to "initial" or "unknown" values.
1960 Use CTXT to report any warnings associated with reading from REG. */
1962 const svalue *
1963 region_model::get_store_value (const region *reg,
1964 region_model_context *ctxt) const
1966 check_region_for_read (reg, ctxt);
1968 /* Special-case: handle var_decls in the constant pool. */
1969 if (const decl_region *decl_reg = reg->dyn_cast_decl_region ())
1970 if (const svalue *sval = decl_reg->maybe_get_constant_value (m_mgr))
1971 return sval;
1973 const svalue *sval
1974 = m_store.get_any_binding (m_mgr->get_store_manager (), reg);
1975 if (sval)
1977 if (reg->get_type ())
1978 sval = m_mgr->get_or_create_cast (reg->get_type (), sval);
1979 return sval;
1982 /* Special-case: read at a constant index within a STRING_CST. */
1983 if (const offset_region *offset_reg = reg->dyn_cast_offset_region ())
1984 if (tree byte_offset_cst
1985 = offset_reg->get_byte_offset ()->maybe_get_constant ())
1986 if (const string_region *str_reg
1987 = reg->get_parent_region ()->dyn_cast_string_region ())
1989 tree string_cst = str_reg->get_string_cst ();
1990 if (const svalue *char_sval
1991 = m_mgr->maybe_get_char_from_string_cst (string_cst,
1992 byte_offset_cst))
1993 return m_mgr->get_or_create_cast (reg->get_type (), char_sval);
1996 /* Special-case: read the initial char of a STRING_CST. */
1997 if (const cast_region *cast_reg = reg->dyn_cast_cast_region ())
1998 if (const string_region *str_reg
1999 = cast_reg->get_original_region ()->dyn_cast_string_region ())
2001 tree string_cst = str_reg->get_string_cst ();
2002 tree byte_offset_cst = build_int_cst (integer_type_node, 0);
2003 if (const svalue *char_sval
2004 = m_mgr->maybe_get_char_from_string_cst (string_cst,
2005 byte_offset_cst))
2006 return m_mgr->get_or_create_cast (reg->get_type (), char_sval);
2009 /* Otherwise we implicitly have the initial value of the region
2010 (if the cluster had been touched, binding_cluster::get_any_binding,
2011 would have returned UNKNOWN, and we would already have returned
2012 that above). */
2014 /* Handle globals. */
2015 if (reg->get_base_region ()->get_parent_region ()->get_kind ()
2016 == RK_GLOBALS)
2017 return get_initial_value_for_global (reg);
2019 return m_mgr->get_or_create_initial_value (reg);
2022 /* Return false if REG does not exist, true if it may do.
2023 This is for detecting regions within the stack that don't exist anymore
2024 after frames are popped. */
2026 bool
2027 region_model::region_exists_p (const region *reg) const
2029 /* If within a stack frame, check that the stack frame is live. */
2030 if (const frame_region *enclosing_frame = reg->maybe_get_frame_region ())
2032 /* Check that the current frame is the enclosing frame, or is called
2033 by it. */
2034 for (const frame_region *iter_frame = get_current_frame (); iter_frame;
2035 iter_frame = iter_frame->get_calling_frame ())
2036 if (iter_frame == enclosing_frame)
2037 return true;
2038 return false;
2041 return true;
2044 /* Get a region for referencing PTR_SVAL, creating a region if need be, and
2045 potentially generating warnings via CTXT.
2046 PTR_SVAL must be of pointer type.
2047 PTR_TREE if non-NULL can be used when emitting diagnostics. */
2049 const region *
2050 region_model::deref_rvalue (const svalue *ptr_sval, tree ptr_tree,
2051 region_model_context *ctxt) const
2053 gcc_assert (ptr_sval);
2054 gcc_assert (POINTER_TYPE_P (ptr_sval->get_type ()));
2056 /* If we're dereferencing PTR_SVAL, assume that it is non-NULL; add this
2057 as a constraint. This suppresses false positives from
2058 -Wanalyzer-null-dereference for the case where we later have an
2059 if (PTR_SVAL) that would occur if we considered the false branch
2060 and transitioned the malloc state machine from start->null. */
2061 tree null_ptr_cst = build_int_cst (ptr_sval->get_type (), 0);
2062 const svalue *null_ptr = m_mgr->get_or_create_constant_svalue (null_ptr_cst);
2063 m_constraints->add_constraint (ptr_sval, NE_EXPR, null_ptr);
2065 switch (ptr_sval->get_kind ())
2067 default:
2068 break;
2070 case SK_REGION:
2072 const region_svalue *region_sval
2073 = as_a <const region_svalue *> (ptr_sval);
2074 return region_sval->get_pointee ();
2077 case SK_BINOP:
2079 const binop_svalue *binop_sval
2080 = as_a <const binop_svalue *> (ptr_sval);
2081 switch (binop_sval->get_op ())
2083 case POINTER_PLUS_EXPR:
2085 /* If we have a symbolic value expressing pointer arithmentic,
2086 try to convert it to a suitable region. */
2087 const region *parent_region
2088 = deref_rvalue (binop_sval->get_arg0 (), NULL_TREE, ctxt);
2089 const svalue *offset = binop_sval->get_arg1 ();
2090 tree type= TREE_TYPE (ptr_sval->get_type ());
2091 return m_mgr->get_offset_region (parent_region, type, offset);
2093 default:
2094 break;
2097 break;
2099 case SK_POISONED:
2101 if (ctxt)
2103 tree ptr = get_representative_tree (ptr_sval);
2104 /* If we can't get a representative tree for PTR_SVAL
2105 (e.g. if it hasn't been bound into the store), then
2106 fall back on PTR_TREE, if non-NULL. */
2107 if (!ptr)
2108 ptr = ptr_tree;
2109 if (ptr)
2111 const poisoned_svalue *poisoned_sval
2112 = as_a <const poisoned_svalue *> (ptr_sval);
2113 enum poison_kind pkind = poisoned_sval->get_poison_kind ();
2114 ctxt->warn (new poisoned_value_diagnostic (ptr, pkind));
2118 break;
2121 return m_mgr->get_symbolic_region (ptr_sval);
2124 /* Attempt to get BITS within any value of REG, as TYPE.
2125 In particular, extract values from compound_svalues for the case
2126 where there's a concrete binding at BITS.
2127 Return an unknown svalue if we can't handle the given case.
2128 Use CTXT to report any warnings associated with reading from REG. */
2130 const svalue *
2131 region_model::get_rvalue_for_bits (tree type,
2132 const region *reg,
2133 const bit_range &bits,
2134 region_model_context *ctxt) const
2136 const svalue *sval = get_store_value (reg, ctxt);
2137 return m_mgr->get_or_create_bits_within (type, bits, sval);
2140 /* A subclass of pending_diagnostic for complaining about writes to
2141 constant regions of memory. */
2143 class write_to_const_diagnostic
2144 : public pending_diagnostic_subclass<write_to_const_diagnostic>
2146 public:
2147 write_to_const_diagnostic (const region *reg, tree decl)
2148 : m_reg (reg), m_decl (decl)
2151 const char *get_kind () const FINAL OVERRIDE
2153 return "write_to_const_diagnostic";
2156 bool operator== (const write_to_const_diagnostic &other) const
2158 return (m_reg == other.m_reg
2159 && m_decl == other.m_decl);
2162 bool emit (rich_location *rich_loc) FINAL OVERRIDE
2164 bool warned = warning_at (rich_loc, OPT_Wanalyzer_write_to_const,
2165 "write to %<const%> object %qE", m_decl);
2166 if (warned)
2167 inform (DECL_SOURCE_LOCATION (m_decl), "declared here");
2168 return warned;
2171 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
2173 return ev.formatted_print ("write to %<const%> object %qE here", m_decl);
2176 private:
2177 const region *m_reg;
2178 tree m_decl;
2181 /* A subclass of pending_diagnostic for complaining about writes to
2182 string literals. */
2184 class write_to_string_literal_diagnostic
2185 : public pending_diagnostic_subclass<write_to_string_literal_diagnostic>
2187 public:
2188 write_to_string_literal_diagnostic (const region *reg)
2189 : m_reg (reg)
2192 const char *get_kind () const FINAL OVERRIDE
2194 return "write_to_string_literal_diagnostic";
2197 bool operator== (const write_to_string_literal_diagnostic &other) const
2199 return m_reg == other.m_reg;
2202 bool emit (rich_location *rich_loc) FINAL OVERRIDE
2204 return warning_at (rich_loc, OPT_Wanalyzer_write_to_string_literal,
2205 "write to string literal");
2206 /* Ideally we would show the location of the STRING_CST as well,
2207 but it is not available at this point. */
2210 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
2212 return ev.formatted_print ("write to string literal here");
2215 private:
2216 const region *m_reg;
2219 /* Use CTXT to warn If DEST_REG is a region that shouldn't be written to. */
2221 void
2222 region_model::check_for_writable_region (const region* dest_reg,
2223 region_model_context *ctxt) const
2225 /* Fail gracefully if CTXT is NULL. */
2226 if (!ctxt)
2227 return;
2229 const region *base_reg = dest_reg->get_base_region ();
2230 switch (base_reg->get_kind ())
2232 default:
2233 break;
2234 case RK_DECL:
2236 const decl_region *decl_reg = as_a <const decl_region *> (base_reg);
2237 tree decl = decl_reg->get_decl ();
2238 /* Warn about writes to const globals.
2239 Don't warn for writes to const locals, and params in particular,
2240 since we would warn in push_frame when setting them up (e.g the
2241 "this" param is "T* const"). */
2242 if (TREE_READONLY (decl)
2243 && is_global_var (decl))
2244 ctxt->warn (new write_to_const_diagnostic (dest_reg, decl));
2246 break;
2247 case RK_STRING:
2248 ctxt->warn (new write_to_string_literal_diagnostic (dest_reg));
2249 break;
2253 /* Get the capacity of REG in bytes. */
2255 const svalue *
2256 region_model::get_capacity (const region *reg) const
2258 switch (reg->get_kind ())
2260 default:
2261 break;
2262 case RK_DECL:
2264 const decl_region *decl_reg = as_a <const decl_region *> (reg);
2265 tree decl = decl_reg->get_decl ();
2266 if (TREE_CODE (decl) == SSA_NAME)
2268 tree type = TREE_TYPE (decl);
2269 tree size = TYPE_SIZE (type);
2270 return get_rvalue (size, NULL);
2272 else
2274 tree size = decl_init_size (decl, false);
2275 if (size)
2276 return get_rvalue (size, NULL);
2279 break;
2280 case RK_SIZED:
2281 /* Look through sized regions to get at the capacity
2282 of the underlying regions. */
2283 return get_capacity (reg->get_parent_region ());
2286 if (const svalue *recorded = get_dynamic_extents (reg))
2287 return recorded;
2289 return m_mgr->get_or_create_unknown_svalue (sizetype);
2292 /* If CTXT is non-NULL, use it to warn about any problems accessing REG,
2293 using DIR to determine if this access is a read or write. */
2295 void
2296 region_model::check_region_access (const region *reg,
2297 enum access_direction dir,
2298 region_model_context *ctxt) const
2300 /* Fail gracefully if CTXT is NULL. */
2301 if (!ctxt)
2302 return;
2304 switch (dir)
2306 default:
2307 gcc_unreachable ();
2308 case DIR_READ:
2309 /* Currently a no-op. */
2310 break;
2311 case DIR_WRITE:
2312 check_for_writable_region (reg, ctxt);
2313 break;
2317 /* If CTXT is non-NULL, use it to warn about any problems writing to REG. */
2319 void
2320 region_model::check_region_for_write (const region *dest_reg,
2321 region_model_context *ctxt) const
2323 check_region_access (dest_reg, DIR_WRITE, ctxt);
2326 /* If CTXT is non-NULL, use it to warn about any problems reading from REG. */
2328 void
2329 region_model::check_region_for_read (const region *src_reg,
2330 region_model_context *ctxt) const
2332 check_region_access (src_reg, DIR_READ, ctxt);
2335 /* Set the value of the region given by LHS_REG to the value given
2336 by RHS_SVAL.
2337 Use CTXT to report any warnings associated with writing to LHS_REG. */
2339 void
2340 region_model::set_value (const region *lhs_reg, const svalue *rhs_sval,
2341 region_model_context *ctxt)
2343 gcc_assert (lhs_reg);
2344 gcc_assert (rhs_sval);
2346 check_region_for_write (lhs_reg, ctxt);
2348 m_store.set_value (m_mgr->get_store_manager(), lhs_reg, rhs_sval,
2349 ctxt ? ctxt->get_uncertainty () : NULL);
2352 /* Set the value of the region given by LHS to the value given by RHS. */
2354 void
2355 region_model::set_value (tree lhs, tree rhs, region_model_context *ctxt)
2357 const region *lhs_reg = get_lvalue (lhs, ctxt);
2358 const svalue *rhs_sval = get_rvalue (rhs, ctxt);
2359 gcc_assert (lhs_reg);
2360 gcc_assert (rhs_sval);
2361 set_value (lhs_reg, rhs_sval, ctxt);
2364 /* Remove all bindings overlapping REG within the store. */
2366 void
2367 region_model::clobber_region (const region *reg)
2369 m_store.clobber_region (m_mgr->get_store_manager(), reg);
2372 /* Remove any bindings for REG within the store. */
2374 void
2375 region_model::purge_region (const region *reg)
2377 m_store.purge_region (m_mgr->get_store_manager(), reg);
2380 /* Fill REG with SVAL. */
2382 void
2383 region_model::fill_region (const region *reg, const svalue *sval)
2385 m_store.fill_region (m_mgr->get_store_manager(), reg, sval);
2388 /* Zero-fill REG. */
2390 void
2391 region_model::zero_fill_region (const region *reg)
2393 m_store.zero_fill_region (m_mgr->get_store_manager(), reg);
2396 /* Mark REG as having unknown content. */
2398 void
2399 region_model::mark_region_as_unknown (const region *reg,
2400 uncertainty_t *uncertainty)
2402 m_store.mark_region_as_unknown (m_mgr->get_store_manager(), reg,
2403 uncertainty);
2406 /* Determine what is known about the condition "LHS_SVAL OP RHS_SVAL" within
2407 this model. */
2409 tristate
2410 region_model::eval_condition (const svalue *lhs,
2411 enum tree_code op,
2412 const svalue *rhs) const
2414 /* For now, make no attempt to capture constraints on floating-point
2415 values. */
2416 if ((lhs->get_type () && FLOAT_TYPE_P (lhs->get_type ()))
2417 || (rhs->get_type () && FLOAT_TYPE_P (rhs->get_type ())))
2418 return tristate::unknown ();
2420 tristate ts = eval_condition_without_cm (lhs, op, rhs);
2421 if (ts.is_known ())
2422 return ts;
2424 /* Otherwise, try constraints. */
2425 return m_constraints->eval_condition (lhs, op, rhs);
2428 /* Determine what is known about the condition "LHS_SVAL OP RHS_SVAL" within
2429 this model, without resorting to the constraint_manager.
2431 This is exposed so that impl_region_model_context::on_state_leak can
2432 check for equality part-way through region_model::purge_unused_svalues
2433 without risking creating new ECs. */
2435 tristate
2436 region_model::eval_condition_without_cm (const svalue *lhs,
2437 enum tree_code op,
2438 const svalue *rhs) const
2440 gcc_assert (lhs);
2441 gcc_assert (rhs);
2443 /* See what we know based on the values. */
2445 /* For now, make no attempt to capture constraints on floating-point
2446 values. */
2447 if ((lhs->get_type () && FLOAT_TYPE_P (lhs->get_type ()))
2448 || (rhs->get_type () && FLOAT_TYPE_P (rhs->get_type ())))
2449 return tristate::unknown ();
2451 /* Unwrap any unmergeable values. */
2452 lhs = lhs->unwrap_any_unmergeable ();
2453 rhs = rhs->unwrap_any_unmergeable ();
2455 if (lhs == rhs)
2457 /* If we have the same svalue, then we have equality
2458 (apart from NaN-handling).
2459 TODO: should this definitely be the case for poisoned values? */
2460 /* Poisoned and unknown values are "unknowable". */
2461 if (lhs->get_kind () == SK_POISONED
2462 || lhs->get_kind () == SK_UNKNOWN)
2463 return tristate::TS_UNKNOWN;
2465 switch (op)
2467 case EQ_EXPR:
2468 case GE_EXPR:
2469 case LE_EXPR:
2470 return tristate::TS_TRUE;
2472 case NE_EXPR:
2473 case GT_EXPR:
2474 case LT_EXPR:
2475 return tristate::TS_FALSE;
2477 default:
2478 /* For other ops, use the logic below. */
2479 break;
2483 /* If we have a pair of region_svalues, compare them. */
2484 if (const region_svalue *lhs_ptr = lhs->dyn_cast_region_svalue ())
2485 if (const region_svalue *rhs_ptr = rhs->dyn_cast_region_svalue ())
2487 tristate res = region_svalue::eval_condition (lhs_ptr, op, rhs_ptr);
2488 if (res.is_known ())
2489 return res;
2490 /* Otherwise, only known through constraints. */
2493 /* If we have a pair of constants, compare them. */
2494 if (const constant_svalue *cst_lhs = lhs->dyn_cast_constant_svalue ())
2495 if (const constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ())
2496 return constant_svalue::eval_condition (cst_lhs, op, cst_rhs);
2498 /* Handle comparison against zero. */
2499 if (const constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ())
2500 if (zerop (cst_rhs->get_constant ()))
2502 if (const region_svalue *ptr = lhs->dyn_cast_region_svalue ())
2504 /* A region_svalue is a non-NULL pointer, except in certain
2505 special cases (see the comment for region::non_null_p). */
2506 const region *pointee = ptr->get_pointee ();
2507 if (pointee->non_null_p ())
2509 switch (op)
2511 default:
2512 gcc_unreachable ();
2514 case EQ_EXPR:
2515 case GE_EXPR:
2516 case LE_EXPR:
2517 return tristate::TS_FALSE;
2519 case NE_EXPR:
2520 case GT_EXPR:
2521 case LT_EXPR:
2522 return tristate::TS_TRUE;
2526 else if (const binop_svalue *binop = lhs->dyn_cast_binop_svalue ())
2528 /* Treat offsets from a non-NULL pointer as being non-NULL. This
2529 isn't strictly true, in that eventually ptr++ will wrap
2530 around and be NULL, but it won't occur in practise and thus
2531 can be used to suppress effectively false positives that we
2532 shouldn't warn for. */
2533 if (binop->get_op () == POINTER_PLUS_EXPR)
2535 tristate lhs_ts
2536 = eval_condition_without_cm (binop->get_arg0 (),
2537 op, rhs);
2538 if (lhs_ts.is_known ())
2539 return lhs_ts;
2544 /* Handle rejection of equality for comparisons of the initial values of
2545 "external" values (such as params) with the address of locals. */
2546 if (const initial_svalue *init_lhs = lhs->dyn_cast_initial_svalue ())
2547 if (const region_svalue *rhs_ptr = rhs->dyn_cast_region_svalue ())
2549 tristate res = compare_initial_and_pointer (init_lhs, rhs_ptr);
2550 if (res.is_known ())
2551 return res;
2553 if (const initial_svalue *init_rhs = rhs->dyn_cast_initial_svalue ())
2554 if (const region_svalue *lhs_ptr = lhs->dyn_cast_region_svalue ())
2556 tristate res = compare_initial_and_pointer (init_rhs, lhs_ptr);
2557 if (res.is_known ())
2558 return res;
2561 if (const widening_svalue *widen_lhs = lhs->dyn_cast_widening_svalue ())
2562 if (tree rhs_cst = rhs->maybe_get_constant ())
2564 tristate res = widen_lhs->eval_condition_without_cm (op, rhs_cst);
2565 if (res.is_known ())
2566 return res;
2569 return tristate::TS_UNKNOWN;
2572 /* Subroutine of region_model::eval_condition_without_cm, for rejecting
2573 equality of INIT_VAL(PARM) with &LOCAL. */
2575 tristate
2576 region_model::compare_initial_and_pointer (const initial_svalue *init,
2577 const region_svalue *ptr) const
2579 const region *pointee = ptr->get_pointee ();
2581 /* If we have a pointer to something within a stack frame, it can't be the
2582 initial value of a param. */
2583 if (pointee->maybe_get_frame_region ())
2584 if (init->initial_value_of_param_p ())
2585 return tristate::TS_FALSE;
2587 return tristate::TS_UNKNOWN;
2590 /* Handle various constraints of the form:
2591 LHS: ((bool)INNER_LHS INNER_OP INNER_RHS))
2592 OP : == or !=
2593 RHS: zero
2594 and (with a cast):
2595 LHS: CAST([long]int, ((bool)INNER_LHS INNER_OP INNER_RHS))
2596 OP : == or !=
2597 RHS: zero
2598 by adding constraints for INNER_LHS INNEROP INNER_RHS.
2600 Return true if this function can fully handle the constraint; if
2601 so, add the implied constraint(s) and write true to *OUT if they
2602 are consistent with existing constraints, or write false to *OUT
2603 if they contradicts existing constraints.
2605 Return false for cases that this function doeesn't know how to handle.
2607 For example, if we're checking a stored conditional, we'll have
2608 something like:
2609 LHS: CAST(long int, (&HEAP_ALLOCATED_REGION(8)!=(int *)0B))
2610 OP : NE_EXPR
2611 RHS: zero
2612 which this function can turn into an add_constraint of:
2613 (&HEAP_ALLOCATED_REGION(8) != (int *)0B)
2615 Similarly, optimized && and || conditionals lead to e.g.
2616 if (p && q)
2617 becoming gimple like this:
2618 _1 = p_6 == 0B;
2619 _2 = q_8 == 0B
2620 _3 = _1 | _2
2621 On the "_3 is false" branch we can have constraints of the form:
2622 ((&HEAP_ALLOCATED_REGION(8)!=(int *)0B)
2623 | (&HEAP_ALLOCATED_REGION(10)!=(int *)0B))
2624 == 0
2625 which implies that both _1 and _2 are false,
2626 which this function can turn into a pair of add_constraints of
2627 (&HEAP_ALLOCATED_REGION(8)!=(int *)0B)
2628 and:
2629 (&HEAP_ALLOCATED_REGION(10)!=(int *)0B). */
2631 bool
2632 region_model::add_constraints_from_binop (const svalue *outer_lhs,
2633 enum tree_code outer_op,
2634 const svalue *outer_rhs,
2635 bool *out,
2636 region_model_context *ctxt)
2638 while (const svalue *cast = outer_lhs->maybe_undo_cast ())
2639 outer_lhs = cast;
2640 const binop_svalue *binop_sval = outer_lhs->dyn_cast_binop_svalue ();
2641 if (!binop_sval)
2642 return false;
2643 if (!outer_rhs->all_zeroes_p ())
2644 return false;
2646 const svalue *inner_lhs = binop_sval->get_arg0 ();
2647 enum tree_code inner_op = binop_sval->get_op ();
2648 const svalue *inner_rhs = binop_sval->get_arg1 ();
2650 if (outer_op != NE_EXPR && outer_op != EQ_EXPR)
2651 return false;
2653 /* We have either
2654 - "OUTER_LHS != false" (i.e. OUTER is true), or
2655 - "OUTER_LHS == false" (i.e. OUTER is false). */
2656 bool is_true = outer_op == NE_EXPR;
2658 switch (inner_op)
2660 default:
2661 return false;
2663 case EQ_EXPR:
2664 case NE_EXPR:
2666 /* ...and "(inner_lhs OP inner_rhs) == 0"
2667 then (inner_lhs OP inner_rhs) must have the same
2668 logical value as LHS. */
2669 if (!is_true)
2670 inner_op = invert_tree_comparison (inner_op, false /* honor_nans */);
2671 *out = add_constraint (inner_lhs, inner_op, inner_rhs, ctxt);
2672 return true;
2674 break;
2676 case BIT_AND_EXPR:
2677 if (is_true)
2679 /* ...and "(inner_lhs & inner_rhs) != 0"
2680 then both inner_lhs and inner_rhs must be true. */
2681 const svalue *false_sval
2682 = m_mgr->get_or_create_constant_svalue (boolean_false_node);
2683 bool sat1 = add_constraint (inner_lhs, NE_EXPR, false_sval, ctxt);
2684 bool sat2 = add_constraint (inner_rhs, NE_EXPR, false_sval, ctxt);
2685 *out = sat1 && sat2;
2686 return true;
2688 return false;
2690 case BIT_IOR_EXPR:
2691 if (!is_true)
2693 /* ...and "(inner_lhs | inner_rhs) == 0"
2694 i.e. "(inner_lhs | inner_rhs)" is false
2695 then both inner_lhs and inner_rhs must be false. */
2696 const svalue *false_sval
2697 = m_mgr->get_or_create_constant_svalue (boolean_false_node);
2698 bool sat1 = add_constraint (inner_lhs, EQ_EXPR, false_sval, ctxt);
2699 bool sat2 = add_constraint (inner_rhs, EQ_EXPR, false_sval, ctxt);
2700 *out = sat1 && sat2;
2701 return true;
2703 return false;
2707 /* Attempt to add the constraint "LHS OP RHS" to this region_model.
2708 If it is consistent with existing constraints, add it, and return true.
2709 Return false if it contradicts existing constraints.
2710 Use CTXT for reporting any diagnostics associated with the accesses. */
2712 bool
2713 region_model::add_constraint (tree lhs, enum tree_code op, tree rhs,
2714 region_model_context *ctxt)
2716 /* For now, make no attempt to capture constraints on floating-point
2717 values. */
2718 if (FLOAT_TYPE_P (TREE_TYPE (lhs)) || FLOAT_TYPE_P (TREE_TYPE (rhs)))
2719 return true;
2721 const svalue *lhs_sval = get_rvalue (lhs, ctxt);
2722 const svalue *rhs_sval = get_rvalue (rhs, ctxt);
2724 return add_constraint (lhs_sval, op, rhs_sval, ctxt);
2727 /* Attempt to add the constraint "LHS OP RHS" to this region_model.
2728 If it is consistent with existing constraints, add it, and return true.
2729 Return false if it contradicts existing constraints.
2730 Use CTXT for reporting any diagnostics associated with the accesses. */
2732 bool
2733 region_model::add_constraint (const svalue *lhs,
2734 enum tree_code op,
2735 const svalue *rhs,
2736 region_model_context *ctxt)
2738 tristate t_cond = eval_condition (lhs, op, rhs);
2740 /* If we already have the condition, do nothing. */
2741 if (t_cond.is_true ())
2742 return true;
2744 /* Reject a constraint that would contradict existing knowledge, as
2745 unsatisfiable. */
2746 if (t_cond.is_false ())
2747 return false;
2749 bool out;
2750 if (add_constraints_from_binop (lhs, op, rhs, &out, ctxt))
2751 return out;
2753 /* Store the constraint. */
2754 m_constraints->add_constraint (lhs, op, rhs);
2756 /* Notify the context, if any. This exists so that the state machines
2757 in a program_state can be notified about the condition, and so can
2758 set sm-state for e.g. unchecked->checked, both for cfg-edges, and
2759 when synthesizing constraints as above. */
2760 if (ctxt)
2761 ctxt->on_condition (lhs, op, rhs);
2763 /* If we have &REGION == NULL, then drop dynamic extents for REGION (for
2764 the case where REGION is heap-allocated and thus could be NULL). */
2765 if (tree rhs_cst = rhs->maybe_get_constant ())
2766 if (op == EQ_EXPR && zerop (rhs_cst))
2767 if (const region_svalue *region_sval = lhs->dyn_cast_region_svalue ())
2768 unset_dynamic_extents (region_sval->get_pointee ());
2770 return true;
2773 /* As above, but when returning false, if OUT is non-NULL, write a
2774 new rejected_constraint to *OUT. */
2776 bool
2777 region_model::add_constraint (tree lhs, enum tree_code op, tree rhs,
2778 region_model_context *ctxt,
2779 rejected_constraint **out)
2781 bool sat = add_constraint (lhs, op, rhs, ctxt);
2782 if (!sat && out)
2783 *out = new rejected_op_constraint (*this, lhs, op, rhs);
2784 return sat;
2787 /* Determine what is known about the condition "LHS OP RHS" within
2788 this model.
2789 Use CTXT for reporting any diagnostics associated with the accesses. */
2791 tristate
2792 region_model::eval_condition (tree lhs,
2793 enum tree_code op,
2794 tree rhs,
2795 region_model_context *ctxt)
2797 /* For now, make no attempt to model constraints on floating-point
2798 values. */
2799 if (FLOAT_TYPE_P (TREE_TYPE (lhs)) || FLOAT_TYPE_P (TREE_TYPE (rhs)))
2800 return tristate::unknown ();
2802 return eval_condition (get_rvalue (lhs, ctxt), op, get_rvalue (rhs, ctxt));
2805 /* Implementation of region_model::get_representative_path_var.
2806 Attempt to return a path_var that represents SVAL, or return NULL_TREE.
2807 Use VISITED to prevent infinite mutual recursion with the overload for
2808 regions. */
2810 path_var
2811 region_model::get_representative_path_var_1 (const svalue *sval,
2812 svalue_set *visited) const
2814 gcc_assert (sval);
2816 /* Prevent infinite recursion. */
2817 if (visited->contains (sval))
2818 return path_var (NULL_TREE, 0);
2819 visited->add (sval);
2821 /* Handle casts by recursion into get_representative_path_var. */
2822 if (const svalue *cast_sval = sval->maybe_undo_cast ())
2824 path_var result = get_representative_path_var (cast_sval, visited);
2825 tree orig_type = sval->get_type ();
2826 /* If necessary, wrap the result in a cast. */
2827 if (result.m_tree && orig_type)
2828 result.m_tree = build1 (NOP_EXPR, orig_type, result.m_tree);
2829 return result;
2832 auto_vec<path_var> pvs;
2833 m_store.get_representative_path_vars (this, visited, sval, &pvs);
2835 if (tree cst = sval->maybe_get_constant ())
2836 pvs.safe_push (path_var (cst, 0));
2838 /* Handle string literals and various other pointers. */
2839 if (const region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
2841 const region *reg = ptr_sval->get_pointee ();
2842 if (path_var pv = get_representative_path_var (reg, visited))
2843 return path_var (build1 (ADDR_EXPR,
2844 sval->get_type (),
2845 pv.m_tree),
2846 pv.m_stack_depth);
2849 /* If we have a sub_svalue, look for ways to represent the parent. */
2850 if (const sub_svalue *sub_sval = sval->dyn_cast_sub_svalue ())
2852 const svalue *parent_sval = sub_sval->get_parent ();
2853 const region *subreg = sub_sval->get_subregion ();
2854 if (path_var parent_pv
2855 = get_representative_path_var (parent_sval, visited))
2856 if (const field_region *field_reg = subreg->dyn_cast_field_region ())
2857 return path_var (build3 (COMPONENT_REF,
2858 sval->get_type (),
2859 parent_pv.m_tree,
2860 field_reg->get_field (),
2861 NULL_TREE),
2862 parent_pv.m_stack_depth);
2865 if (pvs.length () < 1)
2866 return path_var (NULL_TREE, 0);
2868 pvs.qsort (readability_comparator);
2869 return pvs[0];
2872 /* Attempt to return a path_var that represents SVAL, or return NULL_TREE.
2873 Use VISITED to prevent infinite mutual recursion with the overload for
2874 regions
2876 This function defers to get_representative_path_var_1 to do the work;
2877 it adds verification that get_representative_path_var_1 returned a tree
2878 of the correct type. */
2880 path_var
2881 region_model::get_representative_path_var (const svalue *sval,
2882 svalue_set *visited) const
2884 if (sval == NULL)
2885 return path_var (NULL_TREE, 0);
2887 tree orig_type = sval->get_type ();
2889 path_var result = get_representative_path_var_1 (sval, visited);
2891 /* Verify that the result has the same type as SVAL, if any. */
2892 if (result.m_tree && orig_type)
2893 gcc_assert (TREE_TYPE (result.m_tree) == orig_type);
2895 return result;
2898 /* Attempt to return a tree that represents SVAL, or return NULL_TREE.
2900 Strip off any top-level cast, to avoid messages like
2901 double-free of '(void *)ptr'
2902 from analyzer diagnostics. */
2904 tree
2905 region_model::get_representative_tree (const svalue *sval) const
2907 svalue_set visited;
2908 tree expr = get_representative_path_var (sval, &visited).m_tree;
2910 /* Strip off any top-level cast. */
2911 if (expr && TREE_CODE (expr) == NOP_EXPR)
2912 expr = TREE_OPERAND (expr, 0);
2914 return fixup_tree_for_diagnostic (expr);
2917 /* Implementation of region_model::get_representative_path_var.
2919 Attempt to return a path_var that represents REG, or return
2920 the NULL path_var.
2921 For example, a region for a field of a local would be a path_var
2922 wrapping a COMPONENT_REF.
2923 Use VISITED to prevent infinite mutual recursion with the overload for
2924 svalues. */
2926 path_var
2927 region_model::get_representative_path_var_1 (const region *reg,
2928 svalue_set *visited) const
2930 switch (reg->get_kind ())
2932 default:
2933 gcc_unreachable ();
2935 case RK_FRAME:
2936 case RK_GLOBALS:
2937 case RK_CODE:
2938 case RK_HEAP:
2939 case RK_STACK:
2940 case RK_ROOT:
2941 /* Regions that represent memory spaces are not expressible as trees. */
2942 return path_var (NULL_TREE, 0);
2944 case RK_FUNCTION:
2946 const function_region *function_reg
2947 = as_a <const function_region *> (reg);
2948 return path_var (function_reg->get_fndecl (), 0);
2950 case RK_LABEL:
2952 const label_region *label_reg = as_a <const label_region *> (reg);
2953 return path_var (label_reg->get_label (), 0);
2956 case RK_SYMBOLIC:
2958 const symbolic_region *symbolic_reg
2959 = as_a <const symbolic_region *> (reg);
2960 const svalue *pointer = symbolic_reg->get_pointer ();
2961 path_var pointer_pv = get_representative_path_var (pointer, visited);
2962 if (!pointer_pv)
2963 return path_var (NULL_TREE, 0);
2964 tree offset = build_int_cst (pointer->get_type (), 0);
2965 return path_var (build2 (MEM_REF,
2966 reg->get_type (),
2967 pointer_pv.m_tree,
2968 offset),
2969 pointer_pv.m_stack_depth);
2971 case RK_DECL:
2973 const decl_region *decl_reg = as_a <const decl_region *> (reg);
2974 return path_var (decl_reg->get_decl (), decl_reg->get_stack_depth ());
2976 case RK_FIELD:
2978 const field_region *field_reg = as_a <const field_region *> (reg);
2979 path_var parent_pv
2980 = get_representative_path_var (reg->get_parent_region (), visited);
2981 if (!parent_pv)
2982 return path_var (NULL_TREE, 0);
2983 return path_var (build3 (COMPONENT_REF,
2984 reg->get_type (),
2985 parent_pv.m_tree,
2986 field_reg->get_field (),
2987 NULL_TREE),
2988 parent_pv.m_stack_depth);
2991 case RK_ELEMENT:
2993 const element_region *element_reg
2994 = as_a <const element_region *> (reg);
2995 path_var parent_pv
2996 = get_representative_path_var (reg->get_parent_region (), visited);
2997 if (!parent_pv)
2998 return path_var (NULL_TREE, 0);
2999 path_var index_pv
3000 = get_representative_path_var (element_reg->get_index (), visited);
3001 if (!index_pv)
3002 return path_var (NULL_TREE, 0);
3003 return path_var (build4 (ARRAY_REF,
3004 reg->get_type (),
3005 parent_pv.m_tree, index_pv.m_tree,
3006 NULL_TREE, NULL_TREE),
3007 parent_pv.m_stack_depth);
3010 case RK_OFFSET:
3012 const offset_region *offset_reg
3013 = as_a <const offset_region *> (reg);
3014 path_var parent_pv
3015 = get_representative_path_var (reg->get_parent_region (), visited);
3016 if (!parent_pv)
3017 return path_var (NULL_TREE, 0);
3018 path_var offset_pv
3019 = get_representative_path_var (offset_reg->get_byte_offset (),
3020 visited);
3021 if (!offset_pv || TREE_CODE (offset_pv.m_tree) != INTEGER_CST)
3022 return path_var (NULL_TREE, 0);
3023 tree addr_parent = build1 (ADDR_EXPR,
3024 build_pointer_type (reg->get_type ()),
3025 parent_pv.m_tree);
3026 return path_var (build2 (MEM_REF,
3027 reg->get_type (),
3028 addr_parent, offset_pv.m_tree),
3029 parent_pv.m_stack_depth);
3032 case RK_SIZED:
3033 return path_var (NULL_TREE, 0);
3035 case RK_CAST:
3037 path_var parent_pv
3038 = get_representative_path_var (reg->get_parent_region (), visited);
3039 if (!parent_pv)
3040 return path_var (NULL_TREE, 0);
3041 return path_var (build1 (NOP_EXPR,
3042 reg->get_type (),
3043 parent_pv.m_tree),
3044 parent_pv.m_stack_depth);
3047 case RK_HEAP_ALLOCATED:
3048 case RK_ALLOCA:
3049 /* No good way to express heap-allocated/alloca regions as trees. */
3050 return path_var (NULL_TREE, 0);
3052 case RK_STRING:
3054 const string_region *string_reg = as_a <const string_region *> (reg);
3055 return path_var (string_reg->get_string_cst (), 0);
3058 case RK_UNKNOWN:
3059 return path_var (NULL_TREE, 0);
3063 /* Attempt to return a path_var that represents REG, or return
3064 the NULL path_var.
3065 For example, a region for a field of a local would be a path_var
3066 wrapping a COMPONENT_REF.
3067 Use VISITED to prevent infinite mutual recursion with the overload for
3068 svalues.
3070 This function defers to get_representative_path_var_1 to do the work;
3071 it adds verification that get_representative_path_var_1 returned a tree
3072 of the correct type. */
3074 path_var
3075 region_model::get_representative_path_var (const region *reg,
3076 svalue_set *visited) const
3078 path_var result = get_representative_path_var_1 (reg, visited);
3080 /* Verify that the result has the same type as REG, if any. */
3081 if (result.m_tree && reg->get_type ())
3082 gcc_assert (TREE_TYPE (result.m_tree) == reg->get_type ());
3084 return result;
3087 /* Update this model for any phis in SNODE, assuming we came from
3088 LAST_CFG_SUPEREDGE. */
3090 void
3091 region_model::update_for_phis (const supernode *snode,
3092 const cfg_superedge *last_cfg_superedge,
3093 region_model_context *ctxt)
3095 gcc_assert (last_cfg_superedge);
3097 /* Copy this state and pass it to handle_phi so that all of the phi stmts
3098 are effectively handled simultaneously. */
3099 const region_model old_state (*this);
3101 for (gphi_iterator gpi = const_cast<supernode *>(snode)->start_phis ();
3102 !gsi_end_p (gpi); gsi_next (&gpi))
3104 gphi *phi = gpi.phi ();
3106 tree src = last_cfg_superedge->get_phi_arg (phi);
3107 tree lhs = gimple_phi_result (phi);
3109 /* Update next_state based on phi and old_state. */
3110 handle_phi (phi, lhs, src, old_state, ctxt);
3114 /* Attempt to update this model for taking EDGE (where the last statement
3115 was LAST_STMT), returning true if the edge can be taken, false
3116 otherwise.
3117 When returning false, if OUT is non-NULL, write a new rejected_constraint
3118 to it.
3120 For CFG superedges where LAST_STMT is a conditional or a switch
3121 statement, attempt to add the relevant conditions for EDGE to this
3122 model, returning true if they are feasible, or false if they are
3123 impossible.
3125 For call superedges, push frame information and store arguments
3126 into parameters.
3128 For return superedges, pop frame information and store return
3129 values into any lhs.
3131 Rejection of call/return superedges happens elsewhere, in
3132 program_point::on_edge (i.e. based on program point, rather
3133 than program state). */
3135 bool
3136 region_model::maybe_update_for_edge (const superedge &edge,
3137 const gimple *last_stmt,
3138 region_model_context *ctxt,
3139 rejected_constraint **out)
3141 /* Handle frame updates for interprocedural edges. */
3142 switch (edge.m_kind)
3144 default:
3145 break;
3147 case SUPEREDGE_CALL:
3149 const call_superedge *call_edge = as_a <const call_superedge *> (&edge);
3150 update_for_call_superedge (*call_edge, ctxt);
3152 break;
3154 case SUPEREDGE_RETURN:
3156 const return_superedge *return_edge
3157 = as_a <const return_superedge *> (&edge);
3158 update_for_return_superedge (*return_edge, ctxt);
3160 break;
3162 case SUPEREDGE_INTRAPROCEDURAL_CALL:
3164 const callgraph_superedge *cg_sedge
3165 = as_a <const callgraph_superedge *> (&edge);
3166 update_for_call_summary (*cg_sedge, ctxt);
3168 break;
3171 if (last_stmt == NULL)
3172 return true;
3174 /* Apply any constraints for conditionals/switch statements. */
3176 if (const gcond *cond_stmt = dyn_cast <const gcond *> (last_stmt))
3178 const cfg_superedge *cfg_sedge = as_a <const cfg_superedge *> (&edge);
3179 return apply_constraints_for_gcond (*cfg_sedge, cond_stmt, ctxt, out);
3182 if (const gswitch *switch_stmt = dyn_cast <const gswitch *> (last_stmt))
3184 const switch_cfg_superedge *switch_sedge
3185 = as_a <const switch_cfg_superedge *> (&edge);
3186 return apply_constraints_for_gswitch (*switch_sedge, switch_stmt,
3187 ctxt, out);
3190 /* Apply any constraints due to an exception being thrown. */
3191 if (const cfg_superedge *cfg_sedge = dyn_cast <const cfg_superedge *> (&edge))
3192 if (cfg_sedge->get_flags () & EDGE_EH)
3193 return apply_constraints_for_exception (last_stmt, ctxt, out);
3195 return true;
3198 /* Push a new frame_region on to the stack region.
3199 Populate the frame_region with child regions for the function call's
3200 parameters, using values from the arguments at the callsite in the
3201 caller's frame. */
3203 void
3204 region_model::update_for_gcall (const gcall *call_stmt,
3205 region_model_context *ctxt,
3206 function *callee)
3208 /* Build a vec of argument svalues, using the current top
3209 frame for resolving tree expressions. */
3210 auto_vec<const svalue *> arg_svals (gimple_call_num_args (call_stmt));
3212 for (unsigned i = 0; i < gimple_call_num_args (call_stmt); i++)
3214 tree arg = gimple_call_arg (call_stmt, i);
3215 arg_svals.quick_push (get_rvalue (arg, ctxt));
3218 if(!callee)
3220 /* Get the function * from the gcall. */
3221 tree fn_decl = get_fndecl_for_call (call_stmt,ctxt);
3222 callee = DECL_STRUCT_FUNCTION (fn_decl);
3225 push_frame (callee, &arg_svals, ctxt);
3228 /* Pop the top-most frame_region from the stack, and copy the return
3229 region's values (if any) into the region for the lvalue of the LHS of
3230 the call (if any). */
3232 void
3233 region_model::update_for_return_gcall (const gcall *call_stmt,
3234 region_model_context *ctxt)
3236 /* Get the region for the result of the call, within the caller frame. */
3237 const region *result_dst_reg = NULL;
3238 tree lhs = gimple_call_lhs (call_stmt);
3239 if (lhs)
3241 /* Normally we access the top-level frame, which is:
3242 path_var (expr, get_stack_depth () - 1)
3243 whereas here we need the caller frame, hence "- 2" here. */
3244 gcc_assert (get_stack_depth () >= 2);
3245 result_dst_reg = get_lvalue (path_var (lhs, get_stack_depth () - 2),
3246 ctxt);
3249 pop_frame (result_dst_reg, NULL, ctxt);
3252 /* Extract calling information from the superedge and update the model for the
3253 call */
3255 void
3256 region_model::update_for_call_superedge (const call_superedge &call_edge,
3257 region_model_context *ctxt)
3259 const gcall *call_stmt = call_edge.get_call_stmt ();
3260 update_for_gcall (call_stmt, ctxt, call_edge.get_callee_function ());
3263 /* Extract calling information from the return superedge and update the model
3264 for the returning call */
3266 void
3267 region_model::update_for_return_superedge (const return_superedge &return_edge,
3268 region_model_context *ctxt)
3270 const gcall *call_stmt = return_edge.get_call_stmt ();
3271 update_for_return_gcall (call_stmt, ctxt);
3274 /* Update this region_model with a summary of the effect of calling
3275 and returning from CG_SEDGE.
3277 TODO: Currently this is extremely simplistic: we merely set the
3278 return value to "unknown". A proper implementation would e.g. update
3279 sm-state, and presumably be reworked to support multiple outcomes. */
3281 void
3282 region_model::update_for_call_summary (const callgraph_superedge &cg_sedge,
3283 region_model_context *ctxt)
3285 /* For now, set any return value to "unknown". */
3286 const gcall *call_stmt = cg_sedge.get_call_stmt ();
3287 tree lhs = gimple_call_lhs (call_stmt);
3288 if (lhs)
3289 mark_region_as_unknown (get_lvalue (lhs, ctxt),
3290 ctxt ? ctxt->get_uncertainty () : NULL);
3292 // TODO: actually implement some kind of summary here
3295 /* Given a true or false edge guarded by conditional statement COND_STMT,
3296 determine appropriate constraints for the edge to be taken.
3298 If they are feasible, add the constraints and return true.
3300 Return false if the constraints contradict existing knowledge
3301 (and so the edge should not be taken).
3302 When returning false, if OUT is non-NULL, write a new rejected_constraint
3303 to it. */
3305 bool
3306 region_model::apply_constraints_for_gcond (const cfg_superedge &sedge,
3307 const gcond *cond_stmt,
3308 region_model_context *ctxt,
3309 rejected_constraint **out)
3311 ::edge cfg_edge = sedge.get_cfg_edge ();
3312 gcc_assert (cfg_edge != NULL);
3313 gcc_assert (cfg_edge->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE));
3315 enum tree_code op = gimple_cond_code (cond_stmt);
3316 tree lhs = gimple_cond_lhs (cond_stmt);
3317 tree rhs = gimple_cond_rhs (cond_stmt);
3318 if (cfg_edge->flags & EDGE_FALSE_VALUE)
3319 op = invert_tree_comparison (op, false /* honor_nans */);
3320 return add_constraint (lhs, op, rhs, ctxt, out);
3323 /* Given an EDGE guarded by SWITCH_STMT, determine appropriate constraints
3324 for the edge to be taken.
3326 If they are feasible, add the constraints and return true.
3328 Return false if the constraints contradict existing knowledge
3329 (and so the edge should not be taken).
3330 When returning false, if OUT is non-NULL, write a new rejected_constraint
3331 to it. */
3333 bool
3334 region_model::apply_constraints_for_gswitch (const switch_cfg_superedge &edge,
3335 const gswitch *switch_stmt,
3336 region_model_context *ctxt,
3337 rejected_constraint **out)
3339 bounded_ranges_manager *ranges_mgr = get_range_manager ();
3340 const bounded_ranges *all_cases_ranges
3341 = ranges_mgr->get_or_create_ranges_for_switch (&edge, switch_stmt);
3342 tree index = gimple_switch_index (switch_stmt);
3343 const svalue *index_sval = get_rvalue (index, ctxt);
3344 bool sat = m_constraints->add_bounded_ranges (index_sval, all_cases_ranges);
3345 if (!sat && out)
3346 *out = new rejected_ranges_constraint (*this, index, all_cases_ranges);
3347 return sat;
3350 /* Apply any constraints due to an exception being thrown at LAST_STMT.
3352 If they are feasible, add the constraints and return true.
3354 Return false if the constraints contradict existing knowledge
3355 (and so the edge should not be taken).
3356 When returning false, if OUT is non-NULL, write a new rejected_constraint
3357 to it. */
3359 bool
3360 region_model::apply_constraints_for_exception (const gimple *last_stmt,
3361 region_model_context *ctxt,
3362 rejected_constraint **out)
3364 gcc_assert (last_stmt);
3365 if (const gcall *call = dyn_cast <const gcall *> (last_stmt))
3366 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
3367 if (is_named_call_p (callee_fndecl, "operator new", call, 1)
3368 || is_named_call_p (callee_fndecl, "operator new []", call, 1))
3370 /* We have an exception thrown from operator new.
3371 Add a constraint that the result was NULL, to avoid a false
3372 leak report due to the result being lost when following
3373 the EH edge. */
3374 if (tree lhs = gimple_call_lhs (call))
3375 return add_constraint (lhs, EQ_EXPR, null_pointer_node, ctxt, out);
3376 return true;
3378 return true;
3381 /* For use with push_frame when handling a top-level call within the analysis.
3382 PARAM has a defined but unknown initial value.
3383 Anything it points to has escaped, since the calling context "knows"
3384 the pointer, and thus calls to unknown functions could read/write into
3385 the region. */
3387 void
3388 region_model::on_top_level_param (tree param,
3389 region_model_context *ctxt)
3391 if (POINTER_TYPE_P (TREE_TYPE (param)))
3393 const region *param_reg = get_lvalue (param, ctxt);
3394 const svalue *init_ptr_sval
3395 = m_mgr->get_or_create_initial_value (param_reg);
3396 const region *pointee_reg = m_mgr->get_symbolic_region (init_ptr_sval);
3397 m_store.mark_as_escaped (pointee_reg);
3401 /* Update this region_model to reflect pushing a frame onto the stack
3402 for a call to FUN.
3404 If ARG_SVALS is non-NULL, use it to populate the parameters
3405 in the new frame.
3406 Otherwise, the params have their initial_svalues.
3408 Return the frame_region for the new frame. */
3410 const region *
3411 region_model::push_frame (function *fun, const vec<const svalue *> *arg_svals,
3412 region_model_context *ctxt)
3414 m_current_frame = m_mgr->get_frame_region (m_current_frame, fun);
3415 if (arg_svals)
3417 /* Arguments supplied from a caller frame. */
3418 tree fndecl = fun->decl;
3419 unsigned idx = 0;
3420 for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm;
3421 iter_parm = DECL_CHAIN (iter_parm), ++idx)
3423 /* If there's a mismatching declaration, the call stmt might
3424 not have enough args. Handle this case by leaving the
3425 rest of the params as uninitialized. */
3426 if (idx >= arg_svals->length ())
3427 break;
3428 tree parm_lval = iter_parm;
3429 if (tree parm_default_ssa = ssa_default_def (fun, iter_parm))
3430 parm_lval = parm_default_ssa;
3431 const region *parm_reg = get_lvalue (parm_lval, ctxt);
3432 const svalue *arg_sval = (*arg_svals)[idx];
3433 set_value (parm_reg, arg_sval, ctxt);
3436 else
3438 /* Otherwise we have a top-level call within the analysis. The params
3439 have defined but unknown initial values.
3440 Anything they point to has escaped. */
3441 tree fndecl = fun->decl;
3442 for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm;
3443 iter_parm = DECL_CHAIN (iter_parm))
3445 if (tree parm_default_ssa = ssa_default_def (fun, iter_parm))
3446 on_top_level_param (parm_default_ssa, ctxt);
3447 else
3448 on_top_level_param (iter_parm, ctxt);
3452 return m_current_frame;
3455 /* Get the function of the top-most frame in this region_model's stack.
3456 There must be such a frame. */
3458 function *
3459 region_model::get_current_function () const
3461 const frame_region *frame = get_current_frame ();
3462 gcc_assert (frame);
3463 return frame->get_function ();
3466 /* Pop the topmost frame_region from this region_model's stack;
3468 If RESULT_DST_REG is non-null, copy any return value from the frame
3469 into RESULT_DST_REG's region.
3470 If OUT_RESULT is non-null, copy any return value from the frame
3471 into *OUT_RESULT.
3473 Purge the frame region and all its descendent regions.
3474 Convert any pointers that point into such regions into
3475 POISON_KIND_POPPED_STACK svalues. */
3477 void
3478 region_model::pop_frame (const region *result_dst_reg,
3479 const svalue **out_result,
3480 region_model_context *ctxt)
3482 gcc_assert (m_current_frame);
3484 /* Evaluate the result, within the callee frame. */
3485 const frame_region *frame_reg = m_current_frame;
3486 tree fndecl = m_current_frame->get_function ()->decl;
3487 tree result = DECL_RESULT (fndecl);
3488 if (result && TREE_TYPE (result) != void_type_node)
3490 if (result_dst_reg)
3492 /* Copy the result to RESULT_DST_REG. */
3493 copy_region (result_dst_reg,
3494 get_lvalue (result, ctxt),
3495 ctxt);
3497 if (out_result)
3498 *out_result = get_rvalue (result, ctxt);
3501 /* Pop the frame. */
3502 m_current_frame = m_current_frame->get_calling_frame ();
3504 unbind_region_and_descendents (frame_reg,POISON_KIND_POPPED_STACK);
3507 /* Get the number of frames in this region_model's stack. */
3510 region_model::get_stack_depth () const
3512 const frame_region *frame = get_current_frame ();
3513 if (frame)
3514 return frame->get_stack_depth ();
3515 else
3516 return 0;
3519 /* Get the frame_region with the given index within the stack.
3520 The frame_region must exist. */
3522 const frame_region *
3523 region_model::get_frame_at_index (int index) const
3525 const frame_region *frame = get_current_frame ();
3526 gcc_assert (frame);
3527 gcc_assert (index >= 0);
3528 gcc_assert (index <= frame->get_index ());
3529 while (index != frame->get_index ())
3531 frame = frame->get_calling_frame ();
3532 gcc_assert (frame);
3534 return frame;
3537 /* Unbind svalues for any regions in REG and below.
3538 Find any pointers to such regions; convert them to
3539 poisoned values of kind PKIND.
3540 Also purge any dynamic extents. */
3542 void
3543 region_model::unbind_region_and_descendents (const region *reg,
3544 enum poison_kind pkind)
3546 /* Gather a set of base regions to be unbound. */
3547 hash_set<const region *> base_regs;
3548 for (store::cluster_map_t::iterator iter = m_store.begin ();
3549 iter != m_store.end (); ++iter)
3551 const region *iter_base_reg = (*iter).first;
3552 if (iter_base_reg->descendent_of_p (reg))
3553 base_regs.add (iter_base_reg);
3555 for (hash_set<const region *>::iterator iter = base_regs.begin ();
3556 iter != base_regs.end (); ++iter)
3557 m_store.purge_cluster (*iter);
3559 /* Find any pointers to REG or its descendents; convert to poisoned. */
3560 poison_any_pointers_to_descendents (reg, pkind);
3562 /* Purge dynamic extents of any base regions in REG and below
3563 (e.g. VLAs and alloca stack regions). */
3564 for (auto iter : m_dynamic_extents)
3566 const region *iter_reg = iter.first;
3567 if (iter_reg->descendent_of_p (reg))
3568 unset_dynamic_extents (iter_reg);
3572 /* Implementation of BindingVisitor.
3573 Update the bound svalues for regions below REG to use poisoned
3574 values instead. */
3576 struct bad_pointer_finder
3578 bad_pointer_finder (const region *reg, enum poison_kind pkind,
3579 region_model_manager *mgr)
3580 : m_reg (reg), m_pkind (pkind), m_mgr (mgr), m_count (0)
3583 void on_binding (const binding_key *, const svalue *&sval)
3585 if (const region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
3587 const region *ptr_dst = ptr_sval->get_pointee ();
3588 /* Poison ptrs to descendents of REG, but not to REG itself,
3589 otherwise double-free detection doesn't work (since sm-state
3590 for "free" is stored on the original ptr svalue). */
3591 if (ptr_dst->descendent_of_p (m_reg)
3592 && ptr_dst != m_reg)
3594 sval = m_mgr->get_or_create_poisoned_svalue (m_pkind,
3595 sval->get_type ());
3596 ++m_count;
3601 const region *m_reg;
3602 enum poison_kind m_pkind;
3603 region_model_manager *const m_mgr;
3604 int m_count;
3607 /* Find any pointers to REG or its descendents; convert them to
3608 poisoned values of kind PKIND.
3609 Return the number of pointers that were poisoned. */
3612 region_model::poison_any_pointers_to_descendents (const region *reg,
3613 enum poison_kind pkind)
3615 bad_pointer_finder bv (reg, pkind, m_mgr);
3616 m_store.for_each_binding (bv);
3617 return bv.m_count;
3620 /* Attempt to merge THIS with OTHER_MODEL, writing the result
3621 to OUT_MODEL. Use POINT to distinguish values created as a
3622 result of merging. */
3624 bool
3625 region_model::can_merge_with_p (const region_model &other_model,
3626 const program_point &point,
3627 region_model *out_model) const
3629 gcc_assert (out_model);
3630 gcc_assert (m_mgr == other_model.m_mgr);
3631 gcc_assert (m_mgr == out_model->m_mgr);
3633 if (m_current_frame != other_model.m_current_frame)
3634 return false;
3635 out_model->m_current_frame = m_current_frame;
3637 model_merger m (this, &other_model, point, out_model);
3639 if (!store::can_merge_p (&m_store, &other_model.m_store,
3640 &out_model->m_store, m_mgr->get_store_manager (),
3641 &m))
3642 return false;
3644 if (!m_dynamic_extents.can_merge_with_p (other_model.m_dynamic_extents,
3645 &out_model->m_dynamic_extents))
3646 return false;
3648 /* Merge constraints. */
3649 constraint_manager::merge (*m_constraints,
3650 *other_model.m_constraints,
3651 out_model->m_constraints);
3653 return true;
3656 /* Attempt to get the fndecl used at CALL, if known, or NULL_TREE
3657 otherwise. */
3659 tree
3660 region_model::get_fndecl_for_call (const gcall *call,
3661 region_model_context *ctxt)
3663 tree fn_ptr = gimple_call_fn (call);
3664 if (fn_ptr == NULL_TREE)
3665 return NULL_TREE;
3666 const svalue *fn_ptr_sval = get_rvalue (fn_ptr, ctxt);
3667 if (const region_svalue *fn_ptr_ptr
3668 = fn_ptr_sval->dyn_cast_region_svalue ())
3670 const region *reg = fn_ptr_ptr->get_pointee ();
3671 if (const function_region *fn_reg = reg->dyn_cast_function_region ())
3673 tree fn_decl = fn_reg->get_fndecl ();
3674 cgraph_node *node = cgraph_node::get (fn_decl);
3675 if (!node)
3676 return NULL_TREE;
3677 const cgraph_node *ultimate_node = node->ultimate_alias_target ();
3678 if (ultimate_node)
3679 return ultimate_node->decl;
3683 return NULL_TREE;
3686 /* Would be much simpler to use a lambda here, if it were supported. */
3688 struct append_ssa_names_cb_data
3690 const region_model *model;
3691 auto_vec<const decl_region *> *out;
3694 /* Populate *OUT with all decl_regions for SSA names in the current
3695 frame that have clusters within the store. */
3697 void
3698 region_model::
3699 get_ssa_name_regions_for_current_frame (auto_vec<const decl_region *> *out)
3700 const
3702 append_ssa_names_cb_data data;
3703 data.model = this;
3704 data.out = out;
3705 m_store.for_each_cluster (append_ssa_names_cb, &data);
3708 /* Implementation detail of get_ssa_name_regions_for_current_frame. */
3710 void
3711 region_model::append_ssa_names_cb (const region *base_reg,
3712 append_ssa_names_cb_data *cb_data)
3714 if (base_reg->get_parent_region () != cb_data->model->m_current_frame)
3715 return;
3716 if (const decl_region *decl_reg = base_reg->dyn_cast_decl_region ())
3718 if (TREE_CODE (decl_reg->get_decl ()) == SSA_NAME)
3719 cb_data->out->safe_push (decl_reg);
3723 /* Return a new region describing a heap-allocated block of memory. */
3725 const region *
3726 region_model::create_region_for_heap_alloc (const svalue *size_in_bytes)
3728 const region *reg = m_mgr->create_region_for_heap_alloc ();
3729 if (compat_types_p (size_in_bytes->get_type (), size_type_node))
3730 set_dynamic_extents (reg, size_in_bytes);
3731 return reg;
3734 /* Return a new region describing a block of memory allocated within the
3735 current frame. */
3737 const region *
3738 region_model::create_region_for_alloca (const svalue *size_in_bytes)
3740 const region *reg = m_mgr->create_region_for_alloca (m_current_frame);
3741 if (compat_types_p (size_in_bytes->get_type (), size_type_node))
3742 set_dynamic_extents (reg, size_in_bytes);
3743 return reg;
3746 /* Record that the size of REG is SIZE_IN_BYTES. */
3748 void
3749 region_model::set_dynamic_extents (const region *reg,
3750 const svalue *size_in_bytes)
3752 assert_compat_types (size_in_bytes->get_type (), size_type_node);
3753 m_dynamic_extents.put (reg, size_in_bytes);
3756 /* Get the recording of REG in bytes, or NULL if no dynamic size was
3757 recorded. */
3759 const svalue *
3760 region_model::get_dynamic_extents (const region *reg) const
3762 if (const svalue * const *slot = m_dynamic_extents.get (reg))
3763 return *slot;
3764 return NULL;
3767 /* Unset any recorded dynamic size of REG. */
3769 void
3770 region_model::unset_dynamic_extents (const region *reg)
3772 m_dynamic_extents.remove (reg);
3775 /* class noop_region_model_context : public region_model_context. */
3777 void
3778 noop_region_model_context::bifurcate (custom_edge_info *info)
3780 delete info;
3783 void
3784 noop_region_model_context::terminate_path ()
3788 /* struct model_merger. */
3790 /* Dump a multiline representation of this merger to PP. */
3792 void
3793 model_merger::dump_to_pp (pretty_printer *pp, bool simple) const
3795 pp_string (pp, "model A:");
3796 pp_newline (pp);
3797 m_model_a->dump_to_pp (pp, simple, true);
3798 pp_newline (pp);
3800 pp_string (pp, "model B:");
3801 pp_newline (pp);
3802 m_model_b->dump_to_pp (pp, simple, true);
3803 pp_newline (pp);
3805 pp_string (pp, "merged model:");
3806 pp_newline (pp);
3807 m_merged_model->dump_to_pp (pp, simple, true);
3808 pp_newline (pp);
3811 /* Dump a multiline representation of this merger to FILE. */
3813 void
3814 model_merger::dump (FILE *fp, bool simple) const
3816 pretty_printer pp;
3817 pp_format_decoder (&pp) = default_tree_printer;
3818 pp_show_color (&pp) = pp_show_color (global_dc->printer);
3819 pp.buffer->stream = fp;
3820 dump_to_pp (&pp, simple);
3821 pp_flush (&pp);
3824 /* Dump a multiline representation of this merger to stderr. */
3826 DEBUG_FUNCTION void
3827 model_merger::dump (bool simple) const
3829 dump (stderr, simple);
3832 } // namespace ana
3834 /* Dump RMODEL fully to stderr (i.e. without summarization). */
3836 DEBUG_FUNCTION void
3837 debug (const region_model &rmodel)
3839 rmodel.dump (false);
3842 /* class rejected_op_constraint : public rejected_constraint. */
3844 void
3845 rejected_op_constraint::dump_to_pp (pretty_printer *pp) const
3847 region_model m (m_model);
3848 const svalue *lhs_sval = m.get_rvalue (m_lhs, NULL);
3849 const svalue *rhs_sval = m.get_rvalue (m_rhs, NULL);
3850 lhs_sval->dump_to_pp (pp, true);
3851 pp_printf (pp, " %s ", op_symbol_code (m_op));
3852 rhs_sval->dump_to_pp (pp, true);
3855 /* class rejected_ranges_constraint : public rejected_constraint. */
3857 void
3858 rejected_ranges_constraint::dump_to_pp (pretty_printer *pp) const
3860 region_model m (m_model);
3861 const svalue *sval = m.get_rvalue (m_expr, NULL);
3862 sval->dump_to_pp (pp, true);
3863 pp_string (pp, " in ");
3864 m_ranges->dump_to_pp (pp, true);
3867 /* class engine. */
3869 /* Dump the managed objects by class to LOGGER, and the per-class totals. */
3871 void
3872 engine::log_stats (logger *logger) const
3874 m_mgr.log_stats (logger, true);
3877 namespace ana {
3879 #if CHECKING_P
3881 namespace selftest {
3883 /* Build a constant tree of the given type from STR. */
3885 static tree
3886 build_real_cst_from_string (tree type, const char *str)
3888 REAL_VALUE_TYPE real;
3889 real_from_string (&real, str);
3890 return build_real (type, real);
3893 /* Append various "interesting" constants to OUT (e.g. NaN). */
3895 static void
3896 append_interesting_constants (auto_vec<tree> *out)
3898 out->safe_push (build_int_cst (integer_type_node, 0));
3899 out->safe_push (build_int_cst (integer_type_node, 42));
3900 out->safe_push (build_int_cst (unsigned_type_node, 0));
3901 out->safe_push (build_int_cst (unsigned_type_node, 42));
3902 out->safe_push (build_real_cst_from_string (float_type_node, "QNaN"));
3903 out->safe_push (build_real_cst_from_string (float_type_node, "-QNaN"));
3904 out->safe_push (build_real_cst_from_string (float_type_node, "SNaN"));
3905 out->safe_push (build_real_cst_from_string (float_type_node, "-SNaN"));
3906 out->safe_push (build_real_cst_from_string (float_type_node, "0.0"));
3907 out->safe_push (build_real_cst_from_string (float_type_node, "-0.0"));
3908 out->safe_push (build_real_cst_from_string (float_type_node, "Inf"));
3909 out->safe_push (build_real_cst_from_string (float_type_node, "-Inf"));
3912 /* Verify that tree_cmp is a well-behaved comparator for qsort, even
3913 if the underlying constants aren't comparable. */
3915 static void
3916 test_tree_cmp_on_constants ()
3918 auto_vec<tree> csts;
3919 append_interesting_constants (&csts);
3921 /* Try sorting every triple. */
3922 const unsigned num = csts.length ();
3923 for (unsigned i = 0; i < num; i++)
3924 for (unsigned j = 0; j < num; j++)
3925 for (unsigned k = 0; k < num; k++)
3927 auto_vec<tree> v (3);
3928 v.quick_push (csts[i]);
3929 v.quick_push (csts[j]);
3930 v.quick_push (csts[k]);
3931 v.qsort (tree_cmp);
3935 /* Implementation detail of the ASSERT_CONDITION_* macros. */
3937 void
3938 assert_condition (const location &loc,
3939 region_model &model,
3940 const svalue *lhs, tree_code op, const svalue *rhs,
3941 tristate expected)
3943 tristate actual = model.eval_condition (lhs, op, rhs);
3944 ASSERT_EQ_AT (loc, actual, expected);
3947 /* Implementation detail of the ASSERT_CONDITION_* macros. */
3949 void
3950 assert_condition (const location &loc,
3951 region_model &model,
3952 tree lhs, tree_code op, tree rhs,
3953 tristate expected)
3955 tristate actual = model.eval_condition (lhs, op, rhs, NULL);
3956 ASSERT_EQ_AT (loc, actual, expected);
3959 /* Implementation detail of ASSERT_DUMP_TREE_EQ. */
3961 static void
3962 assert_dump_tree_eq (const location &loc, tree t, const char *expected)
3964 auto_fix_quotes sentinel;
3965 pretty_printer pp;
3966 pp_format_decoder (&pp) = default_tree_printer;
3967 dump_tree (&pp, t);
3968 ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected);
3971 /* Assert that dump_tree (T) is EXPECTED. */
3973 #define ASSERT_DUMP_TREE_EQ(T, EXPECTED) \
3974 SELFTEST_BEGIN_STMT \
3975 assert_dump_tree_eq ((SELFTEST_LOCATION), (T), (EXPECTED)); \
3976 SELFTEST_END_STMT
3978 /* Implementation detail of ASSERT_DUMP_EQ. */
3980 static void
3981 assert_dump_eq (const location &loc,
3982 const region_model &model,
3983 bool summarize,
3984 const char *expected)
3986 auto_fix_quotes sentinel;
3987 pretty_printer pp;
3988 pp_format_decoder (&pp) = default_tree_printer;
3990 model.dump_to_pp (&pp, summarize, true);
3991 ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected);
3994 /* Assert that MODEL.dump_to_pp (SUMMARIZE) is EXPECTED. */
3996 #define ASSERT_DUMP_EQ(MODEL, SUMMARIZE, EXPECTED) \
3997 SELFTEST_BEGIN_STMT \
3998 assert_dump_eq ((SELFTEST_LOCATION), (MODEL), (SUMMARIZE), (EXPECTED)); \
3999 SELFTEST_END_STMT
4001 /* Smoketest for region_model::dump_to_pp. */
4003 static void
4004 test_dump ()
4006 region_model_manager mgr;
4007 region_model model (&mgr);
4009 ASSERT_DUMP_EQ (model, false,
4010 "stack depth: 0\n"
4011 "m_called_unknown_fn: FALSE\n"
4012 "constraint_manager:\n"
4013 " equiv classes:\n"
4014 " constraints:\n");
4015 ASSERT_DUMP_EQ (model, true,
4016 "stack depth: 0\n"
4017 "m_called_unknown_fn: FALSE\n"
4018 "constraint_manager:\n"
4019 " equiv classes:\n"
4020 " constraints:\n");
4023 /* Helper function for selftests. Create a struct or union type named NAME,
4024 with the fields given by the FIELD_DECLS in FIELDS.
4025 If IS_STRUCT is true create a RECORD_TYPE (aka a struct), otherwise
4026 create a UNION_TYPE. */
4028 static tree
4029 make_test_compound_type (const char *name, bool is_struct,
4030 const auto_vec<tree> *fields)
4032 tree t = make_node (is_struct ? RECORD_TYPE : UNION_TYPE);
4033 TYPE_NAME (t) = get_identifier (name);
4034 TYPE_SIZE (t) = 0;
4036 tree fieldlist = NULL;
4037 int i;
4038 tree field;
4039 FOR_EACH_VEC_ELT (*fields, i, field)
4041 gcc_assert (TREE_CODE (field) == FIELD_DECL);
4042 DECL_CONTEXT (field) = t;
4043 fieldlist = chainon (field, fieldlist);
4045 fieldlist = nreverse (fieldlist);
4046 TYPE_FIELDS (t) = fieldlist;
4048 layout_type (t);
4049 return t;
4052 /* Selftest fixture for creating the type "struct coord {int x; int y; };". */
4054 struct coord_test
4056 coord_test ()
4058 auto_vec<tree> fields;
4059 m_x_field = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
4060 get_identifier ("x"), integer_type_node);
4061 fields.safe_push (m_x_field);
4062 m_y_field = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
4063 get_identifier ("y"), integer_type_node);
4064 fields.safe_push (m_y_field);
4065 m_coord_type = make_test_compound_type ("coord", true, &fields);
4068 tree m_x_field;
4069 tree m_y_field;
4070 tree m_coord_type;
4073 /* Verify usage of a struct. */
4075 static void
4076 test_struct ()
4078 coord_test ct;
4080 tree c = build_global_decl ("c", ct.m_coord_type);
4081 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
4082 c, ct.m_x_field, NULL_TREE);
4083 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
4084 c, ct.m_y_field, NULL_TREE);
4086 tree int_17 = build_int_cst (integer_type_node, 17);
4087 tree int_m3 = build_int_cst (integer_type_node, -3);
4089 region_model_manager mgr;
4090 region_model model (&mgr);
4091 model.set_value (c_x, int_17, NULL);
4092 model.set_value (c_y, int_m3, NULL);
4094 /* Verify get_offset for "c.x". */
4096 const region *c_x_reg = model.get_lvalue (c_x, NULL);
4097 region_offset offset = c_x_reg->get_offset ();
4098 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (c, NULL));
4099 ASSERT_EQ (offset.get_bit_offset (), 0);
4102 /* Verify get_offset for "c.y". */
4104 const region *c_y_reg = model.get_lvalue (c_y, NULL);
4105 region_offset offset = c_y_reg->get_offset ();
4106 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (c, NULL));
4107 ASSERT_EQ (offset.get_bit_offset (), INT_TYPE_SIZE);
4111 /* Verify usage of an array element. */
4113 static void
4114 test_array_1 ()
4116 tree tlen = size_int (10);
4117 tree arr_type = build_array_type (char_type_node, build_index_type (tlen));
4119 tree a = build_global_decl ("a", arr_type);
4121 region_model_manager mgr;
4122 region_model model (&mgr);
4123 tree int_0 = build_int_cst (integer_type_node, 0);
4124 tree a_0 = build4 (ARRAY_REF, char_type_node,
4125 a, int_0, NULL_TREE, NULL_TREE);
4126 tree char_A = build_int_cst (char_type_node, 'A');
4127 model.set_value (a_0, char_A, NULL);
4130 /* Verify that region_model::get_representative_tree works as expected. */
4132 static void
4133 test_get_representative_tree ()
4135 region_model_manager mgr;
4137 /* STRING_CST. */
4139 tree string_cst = build_string (4, "foo");
4140 region_model m (&mgr);
4141 const svalue *str_sval = m.get_rvalue (string_cst, NULL);
4142 tree rep = m.get_representative_tree (str_sval);
4143 ASSERT_EQ (rep, string_cst);
4146 /* String literal. */
4148 tree string_cst_ptr = build_string_literal (4, "foo");
4149 region_model m (&mgr);
4150 const svalue *str_sval = m.get_rvalue (string_cst_ptr, NULL);
4151 tree rep = m.get_representative_tree (str_sval);
4152 ASSERT_DUMP_TREE_EQ (rep, "&\"foo\"[0]");
4155 /* Value of an element within an array. */
4157 tree tlen = size_int (10);
4158 tree arr_type = build_array_type (char_type_node, build_index_type (tlen));
4159 tree a = build_global_decl ("a", arr_type);
4160 placeholder_svalue test_sval (char_type_node, "test value");
4162 /* Value of a[3]. */
4164 test_region_model_context ctxt;
4165 region_model model (&mgr);
4166 tree int_3 = build_int_cst (integer_type_node, 3);
4167 tree a_3 = build4 (ARRAY_REF, char_type_node,
4168 a, int_3, NULL_TREE, NULL_TREE);
4169 const region *a_3_reg = model.get_lvalue (a_3, &ctxt);
4170 model.set_value (a_3_reg, &test_sval, &ctxt);
4171 tree rep = model.get_representative_tree (&test_sval);
4172 ASSERT_DUMP_TREE_EQ (rep, "a[3]");
4175 /* Value of a[0]. */
4177 test_region_model_context ctxt;
4178 region_model model (&mgr);
4179 tree idx = build_int_cst (integer_type_node, 0);
4180 tree a_0 = build4 (ARRAY_REF, char_type_node,
4181 a, idx, NULL_TREE, NULL_TREE);
4182 const region *a_0_reg = model.get_lvalue (a_0, &ctxt);
4183 model.set_value (a_0_reg, &test_sval, &ctxt);
4184 tree rep = model.get_representative_tree (&test_sval);
4185 ASSERT_DUMP_TREE_EQ (rep, "a[0]");
4189 /* Value of a field within a struct. */
4191 coord_test ct;
4193 tree c = build_global_decl ("c", ct.m_coord_type);
4194 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
4195 c, ct.m_x_field, NULL_TREE);
4196 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
4197 c, ct.m_y_field, NULL_TREE);
4199 test_region_model_context ctxt;
4201 /* Value of initial field. */
4203 region_model m (&mgr);
4204 const region *c_x_reg = m.get_lvalue (c_x, &ctxt);
4205 placeholder_svalue test_sval_x (integer_type_node, "test x val");
4206 m.set_value (c_x_reg, &test_sval_x, &ctxt);
4207 tree rep = m.get_representative_tree (&test_sval_x);
4208 ASSERT_DUMP_TREE_EQ (rep, "c.x");
4211 /* Value of non-initial field. */
4213 region_model m (&mgr);
4214 const region *c_y_reg = m.get_lvalue (c_y, &ctxt);
4215 placeholder_svalue test_sval_y (integer_type_node, "test y val");
4216 m.set_value (c_y_reg, &test_sval_y, &ctxt);
4217 tree rep = m.get_representative_tree (&test_sval_y);
4218 ASSERT_DUMP_TREE_EQ (rep, "c.y");
4223 /* Verify that calling region_model::get_rvalue repeatedly on the same
4224 tree constant retrieves the same svalue *. */
4226 static void
4227 test_unique_constants ()
4229 tree int_0 = build_int_cst (integer_type_node, 0);
4230 tree int_42 = build_int_cst (integer_type_node, 42);
4232 test_region_model_context ctxt;
4233 region_model_manager mgr;
4234 region_model model (&mgr);
4235 ASSERT_EQ (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_0, &ctxt));
4236 ASSERT_EQ (model.get_rvalue (int_42, &ctxt),
4237 model.get_rvalue (int_42, &ctxt));
4238 ASSERT_NE (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_42, &ctxt));
4239 ASSERT_EQ (ctxt.get_num_diagnostics (), 0);
4241 /* A "(const int)42" will be a different tree from "(int)42)"... */
4242 tree const_int_type_node
4243 = build_qualified_type (integer_type_node, TYPE_QUAL_CONST);
4244 tree const_int_42 = build_int_cst (const_int_type_node, 42);
4245 ASSERT_NE (int_42, const_int_42);
4246 /* It should have a different const_svalue. */
4247 const svalue *int_42_sval = model.get_rvalue (int_42, &ctxt);
4248 const svalue *const_int_42_sval = model.get_rvalue (const_int_42, &ctxt);
4249 ASSERT_NE (int_42_sval, const_int_42_sval);
4250 /* But they should compare as equal. */
4251 ASSERT_CONDITION_TRUE (model, int_42_sval, EQ_EXPR, const_int_42_sval);
4252 ASSERT_CONDITION_FALSE (model, int_42_sval, NE_EXPR, const_int_42_sval);
4255 /* Verify that each type gets its own singleton unknown_svalue within a
4256 region_model_manager, and that NULL_TREE gets its own singleton. */
4258 static void
4259 test_unique_unknowns ()
4261 region_model_manager mgr;
4262 const svalue *unknown_int
4263 = mgr.get_or_create_unknown_svalue (integer_type_node);
4264 /* Repeated calls with the same type should get the same "unknown"
4265 svalue. */
4266 const svalue *unknown_int_2
4267 = mgr.get_or_create_unknown_svalue (integer_type_node);
4268 ASSERT_EQ (unknown_int, unknown_int_2);
4270 /* Different types (or the NULL type) should have different
4271 unknown_svalues. */
4272 const svalue *unknown_NULL_type = mgr.get_or_create_unknown_svalue (NULL);
4273 ASSERT_NE (unknown_NULL_type, unknown_int);
4275 /* Repeated calls with NULL for the type should get the same "unknown"
4276 svalue. */
4277 const svalue *unknown_NULL_type_2 = mgr.get_or_create_unknown_svalue (NULL);
4278 ASSERT_EQ (unknown_NULL_type, unknown_NULL_type_2);
4281 /* Verify that initial_svalue are handled as expected. */
4283 static void
4284 test_initial_svalue_folding ()
4286 region_model_manager mgr;
4287 tree x = build_global_decl ("x", integer_type_node);
4288 tree y = build_global_decl ("y", integer_type_node);
4290 test_region_model_context ctxt;
4291 region_model model (&mgr);
4292 const svalue *x_init = model.get_rvalue (x, &ctxt);
4293 const svalue *y_init = model.get_rvalue (y, &ctxt);
4294 ASSERT_NE (x_init, y_init);
4295 const region *x_reg = model.get_lvalue (x, &ctxt);
4296 ASSERT_EQ (x_init, mgr.get_or_create_initial_value (x_reg));
4300 /* Verify that unary ops are folded as expected. */
4302 static void
4303 test_unaryop_svalue_folding ()
4305 region_model_manager mgr;
4306 tree x = build_global_decl ("x", integer_type_node);
4307 tree y = build_global_decl ("y", integer_type_node);
4309 test_region_model_context ctxt;
4310 region_model model (&mgr);
4311 const svalue *x_init = model.get_rvalue (x, &ctxt);
4312 const svalue *y_init = model.get_rvalue (y, &ctxt);
4313 const region *x_reg = model.get_lvalue (x, &ctxt);
4314 ASSERT_EQ (x_init, mgr.get_or_create_initial_value (x_reg));
4316 /* "(int)x" -> "x". */
4317 ASSERT_EQ (x_init, mgr.get_or_create_cast (integer_type_node, x_init));
4319 /* "(void *)x" -> something other than "x". */
4320 ASSERT_NE (x_init, mgr.get_or_create_cast (ptr_type_node, x_init));
4322 /* "!(x == y)" -> "x != y". */
4323 ASSERT_EQ (mgr.get_or_create_unaryop
4324 (boolean_type_node, TRUTH_NOT_EXPR,
4325 mgr.get_or_create_binop (boolean_type_node, EQ_EXPR,
4326 x_init, y_init)),
4327 mgr.get_or_create_binop (boolean_type_node, NE_EXPR,
4328 x_init, y_init));
4329 /* "!(x > y)" -> "x <= y". */
4330 ASSERT_EQ (mgr.get_or_create_unaryop
4331 (boolean_type_node, TRUTH_NOT_EXPR,
4332 mgr.get_or_create_binop (boolean_type_node, GT_EXPR,
4333 x_init, y_init)),
4334 mgr.get_or_create_binop (boolean_type_node, LE_EXPR,
4335 x_init, y_init));
4338 /* Verify that binops on constant svalues are folded. */
4340 static void
4341 test_binop_svalue_folding ()
4343 #define NUM_CSTS 10
4344 tree cst_int[NUM_CSTS];
4345 region_model_manager mgr;
4346 const svalue *cst_sval[NUM_CSTS];
4347 for (int i = 0; i < NUM_CSTS; i++)
4349 cst_int[i] = build_int_cst (integer_type_node, i);
4350 cst_sval[i] = mgr.get_or_create_constant_svalue (cst_int[i]);
4351 ASSERT_EQ (cst_sval[i]->get_kind (), SK_CONSTANT);
4352 ASSERT_EQ (cst_sval[i]->maybe_get_constant (), cst_int[i]);
4355 for (int i = 0; i < NUM_CSTS; i++)
4356 for (int j = 0; j < NUM_CSTS; j++)
4358 if (i != j)
4359 ASSERT_NE (cst_sval[i], cst_sval[j]);
4360 if (i + j < NUM_CSTS)
4362 const svalue *sum
4363 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
4364 cst_sval[i], cst_sval[j]);
4365 ASSERT_EQ (sum, cst_sval[i + j]);
4367 if (i - j >= 0)
4369 const svalue *difference
4370 = mgr.get_or_create_binop (integer_type_node, MINUS_EXPR,
4371 cst_sval[i], cst_sval[j]);
4372 ASSERT_EQ (difference, cst_sval[i - j]);
4374 if (i * j < NUM_CSTS)
4376 const svalue *product
4377 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
4378 cst_sval[i], cst_sval[j]);
4379 ASSERT_EQ (product, cst_sval[i * j]);
4381 const svalue *eq = mgr.get_or_create_binop (integer_type_node, EQ_EXPR,
4382 cst_sval[i], cst_sval[j]);
4383 ASSERT_EQ (eq, i == j ? cst_sval[1] : cst_sval [0]);
4384 const svalue *neq = mgr.get_or_create_binop (integer_type_node, NE_EXPR,
4385 cst_sval[i], cst_sval[j]);
4386 ASSERT_EQ (neq, i != j ? cst_sval[1] : cst_sval [0]);
4387 // etc
4390 tree x = build_global_decl ("x", integer_type_node);
4392 test_region_model_context ctxt;
4393 region_model model (&mgr);
4394 const svalue *x_init = model.get_rvalue (x, &ctxt);
4396 /* PLUS_EXPR folding. */
4397 const svalue *x_init_plus_zero
4398 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
4399 x_init, cst_sval[0]);
4400 ASSERT_EQ (x_init_plus_zero, x_init);
4401 const svalue *zero_plus_x_init
4402 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
4403 cst_sval[0], x_init);
4404 ASSERT_EQ (zero_plus_x_init, x_init);
4406 /* MULT_EXPR folding. */
4407 const svalue *x_init_times_zero
4408 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
4409 x_init, cst_sval[0]);
4410 ASSERT_EQ (x_init_times_zero, cst_sval[0]);
4411 const svalue *zero_times_x_init
4412 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
4413 cst_sval[0], x_init);
4414 ASSERT_EQ (zero_times_x_init, cst_sval[0]);
4416 const svalue *x_init_times_one
4417 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
4418 x_init, cst_sval[1]);
4419 ASSERT_EQ (x_init_times_one, x_init);
4420 const svalue *one_times_x_init
4421 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
4422 cst_sval[1], x_init);
4423 ASSERT_EQ (one_times_x_init, x_init);
4425 // etc
4426 // TODO: do we want to use the match-and-simplify DSL for this?
4428 /* Verify that binops put any constants on the RHS. */
4429 const svalue *four_times_x_init
4430 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
4431 cst_sval[4], x_init);
4432 const svalue *x_init_times_four
4433 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
4434 x_init, cst_sval[4]);
4435 ASSERT_EQ (four_times_x_init, x_init_times_four);
4436 const binop_svalue *binop = four_times_x_init->dyn_cast_binop_svalue ();
4437 ASSERT_EQ (binop->get_op (), MULT_EXPR);
4438 ASSERT_EQ (binop->get_arg0 (), x_init);
4439 ASSERT_EQ (binop->get_arg1 (), cst_sval[4]);
4441 /* Verify that ((x + 1) + 1) == (x + 2). */
4442 const svalue *x_init_plus_one
4443 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
4444 x_init, cst_sval[1]);
4445 const svalue *x_init_plus_two
4446 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
4447 x_init, cst_sval[2]);
4448 const svalue *x_init_plus_one_plus_one
4449 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
4450 x_init_plus_one, cst_sval[1]);
4451 ASSERT_EQ (x_init_plus_one_plus_one, x_init_plus_two);
4454 /* Verify that sub_svalues are folded as expected. */
4456 static void
4457 test_sub_svalue_folding ()
4459 coord_test ct;
4460 tree c = build_global_decl ("c", ct.m_coord_type);
4461 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
4462 c, ct.m_x_field, NULL_TREE);
4464 region_model_manager mgr;
4465 region_model model (&mgr);
4466 test_region_model_context ctxt;
4467 const region *c_x_reg = model.get_lvalue (c_x, &ctxt);
4469 /* Verify that sub_svalue of "unknown" simply
4470 yields an unknown. */
4472 const svalue *unknown = mgr.get_or_create_unknown_svalue (ct.m_coord_type);
4473 const svalue *sub = mgr.get_or_create_sub_svalue (TREE_TYPE (ct.m_x_field),
4474 unknown, c_x_reg);
4475 ASSERT_EQ (sub->get_kind (), SK_UNKNOWN);
4476 ASSERT_EQ (sub->get_type (), TREE_TYPE (ct.m_x_field));
4479 /* Test that region::descendent_of_p works as expected. */
4481 static void
4482 test_descendent_of_p ()
4484 region_model_manager mgr;
4485 const region *stack = mgr.get_stack_region ();
4486 const region *heap = mgr.get_heap_region ();
4487 const region *code = mgr.get_code_region ();
4488 const region *globals = mgr.get_globals_region ();
4490 /* descendent_of_p should return true when used on the region itself. */
4491 ASSERT_TRUE (stack->descendent_of_p (stack));
4492 ASSERT_FALSE (stack->descendent_of_p (heap));
4493 ASSERT_FALSE (stack->descendent_of_p (code));
4494 ASSERT_FALSE (stack->descendent_of_p (globals));
4496 tree x = build_global_decl ("x", integer_type_node);
4497 const region *x_reg = mgr.get_region_for_global (x);
4498 ASSERT_TRUE (x_reg->descendent_of_p (globals));
4500 /* A cast_region should be a descendent of the original region. */
4501 const region *cast_reg = mgr.get_cast_region (x_reg, ptr_type_node);
4502 ASSERT_TRUE (cast_reg->descendent_of_p (x_reg));
4505 /* Verify that simple assignments work as expected. */
4507 static void
4508 test_assignment ()
4510 tree int_0 = build_int_cst (integer_type_node, 0);
4511 tree x = build_global_decl ("x", integer_type_node);
4512 tree y = build_global_decl ("y", integer_type_node);
4514 /* "x == 0", then use of y, then "y = 0;". */
4515 region_model_manager mgr;
4516 region_model model (&mgr);
4517 ADD_SAT_CONSTRAINT (model, x, EQ_EXPR, int_0);
4518 ASSERT_CONDITION_UNKNOWN (model, y, EQ_EXPR, int_0);
4519 model.set_value (model.get_lvalue (y, NULL),
4520 model.get_rvalue (int_0, NULL),
4521 NULL);
4522 ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, int_0);
4523 ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, x);
4526 /* Verify that compound assignments work as expected. */
4528 static void
4529 test_compound_assignment ()
4531 coord_test ct;
4533 tree c = build_global_decl ("c", ct.m_coord_type);
4534 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
4535 c, ct.m_x_field, NULL_TREE);
4536 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
4537 c, ct.m_y_field, NULL_TREE);
4538 tree d = build_global_decl ("d", ct.m_coord_type);
4539 tree d_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
4540 d, ct.m_x_field, NULL_TREE);
4541 tree d_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
4542 d, ct.m_y_field, NULL_TREE);
4544 tree int_17 = build_int_cst (integer_type_node, 17);
4545 tree int_m3 = build_int_cst (integer_type_node, -3);
4547 region_model_manager mgr;
4548 region_model model (&mgr);
4549 model.set_value (c_x, int_17, NULL);
4550 model.set_value (c_y, int_m3, NULL);
4552 /* Copy c to d. */
4553 model.copy_region (model.get_lvalue (d, NULL), model.get_lvalue (c, NULL),
4554 NULL);
4555 /* Check that the fields have the same svalues. */
4556 ASSERT_EQ (model.get_rvalue (c_x, NULL), model.get_rvalue (d_x, NULL));
4557 ASSERT_EQ (model.get_rvalue (c_y, NULL), model.get_rvalue (d_y, NULL));
4560 /* Verify the details of pushing and popping stack frames. */
4562 static void
4563 test_stack_frames ()
4565 tree int_42 = build_int_cst (integer_type_node, 42);
4566 tree int_10 = build_int_cst (integer_type_node, 10);
4567 tree int_5 = build_int_cst (integer_type_node, 5);
4568 tree int_0 = build_int_cst (integer_type_node, 0);
4570 auto_vec <tree> param_types;
4571 tree parent_fndecl = make_fndecl (integer_type_node,
4572 "parent_fn",
4573 param_types);
4574 allocate_struct_function (parent_fndecl, true);
4576 tree child_fndecl = make_fndecl (integer_type_node,
4577 "child_fn",
4578 param_types);
4579 allocate_struct_function (child_fndecl, true);
4581 /* "a" and "b" in the parent frame. */
4582 tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL,
4583 get_identifier ("a"),
4584 integer_type_node);
4585 tree b = build_decl (UNKNOWN_LOCATION, PARM_DECL,
4586 get_identifier ("b"),
4587 integer_type_node);
4588 /* "x" and "y" in a child frame. */
4589 tree x = build_decl (UNKNOWN_LOCATION, PARM_DECL,
4590 get_identifier ("x"),
4591 integer_type_node);
4592 tree y = build_decl (UNKNOWN_LOCATION, PARM_DECL,
4593 get_identifier ("y"),
4594 integer_type_node);
4596 /* "p" global. */
4597 tree p = build_global_decl ("p", ptr_type_node);
4599 /* "q" global. */
4600 tree q = build_global_decl ("q", ptr_type_node);
4602 region_model_manager mgr;
4603 test_region_model_context ctxt;
4604 region_model model (&mgr);
4606 /* Push stack frame for "parent_fn". */
4607 const region *parent_frame_reg
4608 = model.push_frame (DECL_STRUCT_FUNCTION (parent_fndecl),
4609 NULL, &ctxt);
4610 ASSERT_EQ (model.get_current_frame (), parent_frame_reg);
4611 ASSERT_TRUE (model.region_exists_p (parent_frame_reg));
4612 const region *a_in_parent_reg = model.get_lvalue (a, &ctxt);
4613 model.set_value (a_in_parent_reg,
4614 model.get_rvalue (int_42, &ctxt),
4615 &ctxt);
4616 ASSERT_EQ (a_in_parent_reg->maybe_get_frame_region (), parent_frame_reg);
4618 model.add_constraint (b, LT_EXPR, int_10, &ctxt);
4619 ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt),
4620 tristate (tristate::TS_TRUE));
4622 /* Push stack frame for "child_fn". */
4623 const region *child_frame_reg
4624 = model.push_frame (DECL_STRUCT_FUNCTION (child_fndecl), NULL, &ctxt);
4625 ASSERT_EQ (model.get_current_frame (), child_frame_reg);
4626 ASSERT_TRUE (model.region_exists_p (child_frame_reg));
4627 const region *x_in_child_reg = model.get_lvalue (x, &ctxt);
4628 model.set_value (x_in_child_reg,
4629 model.get_rvalue (int_0, &ctxt),
4630 &ctxt);
4631 ASSERT_EQ (x_in_child_reg->maybe_get_frame_region (), child_frame_reg);
4633 model.add_constraint (y, NE_EXPR, int_5, &ctxt);
4634 ASSERT_EQ (model.eval_condition (y, NE_EXPR, int_5, &ctxt),
4635 tristate (tristate::TS_TRUE));
4637 /* Point a global pointer at a local in the child frame: p = &x. */
4638 const region *p_in_globals_reg = model.get_lvalue (p, &ctxt);
4639 model.set_value (p_in_globals_reg,
4640 mgr.get_ptr_svalue (ptr_type_node, x_in_child_reg),
4641 &ctxt);
4642 ASSERT_EQ (p_in_globals_reg->maybe_get_frame_region (), NULL);
4644 /* Point another global pointer at p: q = &p. */
4645 const region *q_in_globals_reg = model.get_lvalue (q, &ctxt);
4646 model.set_value (q_in_globals_reg,
4647 mgr.get_ptr_svalue (ptr_type_node, p_in_globals_reg),
4648 &ctxt);
4650 /* Test region::descendent_of_p. */
4651 ASSERT_TRUE (child_frame_reg->descendent_of_p (child_frame_reg));
4652 ASSERT_TRUE (x_in_child_reg->descendent_of_p (child_frame_reg));
4653 ASSERT_FALSE (a_in_parent_reg->descendent_of_p (child_frame_reg));
4655 /* Pop the "child_fn" frame from the stack. */
4656 model.pop_frame (NULL, NULL, &ctxt);
4657 ASSERT_FALSE (model.region_exists_p (child_frame_reg));
4658 ASSERT_TRUE (model.region_exists_p (parent_frame_reg));
4660 /* Verify that p (which was pointing at the local "x" in the popped
4661 frame) has been poisoned. */
4662 const svalue *new_p_sval = model.get_rvalue (p, NULL);
4663 ASSERT_EQ (new_p_sval->get_kind (), SK_POISONED);
4664 ASSERT_EQ (new_p_sval->dyn_cast_poisoned_svalue ()->get_poison_kind (),
4665 POISON_KIND_POPPED_STACK);
4667 /* Verify that q still points to p, in spite of the region
4668 renumbering. */
4669 const svalue *new_q_sval = model.get_rvalue (q, &ctxt);
4670 ASSERT_EQ (new_q_sval->get_kind (), SK_REGION);
4671 ASSERT_EQ (new_q_sval->maybe_get_region (),
4672 model.get_lvalue (p, &ctxt));
4674 /* Verify that top of stack has been updated. */
4675 ASSERT_EQ (model.get_current_frame (), parent_frame_reg);
4677 /* Verify locals in parent frame. */
4678 /* Verify "a" still has its value. */
4679 const svalue *new_a_sval = model.get_rvalue (a, &ctxt);
4680 ASSERT_EQ (new_a_sval->get_kind (), SK_CONSTANT);
4681 ASSERT_EQ (new_a_sval->dyn_cast_constant_svalue ()->get_constant (),
4682 int_42);
4683 /* Verify "b" still has its constraint. */
4684 ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt),
4685 tristate (tristate::TS_TRUE));
4688 /* Verify that get_representative_path_var works as expected, that
4689 we can map from regions to parms and back within a recursive call
4690 stack. */
4692 static void
4693 test_get_representative_path_var ()
4695 auto_vec <tree> param_types;
4696 tree fndecl = make_fndecl (integer_type_node,
4697 "factorial",
4698 param_types);
4699 allocate_struct_function (fndecl, true);
4701 /* Parm "n". */
4702 tree n = build_decl (UNKNOWN_LOCATION, PARM_DECL,
4703 get_identifier ("n"),
4704 integer_type_node);
4706 region_model_manager mgr;
4707 test_region_model_context ctxt;
4708 region_model model (&mgr);
4710 /* Push 5 stack frames for "factorial", each with a param */
4711 auto_vec<const region *> parm_regs;
4712 auto_vec<const svalue *> parm_svals;
4713 for (int depth = 0; depth < 5; depth++)
4715 const region *frame_n_reg
4716 = model.push_frame (DECL_STRUCT_FUNCTION (fndecl), NULL, &ctxt);
4717 const region *parm_n_reg = model.get_lvalue (path_var (n, depth), &ctxt);
4718 parm_regs.safe_push (parm_n_reg);
4720 ASSERT_EQ (parm_n_reg->get_parent_region (), frame_n_reg);
4721 const svalue *sval_n = mgr.get_or_create_initial_value (parm_n_reg);
4722 parm_svals.safe_push (sval_n);
4725 /* Verify that we can recognize that the regions are the parms,
4726 at every depth. */
4727 for (int depth = 0; depth < 5; depth++)
4730 svalue_set visited;
4731 ASSERT_EQ (model.get_representative_path_var (parm_regs[depth],
4732 &visited),
4733 path_var (n, depth + 1));
4735 /* ...and that we can lookup lvalues for locals for all frames,
4736 not just the top. */
4737 ASSERT_EQ (model.get_lvalue (path_var (n, depth), NULL),
4738 parm_regs[depth]);
4739 /* ...and that we can locate the svalues. */
4741 svalue_set visited;
4742 ASSERT_EQ (model.get_representative_path_var (parm_svals[depth],
4743 &visited),
4744 path_var (n, depth + 1));
4749 /* Ensure that region_model::operator== works as expected. */
4751 static void
4752 test_equality_1 ()
4754 tree int_42 = build_int_cst (integer_type_node, 42);
4755 tree int_17 = build_int_cst (integer_type_node, 17);
4757 /* Verify that "empty" region_model instances are equal to each other. */
4758 region_model_manager mgr;
4759 region_model model0 (&mgr);
4760 region_model model1 (&mgr);
4761 ASSERT_EQ (model0, model1);
4763 /* Verify that setting state in model1 makes the models non-equal. */
4764 tree x = build_global_decl ("x", integer_type_node);
4765 model0.set_value (x, int_42, NULL);
4766 ASSERT_EQ (model0.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
4767 ASSERT_NE (model0, model1);
4769 /* Verify the copy-ctor. */
4770 region_model model2 (model0);
4771 ASSERT_EQ (model0, model2);
4772 ASSERT_EQ (model2.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
4773 ASSERT_NE (model1, model2);
4775 /* Verify that models obtained from copy-ctor are independently editable
4776 w/o affecting the original model. */
4777 model2.set_value (x, int_17, NULL);
4778 ASSERT_NE (model0, model2);
4779 ASSERT_EQ (model2.get_rvalue (x, NULL)->maybe_get_constant (), int_17);
4780 ASSERT_EQ (model0.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
4783 /* Verify that region models for
4784 x = 42; y = 113;
4786 y = 113; x = 42;
4787 are equal. */
4789 static void
4790 test_canonicalization_2 ()
4792 tree int_42 = build_int_cst (integer_type_node, 42);
4793 tree int_113 = build_int_cst (integer_type_node, 113);
4794 tree x = build_global_decl ("x", integer_type_node);
4795 tree y = build_global_decl ("y", integer_type_node);
4797 region_model_manager mgr;
4798 region_model model0 (&mgr);
4799 model0.set_value (model0.get_lvalue (x, NULL),
4800 model0.get_rvalue (int_42, NULL),
4801 NULL);
4802 model0.set_value (model0.get_lvalue (y, NULL),
4803 model0.get_rvalue (int_113, NULL),
4804 NULL);
4806 region_model model1 (&mgr);
4807 model1.set_value (model1.get_lvalue (y, NULL),
4808 model1.get_rvalue (int_113, NULL),
4809 NULL);
4810 model1.set_value (model1.get_lvalue (x, NULL),
4811 model1.get_rvalue (int_42, NULL),
4812 NULL);
4814 ASSERT_EQ (model0, model1);
4817 /* Verify that constraints for
4818 x > 3 && y > 42
4820 y > 42 && x > 3
4821 are equal after canonicalization. */
4823 static void
4824 test_canonicalization_3 ()
4826 tree int_3 = build_int_cst (integer_type_node, 3);
4827 tree int_42 = build_int_cst (integer_type_node, 42);
4828 tree x = build_global_decl ("x", integer_type_node);
4829 tree y = build_global_decl ("y", integer_type_node);
4831 region_model_manager mgr;
4832 region_model model0 (&mgr);
4833 model0.add_constraint (x, GT_EXPR, int_3, NULL);
4834 model0.add_constraint (y, GT_EXPR, int_42, NULL);
4836 region_model model1 (&mgr);
4837 model1.add_constraint (y, GT_EXPR, int_42, NULL);
4838 model1.add_constraint (x, GT_EXPR, int_3, NULL);
4840 model0.canonicalize ();
4841 model1.canonicalize ();
4842 ASSERT_EQ (model0, model1);
4845 /* Verify that we can canonicalize a model containing NaN and other real
4846 constants. */
4848 static void
4849 test_canonicalization_4 ()
4851 auto_vec<tree> csts;
4852 append_interesting_constants (&csts);
4854 region_model_manager mgr;
4855 region_model model (&mgr);
4857 for (tree cst : csts)
4858 model.get_rvalue (cst, NULL);
4860 model.canonicalize ();
4863 /* Assert that if we have two region_model instances
4864 with values VAL_A and VAL_B for EXPR that they are
4865 mergable. Write the merged model to *OUT_MERGED_MODEL,
4866 and the merged svalue ptr to *OUT_MERGED_SVALUE.
4867 If VAL_A or VAL_B are NULL_TREE, don't populate EXPR
4868 for that region_model. */
4870 static void
4871 assert_region_models_merge (tree expr, tree val_a, tree val_b,
4872 region_model *out_merged_model,
4873 const svalue **out_merged_svalue)
4875 program_point point (program_point::origin ());
4876 test_region_model_context ctxt;
4877 region_model_manager *mgr = out_merged_model->get_manager ();
4878 region_model model0 (mgr);
4879 region_model model1 (mgr);
4880 if (val_a)
4881 model0.set_value (model0.get_lvalue (expr, &ctxt),
4882 model0.get_rvalue (val_a, &ctxt),
4883 &ctxt);
4884 if (val_b)
4885 model1.set_value (model1.get_lvalue (expr, &ctxt),
4886 model1.get_rvalue (val_b, &ctxt),
4887 &ctxt);
4889 /* They should be mergeable. */
4890 ASSERT_TRUE (model0.can_merge_with_p (model1, point, out_merged_model));
4891 *out_merged_svalue = out_merged_model->get_rvalue (expr, &ctxt);
4894 /* Verify that we can merge region_model instances. */
4896 static void
4897 test_state_merging ()
4899 tree int_42 = build_int_cst (integer_type_node, 42);
4900 tree int_113 = build_int_cst (integer_type_node, 113);
4901 tree x = build_global_decl ("x", integer_type_node);
4902 tree y = build_global_decl ("y", integer_type_node);
4903 tree z = build_global_decl ("z", integer_type_node);
4904 tree p = build_global_decl ("p", ptr_type_node);
4906 tree addr_of_y = build1 (ADDR_EXPR, ptr_type_node, y);
4907 tree addr_of_z = build1 (ADDR_EXPR, ptr_type_node, z);
4909 auto_vec <tree> param_types;
4910 tree test_fndecl = make_fndecl (integer_type_node, "test_fn", param_types);
4911 allocate_struct_function (test_fndecl, true);
4913 /* Param "a". */
4914 tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL,
4915 get_identifier ("a"),
4916 integer_type_node);
4917 tree addr_of_a = build1 (ADDR_EXPR, ptr_type_node, a);
4919 /* Param "q", a pointer. */
4920 tree q = build_decl (UNKNOWN_LOCATION, PARM_DECL,
4921 get_identifier ("q"),
4922 ptr_type_node);
4924 program_point point (program_point::origin ());
4925 region_model_manager mgr;
4928 region_model model0 (&mgr);
4929 region_model model1 (&mgr);
4930 region_model merged (&mgr);
4931 /* Verify empty models can be merged. */
4932 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
4933 ASSERT_EQ (model0, merged);
4936 /* Verify that we can merge two contradictory constraints on the
4937 value for a global. */
4938 /* TODO: verify that the merged model doesn't have a value for
4939 the global */
4941 region_model model0 (&mgr);
4942 region_model model1 (&mgr);
4943 region_model merged (&mgr);
4944 test_region_model_context ctxt;
4945 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
4946 model1.add_constraint (x, EQ_EXPR, int_113, &ctxt);
4947 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
4948 ASSERT_NE (model0, merged);
4949 ASSERT_NE (model1, merged);
4952 /* Verify handling of a PARM_DECL. */
4954 test_region_model_context ctxt;
4955 region_model model0 (&mgr);
4956 region_model model1 (&mgr);
4957 ASSERT_EQ (model0.get_stack_depth (), 0);
4958 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, &ctxt);
4959 ASSERT_EQ (model0.get_stack_depth (), 1);
4960 model1.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, &ctxt);
4962 placeholder_svalue test_sval (integer_type_node, "test sval");
4963 model0.set_value (model0.get_lvalue (a, &ctxt), &test_sval, &ctxt);
4964 model1.set_value (model1.get_lvalue (a, &ctxt), &test_sval, &ctxt);
4965 ASSERT_EQ (model0, model1);
4967 /* They should be mergeable, and the result should be the same. */
4968 region_model merged (&mgr);
4969 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
4970 ASSERT_EQ (model0, merged);
4971 /* In particular, "a" should have the placeholder value. */
4972 ASSERT_EQ (merged.get_rvalue (a, &ctxt), &test_sval);
4975 /* Verify handling of a global. */
4977 test_region_model_context ctxt;
4978 region_model model0 (&mgr);
4979 region_model model1 (&mgr);
4981 placeholder_svalue test_sval (integer_type_node, "test sval");
4982 model0.set_value (model0.get_lvalue (x, &ctxt), &test_sval, &ctxt);
4983 model1.set_value (model1.get_lvalue (x, &ctxt), &test_sval, &ctxt);
4984 ASSERT_EQ (model0, model1);
4986 /* They should be mergeable, and the result should be the same. */
4987 region_model merged (&mgr);
4988 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
4989 ASSERT_EQ (model0, merged);
4990 /* In particular, "x" should have the placeholder value. */
4991 ASSERT_EQ (merged.get_rvalue (x, &ctxt), &test_sval);
4994 /* Use global-handling to verify various combinations of values. */
4996 /* Two equal constant values. */
4998 region_model merged (&mgr);
4999 const svalue *merged_x_sval;
5000 assert_region_models_merge (x, int_42, int_42, &merged, &merged_x_sval);
5002 /* In particular, there should be a constant value for "x". */
5003 ASSERT_EQ (merged_x_sval->get_kind (), SK_CONSTANT);
5004 ASSERT_EQ (merged_x_sval->dyn_cast_constant_svalue ()->get_constant (),
5005 int_42);
5008 /* Two non-equal constant values. */
5010 region_model merged (&mgr);
5011 const svalue *merged_x_sval;
5012 assert_region_models_merge (x, int_42, int_113, &merged, &merged_x_sval);
5014 /* In particular, there should be a "widening" value for "x". */
5015 ASSERT_EQ (merged_x_sval->get_kind (), SK_WIDENING);
5018 /* Initial and constant. */
5020 region_model merged (&mgr);
5021 const svalue *merged_x_sval;
5022 assert_region_models_merge (x, NULL_TREE, int_113, &merged, &merged_x_sval);
5024 /* In particular, there should be an unknown value for "x". */
5025 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
5028 /* Constant and initial. */
5030 region_model merged (&mgr);
5031 const svalue *merged_x_sval;
5032 assert_region_models_merge (x, int_42, NULL_TREE, &merged, &merged_x_sval);
5034 /* In particular, there should be an unknown value for "x". */
5035 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
5038 /* Unknown and constant. */
5039 // TODO
5041 /* Pointers: NULL and NULL. */
5042 // TODO
5044 /* Pointers: NULL and non-NULL. */
5045 // TODO
5047 /* Pointers: non-NULL and non-NULL: ptr to a local. */
5049 region_model model0 (&mgr);
5050 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
5051 model0.set_value (model0.get_lvalue (p, NULL),
5052 model0.get_rvalue (addr_of_a, NULL), NULL);
5054 region_model model1 (model0);
5055 ASSERT_EQ (model0, model1);
5057 /* They should be mergeable, and the result should be the same. */
5058 region_model merged (&mgr);
5059 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
5060 ASSERT_EQ (model0, merged);
5063 /* Pointers: non-NULL and non-NULL: ptr to a global. */
5065 region_model merged (&mgr);
5066 /* p == &y in both input models. */
5067 const svalue *merged_p_sval;
5068 assert_region_models_merge (p, addr_of_y, addr_of_y, &merged,
5069 &merged_p_sval);
5071 /* We should get p == &y in the merged model. */
5072 ASSERT_EQ (merged_p_sval->get_kind (), SK_REGION);
5073 const region_svalue *merged_p_ptr
5074 = merged_p_sval->dyn_cast_region_svalue ();
5075 const region *merged_p_star_reg = merged_p_ptr->get_pointee ();
5076 ASSERT_EQ (merged_p_star_reg, merged.get_lvalue (y, NULL));
5079 /* Pointers: non-NULL ptrs to different globals: should be unknown. */
5081 region_model merged (&mgr);
5082 /* x == &y vs x == &z in the input models; these are actually casts
5083 of the ptrs to "int". */
5084 const svalue *merged_x_sval;
5085 // TODO:
5086 assert_region_models_merge (x, addr_of_y, addr_of_z, &merged,
5087 &merged_x_sval);
5089 /* We should get x == unknown in the merged model. */
5090 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
5093 /* Pointers: non-NULL and non-NULL: ptr to a heap region. */
5095 test_region_model_context ctxt;
5096 region_model model0 (&mgr);
5097 tree size = build_int_cst (size_type_node, 1024);
5098 const svalue *size_sval = mgr.get_or_create_constant_svalue (size);
5099 const region *new_reg = model0.create_region_for_heap_alloc (size_sval);
5100 const svalue *ptr_sval = mgr.get_ptr_svalue (ptr_type_node, new_reg);
5101 model0.set_value (model0.get_lvalue (p, &ctxt),
5102 ptr_sval, &ctxt);
5104 region_model model1 (model0);
5106 ASSERT_EQ (model0, model1);
5108 region_model merged (&mgr);
5109 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
5111 /* The merged model ought to be identical. */
5112 ASSERT_EQ (model0, merged);
5115 /* Two regions sharing the same placeholder svalue should continue sharing
5116 it after self-merger. */
5118 test_region_model_context ctxt;
5119 region_model model0 (&mgr);
5120 placeholder_svalue placeholder_sval (integer_type_node, "test");
5121 model0.set_value (model0.get_lvalue (x, &ctxt),
5122 &placeholder_sval, &ctxt);
5123 model0.set_value (model0.get_lvalue (y, &ctxt), &placeholder_sval, &ctxt);
5124 region_model model1 (model0);
5126 /* They should be mergeable, and the result should be the same. */
5127 region_model merged (&mgr);
5128 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
5129 ASSERT_EQ (model0, merged);
5131 /* In particular, we should have x == y. */
5132 ASSERT_EQ (merged.eval_condition (x, EQ_EXPR, y, &ctxt),
5133 tristate (tristate::TS_TRUE));
5137 region_model model0 (&mgr);
5138 region_model model1 (&mgr);
5139 test_region_model_context ctxt;
5140 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
5141 model1.add_constraint (x, NE_EXPR, int_42, &ctxt);
5142 region_model merged (&mgr);
5143 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
5147 region_model model0 (&mgr);
5148 region_model model1 (&mgr);
5149 test_region_model_context ctxt;
5150 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
5151 model1.add_constraint (x, NE_EXPR, int_42, &ctxt);
5152 model1.add_constraint (x, EQ_EXPR, int_113, &ctxt);
5153 region_model merged (&mgr);
5154 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
5157 // TODO: what can't we merge? need at least one such test
5159 /* TODO: various things
5160 - heap regions
5161 - value merging:
5162 - every combination, but in particular
5163 - pairs of regions
5166 /* Views. */
5168 test_region_model_context ctxt;
5169 region_model model0 (&mgr);
5171 const region *x_reg = model0.get_lvalue (x, &ctxt);
5172 const region *x_as_ptr = mgr.get_cast_region (x_reg, ptr_type_node);
5173 model0.set_value (x_as_ptr, model0.get_rvalue (addr_of_y, &ctxt), &ctxt);
5175 region_model model1 (model0);
5176 ASSERT_EQ (model1, model0);
5178 /* They should be mergeable, and the result should be the same. */
5179 region_model merged (&mgr);
5180 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
5183 /* Verify that we can merge a model in which a local in an older stack
5184 frame points to a local in a more recent stack frame. */
5186 region_model model0 (&mgr);
5187 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
5188 const region *q_in_first_frame = model0.get_lvalue (q, NULL);
5190 /* Push a second frame. */
5191 const region *reg_2nd_frame
5192 = model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
5194 /* Have a pointer in the older frame point to a local in the
5195 more recent frame. */
5196 const svalue *sval_ptr = model0.get_rvalue (addr_of_a, NULL);
5197 model0.set_value (q_in_first_frame, sval_ptr, NULL);
5199 /* Verify that it's pointing at the newer frame. */
5200 const region *reg_pointee = sval_ptr->maybe_get_region ();
5201 ASSERT_EQ (reg_pointee->get_parent_region (), reg_2nd_frame);
5203 model0.canonicalize ();
5205 region_model model1 (model0);
5206 ASSERT_EQ (model0, model1);
5208 /* They should be mergeable, and the result should be the same
5209 (after canonicalization, at least). */
5210 region_model merged (&mgr);
5211 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
5212 merged.canonicalize ();
5213 ASSERT_EQ (model0, merged);
5216 /* Verify that we can merge a model in which a local points to a global. */
5218 region_model model0 (&mgr);
5219 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
5220 model0.set_value (model0.get_lvalue (q, NULL),
5221 model0.get_rvalue (addr_of_y, NULL), NULL);
5223 region_model model1 (model0);
5224 ASSERT_EQ (model0, model1);
5226 /* They should be mergeable, and the result should be the same
5227 (after canonicalization, at least). */
5228 region_model merged (&mgr);
5229 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
5230 ASSERT_EQ (model0, merged);
5234 /* Verify that constraints are correctly merged when merging region_model
5235 instances. */
5237 static void
5238 test_constraint_merging ()
5240 tree int_0 = build_int_cst (integer_type_node, 0);
5241 tree int_5 = build_int_cst (integer_type_node, 5);
5242 tree x = build_global_decl ("x", integer_type_node);
5243 tree y = build_global_decl ("y", integer_type_node);
5244 tree z = build_global_decl ("z", integer_type_node);
5245 tree n = build_global_decl ("n", integer_type_node);
5247 region_model_manager mgr;
5248 test_region_model_context ctxt;
5250 /* model0: 0 <= (x == y) < n. */
5251 region_model model0 (&mgr);
5252 model0.add_constraint (x, EQ_EXPR, y, &ctxt);
5253 model0.add_constraint (x, GE_EXPR, int_0, NULL);
5254 model0.add_constraint (x, LT_EXPR, n, NULL);
5256 /* model1: z != 5 && (0 <= x < n). */
5257 region_model model1 (&mgr);
5258 model1.add_constraint (z, NE_EXPR, int_5, NULL);
5259 model1.add_constraint (x, GE_EXPR, int_0, NULL);
5260 model1.add_constraint (x, LT_EXPR, n, NULL);
5262 /* They should be mergeable; the merged constraints should
5263 be: (0 <= x < n). */
5264 program_point point (program_point::origin ());
5265 region_model merged (&mgr);
5266 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
5268 ASSERT_EQ (merged.eval_condition (x, GE_EXPR, int_0, &ctxt),
5269 tristate (tristate::TS_TRUE));
5270 ASSERT_EQ (merged.eval_condition (x, LT_EXPR, n, &ctxt),
5271 tristate (tristate::TS_TRUE));
5273 ASSERT_EQ (merged.eval_condition (z, NE_EXPR, int_5, &ctxt),
5274 tristate (tristate::TS_UNKNOWN));
5275 ASSERT_EQ (merged.eval_condition (x, LT_EXPR, y, &ctxt),
5276 tristate (tristate::TS_UNKNOWN));
5279 /* Verify that widening_svalue::eval_condition_without_cm works as
5280 expected. */
5282 static void
5283 test_widening_constraints ()
5285 program_point point (program_point::origin ());
5286 tree int_0 = build_int_cst (integer_type_node, 0);
5287 tree int_m1 = build_int_cst (integer_type_node, -1);
5288 tree int_1 = build_int_cst (integer_type_node, 1);
5289 tree int_256 = build_int_cst (integer_type_node, 256);
5290 region_model_manager mgr;
5291 test_region_model_context ctxt;
5292 const svalue *int_0_sval = mgr.get_or_create_constant_svalue (int_0);
5293 const svalue *int_1_sval = mgr.get_or_create_constant_svalue (int_1);
5294 const svalue *w_zero_then_one_sval
5295 = mgr.get_or_create_widening_svalue (integer_type_node, point,
5296 int_0_sval, int_1_sval);
5297 const widening_svalue *w_zero_then_one
5298 = w_zero_then_one_sval->dyn_cast_widening_svalue ();
5299 ASSERT_EQ (w_zero_then_one->get_direction (),
5300 widening_svalue::DIR_ASCENDING);
5301 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_m1),
5302 tristate::TS_FALSE);
5303 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_0),
5304 tristate::TS_FALSE);
5305 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_1),
5306 tristate::TS_UNKNOWN);
5307 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_256),
5308 tristate::TS_UNKNOWN);
5310 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_m1),
5311 tristate::TS_FALSE);
5312 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_0),
5313 tristate::TS_UNKNOWN);
5314 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_1),
5315 tristate::TS_UNKNOWN);
5316 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_256),
5317 tristate::TS_UNKNOWN);
5319 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_m1),
5320 tristate::TS_TRUE);
5321 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_0),
5322 tristate::TS_UNKNOWN);
5323 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_1),
5324 tristate::TS_UNKNOWN);
5325 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_256),
5326 tristate::TS_UNKNOWN);
5328 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_m1),
5329 tristate::TS_TRUE);
5330 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_0),
5331 tristate::TS_TRUE);
5332 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_1),
5333 tristate::TS_UNKNOWN);
5334 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_256),
5335 tristate::TS_UNKNOWN);
5337 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_m1),
5338 tristate::TS_FALSE);
5339 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_0),
5340 tristate::TS_UNKNOWN);
5341 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_1),
5342 tristate::TS_UNKNOWN);
5343 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_256),
5344 tristate::TS_UNKNOWN);
5346 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_m1),
5347 tristate::TS_TRUE);
5348 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_0),
5349 tristate::TS_UNKNOWN);
5350 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_1),
5351 tristate::TS_UNKNOWN);
5352 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_256),
5353 tristate::TS_UNKNOWN);
5356 /* Verify merging constraints for states simulating successive iterations
5357 of a loop.
5358 Simulate:
5359 for (i = 0; i < 256; i++)
5360 [...body...]
5361 i.e. this gimple:.
5362 i_15 = 0;
5363 goto <bb 4>;
5365 <bb 4> :
5366 i_11 = PHI <i_15(2), i_23(3)>
5367 if (i_11 <= 255)
5368 goto <bb 3>;
5369 else
5370 goto [AFTER LOOP]
5372 <bb 3> :
5373 [LOOP BODY]
5374 i_23 = i_11 + 1;
5376 and thus these ops (and resultant states):
5377 i_11 = PHI()
5378 {i_11: 0}
5379 add_constraint (i_11 <= 255) [for the true edge]
5380 {i_11: 0} [constraint was a no-op]
5381 i_23 = i_11 + 1;
5382 {i_22: 1}
5383 i_11 = PHI()
5384 {i_11: WIDENED (at phi, 0, 1)}
5385 add_constraint (i_11 <= 255) [for the true edge]
5386 {i_11: WIDENED (at phi, 0, 1); WIDENED <= 255}
5387 i_23 = i_11 + 1;
5388 {i_23: (WIDENED (at phi, 0, 1) + 1); WIDENED <= 255}
5389 i_11 = PHI(); merge with state at phi above
5390 {i_11: WIDENED (at phi, 0, 1); WIDENED <= 256}
5391 [changing meaning of "WIDENED" here]
5392 if (i_11 <= 255)
5393 T: {i_11: WIDENED (at phi, 0, 1); WIDENED <= 255}; cache hit
5394 F: {i_11: 256}
5397 static void
5398 test_iteration_1 ()
5400 program_point point (program_point::origin ());
5402 tree int_0 = build_int_cst (integer_type_node, 0);
5403 tree int_1 = build_int_cst (integer_type_node, 1);
5404 tree int_256 = build_int_cst (integer_type_node, 256);
5405 tree int_257 = build_int_cst (integer_type_node, 257);
5406 tree i = build_global_decl ("i", integer_type_node);
5408 region_model_manager mgr;
5409 test_region_model_context ctxt;
5411 /* model0: i: 0. */
5412 region_model model0 (&mgr);
5413 model0.set_value (i, int_0, &ctxt);
5415 /* model1: i: 1. */
5416 region_model model1 (&mgr);
5417 model1.set_value (i, int_1, &ctxt);
5419 /* Should merge "i" to a widened value. */
5420 region_model model2 (&mgr);
5421 ASSERT_TRUE (model1.can_merge_with_p (model0, point, &model2));
5422 const svalue *merged_i = model2.get_rvalue (i, &ctxt);
5423 ASSERT_EQ (merged_i->get_kind (), SK_WIDENING);
5424 const widening_svalue *w = merged_i->dyn_cast_widening_svalue ();
5425 ASSERT_EQ (w->get_direction (), widening_svalue::DIR_ASCENDING);
5427 /* Add constraint: i < 256 */
5428 model2.add_constraint (i, LT_EXPR, int_256, &ctxt);
5429 ASSERT_EQ (model2.eval_condition (i, LT_EXPR, int_256, &ctxt),
5430 tristate (tristate::TS_TRUE));
5431 ASSERT_EQ (model2.eval_condition (i, GE_EXPR, int_0, &ctxt),
5432 tristate (tristate::TS_TRUE));
5434 /* Try merging with the initial state. */
5435 region_model model3 (&mgr);
5436 ASSERT_TRUE (model2.can_merge_with_p (model0, point, &model3));
5437 /* Merging the merged value with the initial value should be idempotent,
5438 so that the analysis converges. */
5439 ASSERT_EQ (model3.get_rvalue (i, &ctxt), merged_i);
5440 /* Merger of 0 and a widening value with constraint < CST
5441 should retain the constraint, even though it was implicit
5442 for the 0 case. */
5443 ASSERT_EQ (model3.eval_condition (i, LT_EXPR, int_256, &ctxt),
5444 tristate (tristate::TS_TRUE));
5445 /* ...and we should have equality: the analysis should have converged. */
5446 ASSERT_EQ (model3, model2);
5448 /* "i_23 = i_11 + 1;" */
5449 region_model model4 (model3);
5450 ASSERT_EQ (model4, model2);
5451 model4.set_value (i, build2 (PLUS_EXPR, integer_type_node, i, int_1), &ctxt);
5452 const svalue *plus_one = model4.get_rvalue (i, &ctxt);
5453 ASSERT_EQ (plus_one->get_kind (), SK_BINOP);
5455 /* Try merging with the "i: 1" state. */
5456 region_model model5 (&mgr);
5457 ASSERT_TRUE (model4.can_merge_with_p (model1, point, &model5));
5458 ASSERT_EQ (model5.get_rvalue (i, &ctxt), plus_one);
5459 ASSERT_EQ (model5, model4);
5461 /* "i_11 = PHI();" merge with state at phi above.
5462 For i, we should have a merger of WIDENING with WIDENING + 1,
5463 and this should be WIDENING again. */
5464 region_model model6 (&mgr);
5465 ASSERT_TRUE (model5.can_merge_with_p (model2, point, &model6));
5466 const svalue *merged_widening = model6.get_rvalue (i, &ctxt);
5467 ASSERT_EQ (merged_widening->get_kind (), SK_WIDENING);
5469 ASSERT_CONDITION_TRUE (model6, i, LT_EXPR, int_257);
5472 /* Verify that if we mark a pointer to a malloc-ed region as non-NULL,
5473 all cast pointers to that region are also known to be non-NULL. */
5475 static void
5476 test_malloc_constraints ()
5478 region_model_manager mgr;
5479 region_model model (&mgr);
5480 tree p = build_global_decl ("p", ptr_type_node);
5481 tree char_star = build_pointer_type (char_type_node);
5482 tree q = build_global_decl ("q", char_star);
5483 tree null_ptr = build_int_cst (ptr_type_node, 0);
5485 const svalue *size_in_bytes
5486 = mgr.get_or_create_unknown_svalue (size_type_node);
5487 const region *reg = model.create_region_for_heap_alloc (size_in_bytes);
5488 const svalue *sval = mgr.get_ptr_svalue (ptr_type_node, reg);
5489 model.set_value (model.get_lvalue (p, NULL), sval, NULL);
5490 model.set_value (q, p, NULL);
5492 ASSERT_CONDITION_UNKNOWN (model, p, NE_EXPR, null_ptr);
5493 ASSERT_CONDITION_UNKNOWN (model, p, EQ_EXPR, null_ptr);
5494 ASSERT_CONDITION_UNKNOWN (model, q, NE_EXPR, null_ptr);
5495 ASSERT_CONDITION_UNKNOWN (model, q, EQ_EXPR, null_ptr);
5497 model.add_constraint (p, NE_EXPR, null_ptr, NULL);
5499 ASSERT_CONDITION_TRUE (model, p, NE_EXPR, null_ptr);
5500 ASSERT_CONDITION_FALSE (model, p, EQ_EXPR, null_ptr);
5501 ASSERT_CONDITION_TRUE (model, q, NE_EXPR, null_ptr);
5502 ASSERT_CONDITION_FALSE (model, q, EQ_EXPR, null_ptr);
5505 /* Smoketest of getting and setting the value of a variable. */
5507 static void
5508 test_var ()
5510 /* "int i;" */
5511 tree i = build_global_decl ("i", integer_type_node);
5513 tree int_17 = build_int_cst (integer_type_node, 17);
5514 tree int_m3 = build_int_cst (integer_type_node, -3);
5516 region_model_manager mgr;
5517 region_model model (&mgr);
5519 const region *i_reg = model.get_lvalue (i, NULL);
5520 ASSERT_EQ (i_reg->get_kind (), RK_DECL);
5522 /* Reading "i" should give a symbolic "initial value". */
5523 const svalue *sval_init = model.get_rvalue (i, NULL);
5524 ASSERT_EQ (sval_init->get_kind (), SK_INITIAL);
5525 ASSERT_EQ (sval_init->dyn_cast_initial_svalue ()->get_region (), i_reg);
5526 /* ..and doing it again should give the same "initial value". */
5527 ASSERT_EQ (model.get_rvalue (i, NULL), sval_init);
5529 /* "i = 17;". */
5530 model.set_value (i, int_17, NULL);
5531 ASSERT_EQ (model.get_rvalue (i, NULL),
5532 model.get_rvalue (int_17, NULL));
5534 /* "i = -3;". */
5535 model.set_value (i, int_m3, NULL);
5536 ASSERT_EQ (model.get_rvalue (i, NULL),
5537 model.get_rvalue (int_m3, NULL));
5539 /* Verify get_offset for "i". */
5541 region_offset offset = i_reg->get_offset ();
5542 ASSERT_EQ (offset.get_base_region (), i_reg);
5543 ASSERT_EQ (offset.get_bit_offset (), 0);
5547 static void
5548 test_array_2 ()
5550 /* "int arr[10];" */
5551 tree tlen = size_int (10);
5552 tree arr_type
5553 = build_array_type (integer_type_node, build_index_type (tlen));
5554 tree arr = build_global_decl ("arr", arr_type);
5556 /* "int i;" */
5557 tree i = build_global_decl ("i", integer_type_node);
5559 tree int_0 = build_int_cst (integer_type_node, 0);
5560 tree int_1 = build_int_cst (integer_type_node, 1);
5562 tree arr_0 = build4 (ARRAY_REF, integer_type_node,
5563 arr, int_0, NULL_TREE, NULL_TREE);
5564 tree arr_1 = build4 (ARRAY_REF, integer_type_node,
5565 arr, int_1, NULL_TREE, NULL_TREE);
5566 tree arr_i = build4 (ARRAY_REF, integer_type_node,
5567 arr, i, NULL_TREE, NULL_TREE);
5569 tree int_17 = build_int_cst (integer_type_node, 17);
5570 tree int_42 = build_int_cst (integer_type_node, 42);
5571 tree int_m3 = build_int_cst (integer_type_node, -3);
5573 region_model_manager mgr;
5574 region_model model (&mgr);
5575 /* "arr[0] = 17;". */
5576 model.set_value (arr_0, int_17, NULL);
5577 /* "arr[1] = -3;". */
5578 model.set_value (arr_1, int_m3, NULL);
5580 ASSERT_EQ (model.get_rvalue (arr_0, NULL), model.get_rvalue (int_17, NULL));
5581 ASSERT_EQ (model.get_rvalue (arr_1, NULL), model.get_rvalue (int_m3, NULL));
5583 /* Overwrite a pre-existing binding: "arr[1] = 42;". */
5584 model.set_value (arr_1, int_42, NULL);
5585 ASSERT_EQ (model.get_rvalue (arr_1, NULL), model.get_rvalue (int_42, NULL));
5587 /* Verify get_offset for "arr[0]". */
5589 const region *arr_0_reg = model.get_lvalue (arr_0, NULL);
5590 region_offset offset = arr_0_reg->get_offset ();
5591 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
5592 ASSERT_EQ (offset.get_bit_offset (), 0);
5595 /* Verify get_offset for "arr[1]". */
5597 const region *arr_1_reg = model.get_lvalue (arr_1, NULL);
5598 region_offset offset = arr_1_reg->get_offset ();
5599 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
5600 ASSERT_EQ (offset.get_bit_offset (), INT_TYPE_SIZE);
5603 /* "arr[i] = i;" - this should remove the earlier bindings. */
5604 model.set_value (arr_i, i, NULL);
5605 ASSERT_EQ (model.get_rvalue (arr_i, NULL), model.get_rvalue (i, NULL));
5606 ASSERT_EQ (model.get_rvalue (arr_0, NULL)->get_kind (), SK_UNKNOWN);
5608 /* "arr[0] = 17;" - this should remove the arr[i] binding. */
5609 model.set_value (arr_0, int_17, NULL);
5610 ASSERT_EQ (model.get_rvalue (arr_0, NULL), model.get_rvalue (int_17, NULL));
5611 ASSERT_EQ (model.get_rvalue (arr_i, NULL)->get_kind (), SK_UNKNOWN);
5614 /* Smoketest of dereferencing a pointer via MEM_REF. */
5616 static void
5617 test_mem_ref ()
5620 x = 17;
5621 p = &x;
5624 tree x = build_global_decl ("x", integer_type_node);
5625 tree int_star = build_pointer_type (integer_type_node);
5626 tree p = build_global_decl ("p", int_star);
5628 tree int_17 = build_int_cst (integer_type_node, 17);
5629 tree addr_of_x = build1 (ADDR_EXPR, int_star, x);
5630 tree offset_0 = build_int_cst (integer_type_node, 0);
5631 tree star_p = build2 (MEM_REF, integer_type_node, p, offset_0);
5633 region_model_manager mgr;
5634 region_model model (&mgr);
5636 /* "x = 17;". */
5637 model.set_value (x, int_17, NULL);
5639 /* "p = &x;". */
5640 model.set_value (p, addr_of_x, NULL);
5642 const svalue *sval = model.get_rvalue (star_p, NULL);
5643 ASSERT_EQ (sval->maybe_get_constant (), int_17);
5646 /* Test for a POINTER_PLUS_EXPR followed by a MEM_REF.
5647 Analogous to this code:
5648 void test_6 (int a[10])
5650 __analyzer_eval (a[3] == 42); [should be UNKNOWN]
5651 a[3] = 42;
5652 __analyzer_eval (a[3] == 42); [should be TRUE]
5654 from data-model-1.c, which looks like this at the gimple level:
5655 # __analyzer_eval (a[3] == 42); [should be UNKNOWN]
5656 int *_1 = a_10(D) + 12; # POINTER_PLUS_EXPR
5657 int _2 = *_1; # MEM_REF
5658 _Bool _3 = _2 == 42;
5659 int _4 = (int) _3;
5660 __analyzer_eval (_4);
5662 # a[3] = 42;
5663 int *_5 = a_10(D) + 12; # POINTER_PLUS_EXPR
5664 *_5 = 42; # MEM_REF
5666 # __analyzer_eval (a[3] == 42); [should be TRUE]
5667 int *_6 = a_10(D) + 12; # POINTER_PLUS_EXPR
5668 int _7 = *_6; # MEM_REF
5669 _Bool _8 = _7 == 42;
5670 int _9 = (int) _8;
5671 __analyzer_eval (_9); */
5673 static void
5674 test_POINTER_PLUS_EXPR_then_MEM_REF ()
5676 tree int_star = build_pointer_type (integer_type_node);
5677 tree a = build_global_decl ("a", int_star);
5678 tree offset_12 = build_int_cst (size_type_node, 12);
5679 tree pointer_plus_expr = build2 (POINTER_PLUS_EXPR, int_star, a, offset_12);
5680 tree offset_0 = build_int_cst (integer_type_node, 0);
5681 tree mem_ref = build2 (MEM_REF, integer_type_node,
5682 pointer_plus_expr, offset_0);
5683 region_model_manager mgr;
5684 region_model m (&mgr);
5686 tree int_42 = build_int_cst (integer_type_node, 42);
5687 m.set_value (mem_ref, int_42, NULL);
5688 ASSERT_EQ (m.get_rvalue (mem_ref, NULL)->maybe_get_constant (), int_42);
5691 /* Verify that malloc works. */
5693 static void
5694 test_malloc ()
5696 tree int_star = build_pointer_type (integer_type_node);
5697 tree p = build_global_decl ("p", int_star);
5698 tree n = build_global_decl ("n", integer_type_node);
5699 tree n_times_4 = build2 (MULT_EXPR, size_type_node,
5700 n, build_int_cst (size_type_node, 4));
5702 region_model_manager mgr;
5703 test_region_model_context ctxt;
5704 region_model model (&mgr);
5706 /* "p = malloc (n * 4);". */
5707 const svalue *size_sval = model.get_rvalue (n_times_4, &ctxt);
5708 const region *reg = model.create_region_for_heap_alloc (size_sval);
5709 const svalue *ptr = mgr.get_ptr_svalue (int_star, reg);
5710 model.set_value (model.get_lvalue (p, &ctxt), ptr, &ctxt);
5711 ASSERT_EQ (model.get_capacity (reg), size_sval);
5714 /* Verify that alloca works. */
5716 static void
5717 test_alloca ()
5719 auto_vec <tree> param_types;
5720 tree fndecl = make_fndecl (integer_type_node,
5721 "test_fn",
5722 param_types);
5723 allocate_struct_function (fndecl, true);
5726 tree int_star = build_pointer_type (integer_type_node);
5727 tree p = build_global_decl ("p", int_star);
5728 tree n = build_global_decl ("n", integer_type_node);
5729 tree n_times_4 = build2 (MULT_EXPR, size_type_node,
5730 n, build_int_cst (size_type_node, 4));
5732 region_model_manager mgr;
5733 test_region_model_context ctxt;
5734 region_model model (&mgr);
5736 /* Push stack frame. */
5737 const region *frame_reg
5738 = model.push_frame (DECL_STRUCT_FUNCTION (fndecl),
5739 NULL, &ctxt);
5740 /* "p = alloca (n * 4);". */
5741 const svalue *size_sval = model.get_rvalue (n_times_4, &ctxt);
5742 const region *reg = model.create_region_for_alloca (size_sval);
5743 ASSERT_EQ (reg->get_parent_region (), frame_reg);
5744 const svalue *ptr = mgr.get_ptr_svalue (int_star, reg);
5745 model.set_value (model.get_lvalue (p, &ctxt), ptr, &ctxt);
5746 ASSERT_EQ (model.get_capacity (reg), size_sval);
5748 /* Verify that the pointers to the alloca region are replaced by
5749 poisoned values when the frame is popped. */
5750 model.pop_frame (NULL, NULL, &ctxt);
5751 ASSERT_EQ (model.get_rvalue (p, NULL)->get_kind (), SK_POISONED);
5754 /* Verify that svalue::involves_p works. */
5756 static void
5757 test_involves_p ()
5759 region_model_manager mgr;
5760 tree int_star = build_pointer_type (integer_type_node);
5761 tree p = build_global_decl ("p", int_star);
5762 tree q = build_global_decl ("q", int_star);
5764 test_region_model_context ctxt;
5765 region_model model (&mgr);
5766 const svalue *p_init = model.get_rvalue (p, &ctxt);
5767 const svalue *q_init = model.get_rvalue (q, &ctxt);
5769 ASSERT_TRUE (p_init->involves_p (p_init));
5770 ASSERT_FALSE (p_init->involves_p (q_init));
5772 const region *star_p_reg = mgr.get_symbolic_region (p_init);
5773 const region *star_q_reg = mgr.get_symbolic_region (q_init);
5775 const svalue *init_star_p = mgr.get_or_create_initial_value (star_p_reg);
5776 const svalue *init_star_q = mgr.get_or_create_initial_value (star_q_reg);
5778 ASSERT_TRUE (init_star_p->involves_p (p_init));
5779 ASSERT_FALSE (p_init->involves_p (init_star_p));
5780 ASSERT_FALSE (init_star_p->involves_p (q_init));
5781 ASSERT_TRUE (init_star_q->involves_p (q_init));
5782 ASSERT_FALSE (init_star_q->involves_p (p_init));
5785 /* Run all of the selftests within this file. */
5787 void
5788 analyzer_region_model_cc_tests ()
5790 test_tree_cmp_on_constants ();
5791 test_dump ();
5792 test_struct ();
5793 test_array_1 ();
5794 test_get_representative_tree ();
5795 test_unique_constants ();
5796 test_unique_unknowns ();
5797 test_initial_svalue_folding ();
5798 test_unaryop_svalue_folding ();
5799 test_binop_svalue_folding ();
5800 test_sub_svalue_folding ();
5801 test_descendent_of_p ();
5802 test_assignment ();
5803 test_compound_assignment ();
5804 test_stack_frames ();
5805 test_get_representative_path_var ();
5806 test_equality_1 ();
5807 test_canonicalization_2 ();
5808 test_canonicalization_3 ();
5809 test_canonicalization_4 ();
5810 test_state_merging ();
5811 test_constraint_merging ();
5812 test_widening_constraints ();
5813 test_iteration_1 ();
5814 test_malloc_constraints ();
5815 test_var ();
5816 test_array_2 ();
5817 test_mem_ref ();
5818 test_POINTER_PLUS_EXPR_then_MEM_REF ();
5819 test_malloc ();
5820 test_alloca ();
5821 test_involves_p ();
5824 } // namespace selftest
5826 #endif /* CHECKING_P */
5828 } // namespace ana
5830 #endif /* #if ENABLE_ANALYZER */