analyzer: add support for plugin-supplied known function behaviors
[official-gcc.git] / gcc / analyzer / region-model.cc
blobbc9db69315f67bbdf1e633079d7ba6e421a41410
1 /* Classes for modeling the state of memory.
2 Copyright (C) 2019-2022 Free Software Foundation, Inc.
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tree.h"
25 #include "function.h"
26 #include "basic-block.h"
27 #include "gimple.h"
28 #include "gimple-iterator.h"
29 #include "diagnostic-core.h"
30 #include "graphviz.h"
31 #include "options.h"
32 #include "cgraph.h"
33 #include "tree-dfa.h"
34 #include "stringpool.h"
35 #include "convert.h"
36 #include "target.h"
37 #include "fold-const.h"
38 #include "tree-pretty-print.h"
39 #include "diagnostic-color.h"
40 #include "diagnostic-metadata.h"
41 #include "tristate.h"
42 #include "bitmap.h"
43 #include "selftest.h"
44 #include "function.h"
45 #include "json.h"
46 #include "analyzer/analyzer.h"
47 #include "analyzer/analyzer-logging.h"
48 #include "ordered-hash-map.h"
49 #include "options.h"
50 #include "cgraph.h"
51 #include "cfg.h"
52 #include "digraph.h"
53 #include "analyzer/supergraph.h"
54 #include "sbitmap.h"
55 #include "analyzer/call-string.h"
56 #include "analyzer/program-point.h"
57 #include "analyzer/store.h"
58 #include "analyzer/region-model.h"
59 #include "analyzer/constraint-manager.h"
60 #include "diagnostic-event-id.h"
61 #include "analyzer/sm.h"
62 #include "diagnostic-event-id.h"
63 #include "analyzer/sm.h"
64 #include "analyzer/pending-diagnostic.h"
65 #include "analyzer/region-model-reachability.h"
66 #include "analyzer/analyzer-selftests.h"
67 #include "analyzer/program-state.h"
68 #include "stor-layout.h"
69 #include "attribs.h"
70 #include "tree-object-size.h"
71 #include "gimple-ssa.h"
72 #include "tree-phinodes.h"
73 #include "tree-ssa-operands.h"
74 #include "ssa-iterators.h"
75 #include "calls.h"
76 #include "is-a.h"
78 #if ENABLE_ANALYZER
80 namespace ana {
82 /* Dump T to PP in language-independent form, for debugging/logging/dumping
83 purposes. */
85 void
86 dump_tree (pretty_printer *pp, tree t)
88 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
91 /* Dump T to PP in language-independent form in quotes, for
92 debugging/logging/dumping purposes. */
94 void
95 dump_quoted_tree (pretty_printer *pp, tree t)
97 pp_begin_quote (pp, pp_show_color (pp));
98 dump_tree (pp, t);
99 pp_end_quote (pp, pp_show_color (pp));
102 /* Equivalent to pp_printf (pp, "%qT", t), to avoid nesting pp_printf
103 calls within other pp_printf calls.
105 default_tree_printer handles 'T' and some other codes by calling
106 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
107 dump_generic_node calls pp_printf in various places, leading to
108 garbled output.
110 Ideally pp_printf could be made to be reentrant, but in the meantime
111 this function provides a workaround. */
113 void
114 print_quoted_type (pretty_printer *pp, tree t)
116 pp_begin_quote (pp, pp_show_color (pp));
117 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
118 pp_end_quote (pp, pp_show_color (pp));
121 /* class region_to_value_map. */
123 /* Assignment operator for region_to_value_map. */
125 region_to_value_map &
126 region_to_value_map::operator= (const region_to_value_map &other)
128 m_hash_map.empty ();
129 for (auto iter : other.m_hash_map)
131 const region *reg = iter.first;
132 const svalue *sval = iter.second;
133 m_hash_map.put (reg, sval);
135 return *this;
138 /* Equality operator for region_to_value_map. */
140 bool
141 region_to_value_map::operator== (const region_to_value_map &other) const
143 if (m_hash_map.elements () != other.m_hash_map.elements ())
144 return false;
146 for (auto iter : *this)
148 const region *reg = iter.first;
149 const svalue *sval = iter.second;
150 const svalue * const *other_slot = other.get (reg);
151 if (other_slot == NULL)
152 return false;
153 if (sval != *other_slot)
154 return false;
157 return true;
160 /* Dump this object to PP. */
162 void
163 region_to_value_map::dump_to_pp (pretty_printer *pp, bool simple,
164 bool multiline) const
166 auto_vec<const region *> regs;
167 for (iterator iter = begin (); iter != end (); ++iter)
168 regs.safe_push ((*iter).first);
169 regs.qsort (region::cmp_ptr_ptr);
170 if (multiline)
171 pp_newline (pp);
172 else
173 pp_string (pp, " {");
174 unsigned i;
175 const region *reg;
176 FOR_EACH_VEC_ELT (regs, i, reg)
178 if (multiline)
179 pp_string (pp, " ");
180 else if (i > 0)
181 pp_string (pp, ", ");
182 reg->dump_to_pp (pp, simple);
183 pp_string (pp, ": ");
184 const svalue *sval = *get (reg);
185 sval->dump_to_pp (pp, true);
186 if (multiline)
187 pp_newline (pp);
189 if (!multiline)
190 pp_string (pp, "}");
193 /* Dump this object to stderr. */
195 DEBUG_FUNCTION void
196 region_to_value_map::dump (bool simple) const
198 pretty_printer pp;
199 pp_format_decoder (&pp) = default_tree_printer;
200 pp_show_color (&pp) = pp_show_color (global_dc->printer);
201 pp.buffer->stream = stderr;
202 dump_to_pp (&pp, simple, true);
203 pp_newline (&pp);
204 pp_flush (&pp);
208 /* Attempt to merge THIS with OTHER, writing the result
209 to OUT.
211 For now, write (region, value) mappings that are in common between THIS
212 and OTHER to OUT, effectively taking the intersection, rather than
213 rejecting differences. */
215 bool
216 region_to_value_map::can_merge_with_p (const region_to_value_map &other,
217 region_to_value_map *out) const
219 for (auto iter : *this)
221 const region *iter_reg = iter.first;
222 const svalue *iter_sval = iter.second;
223 const svalue * const * other_slot = other.get (iter_reg);
224 if (other_slot)
225 if (iter_sval == *other_slot)
226 out->put (iter_reg, iter_sval);
228 return true;
231 /* Purge any state involving SVAL. */
233 void
234 region_to_value_map::purge_state_involving (const svalue *sval)
236 auto_vec<const region *> to_purge;
237 for (auto iter : *this)
239 const region *iter_reg = iter.first;
240 const svalue *iter_sval = iter.second;
241 if (iter_reg->involves_p (sval) || iter_sval->involves_p (sval))
242 to_purge.safe_push (iter_reg);
244 for (auto iter : to_purge)
245 m_hash_map.remove (iter);
248 /* class region_model. */
250 /* Ctor for region_model: construct an "empty" model. */
252 region_model::region_model (region_model_manager *mgr)
253 : m_mgr (mgr), m_store (), m_current_frame (NULL),
254 m_dynamic_extents ()
256 m_constraints = new constraint_manager (mgr);
259 /* region_model's copy ctor. */
261 region_model::region_model (const region_model &other)
262 : m_mgr (other.m_mgr), m_store (other.m_store),
263 m_constraints (new constraint_manager (*other.m_constraints)),
264 m_current_frame (other.m_current_frame),
265 m_dynamic_extents (other.m_dynamic_extents)
269 /* region_model's dtor. */
271 region_model::~region_model ()
273 delete m_constraints;
276 /* region_model's assignment operator. */
278 region_model &
279 region_model::operator= (const region_model &other)
281 /* m_mgr is const. */
282 gcc_assert (m_mgr == other.m_mgr);
284 m_store = other.m_store;
286 delete m_constraints;
287 m_constraints = new constraint_manager (*other.m_constraints);
289 m_current_frame = other.m_current_frame;
291 m_dynamic_extents = other.m_dynamic_extents;
293 return *this;
296 /* Equality operator for region_model.
298 Amongst other things this directly compares the stores and the constraint
299 managers, so for this to be meaningful both this and OTHER should
300 have been canonicalized. */
302 bool
303 region_model::operator== (const region_model &other) const
305 /* We can only compare instances that use the same manager. */
306 gcc_assert (m_mgr == other.m_mgr);
308 if (m_store != other.m_store)
309 return false;
311 if (*m_constraints != *other.m_constraints)
312 return false;
314 if (m_current_frame != other.m_current_frame)
315 return false;
317 if (m_dynamic_extents != other.m_dynamic_extents)
318 return false;
320 gcc_checking_assert (hash () == other.hash ());
322 return true;
325 /* Generate a hash value for this region_model. */
327 hashval_t
328 region_model::hash () const
330 hashval_t result = m_store.hash ();
331 result ^= m_constraints->hash ();
332 return result;
335 /* Dump a representation of this model to PP, showing the
336 stack, the store, and any constraints.
337 Use SIMPLE to control how svalues and regions are printed. */
339 void
340 region_model::dump_to_pp (pretty_printer *pp, bool simple,
341 bool multiline) const
343 /* Dump stack. */
344 pp_printf (pp, "stack depth: %i", get_stack_depth ());
345 if (multiline)
346 pp_newline (pp);
347 else
348 pp_string (pp, " {");
349 for (const frame_region *iter_frame = m_current_frame; iter_frame;
350 iter_frame = iter_frame->get_calling_frame ())
352 if (multiline)
353 pp_string (pp, " ");
354 else if (iter_frame != m_current_frame)
355 pp_string (pp, ", ");
356 pp_printf (pp, "frame (index %i): ", iter_frame->get_index ());
357 iter_frame->dump_to_pp (pp, simple);
358 if (multiline)
359 pp_newline (pp);
361 if (!multiline)
362 pp_string (pp, "}");
364 /* Dump store. */
365 if (!multiline)
366 pp_string (pp, ", {");
367 m_store.dump_to_pp (pp, simple, multiline,
368 m_mgr->get_store_manager ());
369 if (!multiline)
370 pp_string (pp, "}");
372 /* Dump constraints. */
373 pp_string (pp, "constraint_manager:");
374 if (multiline)
375 pp_newline (pp);
376 else
377 pp_string (pp, " {");
378 m_constraints->dump_to_pp (pp, multiline);
379 if (!multiline)
380 pp_string (pp, "}");
382 /* Dump sizes of dynamic regions, if any are known. */
383 if (!m_dynamic_extents.is_empty ())
385 pp_string (pp, "dynamic_extents:");
386 m_dynamic_extents.dump_to_pp (pp, simple, multiline);
390 /* Dump a representation of this model to FILE. */
392 void
393 region_model::dump (FILE *fp, bool simple, bool multiline) const
395 pretty_printer pp;
396 pp_format_decoder (&pp) = default_tree_printer;
397 pp_show_color (&pp) = pp_show_color (global_dc->printer);
398 pp.buffer->stream = fp;
399 dump_to_pp (&pp, simple, multiline);
400 pp_newline (&pp);
401 pp_flush (&pp);
404 /* Dump a multiline representation of this model to stderr. */
406 DEBUG_FUNCTION void
407 region_model::dump (bool simple) const
409 dump (stderr, simple, true);
412 /* Dump a multiline representation of this model to stderr. */
414 DEBUG_FUNCTION void
415 region_model::debug () const
417 dump (true);
420 /* Assert that this object is valid. */
422 void
423 region_model::validate () const
425 m_store.validate ();
428 /* Canonicalize the store and constraints, to maximize the chance of
429 equality between region_model instances. */
431 void
432 region_model::canonicalize ()
434 m_store.canonicalize (m_mgr->get_store_manager ());
435 m_constraints->canonicalize ();
438 /* Return true if this region_model is in canonical form. */
440 bool
441 region_model::canonicalized_p () const
443 region_model copy (*this);
444 copy.canonicalize ();
445 return *this == copy;
448 /* See the comment for store::loop_replay_fixup. */
450 void
451 region_model::loop_replay_fixup (const region_model *dst_state)
453 m_store.loop_replay_fixup (dst_state->get_store (), m_mgr);
456 /* A subclass of pending_diagnostic for complaining about uses of
457 poisoned values. */
459 class poisoned_value_diagnostic
460 : public pending_diagnostic_subclass<poisoned_value_diagnostic>
462 public:
463 poisoned_value_diagnostic (tree expr, enum poison_kind pkind,
464 const region *src_region)
465 : m_expr (expr), m_pkind (pkind),
466 m_src_region (src_region)
469 const char *get_kind () const final override { return "poisoned_value_diagnostic"; }
471 bool use_of_uninit_p () const final override
473 return m_pkind == POISON_KIND_UNINIT;
476 bool operator== (const poisoned_value_diagnostic &other) const
478 return (m_expr == other.m_expr
479 && m_pkind == other.m_pkind
480 && m_src_region == other.m_src_region);
483 int get_controlling_option () const final override
485 switch (m_pkind)
487 default:
488 gcc_unreachable ();
489 case POISON_KIND_UNINIT:
490 return OPT_Wanalyzer_use_of_uninitialized_value;
491 case POISON_KIND_FREED:
492 return OPT_Wanalyzer_use_after_free;
493 case POISON_KIND_POPPED_STACK:
494 return OPT_Wanalyzer_use_of_pointer_in_stale_stack_frame;
498 bool emit (rich_location *rich_loc) final override
500 switch (m_pkind)
502 default:
503 gcc_unreachable ();
504 case POISON_KIND_UNINIT:
506 diagnostic_metadata m;
507 m.add_cwe (457); /* "CWE-457: Use of Uninitialized Variable". */
508 return warning_meta (rich_loc, m, get_controlling_option (),
509 "use of uninitialized value %qE",
510 m_expr);
512 break;
513 case POISON_KIND_FREED:
515 diagnostic_metadata m;
516 m.add_cwe (416); /* "CWE-416: Use After Free". */
517 return warning_meta (rich_loc, m, get_controlling_option (),
518 "use after %<free%> of %qE",
519 m_expr);
521 break;
522 case POISON_KIND_POPPED_STACK:
524 /* TODO: which CWE? */
525 return warning_at
526 (rich_loc, get_controlling_option (),
527 "dereferencing pointer %qE to within stale stack frame",
528 m_expr);
530 break;
534 label_text describe_final_event (const evdesc::final_event &ev) final override
536 switch (m_pkind)
538 default:
539 gcc_unreachable ();
540 case POISON_KIND_UNINIT:
541 return ev.formatted_print ("use of uninitialized value %qE here",
542 m_expr);
543 case POISON_KIND_FREED:
544 return ev.formatted_print ("use after %<free%> of %qE here",
545 m_expr);
546 case POISON_KIND_POPPED_STACK:
547 return ev.formatted_print
548 ("dereferencing pointer %qE to within stale stack frame",
549 m_expr);
553 void mark_interesting_stuff (interesting_t *interest) final override
555 if (m_src_region)
556 interest->add_region_creation (m_src_region);
559 private:
560 tree m_expr;
561 enum poison_kind m_pkind;
562 const region *m_src_region;
565 /* A subclass of pending_diagnostic for complaining about shifts
566 by negative counts. */
568 class shift_count_negative_diagnostic
569 : public pending_diagnostic_subclass<shift_count_negative_diagnostic>
571 public:
572 shift_count_negative_diagnostic (const gassign *assign, tree count_cst)
573 : m_assign (assign), m_count_cst (count_cst)
576 const char *get_kind () const final override
578 return "shift_count_negative_diagnostic";
581 bool operator== (const shift_count_negative_diagnostic &other) const
583 return (m_assign == other.m_assign
584 && same_tree_p (m_count_cst, other.m_count_cst));
587 int get_controlling_option () const final override
589 return OPT_Wanalyzer_shift_count_negative;
592 bool emit (rich_location *rich_loc) final override
594 return warning_at (rich_loc, get_controlling_option (),
595 "shift by negative count (%qE)", m_count_cst);
598 label_text describe_final_event (const evdesc::final_event &ev) final override
600 return ev.formatted_print ("shift by negative amount here (%qE)", m_count_cst);
603 private:
604 const gassign *m_assign;
605 tree m_count_cst;
608 /* A subclass of pending_diagnostic for complaining about shifts
609 by counts >= the width of the operand type. */
611 class shift_count_overflow_diagnostic
612 : public pending_diagnostic_subclass<shift_count_overflow_diagnostic>
614 public:
615 shift_count_overflow_diagnostic (const gassign *assign,
616 int operand_precision,
617 tree count_cst)
618 : m_assign (assign), m_operand_precision (operand_precision),
619 m_count_cst (count_cst)
622 const char *get_kind () const final override
624 return "shift_count_overflow_diagnostic";
627 bool operator== (const shift_count_overflow_diagnostic &other) const
629 return (m_assign == other.m_assign
630 && m_operand_precision == other.m_operand_precision
631 && same_tree_p (m_count_cst, other.m_count_cst));
634 int get_controlling_option () const final override
636 return OPT_Wanalyzer_shift_count_overflow;
639 bool emit (rich_location *rich_loc) final override
641 return warning_at (rich_loc, get_controlling_option (),
642 "shift by count (%qE) >= precision of type (%qi)",
643 m_count_cst, m_operand_precision);
646 label_text describe_final_event (const evdesc::final_event &ev) final override
648 return ev.formatted_print ("shift by count %qE here", m_count_cst);
651 private:
652 const gassign *m_assign;
653 int m_operand_precision;
654 tree m_count_cst;
657 /* If ASSIGN is a stmt that can be modelled via
658 set_value (lhs_reg, SVALUE, CTXT)
659 for some SVALUE, get the SVALUE.
660 Otherwise return NULL. */
662 const svalue *
663 region_model::get_gassign_result (const gassign *assign,
664 region_model_context *ctxt)
666 tree lhs = gimple_assign_lhs (assign);
667 tree rhs1 = gimple_assign_rhs1 (assign);
668 enum tree_code op = gimple_assign_rhs_code (assign);
669 switch (op)
671 default:
672 return NULL;
674 case POINTER_PLUS_EXPR:
676 /* e.g. "_1 = a_10(D) + 12;" */
677 tree ptr = rhs1;
678 tree offset = gimple_assign_rhs2 (assign);
680 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
681 const svalue *offset_sval = get_rvalue (offset, ctxt);
682 /* Quoting tree.def, "the second operand [of a POINTER_PLUS_EXPR]
683 is an integer of type sizetype". */
684 offset_sval = m_mgr->get_or_create_cast (size_type_node, offset_sval);
686 const svalue *sval_binop
687 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
688 ptr_sval, offset_sval);
689 return sval_binop;
691 break;
693 case POINTER_DIFF_EXPR:
695 /* e.g. "_1 = p_2(D) - q_3(D);". */
696 tree rhs2 = gimple_assign_rhs2 (assign);
697 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
698 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
700 // TODO: perhaps fold to zero if they're known to be equal?
702 const svalue *sval_binop
703 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
704 rhs1_sval, rhs2_sval);
705 return sval_binop;
707 break;
709 /* Assignments of the form
710 set_value (lvalue (LHS), rvalue (EXPR))
711 for various EXPR.
712 We already have the lvalue for the LHS above, as "lhs_reg". */
713 case ADDR_EXPR: /* LHS = &RHS; */
714 case BIT_FIELD_REF:
715 case COMPONENT_REF: /* LHS = op0.op1; */
716 case MEM_REF:
717 case REAL_CST:
718 case COMPLEX_CST:
719 case VECTOR_CST:
720 case INTEGER_CST:
721 case ARRAY_REF:
722 case SSA_NAME: /* LHS = VAR; */
723 case VAR_DECL: /* LHS = VAR; */
724 case PARM_DECL:/* LHS = VAR; */
725 case REALPART_EXPR:
726 case IMAGPART_EXPR:
727 return get_rvalue (rhs1, ctxt);
729 case ABS_EXPR:
730 case ABSU_EXPR:
731 case CONJ_EXPR:
732 case BIT_NOT_EXPR:
733 case FIX_TRUNC_EXPR:
734 case FLOAT_EXPR:
735 case NEGATE_EXPR:
736 case NOP_EXPR:
737 case VIEW_CONVERT_EXPR:
739 /* Unary ops. */
740 const svalue *rhs_sval = get_rvalue (rhs1, ctxt);
741 const svalue *sval_unaryop
742 = m_mgr->get_or_create_unaryop (TREE_TYPE (lhs), op, rhs_sval);
743 return sval_unaryop;
746 case EQ_EXPR:
747 case GE_EXPR:
748 case LE_EXPR:
749 case NE_EXPR:
750 case GT_EXPR:
751 case LT_EXPR:
752 case UNORDERED_EXPR:
753 case ORDERED_EXPR:
755 tree rhs2 = gimple_assign_rhs2 (assign);
757 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
758 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
760 if (TREE_TYPE (lhs) == boolean_type_node)
762 /* Consider constraints between svalues. */
763 tristate t = eval_condition (rhs1_sval, op, rhs2_sval);
764 if (t.is_known ())
765 return m_mgr->get_or_create_constant_svalue
766 (t.is_true () ? boolean_true_node : boolean_false_node);
769 /* Otherwise, generate a symbolic binary op. */
770 const svalue *sval_binop
771 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
772 rhs1_sval, rhs2_sval);
773 return sval_binop;
775 break;
777 case PLUS_EXPR:
778 case MINUS_EXPR:
779 case MULT_EXPR:
780 case MULT_HIGHPART_EXPR:
781 case TRUNC_DIV_EXPR:
782 case CEIL_DIV_EXPR:
783 case FLOOR_DIV_EXPR:
784 case ROUND_DIV_EXPR:
785 case TRUNC_MOD_EXPR:
786 case CEIL_MOD_EXPR:
787 case FLOOR_MOD_EXPR:
788 case ROUND_MOD_EXPR:
789 case RDIV_EXPR:
790 case EXACT_DIV_EXPR:
791 case LSHIFT_EXPR:
792 case RSHIFT_EXPR:
793 case LROTATE_EXPR:
794 case RROTATE_EXPR:
795 case BIT_IOR_EXPR:
796 case BIT_XOR_EXPR:
797 case BIT_AND_EXPR:
798 case MIN_EXPR:
799 case MAX_EXPR:
800 case COMPLEX_EXPR:
802 /* Binary ops. */
803 tree rhs2 = gimple_assign_rhs2 (assign);
805 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
806 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
808 if (ctxt && (op == LSHIFT_EXPR || op == RSHIFT_EXPR))
810 /* "INT34-C. Do not shift an expression by a negative number of bits
811 or by greater than or equal to the number of bits that exist in
812 the operand." */
813 if (const tree rhs2_cst = rhs2_sval->maybe_get_constant ())
814 if (TREE_CODE (rhs2_cst) == INTEGER_CST)
816 if (tree_int_cst_sgn (rhs2_cst) < 0)
817 ctxt->warn (new shift_count_negative_diagnostic
818 (assign, rhs2_cst));
819 else if (compare_tree_int (rhs2_cst,
820 TYPE_PRECISION (TREE_TYPE (rhs1)))
821 >= 0)
822 ctxt->warn (new shift_count_overflow_diagnostic
823 (assign, TYPE_PRECISION (TREE_TYPE (rhs1)),
824 rhs2_cst));
828 const svalue *sval_binop
829 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
830 rhs1_sval, rhs2_sval);
831 return sval_binop;
834 /* Vector expressions. In theory we could implement these elementwise,
835 but for now, simply return unknown values. */
836 case VEC_DUPLICATE_EXPR:
837 case VEC_SERIES_EXPR:
838 case VEC_COND_EXPR:
839 case VEC_PERM_EXPR:
840 case VEC_WIDEN_MULT_HI_EXPR:
841 case VEC_WIDEN_MULT_LO_EXPR:
842 case VEC_WIDEN_MULT_EVEN_EXPR:
843 case VEC_WIDEN_MULT_ODD_EXPR:
844 case VEC_UNPACK_HI_EXPR:
845 case VEC_UNPACK_LO_EXPR:
846 case VEC_UNPACK_FLOAT_HI_EXPR:
847 case VEC_UNPACK_FLOAT_LO_EXPR:
848 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
849 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
850 case VEC_PACK_TRUNC_EXPR:
851 case VEC_PACK_SAT_EXPR:
852 case VEC_PACK_FIX_TRUNC_EXPR:
853 case VEC_PACK_FLOAT_EXPR:
854 case VEC_WIDEN_LSHIFT_HI_EXPR:
855 case VEC_WIDEN_LSHIFT_LO_EXPR:
856 return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (lhs));
860 /* Workaround for discarding certain false positives from
861 -Wanalyzer-use-of-uninitialized-value
862 of the form:
863 ((A OR-IF B) OR-IF C)
864 and:
865 ((A AND-IF B) AND-IF C)
866 where evaluating B is redundant, but could involve simple accesses of
867 uninitialized locals.
869 When optimization is turned on the FE can immediately fold compound
870 conditionals. Specifically, c_parser_condition parses this condition:
871 ((A OR-IF B) OR-IF C)
872 and calls c_fully_fold on the condition.
873 Within c_fully_fold, fold_truth_andor is called, which bails when
874 optimization is off, but if any optimization is turned on can convert the
875 ((A OR-IF B) OR-IF C)
876 into:
877 ((A OR B) OR_IF C)
878 for sufficiently simple B
879 i.e. the inner OR-IF becomes an OR.
880 At gimplification time the inner OR becomes BIT_IOR_EXPR (in gimplify_expr),
881 giving this for the inner condition:
882 tmp = A | B;
883 if (tmp)
884 thus effectively synthesizing a redundant access of B when optimization
885 is turned on, when compared to:
886 if (A) goto L1; else goto L4;
887 L1: if (B) goto L2; else goto L4;
888 L2: if (C) goto L3; else goto L4;
889 for the unoptimized case.
891 Return true if CTXT appears to be handling such a short-circuitable stmt,
892 such as the def-stmt for B for the:
893 tmp = A | B;
894 case above, for the case where A is true and thus B would have been
895 short-circuited without optimization, using MODEL for the value of A. */
897 static bool
898 within_short_circuited_stmt_p (const region_model *model,
899 const gassign *assign_stmt)
901 /* We must have an assignment to a temporary of _Bool type. */
902 tree lhs = gimple_assign_lhs (assign_stmt);
903 if (TREE_TYPE (lhs) != boolean_type_node)
904 return false;
905 if (TREE_CODE (lhs) != SSA_NAME)
906 return false;
907 if (SSA_NAME_VAR (lhs) != NULL_TREE)
908 return false;
910 /* The temporary bool must be used exactly once: as the second arg of
911 a BIT_IOR_EXPR or BIT_AND_EXPR. */
912 use_operand_p use_op;
913 gimple *use_stmt;
914 if (!single_imm_use (lhs, &use_op, &use_stmt))
915 return false;
916 const gassign *use_assign = dyn_cast <const gassign *> (use_stmt);
917 if (!use_assign)
918 return false;
919 enum tree_code op = gimple_assign_rhs_code (use_assign);
920 if (!(op == BIT_IOR_EXPR ||op == BIT_AND_EXPR))
921 return false;
922 if (!(gimple_assign_rhs1 (use_assign) != lhs
923 && gimple_assign_rhs2 (use_assign) == lhs))
924 return false;
926 /* The first arg of the bitwise stmt must have a known value in MODEL
927 that implies that the value of the second arg doesn't matter, i.e.
928 1 for bitwise or, 0 for bitwise and. */
929 tree other_arg = gimple_assign_rhs1 (use_assign);
930 /* Use a NULL ctxt here to avoid generating warnings. */
931 const svalue *other_arg_sval = model->get_rvalue (other_arg, NULL);
932 tree other_arg_cst = other_arg_sval->maybe_get_constant ();
933 if (!other_arg_cst)
934 return false;
935 switch (op)
937 default:
938 gcc_unreachable ();
939 case BIT_IOR_EXPR:
940 if (zerop (other_arg_cst))
941 return false;
942 break;
943 case BIT_AND_EXPR:
944 if (!zerop (other_arg_cst))
945 return false;
946 break;
949 /* All tests passed. We appear to be in a stmt that generates a boolean
950 temporary with a value that won't matter. */
951 return true;
954 /* Workaround for discarding certain false positives from
955 -Wanalyzer-use-of-uninitialized-value
956 seen with -ftrivial-auto-var-init=.
958 -ftrivial-auto-var-init= will generate calls to IFN_DEFERRED_INIT.
960 If the address of the var is taken, gimplification will give us
961 something like:
963 _1 = .DEFERRED_INIT (4, 2, &"len"[0]);
964 len = _1;
966 The result of DEFERRED_INIT will be an uninit value; we don't
967 want to emit a false positive for "len = _1;"
969 Return true if ASSIGN_STMT is such a stmt. */
971 static bool
972 due_to_ifn_deferred_init_p (const gassign *assign_stmt)
975 /* We must have an assignment to a decl from an SSA name that's the
976 result of a IFN_DEFERRED_INIT call. */
977 if (gimple_assign_rhs_code (assign_stmt) != SSA_NAME)
978 return false;
979 tree lhs = gimple_assign_lhs (assign_stmt);
980 if (TREE_CODE (lhs) != VAR_DECL)
981 return false;
982 tree rhs = gimple_assign_rhs1 (assign_stmt);
983 if (TREE_CODE (rhs) != SSA_NAME)
984 return false;
985 const gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
986 const gcall *call = dyn_cast <const gcall *> (def_stmt);
987 if (!call)
988 return false;
989 if (gimple_call_internal_p (call)
990 && gimple_call_internal_fn (call) == IFN_DEFERRED_INIT)
991 return true;
992 return false;
995 /* Check for SVAL being poisoned, adding a warning to CTXT.
996 Return SVAL, or, if a warning is added, another value, to avoid
997 repeatedly complaining about the same poisoned value in followup code. */
999 const svalue *
1000 region_model::check_for_poison (const svalue *sval,
1001 tree expr,
1002 region_model_context *ctxt) const
1004 if (!ctxt)
1005 return sval;
1007 if (const poisoned_svalue *poisoned_sval = sval->dyn_cast_poisoned_svalue ())
1009 enum poison_kind pkind = poisoned_sval->get_poison_kind ();
1011 /* Ignore uninitialized uses of empty types; there's nothing
1012 to initialize. */
1013 if (pkind == POISON_KIND_UNINIT
1014 && sval->get_type ()
1015 && is_empty_type (sval->get_type ()))
1016 return sval;
1018 if (pkind == POISON_KIND_UNINIT)
1019 if (const gimple *curr_stmt = ctxt->get_stmt ())
1020 if (const gassign *assign_stmt
1021 = dyn_cast <const gassign *> (curr_stmt))
1023 /* Special case to avoid certain false positives. */
1024 if (within_short_circuited_stmt_p (this, assign_stmt))
1025 return sval;
1027 /* Special case to avoid false positive on
1028 -ftrivial-auto-var-init=. */
1029 if (due_to_ifn_deferred_init_p (assign_stmt))
1030 return sval;
1033 /* If we have an SSA name for a temporary, we don't want to print
1034 '<unknown>'.
1035 Poisoned values are shared by type, and so we can't reconstruct
1036 the tree other than via the def stmts, using
1037 fixup_tree_for_diagnostic. */
1038 tree diag_arg = fixup_tree_for_diagnostic (expr);
1039 const region *src_region = NULL;
1040 if (pkind == POISON_KIND_UNINIT)
1041 src_region = get_region_for_poisoned_expr (expr);
1042 if (ctxt->warn (new poisoned_value_diagnostic (diag_arg, pkind,
1043 src_region)))
1045 /* We only want to report use of a poisoned value at the first
1046 place it gets used; return an unknown value to avoid generating
1047 a chain of followup warnings. */
1048 sval = m_mgr->get_or_create_unknown_svalue (sval->get_type ());
1051 return sval;
1054 return sval;
1057 /* Attempt to get a region for describing EXPR, the source of region of
1058 a poisoned_svalue for use in a poisoned_value_diagnostic.
1059 Return NULL if there is no good region to use. */
1061 const region *
1062 region_model::get_region_for_poisoned_expr (tree expr) const
1064 if (TREE_CODE (expr) == SSA_NAME)
1066 tree decl = SSA_NAME_VAR (expr);
1067 if (decl && DECL_P (decl))
1068 expr = decl;
1069 else
1070 return NULL;
1072 return get_lvalue (expr, NULL);
1075 /* Update this model for the ASSIGN stmt, using CTXT to report any
1076 diagnostics. */
1078 void
1079 region_model::on_assignment (const gassign *assign, region_model_context *ctxt)
1081 tree lhs = gimple_assign_lhs (assign);
1082 tree rhs1 = gimple_assign_rhs1 (assign);
1084 const region *lhs_reg = get_lvalue (lhs, ctxt);
1086 /* Most assignments are handled by:
1087 set_value (lhs_reg, SVALUE, CTXT)
1088 for some SVALUE. */
1089 if (const svalue *sval = get_gassign_result (assign, ctxt))
1091 tree expr = get_diagnostic_tree_for_gassign (assign);
1092 check_for_poison (sval, expr, ctxt);
1093 set_value (lhs_reg, sval, ctxt);
1094 return;
1097 enum tree_code op = gimple_assign_rhs_code (assign);
1098 switch (op)
1100 default:
1102 if (0)
1103 sorry_at (assign->location, "unhandled assignment op: %qs",
1104 get_tree_code_name (op));
1105 const svalue *unknown_sval
1106 = m_mgr->get_or_create_unknown_svalue (TREE_TYPE (lhs));
1107 set_value (lhs_reg, unknown_sval, ctxt);
1109 break;
1111 case CONSTRUCTOR:
1113 if (TREE_CLOBBER_P (rhs1))
1115 /* e.g. "x ={v} {CLOBBER};" */
1116 clobber_region (lhs_reg);
1118 else
1120 /* Any CONSTRUCTOR that survives to this point is either
1121 just a zero-init of everything, or a vector. */
1122 if (!CONSTRUCTOR_NO_CLEARING (rhs1))
1123 zero_fill_region (lhs_reg);
1124 unsigned ix;
1125 tree index;
1126 tree val;
1127 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), ix, index, val)
1129 gcc_assert (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE);
1130 if (!index)
1131 index = build_int_cst (integer_type_node, ix);
1132 gcc_assert (TREE_CODE (index) == INTEGER_CST);
1133 const svalue *index_sval
1134 = m_mgr->get_or_create_constant_svalue (index);
1135 gcc_assert (index_sval);
1136 const region *sub_reg
1137 = m_mgr->get_element_region (lhs_reg,
1138 TREE_TYPE (val),
1139 index_sval);
1140 const svalue *val_sval = get_rvalue (val, ctxt);
1141 set_value (sub_reg, val_sval, ctxt);
1145 break;
1147 case STRING_CST:
1149 /* e.g. "struct s2 x = {{'A', 'B', 'C', 'D'}};". */
1150 const svalue *rhs_sval = get_rvalue (rhs1, ctxt);
1151 m_store.set_value (m_mgr->get_store_manager(), lhs_reg, rhs_sval,
1152 ctxt ? ctxt->get_uncertainty () : NULL);
1154 break;
1158 /* A pending_diagnostic subclass for implementing "__analyzer_dump_path". */
1160 class dump_path_diagnostic
1161 : public pending_diagnostic_subclass<dump_path_diagnostic>
1163 public:
1164 int get_controlling_option () const final override
1166 return 0;
1169 bool emit (rich_location *richloc) final override
1171 inform (richloc, "path");
1172 return true;
1175 const char *get_kind () const final override { return "dump_path_diagnostic"; }
1177 bool operator== (const dump_path_diagnostic &) const
1179 return true;
1183 /* Handle the pre-sm-state part of STMT, modifying this object in-place.
1184 Write true to *OUT_TERMINATE_PATH if the path should be terminated.
1185 Write true to *OUT_UNKNOWN_SIDE_EFFECTS if the stmt has unknown
1186 side effects. */
1188 void
1189 region_model::on_stmt_pre (const gimple *stmt,
1190 bool *out_terminate_path,
1191 bool *out_unknown_side_effects,
1192 region_model_context *ctxt)
1194 switch (gimple_code (stmt))
1196 default:
1197 /* No-op for now. */
1198 break;
1200 case GIMPLE_ASSIGN:
1202 const gassign *assign = as_a <const gassign *> (stmt);
1203 on_assignment (assign, ctxt);
1205 break;
1207 case GIMPLE_ASM:
1209 const gasm *asm_stmt = as_a <const gasm *> (stmt);
1210 on_asm_stmt (asm_stmt, ctxt);
1212 break;
1214 case GIMPLE_CALL:
1216 /* Track whether we have a gcall to a function that's not recognized by
1217 anything, for which we don't have a function body, or for which we
1218 don't know the fndecl. */
1219 const gcall *call = as_a <const gcall *> (stmt);
1221 /* Debugging/test support. */
1222 if (is_special_named_call_p (call, "__analyzer_describe", 2))
1223 impl_call_analyzer_describe (call, ctxt);
1224 else if (is_special_named_call_p (call, "__analyzer_dump_capacity", 1))
1225 impl_call_analyzer_dump_capacity (call, ctxt);
1226 else if (is_special_named_call_p (call, "__analyzer_dump_escaped", 0))
1227 impl_call_analyzer_dump_escaped (call);
1228 else if (is_special_named_call_p (call, "__analyzer_dump_path", 0))
1230 /* Handle the builtin "__analyzer_dump_path" by queuing a
1231 diagnostic at this exploded_node. */
1232 ctxt->warn (new dump_path_diagnostic ());
1234 else if (is_special_named_call_p (call, "__analyzer_dump_region_model",
1237 /* Handle the builtin "__analyzer_dump_region_model" by dumping
1238 the region model's state to stderr. */
1239 dump (false);
1241 else if (is_special_named_call_p (call, "__analyzer_eval", 1))
1242 impl_call_analyzer_eval (call, ctxt);
1243 else if (is_special_named_call_p (call, "__analyzer_break", 0))
1245 /* Handle the builtin "__analyzer_break" by triggering a
1246 breakpoint. */
1247 /* TODO: is there a good cross-platform way to do this? */
1248 raise (SIGINT);
1250 else if (is_special_named_call_p (call,
1251 "__analyzer_dump_exploded_nodes",
1254 /* This is handled elsewhere. */
1256 else
1257 *out_unknown_side_effects = on_call_pre (call, ctxt,
1258 out_terminate_path);
1260 break;
1262 case GIMPLE_RETURN:
1264 const greturn *return_ = as_a <const greturn *> (stmt);
1265 on_return (return_, ctxt);
1267 break;
1271 /* Abstract base class for all out-of-bounds warnings with concrete values. */
1273 class out_of_bounds : public pending_diagnostic_subclass<out_of_bounds>
1275 public:
1276 out_of_bounds (const region *reg, tree diag_arg,
1277 byte_range out_of_bounds_range)
1278 : m_reg (reg), m_diag_arg (diag_arg),
1279 m_out_of_bounds_range (out_of_bounds_range)
1282 const char *get_kind () const final override
1284 return "out_of_bounds_diagnostic";
1287 bool operator== (const out_of_bounds &other) const
1289 return m_reg == other.m_reg
1290 && m_out_of_bounds_range == other.m_out_of_bounds_range
1291 && pending_diagnostic::same_tree_p (m_diag_arg, other.m_diag_arg);
1294 int get_controlling_option () const final override
1296 return OPT_Wanalyzer_out_of_bounds;
1299 void mark_interesting_stuff (interesting_t *interest) final override
1301 interest->add_region_creation (m_reg);
1304 protected:
1305 const region *m_reg;
1306 tree m_diag_arg;
1307 byte_range m_out_of_bounds_range;
1310 /* Abstract subclass to complaing about out-of-bounds
1311 past the end of the buffer. */
1313 class past_the_end : public out_of_bounds
1315 public:
1316 past_the_end (const region *reg, tree diag_arg, byte_range range,
1317 tree byte_bound)
1318 : out_of_bounds (reg, diag_arg, range), m_byte_bound (byte_bound)
1321 bool operator== (const past_the_end &other) const
1323 return out_of_bounds::operator== (other)
1324 && pending_diagnostic::same_tree_p (m_byte_bound,
1325 other.m_byte_bound);
1328 label_text
1329 describe_region_creation_event (const evdesc::region_creation &ev) final
1330 override
1332 if (m_byte_bound && TREE_CODE (m_byte_bound) == INTEGER_CST)
1333 return ev.formatted_print ("capacity is %E bytes", m_byte_bound);
1335 return label_text ();
1338 protected:
1339 tree m_byte_bound;
1342 /* Concrete subclass to complain about buffer overflows. */
1344 class buffer_overflow : public past_the_end
1346 public:
1347 buffer_overflow (const region *reg, tree diag_arg,
1348 byte_range range, tree byte_bound)
1349 : past_the_end (reg, diag_arg, range, byte_bound)
1352 bool emit (rich_location *rich_loc) final override
1354 diagnostic_metadata m;
1355 bool warned;
1356 switch (m_reg->get_memory_space ())
1358 default:
1359 m.add_cwe (787);
1360 warned = warning_meta (rich_loc, m, get_controlling_option (),
1361 "buffer overflow");
1362 break;
1363 case MEMSPACE_STACK:
1364 m.add_cwe (121);
1365 warned = warning_meta (rich_loc, m, get_controlling_option (),
1366 "stack-based buffer overflow");
1367 break;
1368 case MEMSPACE_HEAP:
1369 m.add_cwe (122);
1370 warned = warning_meta (rich_loc, m, get_controlling_option (),
1371 "heap-based buffer overflow");
1372 break;
1375 if (warned)
1377 char num_bytes_past_buf[WIDE_INT_PRINT_BUFFER_SIZE];
1378 print_dec (m_out_of_bounds_range.m_size_in_bytes,
1379 num_bytes_past_buf, UNSIGNED);
1380 if (m_diag_arg)
1381 inform (rich_loc->get_loc (), "write is %s bytes past the end"
1382 " of %qE", num_bytes_past_buf,
1383 m_diag_arg);
1384 else
1385 inform (rich_loc->get_loc (), "write is %s bytes past the end"
1386 "of the region",
1387 num_bytes_past_buf);
1390 return warned;
1393 label_text describe_final_event (const evdesc::final_event &ev)
1394 final override
1396 byte_size_t start = m_out_of_bounds_range.get_start_byte_offset ();
1397 byte_size_t end = m_out_of_bounds_range.get_last_byte_offset ();
1398 char start_buf[WIDE_INT_PRINT_BUFFER_SIZE];
1399 print_dec (start, start_buf, SIGNED);
1400 char end_buf[WIDE_INT_PRINT_BUFFER_SIZE];
1401 print_dec (end, end_buf, SIGNED);
1403 if (start == end)
1405 if (m_diag_arg)
1406 return ev.formatted_print ("out-of-bounds write at byte %s but %qE"
1407 " ends at byte %E", start_buf, m_diag_arg,
1408 m_byte_bound);
1409 return ev.formatted_print ("out-of-bounds write at byte %s but region"
1410 " ends at byte %E", start_buf,
1411 m_byte_bound);
1413 else
1415 if (m_diag_arg)
1416 return ev.formatted_print ("out-of-bounds write from byte %s till"
1417 " byte %s but %qE ends at byte %E",
1418 start_buf, end_buf, m_diag_arg,
1419 m_byte_bound);
1420 return ev.formatted_print ("out-of-bounds write from byte %s till"
1421 " byte %s but region ends at byte %E",
1422 start_buf, end_buf, m_byte_bound);
1427 /* Concrete subclass to complain about buffer overreads. */
1429 class buffer_overread : public past_the_end
1431 public:
1432 buffer_overread (const region *reg, tree diag_arg,
1433 byte_range range, tree byte_bound)
1434 : past_the_end (reg, diag_arg, range, byte_bound)
1437 bool emit (rich_location *rich_loc) final override
1439 diagnostic_metadata m;
1440 m.add_cwe (126);
1441 bool warned = warning_meta (rich_loc, m, get_controlling_option (),
1442 "buffer overread");
1444 if (warned)
1446 char num_bytes_past_buf[WIDE_INT_PRINT_BUFFER_SIZE];
1447 print_dec (m_out_of_bounds_range.m_size_in_bytes,
1448 num_bytes_past_buf, UNSIGNED);
1449 if (m_diag_arg)
1450 inform (rich_loc->get_loc (), "read is %s bytes past the end"
1451 " of %qE", num_bytes_past_buf,
1452 m_diag_arg);
1453 else
1454 inform (rich_loc->get_loc (), "read is %s bytes past the end"
1455 "of the region",
1456 num_bytes_past_buf);
1459 return warned;
1462 label_text describe_final_event (const evdesc::final_event &ev)
1463 final override
1465 byte_size_t start = m_out_of_bounds_range.get_start_byte_offset ();
1466 byte_size_t end = m_out_of_bounds_range.get_last_byte_offset ();
1467 char start_buf[WIDE_INT_PRINT_BUFFER_SIZE];
1468 print_dec (start, start_buf, SIGNED);
1469 char end_buf[WIDE_INT_PRINT_BUFFER_SIZE];
1470 print_dec (end, end_buf, SIGNED);
1472 if (start == end)
1474 if (m_diag_arg)
1475 return ev.formatted_print ("out-of-bounds read at byte %s but %qE"
1476 " ends at byte %E", start_buf, m_diag_arg,
1477 m_byte_bound);
1478 return ev.formatted_print ("out-of-bounds read at byte %s but region"
1479 " ends at byte %E", start_buf,
1480 m_byte_bound);
1482 else
1484 if (m_diag_arg)
1485 return ev.formatted_print ("out-of-bounds read from byte %s till"
1486 " byte %s but %qE ends at byte %E",
1487 start_buf, end_buf, m_diag_arg,
1488 m_byte_bound);
1489 return ev.formatted_print ("out-of-bounds read from byte %s till"
1490 " byte %s but region ends at byte %E",
1491 start_buf, end_buf, m_byte_bound);
1496 /* Concrete subclass to complain about buffer underflows. */
1498 class buffer_underflow : public out_of_bounds
1500 public:
1501 buffer_underflow (const region *reg, tree diag_arg, byte_range range)
1502 : out_of_bounds (reg, diag_arg, range)
1505 bool emit (rich_location *rich_loc) final override
1507 diagnostic_metadata m;
1508 m.add_cwe (124);
1509 return warning_meta (rich_loc, m, get_controlling_option (),
1510 "buffer underflow");
1513 label_text describe_final_event (const evdesc::final_event &ev)
1514 final override
1516 byte_size_t start = m_out_of_bounds_range.get_start_byte_offset ();
1517 byte_size_t end = m_out_of_bounds_range.get_last_byte_offset ();
1518 char start_buf[WIDE_INT_PRINT_BUFFER_SIZE];
1519 print_dec (start, start_buf, SIGNED);
1520 char end_buf[WIDE_INT_PRINT_BUFFER_SIZE];
1521 print_dec (end, end_buf, SIGNED);
1523 if (start == end)
1525 if (m_diag_arg)
1526 return ev.formatted_print ("out-of-bounds write at byte %s but %qE"
1527 " starts at byte 0", start_buf,
1528 m_diag_arg);
1529 return ev.formatted_print ("out-of-bounds write at byte %s but region"
1530 " starts at byte 0", start_buf);
1532 else
1534 if (m_diag_arg)
1535 return ev.formatted_print ("out-of-bounds write from byte %s till"
1536 " byte %s but %qE starts at byte 0",
1537 start_buf, end_buf, m_diag_arg);
1538 return ev.formatted_print ("out-of-bounds write from byte %s till"
1539 " byte %s but region starts at byte 0",
1540 start_buf, end_buf);;
1545 /* Concrete subclass to complain about buffer underreads. */
1547 class buffer_underread : public out_of_bounds
1549 public:
1550 buffer_underread (const region *reg, tree diag_arg, byte_range range)
1551 : out_of_bounds (reg, diag_arg, range)
1554 bool emit (rich_location *rich_loc) final override
1556 diagnostic_metadata m;
1557 m.add_cwe (127);
1558 return warning_meta (rich_loc, m, get_controlling_option (),
1559 "buffer underread");
1562 label_text describe_final_event (const evdesc::final_event &ev)
1563 final override
1565 byte_size_t start = m_out_of_bounds_range.get_start_byte_offset ();
1566 byte_size_t end = m_out_of_bounds_range.get_last_byte_offset ();
1567 char start_buf[WIDE_INT_PRINT_BUFFER_SIZE];
1568 print_dec (start, start_buf, SIGNED);
1569 char end_buf[WIDE_INT_PRINT_BUFFER_SIZE];
1570 print_dec (end, end_buf, SIGNED);
1572 if (start == end)
1574 if (m_diag_arg)
1575 return ev.formatted_print ("out-of-bounds read at byte %s but %qE"
1576 " starts at byte 0", start_buf,
1577 m_diag_arg);
1578 return ev.formatted_print ("out-of-bounds read at byte %s but region"
1579 " starts at byte 0", start_buf);
1581 else
1583 if (m_diag_arg)
1584 return ev.formatted_print ("out-of-bounds read from byte %s till"
1585 " byte %s but %qE starts at byte 0",
1586 start_buf, end_buf, m_diag_arg);
1587 return ev.formatted_print ("out-of-bounds read from byte %s till"
1588 " byte %s but region starts at byte 0",
1589 start_buf, end_buf);;
1594 /* Abstract class to complain about out-of-bounds read/writes where
1595 the values are symbolic. */
1597 class symbolic_past_the_end
1598 : public pending_diagnostic_subclass<symbolic_past_the_end>
1600 public:
1601 symbolic_past_the_end (const region *reg, tree diag_arg, tree offset,
1602 tree num_bytes, tree capacity)
1603 : m_reg (reg), m_diag_arg (diag_arg), m_offset (offset),
1604 m_num_bytes (num_bytes), m_capacity (capacity)
1607 const char *get_kind () const final override
1609 return "symbolic_past_the_end";
1612 bool operator== (const symbolic_past_the_end &other) const
1614 return m_reg == other.m_reg
1615 && pending_diagnostic::same_tree_p (m_diag_arg, other.m_diag_arg)
1616 && pending_diagnostic::same_tree_p (m_offset, other.m_offset)
1617 && pending_diagnostic::same_tree_p (m_num_bytes, other.m_num_bytes)
1618 && pending_diagnostic::same_tree_p (m_capacity, other.m_capacity);
1621 int get_controlling_option () const final override
1623 return OPT_Wanalyzer_out_of_bounds;
1626 void mark_interesting_stuff (interesting_t *interest) final override
1628 interest->add_region_creation (m_reg);
1631 label_text
1632 describe_region_creation_event (const evdesc::region_creation &ev) final
1633 override
1635 if (m_capacity)
1636 return ev.formatted_print ("capacity is %qE bytes", m_capacity);
1638 return label_text ();
1641 label_text
1642 describe_final_event (const evdesc::final_event &ev) final override
1644 const char *byte_str;
1645 if (pending_diagnostic::same_tree_p (m_num_bytes, integer_one_node))
1646 byte_str = "byte";
1647 else
1648 byte_str = "bytes";
1650 if (m_offset)
1652 if (m_num_bytes && TREE_CODE (m_num_bytes) == INTEGER_CST)
1654 if (m_diag_arg)
1655 return ev.formatted_print ("%s of %E %s at offset %qE"
1656 " exceeds %qE", m_dir_str,
1657 m_num_bytes, byte_str,
1658 m_offset, m_diag_arg);
1659 else
1660 return ev.formatted_print ("%s of %E %s at offset %qE"
1661 " exceeds the buffer", m_dir_str,
1662 m_num_bytes, byte_str, m_offset);
1664 else if (m_num_bytes)
1666 if (m_diag_arg)
1667 return ev.formatted_print ("%s of %qE %s at offset %qE"
1668 " exceeds %qE", m_dir_str,
1669 m_num_bytes, byte_str,
1670 m_offset, m_diag_arg);
1671 else
1672 return ev.formatted_print ("%s of %qE %s at offset %qE"
1673 " exceeds the buffer", m_dir_str,
1674 m_num_bytes, byte_str, m_offset);
1676 else
1678 if (m_diag_arg)
1679 return ev.formatted_print ("%s at offset %qE exceeds %qE",
1680 m_dir_str, m_offset, m_diag_arg);
1681 else
1682 return ev.formatted_print ("%s at offset %qE exceeds the"
1683 " buffer", m_dir_str, m_offset);
1686 if (m_diag_arg)
1687 return ev.formatted_print ("out-of-bounds %s on %qE",
1688 m_dir_str, m_diag_arg);
1689 return ev.formatted_print ("out-of-bounds %s", m_dir_str);
1692 protected:
1693 const region *m_reg;
1694 tree m_diag_arg;
1695 tree m_offset;
1696 tree m_num_bytes;
1697 tree m_capacity;
1698 const char *m_dir_str;
1701 /* Concrete subclass to complain about overflows with symbolic values. */
1703 class symbolic_buffer_overflow : public symbolic_past_the_end
1705 public:
1706 symbolic_buffer_overflow (const region *reg, tree diag_arg, tree offset,
1707 tree num_bytes, tree capacity)
1708 : symbolic_past_the_end (reg, diag_arg, offset, num_bytes, capacity)
1710 m_dir_str = "write";
1713 bool emit (rich_location *rich_loc) final override
1715 diagnostic_metadata m;
1716 switch (m_reg->get_memory_space ())
1718 default:
1719 m.add_cwe (787);
1720 return warning_meta (rich_loc, m, get_controlling_option (),
1721 "buffer overflow");
1722 case MEMSPACE_STACK:
1723 m.add_cwe (121);
1724 return warning_meta (rich_loc, m, get_controlling_option (),
1725 "stack-based buffer overflow");
1726 case MEMSPACE_HEAP:
1727 m.add_cwe (122);
1728 return warning_meta (rich_loc, m, get_controlling_option (),
1729 "heap-based buffer overflow");
1734 /* Concrete subclass to complain about overreads with symbolic values. */
1736 class symbolic_buffer_overread : public symbolic_past_the_end
1738 public:
1739 symbolic_buffer_overread (const region *reg, tree diag_arg, tree offset,
1740 tree num_bytes, tree capacity)
1741 : symbolic_past_the_end (reg, diag_arg, offset, num_bytes, capacity)
1743 m_dir_str = "read";
1746 bool emit (rich_location *rich_loc) final override
1748 diagnostic_metadata m;
1749 m.add_cwe (126);
1750 return warning_meta (rich_loc, m, get_controlling_option (),
1751 "buffer overread");
1755 /* Check whether an access is past the end of the BASE_REG. */
1757 void region_model::check_symbolic_bounds (const region *base_reg,
1758 const svalue *sym_byte_offset,
1759 const svalue *num_bytes_sval,
1760 const svalue *capacity,
1761 enum access_direction dir,
1762 region_model_context *ctxt) const
1764 gcc_assert (ctxt);
1766 const svalue *next_byte
1767 = m_mgr->get_or_create_binop (num_bytes_sval->get_type (), PLUS_EXPR,
1768 sym_byte_offset, num_bytes_sval);
1770 if (eval_condition_without_cm (next_byte, GT_EXPR, capacity).is_true ())
1772 tree diag_arg = get_representative_tree (base_reg);
1773 tree offset_tree = get_representative_tree (sym_byte_offset);
1774 tree num_bytes_tree = get_representative_tree (num_bytes_sval);
1775 tree capacity_tree = get_representative_tree (capacity);
1776 switch (dir)
1778 default:
1779 gcc_unreachable ();
1780 break;
1781 case DIR_READ:
1782 ctxt->warn (new symbolic_buffer_overread (base_reg, diag_arg,
1783 offset_tree,
1784 num_bytes_tree,
1785 capacity_tree));
1786 break;
1787 case DIR_WRITE:
1788 ctxt->warn (new symbolic_buffer_overflow (base_reg, diag_arg,
1789 offset_tree,
1790 num_bytes_tree,
1791 capacity_tree));
1792 break;
1797 static tree
1798 maybe_get_integer_cst_tree (const svalue *sval)
1800 tree cst_tree = sval->maybe_get_constant ();
1801 if (cst_tree && TREE_CODE (cst_tree) == INTEGER_CST)
1802 return cst_tree;
1804 return NULL_TREE;
1807 /* May complain when the access on REG is out-of-bounds. */
1809 void
1810 region_model::check_region_bounds (const region *reg,
1811 enum access_direction dir,
1812 region_model_context *ctxt) const
1814 gcc_assert (ctxt);
1816 /* Get the offset. */
1817 region_offset reg_offset = reg->get_offset (m_mgr);
1818 const region *base_reg = reg_offset.get_base_region ();
1820 /* Bail out on symbolic regions.
1821 (e.g. because the analyzer did not see previous offsets on the latter,
1822 it might think that a negative access is before the buffer). */
1823 if (base_reg->symbolic_p ())
1824 return;
1826 /* Find out how many bytes were accessed. */
1827 const svalue *num_bytes_sval = reg->get_byte_size_sval (m_mgr);
1828 tree num_bytes_tree = maybe_get_integer_cst_tree (num_bytes_sval);
1830 /* Get the capacity of the buffer. */
1831 const svalue *capacity = get_capacity (base_reg);
1832 tree cst_capacity_tree = maybe_get_integer_cst_tree (capacity);
1834 /* The constant offset from a pointer is represented internally as a sizetype
1835 but should be interpreted as a signed value here. The statement below
1836 converts the offset from bits to bytes and then to a signed integer with
1837 the same precision the sizetype has on the target system.
1839 For example, this is needed for out-of-bounds-3.c test1 to pass when
1840 compiled with a 64-bit gcc build targeting 32-bit systems. */
1841 byte_offset_t offset;
1842 if (!reg_offset.symbolic_p ())
1843 offset = wi::sext (reg_offset.get_bit_offset () >> LOG2_BITS_PER_UNIT,
1844 TYPE_PRECISION (size_type_node));
1846 /* If either the offset or the number of bytes accessed are symbolic,
1847 we have to reason about symbolic values. */
1848 if (reg_offset.symbolic_p () || !num_bytes_tree)
1850 const svalue* byte_offset_sval;
1851 if (!reg_offset.symbolic_p ())
1853 tree offset_tree = wide_int_to_tree (integer_type_node, offset);
1854 byte_offset_sval
1855 = m_mgr->get_or_create_constant_svalue (offset_tree);
1857 else
1858 byte_offset_sval = reg_offset.get_symbolic_byte_offset ();
1859 check_symbolic_bounds (base_reg, byte_offset_sval, num_bytes_sval,
1860 capacity, dir, ctxt);
1861 return;
1864 /* Otherwise continue to check with concrete values. */
1865 byte_range out (0, 0);
1866 /* NUM_BYTES_TREE should always be interpreted as unsigned. */
1867 byte_offset_t num_bytes_unsigned = wi::to_offset (num_bytes_tree);
1868 byte_range read_bytes (offset, num_bytes_unsigned);
1869 /* If read_bytes has a subset < 0, we do have an underflow. */
1870 if (read_bytes.falls_short_of_p (0, &out))
1872 tree diag_arg = get_representative_tree (base_reg);
1873 switch (dir)
1875 default:
1876 gcc_unreachable ();
1877 break;
1878 case DIR_READ:
1879 ctxt->warn (new buffer_underread (reg, diag_arg, out));
1880 break;
1881 case DIR_WRITE:
1882 ctxt->warn (new buffer_underflow (reg, diag_arg, out));
1883 break;
1887 /* For accesses past the end, we do need a concrete capacity. No need to
1888 do a symbolic check here because the inequality check does not reason
1889 whether constants are greater than symbolic values. */
1890 if (!cst_capacity_tree)
1891 return;
1893 byte_range buffer (0, wi::to_offset (cst_capacity_tree));
1894 /* If READ_BYTES exceeds BUFFER, we do have an overflow. */
1895 if (read_bytes.exceeds_p (buffer, &out))
1897 tree byte_bound = wide_int_to_tree (size_type_node,
1898 buffer.get_next_byte_offset ());
1899 tree diag_arg = get_representative_tree (base_reg);
1901 switch (dir)
1903 default:
1904 gcc_unreachable ();
1905 break;
1906 case DIR_READ:
1907 ctxt->warn (new buffer_overread (reg, diag_arg, out, byte_bound));
1908 break;
1909 case DIR_WRITE:
1910 ctxt->warn (new buffer_overflow (reg, diag_arg, out, byte_bound));
1911 break;
1916 /* Ensure that all arguments at the call described by CD are checked
1917 for poisoned values, by calling get_rvalue on each argument. */
1919 void
1920 region_model::check_call_args (const call_details &cd) const
1922 for (unsigned arg_idx = 0; arg_idx < cd.num_args (); arg_idx++)
1923 cd.get_arg_svalue (arg_idx);
1926 /* Return true if CD is known to be a call to a function with
1927 __attribute__((const)). */
1929 static bool
1930 const_fn_p (const call_details &cd)
1932 tree fndecl = cd.get_fndecl_for_call ();
1933 if (!fndecl)
1934 return false;
1935 gcc_assert (DECL_P (fndecl));
1936 return TREE_READONLY (fndecl);
1939 /* If this CD is known to be a call to a function with
1940 __attribute__((const)), attempt to get a const_fn_result_svalue
1941 based on the arguments, or return NULL otherwise. */
1943 static const svalue *
1944 maybe_get_const_fn_result (const call_details &cd)
1946 if (!const_fn_p (cd))
1947 return NULL;
1949 unsigned num_args = cd.num_args ();
1950 if (num_args > const_fn_result_svalue::MAX_INPUTS)
1951 /* Too many arguments. */
1952 return NULL;
1954 auto_vec<const svalue *> inputs (num_args);
1955 for (unsigned arg_idx = 0; arg_idx < num_args; arg_idx++)
1957 const svalue *arg_sval = cd.get_arg_svalue (arg_idx);
1958 if (!arg_sval->can_have_associated_state_p ())
1959 return NULL;
1960 inputs.quick_push (arg_sval);
1963 region_model_manager *mgr = cd.get_manager ();
1964 const svalue *sval
1965 = mgr->get_or_create_const_fn_result_svalue (cd.get_lhs_type (),
1966 cd.get_fndecl_for_call (),
1967 inputs);
1968 return sval;
1971 /* Update this model for an outcome of a call that returns zero.
1972 If UNMERGEABLE, then make the result unmergeable, e.g. to prevent
1973 the state-merger code from merging success and failure outcomes. */
1975 void
1976 region_model::update_for_zero_return (const call_details &cd,
1977 bool unmergeable)
1979 if (!cd.get_lhs_type ())
1980 return;
1981 const svalue *result
1982 = m_mgr->get_or_create_int_cst (cd.get_lhs_type (), 0);
1983 if (unmergeable)
1984 result = m_mgr->get_or_create_unmergeable (result);
1985 set_value (cd.get_lhs_region (), result, cd.get_ctxt ());
1988 /* Update this model for an outcome of a call that returns non-zero. */
1990 void
1991 region_model::update_for_nonzero_return (const call_details &cd)
1993 if (!cd.get_lhs_type ())
1994 return;
1995 const svalue *zero
1996 = m_mgr->get_or_create_int_cst (cd.get_lhs_type (), 0);
1997 const svalue *result
1998 = get_store_value (cd.get_lhs_region (), cd.get_ctxt ());
1999 add_constraint (result, NE_EXPR, zero, cd.get_ctxt ());
2002 /* Subroutine of region_model::maybe_get_copy_bounds.
2003 The Linux kernel commonly uses
2004 min_t([unsigned] long, VAR, sizeof(T));
2005 to set an upper bound on the size of a copy_to_user.
2006 Attempt to simplify such sizes by trying to get the upper bound as a
2007 constant.
2008 Return the simplified svalue if possible, or NULL otherwise. */
2010 static const svalue *
2011 maybe_simplify_upper_bound (const svalue *num_bytes_sval,
2012 region_model_manager *mgr)
2014 tree type = num_bytes_sval->get_type ();
2015 while (const svalue *raw = num_bytes_sval->maybe_undo_cast ())
2016 num_bytes_sval = raw;
2017 if (const binop_svalue *binop_sval = num_bytes_sval->dyn_cast_binop_svalue ())
2018 if (binop_sval->get_op () == MIN_EXPR)
2019 if (binop_sval->get_arg1 ()->get_kind () == SK_CONSTANT)
2021 return mgr->get_or_create_cast (type, binop_sval->get_arg1 ());
2022 /* TODO: we might want to also capture the constraint
2023 when recording the diagnostic, or note that we're using
2024 the upper bound. */
2026 return NULL;
2029 /* Attempt to get an upper bound for the size of a copy when simulating a
2030 copy function.
2032 NUM_BYTES_SVAL is the symbolic value for the size of the copy.
2033 Use it if it's constant, otherwise try to simplify it. Failing
2034 that, use the size of SRC_REG if constant.
2036 Return a symbolic value for an upper limit on the number of bytes
2037 copied, or NULL if no such value could be determined. */
2039 const svalue *
2040 region_model::maybe_get_copy_bounds (const region *src_reg,
2041 const svalue *num_bytes_sval)
2043 if (num_bytes_sval->maybe_get_constant ())
2044 return num_bytes_sval;
2046 if (const svalue *simplified
2047 = maybe_simplify_upper_bound (num_bytes_sval, m_mgr))
2048 num_bytes_sval = simplified;
2050 if (num_bytes_sval->maybe_get_constant ())
2051 return num_bytes_sval;
2053 /* For now, try just guessing the size as the capacity of the
2054 base region of the src.
2055 This is a hack; we might get too large a value. */
2056 const region *src_base_reg = src_reg->get_base_region ();
2057 num_bytes_sval = get_capacity (src_base_reg);
2059 if (num_bytes_sval->maybe_get_constant ())
2060 return num_bytes_sval;
2062 /* Non-constant: give up. */
2063 return NULL;
2066 /* Get any known_function for FNDECL, or NULL if there is none. */
2068 const known_function *
2069 region_model::get_known_function (tree fndecl) const
2071 known_function_manager *known_fn_mgr = m_mgr->get_known_function_manager ();
2072 return known_fn_mgr->get_by_fndecl (fndecl);
2075 /* Update this model for the CALL stmt, using CTXT to report any
2076 diagnostics - the first half.
2078 Updates to the region_model that should be made *before* sm-states
2079 are updated are done here; other updates to the region_model are done
2080 in region_model::on_call_post.
2082 Return true if the function call has unknown side effects (it wasn't
2083 recognized and we don't have a body for it, or are unable to tell which
2084 fndecl it is).
2086 Write true to *OUT_TERMINATE_PATH if this execution path should be
2087 terminated (e.g. the function call terminates the process). */
2089 bool
2090 region_model::on_call_pre (const gcall *call, region_model_context *ctxt,
2091 bool *out_terminate_path)
2093 call_details cd (call, this, ctxt);
2095 bool unknown_side_effects = false;
2097 /* Special-case for IFN_DEFERRED_INIT.
2098 We want to report uninitialized variables with -fanalyzer (treating
2099 -ftrivial-auto-var-init= as purely a mitigation feature).
2100 Handle IFN_DEFERRED_INIT by treating it as no-op: don't touch the
2101 lhs of the call, so that it is still uninitialized from the point of
2102 view of the analyzer. */
2103 if (gimple_call_internal_p (call)
2104 && gimple_call_internal_fn (call) == IFN_DEFERRED_INIT)
2105 return false;
2107 /* Get svalues for all of the arguments at the callsite, to ensure that we
2108 complain about any uninitialized arguments. This might lead to
2109 duplicates if any of the handling below also looks up the svalues,
2110 but the deduplication code should deal with that. */
2111 if (ctxt)
2112 check_call_args (cd);
2114 /* Some of the cases below update the lhs of the call based on the
2115 return value, but not all. Provide a default value, which may
2116 get overwritten below. */
2117 if (tree lhs = gimple_call_lhs (call))
2119 const region *lhs_region = get_lvalue (lhs, ctxt);
2120 const svalue *sval = maybe_get_const_fn_result (cd);
2121 if (!sval)
2123 /* For the common case of functions without __attribute__((const)),
2124 use a conjured value, and purge any prior state involving that
2125 value (in case this is in a loop). */
2126 sval = m_mgr->get_or_create_conjured_svalue (TREE_TYPE (lhs), call,
2127 lhs_region,
2128 conjured_purge (this,
2129 ctxt));
2131 set_value (lhs_region, sval, ctxt);
2134 if (gimple_call_internal_p (call))
2136 switch (gimple_call_internal_fn (call))
2138 default:
2139 break;
2140 case IFN_BUILTIN_EXPECT:
2141 impl_call_builtin_expect (cd);
2142 return false;
2143 case IFN_UBSAN_BOUNDS:
2144 return false;
2145 case IFN_VA_ARG:
2146 impl_call_va_arg (cd);
2147 return false;
2151 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
2153 /* The various impl_call_* member functions are implemented
2154 in region-model-impl-calls.cc.
2155 Having them split out into separate functions makes it easier
2156 to put breakpoints on the handling of specific functions. */
2157 int callee_fndecl_flags = flags_from_decl_or_type (callee_fndecl);
2159 if (fndecl_built_in_p (callee_fndecl, BUILT_IN_NORMAL)
2160 && gimple_builtin_call_types_compatible_p (call, callee_fndecl))
2161 switch (DECL_UNCHECKED_FUNCTION_CODE (callee_fndecl))
2163 default:
2164 if (!(callee_fndecl_flags & (ECF_CONST | ECF_PURE)))
2165 unknown_side_effects = true;
2166 break;
2167 case BUILT_IN_ALLOCA:
2168 case BUILT_IN_ALLOCA_WITH_ALIGN:
2169 impl_call_alloca (cd);
2170 return false;
2171 case BUILT_IN_CALLOC:
2172 impl_call_calloc (cd);
2173 return false;
2174 case BUILT_IN_EXPECT:
2175 case BUILT_IN_EXPECT_WITH_PROBABILITY:
2176 impl_call_builtin_expect (cd);
2177 return false;
2178 case BUILT_IN_FREE:
2179 /* Handle in "on_call_post". */
2180 break;
2181 case BUILT_IN_MALLOC:
2182 impl_call_malloc (cd);
2183 return false;
2184 case BUILT_IN_MEMCPY:
2185 case BUILT_IN_MEMCPY_CHK:
2186 impl_call_memcpy (cd);
2187 return false;
2188 case BUILT_IN_MEMSET:
2189 case BUILT_IN_MEMSET_CHK:
2190 impl_call_memset (cd);
2191 return false;
2192 break;
2193 case BUILT_IN_REALLOC:
2194 return false;
2195 case BUILT_IN_STRCHR:
2196 impl_call_strchr (cd);
2197 return false;
2198 case BUILT_IN_STRCPY:
2199 case BUILT_IN_STRCPY_CHK:
2200 impl_call_strcpy (cd);
2201 return false;
2202 case BUILT_IN_STRLEN:
2203 impl_call_strlen (cd);
2204 return false;
2206 case BUILT_IN_STACK_SAVE:
2207 case BUILT_IN_STACK_RESTORE:
2208 return false;
2210 /* Stdio builtins. */
2211 case BUILT_IN_FPRINTF:
2212 case BUILT_IN_FPRINTF_UNLOCKED:
2213 case BUILT_IN_PUTC:
2214 case BUILT_IN_PUTC_UNLOCKED:
2215 case BUILT_IN_FPUTC:
2216 case BUILT_IN_FPUTC_UNLOCKED:
2217 case BUILT_IN_FPUTS:
2218 case BUILT_IN_FPUTS_UNLOCKED:
2219 case BUILT_IN_FWRITE:
2220 case BUILT_IN_FWRITE_UNLOCKED:
2221 case BUILT_IN_PRINTF:
2222 case BUILT_IN_PRINTF_UNLOCKED:
2223 case BUILT_IN_PUTCHAR:
2224 case BUILT_IN_PUTCHAR_UNLOCKED:
2225 case BUILT_IN_PUTS:
2226 case BUILT_IN_PUTS_UNLOCKED:
2227 case BUILT_IN_VFPRINTF:
2228 case BUILT_IN_VPRINTF:
2229 /* These stdio builtins have external effects that are out
2230 of scope for the analyzer: we only want to model the effects
2231 on the return value. */
2232 break;
2234 case BUILT_IN_VA_START:
2235 impl_call_va_start (cd);
2236 return false;
2237 case BUILT_IN_VA_COPY:
2238 impl_call_va_copy (cd);
2239 return false;
2241 else if (is_named_call_p (callee_fndecl, "malloc", call, 1))
2243 impl_call_malloc (cd);
2244 return false;
2246 else if (is_named_call_p (callee_fndecl, "calloc", call, 2))
2248 impl_call_calloc (cd);
2249 return false;
2251 else if (is_named_call_p (callee_fndecl, "alloca", call, 1))
2253 impl_call_alloca (cd);
2254 return false;
2256 else if (is_named_call_p (callee_fndecl, "realloc", call, 2))
2258 impl_call_realloc (cd);
2259 return false;
2261 else if (is_named_call_p (callee_fndecl, "error"))
2263 if (impl_call_error (cd, 3, out_terminate_path))
2264 return false;
2265 else
2266 unknown_side_effects = true;
2268 else if (is_named_call_p (callee_fndecl, "error_at_line"))
2270 if (impl_call_error (cd, 5, out_terminate_path))
2271 return false;
2272 else
2273 unknown_side_effects = true;
2275 else if (is_named_call_p (callee_fndecl, "fgets", call, 3)
2276 || is_named_call_p (callee_fndecl, "fgets_unlocked", call, 3))
2278 impl_call_fgets (cd);
2279 return false;
2281 else if (is_named_call_p (callee_fndecl, "fread", call, 4))
2283 impl_call_fread (cd);
2284 return false;
2286 else if (is_named_call_p (callee_fndecl, "getchar", call, 0))
2288 /* No side-effects (tracking stream state is out-of-scope
2289 for the analyzer). */
2291 else if (is_named_call_p (callee_fndecl, "memset", call, 3)
2292 && POINTER_TYPE_P (cd.get_arg_type (0)))
2294 impl_call_memset (cd);
2295 return false;
2297 else if (is_named_call_p (callee_fndecl, "putenv", call, 1)
2298 && POINTER_TYPE_P (cd.get_arg_type (0)))
2300 impl_call_putenv (cd);
2301 return false;
2303 else if (is_named_call_p (callee_fndecl, "strchr", call, 2)
2304 && POINTER_TYPE_P (cd.get_arg_type (0)))
2306 impl_call_strchr (cd);
2307 return false;
2309 else if (is_named_call_p (callee_fndecl, "strlen", call, 1)
2310 && POINTER_TYPE_P (cd.get_arg_type (0)))
2312 impl_call_strlen (cd);
2313 return false;
2315 else if (is_named_call_p (callee_fndecl, "operator new", call, 1))
2317 impl_call_operator_new (cd);
2318 return false;
2320 else if (is_named_call_p (callee_fndecl, "operator new []", call, 1))
2322 impl_call_operator_new (cd);
2323 return false;
2325 else if (is_named_call_p (callee_fndecl, "operator delete", call, 1)
2326 || is_named_call_p (callee_fndecl, "operator delete", call, 2)
2327 || is_named_call_p (callee_fndecl, "operator delete []", call, 1))
2329 /* Handle in "on_call_post". */
2331 else if (const known_function *kf = get_known_function (callee_fndecl))
2333 kf->impl_call_pre (cd);
2334 return false;
2336 else if (!fndecl_has_gimple_body_p (callee_fndecl)
2337 && (!(callee_fndecl_flags & (ECF_CONST | ECF_PURE)))
2338 && !fndecl_built_in_p (callee_fndecl))
2339 unknown_side_effects = true;
2341 else
2342 unknown_side_effects = true;
2344 return unknown_side_effects;
2347 /* Update this model for the CALL stmt, using CTXT to report any
2348 diagnostics - the second half.
2350 Updates to the region_model that should be made *after* sm-states
2351 are updated are done here; other updates to the region_model are done
2352 in region_model::on_call_pre.
2354 If UNKNOWN_SIDE_EFFECTS is true, also call handle_unrecognized_call
2355 to purge state. */
2357 void
2358 region_model::on_call_post (const gcall *call,
2359 bool unknown_side_effects,
2360 region_model_context *ctxt)
2362 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
2364 call_details cd (call, this, ctxt);
2365 if (is_named_call_p (callee_fndecl, "free", call, 1))
2367 impl_call_free (cd);
2368 return;
2370 if (is_named_call_p (callee_fndecl, "operator delete", call, 1)
2371 || is_named_call_p (callee_fndecl, "operator delete", call, 2)
2372 || is_named_call_p (callee_fndecl, "operator delete []", call, 1))
2374 impl_call_operator_delete (cd);
2375 return;
2377 /* Was this fndecl referenced by
2378 __attribute__((malloc(FOO)))? */
2379 if (lookup_attribute ("*dealloc", DECL_ATTRIBUTES (callee_fndecl)))
2381 impl_deallocation_call (cd);
2382 return;
2384 if (fndecl_built_in_p (callee_fndecl, BUILT_IN_NORMAL)
2385 && gimple_builtin_call_types_compatible_p (call, callee_fndecl))
2386 switch (DECL_UNCHECKED_FUNCTION_CODE (callee_fndecl))
2388 default:
2389 break;
2390 case BUILT_IN_REALLOC:
2391 impl_call_realloc (cd);
2392 return;
2394 case BUILT_IN_VA_END:
2395 impl_call_va_end (cd);
2396 return;
2400 if (unknown_side_effects)
2401 handle_unrecognized_call (call, ctxt);
2404 /* Purge state involving SVAL from this region_model, using CTXT
2405 (if non-NULL) to purge other state in a program_state.
2407 For example, if we're at the def-stmt of an SSA name, then we need to
2408 purge any state for svalues that involve that SSA name. This avoids
2409 false positives in loops, since a symbolic value referring to the
2410 SSA name will be referring to the previous value of that SSA name.
2412 For example, in:
2413 while ((e = hashmap_iter_next(&iter))) {
2414 struct oid2strbuf *e_strbuf = (struct oid2strbuf *)e;
2415 free (e_strbuf->value);
2417 at the def-stmt of e_8:
2418 e_8 = hashmap_iter_next (&iter);
2419 we should purge the "freed" state of:
2420 INIT_VAL(CAST_REG(‘struct oid2strbuf’, (*INIT_VAL(e_8))).value)
2421 which is the "e_strbuf->value" value from the previous iteration,
2422 or we will erroneously report a double-free - the "e_8" within it
2423 refers to the previous value. */
2425 void
2426 region_model::purge_state_involving (const svalue *sval,
2427 region_model_context *ctxt)
2429 if (!sval->can_have_associated_state_p ())
2430 return;
2431 m_store.purge_state_involving (sval, m_mgr);
2432 m_constraints->purge_state_involving (sval);
2433 m_dynamic_extents.purge_state_involving (sval);
2434 if (ctxt)
2435 ctxt->purge_state_involving (sval);
2438 /* A pending_note subclass for adding a note about an
2439 __attribute__((access, ...)) to a diagnostic. */
2441 class reason_attr_access : public pending_note_subclass<reason_attr_access>
2443 public:
2444 reason_attr_access (tree callee_fndecl, const attr_access &access)
2445 : m_callee_fndecl (callee_fndecl),
2446 m_ptr_argno (access.ptrarg),
2447 m_access_str (TREE_STRING_POINTER (access.to_external_string ()))
2451 const char *get_kind () const final override { return "reason_attr_access"; }
2453 void emit () const final override
2455 inform (DECL_SOURCE_LOCATION (m_callee_fndecl),
2456 "parameter %i of %qD marked with attribute %qs",
2457 m_ptr_argno + 1, m_callee_fndecl, m_access_str);
2460 bool operator== (const reason_attr_access &other) const
2462 return (m_callee_fndecl == other.m_callee_fndecl
2463 && m_ptr_argno == other.m_ptr_argno
2464 && !strcmp (m_access_str, other.m_access_str));
2467 private:
2468 tree m_callee_fndecl;
2469 unsigned m_ptr_argno;
2470 const char *m_access_str;
2473 /* Check CALL a call to external function CALLEE_FNDECL based on
2474 any __attribute__ ((access, ....) on the latter, complaining to
2475 CTXT about any issues.
2477 Currently we merely call check_region_for_write on any regions
2478 pointed to by arguments marked with a "write_only" or "read_write"
2479 attribute. */
2481 void
2482 region_model::
2483 check_external_function_for_access_attr (const gcall *call,
2484 tree callee_fndecl,
2485 region_model_context *ctxt) const
2487 gcc_assert (call);
2488 gcc_assert (callee_fndecl);
2489 gcc_assert (ctxt);
2491 tree fntype = TREE_TYPE (callee_fndecl);
2492 if (!fntype)
2493 return;
2495 if (!TYPE_ATTRIBUTES (fntype))
2496 return;
2498 /* Initialize a map of attribute access specifications for arguments
2499 to the function call. */
2500 rdwr_map rdwr_idx;
2501 init_attr_rdwr_indices (&rdwr_idx, TYPE_ATTRIBUTES (fntype));
2503 unsigned argno = 0;
2505 for (tree iter = TYPE_ARG_TYPES (fntype); iter;
2506 iter = TREE_CHAIN (iter), ++argno)
2508 const attr_access* access = rdwr_idx.get (argno);
2509 if (!access)
2510 continue;
2512 /* Ignore any duplicate entry in the map for the size argument. */
2513 if (access->ptrarg != argno)
2514 continue;
2516 if (access->mode == access_write_only
2517 || access->mode == access_read_write)
2519 /* Subclass of decorated_region_model_context that
2520 adds a note about the attr access to any saved diagnostics. */
2521 class annotating_ctxt : public note_adding_context
2523 public:
2524 annotating_ctxt (tree callee_fndecl,
2525 const attr_access &access,
2526 region_model_context *ctxt)
2527 : note_adding_context (ctxt),
2528 m_callee_fndecl (callee_fndecl),
2529 m_access (access)
2532 pending_note *make_note () final override
2534 return new reason_attr_access (m_callee_fndecl, m_access);
2536 private:
2537 tree m_callee_fndecl;
2538 const attr_access &m_access;
2541 /* Use this ctxt below so that any diagnostics get the
2542 note added to them. */
2543 annotating_ctxt my_ctxt (callee_fndecl, *access, ctxt);
2545 tree ptr_tree = gimple_call_arg (call, access->ptrarg);
2546 const svalue *ptr_sval = get_rvalue (ptr_tree, &my_ctxt);
2547 const region *reg = deref_rvalue (ptr_sval, ptr_tree, &my_ctxt);
2548 check_region_for_write (reg, &my_ctxt);
2549 /* We don't use the size arg for now. */
2554 /* Handle a call CALL to a function with unknown behavior.
2556 Traverse the regions in this model, determining what regions are
2557 reachable from pointer arguments to CALL and from global variables,
2558 recursively.
2560 Set all reachable regions to new unknown values and purge sm-state
2561 from their values, and from values that point to them. */
2563 void
2564 region_model::handle_unrecognized_call (const gcall *call,
2565 region_model_context *ctxt)
2567 tree fndecl = get_fndecl_for_call (call, ctxt);
2569 if (fndecl && ctxt)
2570 check_external_function_for_access_attr (call, fndecl, ctxt);
2572 reachable_regions reachable_regs (this);
2574 /* Determine the reachable regions and their mutability. */
2576 /* Add globals and regions that already escaped in previous
2577 unknown calls. */
2578 m_store.for_each_cluster (reachable_regions::init_cluster_cb,
2579 &reachable_regs);
2581 /* Params that are pointers. */
2582 tree iter_param_types = NULL_TREE;
2583 if (fndecl)
2584 iter_param_types = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
2585 for (unsigned arg_idx = 0; arg_idx < gimple_call_num_args (call); arg_idx++)
2587 /* Track expected param type, where available. */
2588 tree param_type = NULL_TREE;
2589 if (iter_param_types)
2591 param_type = TREE_VALUE (iter_param_types);
2592 gcc_assert (param_type);
2593 iter_param_types = TREE_CHAIN (iter_param_types);
2596 tree parm = gimple_call_arg (call, arg_idx);
2597 const svalue *parm_sval = get_rvalue (parm, ctxt);
2598 reachable_regs.handle_parm (parm_sval, param_type);
2602 uncertainty_t *uncertainty = ctxt ? ctxt->get_uncertainty () : NULL;
2604 /* Purge sm-state for the svalues that were reachable,
2605 both in non-mutable and mutable form. */
2606 for (svalue_set::iterator iter
2607 = reachable_regs.begin_reachable_svals ();
2608 iter != reachable_regs.end_reachable_svals (); ++iter)
2610 const svalue *sval = (*iter);
2611 if (ctxt)
2612 ctxt->on_unknown_change (sval, false);
2614 for (svalue_set::iterator iter
2615 = reachable_regs.begin_mutable_svals ();
2616 iter != reachable_regs.end_mutable_svals (); ++iter)
2618 const svalue *sval = (*iter);
2619 if (ctxt)
2620 ctxt->on_unknown_change (sval, true);
2621 if (uncertainty)
2622 uncertainty->on_mutable_sval_at_unknown_call (sval);
2625 /* Mark any clusters that have escaped. */
2626 reachable_regs.mark_escaped_clusters (ctxt);
2628 /* Update bindings for all clusters that have escaped, whether above,
2629 or previously. */
2630 m_store.on_unknown_fncall (call, m_mgr->get_store_manager (),
2631 conjured_purge (this, ctxt));
2633 /* Purge dynamic extents from any regions that have escaped mutably:
2634 realloc could have been called on them. */
2635 for (hash_set<const region *>::iterator
2636 iter = reachable_regs.begin_mutable_base_regs ();
2637 iter != reachable_regs.end_mutable_base_regs ();
2638 ++iter)
2640 const region *base_reg = (*iter);
2641 unset_dynamic_extents (base_reg);
2645 /* Traverse the regions in this model, determining what regions are
2646 reachable from the store and populating *OUT.
2648 If EXTRA_SVAL is non-NULL, treat it as an additional "root"
2649 for reachability (for handling return values from functions when
2650 analyzing return of the only function on the stack).
2652 If UNCERTAINTY is non-NULL, treat any svalues that were recorded
2653 within it as being maybe-bound as additional "roots" for reachability.
2655 Find svalues that haven't leaked. */
2657 void
2658 region_model::get_reachable_svalues (svalue_set *out,
2659 const svalue *extra_sval,
2660 const uncertainty_t *uncertainty)
2662 reachable_regions reachable_regs (this);
2664 /* Add globals and regions that already escaped in previous
2665 unknown calls. */
2666 m_store.for_each_cluster (reachable_regions::init_cluster_cb,
2667 &reachable_regs);
2669 if (extra_sval)
2670 reachable_regs.handle_sval (extra_sval);
2672 if (uncertainty)
2673 for (uncertainty_t::iterator iter
2674 = uncertainty->begin_maybe_bound_svals ();
2675 iter != uncertainty->end_maybe_bound_svals (); ++iter)
2676 reachable_regs.handle_sval (*iter);
2678 /* Get regions for locals that have explicitly bound values. */
2679 for (store::cluster_map_t::iterator iter = m_store.begin ();
2680 iter != m_store.end (); ++iter)
2682 const region *base_reg = (*iter).first;
2683 if (const region *parent = base_reg->get_parent_region ())
2684 if (parent->get_kind () == RK_FRAME)
2685 reachable_regs.add (base_reg, false);
2688 /* Populate *OUT based on the values that were reachable. */
2689 for (svalue_set::iterator iter
2690 = reachable_regs.begin_reachable_svals ();
2691 iter != reachable_regs.end_reachable_svals (); ++iter)
2692 out->add (*iter);
2695 /* Update this model for the RETURN_STMT, using CTXT to report any
2696 diagnostics. */
2698 void
2699 region_model::on_return (const greturn *return_stmt, region_model_context *ctxt)
2701 tree callee = get_current_function ()->decl;
2702 tree lhs = DECL_RESULT (callee);
2703 tree rhs = gimple_return_retval (return_stmt);
2705 if (lhs && rhs)
2707 const svalue *sval = get_rvalue (rhs, ctxt);
2708 const region *ret_reg = get_lvalue (lhs, ctxt);
2709 set_value (ret_reg, sval, ctxt);
2713 /* Update this model for a call and return of setjmp/sigsetjmp at CALL within
2714 ENODE, using CTXT to report any diagnostics.
2716 This is for the initial direct invocation of setjmp/sigsetjmp (which returns
2717 0), as opposed to any second return due to longjmp/sigsetjmp. */
2719 void
2720 region_model::on_setjmp (const gcall *call, const exploded_node *enode,
2721 region_model_context *ctxt)
2723 const svalue *buf_ptr = get_rvalue (gimple_call_arg (call, 0), ctxt);
2724 const region *buf_reg = deref_rvalue (buf_ptr, gimple_call_arg (call, 0),
2725 ctxt);
2727 /* Create a setjmp_svalue for this call and store it in BUF_REG's
2728 region. */
2729 if (buf_reg)
2731 setjmp_record r (enode, call);
2732 const svalue *sval
2733 = m_mgr->get_or_create_setjmp_svalue (r, buf_reg->get_type ());
2734 set_value (buf_reg, sval, ctxt);
2737 /* Direct calls to setjmp return 0. */
2738 if (tree lhs = gimple_call_lhs (call))
2740 const svalue *new_sval
2741 = m_mgr->get_or_create_int_cst (TREE_TYPE (lhs), 0);
2742 const region *lhs_reg = get_lvalue (lhs, ctxt);
2743 set_value (lhs_reg, new_sval, ctxt);
2747 /* Update this region_model for rewinding from a "longjmp" at LONGJMP_CALL
2748 to a "setjmp" at SETJMP_CALL where the final stack depth should be
2749 SETJMP_STACK_DEPTH. Pop any stack frames. Leak detection is *not*
2750 done, and should be done by the caller. */
2752 void
2753 region_model::on_longjmp (const gcall *longjmp_call, const gcall *setjmp_call,
2754 int setjmp_stack_depth, region_model_context *ctxt)
2756 /* Evaluate the val, using the frame of the "longjmp". */
2757 tree fake_retval = gimple_call_arg (longjmp_call, 1);
2758 const svalue *fake_retval_sval = get_rvalue (fake_retval, ctxt);
2760 /* Pop any frames until we reach the stack depth of the function where
2761 setjmp was called. */
2762 gcc_assert (get_stack_depth () >= setjmp_stack_depth);
2763 while (get_stack_depth () > setjmp_stack_depth)
2764 pop_frame (NULL, NULL, ctxt);
2766 gcc_assert (get_stack_depth () == setjmp_stack_depth);
2768 /* Assign to LHS of "setjmp" in new_state. */
2769 if (tree lhs = gimple_call_lhs (setjmp_call))
2771 /* Passing 0 as the val to longjmp leads to setjmp returning 1. */
2772 const svalue *zero_sval
2773 = m_mgr->get_or_create_int_cst (TREE_TYPE (fake_retval), 0);
2774 tristate eq_zero = eval_condition (fake_retval_sval, EQ_EXPR, zero_sval);
2775 /* If we have 0, use 1. */
2776 if (eq_zero.is_true ())
2778 const svalue *one_sval
2779 = m_mgr->get_or_create_int_cst (TREE_TYPE (fake_retval), 1);
2780 fake_retval_sval = one_sval;
2782 else
2784 /* Otherwise note that the value is nonzero. */
2785 m_constraints->add_constraint (fake_retval_sval, NE_EXPR, zero_sval);
2788 /* Decorate the return value from setjmp as being unmergeable,
2789 so that we don't attempt to merge states with it as zero
2790 with states in which it's nonzero, leading to a clean distinction
2791 in the exploded_graph betweeen the first return and the second
2792 return. */
2793 fake_retval_sval = m_mgr->get_or_create_unmergeable (fake_retval_sval);
2795 const region *lhs_reg = get_lvalue (lhs, ctxt);
2796 set_value (lhs_reg, fake_retval_sval, ctxt);
2800 /* Update this region_model for a phi stmt of the form
2801 LHS = PHI <...RHS...>.
2802 where RHS is for the appropriate edge.
2803 Get state from OLD_STATE so that all of the phi stmts for a basic block
2804 are effectively handled simultaneously. */
2806 void
2807 region_model::handle_phi (const gphi *phi,
2808 tree lhs, tree rhs,
2809 const region_model &old_state,
2810 region_model_context *ctxt)
2812 /* For now, don't bother tracking the .MEM SSA names. */
2813 if (tree var = SSA_NAME_VAR (lhs))
2814 if (TREE_CODE (var) == VAR_DECL)
2815 if (VAR_DECL_IS_VIRTUAL_OPERAND (var))
2816 return;
2818 const svalue *src_sval = old_state.get_rvalue (rhs, ctxt);
2819 const region *dst_reg = old_state.get_lvalue (lhs, ctxt);
2821 set_value (dst_reg, src_sval, ctxt);
2823 if (ctxt)
2824 ctxt->on_phi (phi, rhs);
2827 /* Implementation of region_model::get_lvalue; the latter adds type-checking.
2829 Get the id of the region for PV within this region_model,
2830 emitting any diagnostics to CTXT. */
2832 const region *
2833 region_model::get_lvalue_1 (path_var pv, region_model_context *ctxt) const
2835 tree expr = pv.m_tree;
2837 gcc_assert (expr);
2839 switch (TREE_CODE (expr))
2841 default:
2842 return m_mgr->get_region_for_unexpected_tree_code (ctxt, expr,
2843 dump_location_t ());
2845 case ARRAY_REF:
2847 tree array = TREE_OPERAND (expr, 0);
2848 tree index = TREE_OPERAND (expr, 1);
2850 const region *array_reg = get_lvalue (array, ctxt);
2851 const svalue *index_sval = get_rvalue (index, ctxt);
2852 return m_mgr->get_element_region (array_reg,
2853 TREE_TYPE (TREE_TYPE (array)),
2854 index_sval);
2856 break;
2858 case BIT_FIELD_REF:
2860 tree inner_expr = TREE_OPERAND (expr, 0);
2861 const region *inner_reg = get_lvalue (inner_expr, ctxt);
2862 tree num_bits = TREE_OPERAND (expr, 1);
2863 tree first_bit_offset = TREE_OPERAND (expr, 2);
2864 gcc_assert (TREE_CODE (num_bits) == INTEGER_CST);
2865 gcc_assert (TREE_CODE (first_bit_offset) == INTEGER_CST);
2866 bit_range bits (TREE_INT_CST_LOW (first_bit_offset),
2867 TREE_INT_CST_LOW (num_bits));
2868 return m_mgr->get_bit_range (inner_reg, TREE_TYPE (expr), bits);
2870 break;
2872 case MEM_REF:
2874 tree ptr = TREE_OPERAND (expr, 0);
2875 tree offset = TREE_OPERAND (expr, 1);
2876 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
2877 const svalue *offset_sval = get_rvalue (offset, ctxt);
2878 const region *star_ptr = deref_rvalue (ptr_sval, ptr, ctxt);
2879 return m_mgr->get_offset_region (star_ptr,
2880 TREE_TYPE (expr),
2881 offset_sval);
2883 break;
2885 case FUNCTION_DECL:
2886 return m_mgr->get_region_for_fndecl (expr);
2888 case LABEL_DECL:
2889 return m_mgr->get_region_for_label (expr);
2891 case VAR_DECL:
2892 /* Handle globals. */
2893 if (is_global_var (expr))
2894 return m_mgr->get_region_for_global (expr);
2896 /* Fall through. */
2898 case SSA_NAME:
2899 case PARM_DECL:
2900 case RESULT_DECL:
2902 gcc_assert (TREE_CODE (expr) == SSA_NAME
2903 || TREE_CODE (expr) == PARM_DECL
2904 || TREE_CODE (expr) == VAR_DECL
2905 || TREE_CODE (expr) == RESULT_DECL);
2907 int stack_index = pv.m_stack_depth;
2908 const frame_region *frame = get_frame_at_index (stack_index);
2909 gcc_assert (frame);
2910 return frame->get_region_for_local (m_mgr, expr, ctxt);
2913 case COMPONENT_REF:
2915 /* obj.field */
2916 tree obj = TREE_OPERAND (expr, 0);
2917 tree field = TREE_OPERAND (expr, 1);
2918 const region *obj_reg = get_lvalue (obj, ctxt);
2919 return m_mgr->get_field_region (obj_reg, field);
2921 break;
2923 case STRING_CST:
2924 return m_mgr->get_region_for_string (expr);
2928 /* Assert that SRC_TYPE can be converted to DST_TYPE as a no-op. */
2930 static void
2931 assert_compat_types (tree src_type, tree dst_type)
2933 if (src_type && dst_type && !VOID_TYPE_P (dst_type))
2935 #if CHECKING_P
2936 if (!(useless_type_conversion_p (src_type, dst_type)))
2937 internal_error ("incompatible types: %qT and %qT", src_type, dst_type);
2938 #endif
2942 /* Return true if SRC_TYPE can be converted to DST_TYPE as a no-op. */
2944 bool
2945 compat_types_p (tree src_type, tree dst_type)
2947 if (src_type && dst_type && !VOID_TYPE_P (dst_type))
2948 if (!(useless_type_conversion_p (src_type, dst_type)))
2949 return false;
2950 return true;
2953 /* Get the region for PV within this region_model,
2954 emitting any diagnostics to CTXT. */
2956 const region *
2957 region_model::get_lvalue (path_var pv, region_model_context *ctxt) const
2959 if (pv.m_tree == NULL_TREE)
2960 return NULL;
2962 const region *result_reg = get_lvalue_1 (pv, ctxt);
2963 assert_compat_types (result_reg->get_type (), TREE_TYPE (pv.m_tree));
2964 return result_reg;
2967 /* Get the region for EXPR within this region_model (assuming the most
2968 recent stack frame if it's a local). */
2970 const region *
2971 region_model::get_lvalue (tree expr, region_model_context *ctxt) const
2973 return get_lvalue (path_var (expr, get_stack_depth () - 1), ctxt);
2976 /* Implementation of region_model::get_rvalue; the latter adds type-checking.
2978 Get the value of PV within this region_model,
2979 emitting any diagnostics to CTXT. */
2981 const svalue *
2982 region_model::get_rvalue_1 (path_var pv, region_model_context *ctxt) const
2984 gcc_assert (pv.m_tree);
2986 switch (TREE_CODE (pv.m_tree))
2988 default:
2989 return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (pv.m_tree));
2991 case ADDR_EXPR:
2993 /* "&EXPR". */
2994 tree expr = pv.m_tree;
2995 tree op0 = TREE_OPERAND (expr, 0);
2996 const region *expr_reg = get_lvalue (op0, ctxt);
2997 return m_mgr->get_ptr_svalue (TREE_TYPE (expr), expr_reg);
2999 break;
3001 case BIT_FIELD_REF:
3003 tree expr = pv.m_tree;
3004 tree op0 = TREE_OPERAND (expr, 0);
3005 const region *reg = get_lvalue (op0, ctxt);
3006 tree num_bits = TREE_OPERAND (expr, 1);
3007 tree first_bit_offset = TREE_OPERAND (expr, 2);
3008 gcc_assert (TREE_CODE (num_bits) == INTEGER_CST);
3009 gcc_assert (TREE_CODE (first_bit_offset) == INTEGER_CST);
3010 bit_range bits (TREE_INT_CST_LOW (first_bit_offset),
3011 TREE_INT_CST_LOW (num_bits));
3012 return get_rvalue_for_bits (TREE_TYPE (expr), reg, bits, ctxt);
3015 case SSA_NAME:
3016 case VAR_DECL:
3017 case PARM_DECL:
3018 case RESULT_DECL:
3019 case ARRAY_REF:
3021 const region *reg = get_lvalue (pv, ctxt);
3022 return get_store_value (reg, ctxt);
3025 case REALPART_EXPR:
3026 case IMAGPART_EXPR:
3027 case VIEW_CONVERT_EXPR:
3029 tree expr = pv.m_tree;
3030 tree arg = TREE_OPERAND (expr, 0);
3031 const svalue *arg_sval = get_rvalue (arg, ctxt);
3032 const svalue *sval_unaryop
3033 = m_mgr->get_or_create_unaryop (TREE_TYPE (expr), TREE_CODE (expr),
3034 arg_sval);
3035 return sval_unaryop;
3038 case INTEGER_CST:
3039 case REAL_CST:
3040 case COMPLEX_CST:
3041 case VECTOR_CST:
3042 case STRING_CST:
3043 return m_mgr->get_or_create_constant_svalue (pv.m_tree);
3045 case POINTER_PLUS_EXPR:
3047 tree expr = pv.m_tree;
3048 tree ptr = TREE_OPERAND (expr, 0);
3049 tree offset = TREE_OPERAND (expr, 1);
3050 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
3051 const svalue *offset_sval = get_rvalue (offset, ctxt);
3052 const svalue *sval_binop
3053 = m_mgr->get_or_create_binop (TREE_TYPE (expr), POINTER_PLUS_EXPR,
3054 ptr_sval, offset_sval);
3055 return sval_binop;
3058 /* Binary ops. */
3059 case PLUS_EXPR:
3060 case MULT_EXPR:
3062 tree expr = pv.m_tree;
3063 tree arg0 = TREE_OPERAND (expr, 0);
3064 tree arg1 = TREE_OPERAND (expr, 1);
3065 const svalue *arg0_sval = get_rvalue (arg0, ctxt);
3066 const svalue *arg1_sval = get_rvalue (arg1, ctxt);
3067 const svalue *sval_binop
3068 = m_mgr->get_or_create_binop (TREE_TYPE (expr), TREE_CODE (expr),
3069 arg0_sval, arg1_sval);
3070 return sval_binop;
3073 case COMPONENT_REF:
3074 case MEM_REF:
3076 const region *ref_reg = get_lvalue (pv, ctxt);
3077 return get_store_value (ref_reg, ctxt);
3079 case OBJ_TYPE_REF:
3081 tree expr = OBJ_TYPE_REF_EXPR (pv.m_tree);
3082 return get_rvalue (expr, ctxt);
3087 /* Get the value of PV within this region_model,
3088 emitting any diagnostics to CTXT. */
3090 const svalue *
3091 region_model::get_rvalue (path_var pv, region_model_context *ctxt) const
3093 if (pv.m_tree == NULL_TREE)
3094 return NULL;
3096 const svalue *result_sval = get_rvalue_1 (pv, ctxt);
3098 assert_compat_types (result_sval->get_type (), TREE_TYPE (pv.m_tree));
3100 result_sval = check_for_poison (result_sval, pv.m_tree, ctxt);
3102 return result_sval;
3105 /* Get the value of EXPR within this region_model (assuming the most
3106 recent stack frame if it's a local). */
3108 const svalue *
3109 region_model::get_rvalue (tree expr, region_model_context *ctxt) const
3111 return get_rvalue (path_var (expr, get_stack_depth () - 1), ctxt);
3114 /* Return true if this model is on a path with "main" as the entrypoint
3115 (as opposed to one in which we're merely analyzing a subset of the
3116 path through the code). */
3118 bool
3119 region_model::called_from_main_p () const
3121 if (!m_current_frame)
3122 return false;
3123 /* Determine if the oldest stack frame in this model is for "main". */
3124 const frame_region *frame0 = get_frame_at_index (0);
3125 gcc_assert (frame0);
3126 return id_equal (DECL_NAME (frame0->get_function ()->decl), "main");
3129 /* Subroutine of region_model::get_store_value for when REG is (or is within)
3130 a global variable that hasn't been touched since the start of this path
3131 (or was implicitly touched due to a call to an unknown function). */
3133 const svalue *
3134 region_model::get_initial_value_for_global (const region *reg) const
3136 /* Get the decl that REG is for (or is within). */
3137 const decl_region *base_reg
3138 = reg->get_base_region ()->dyn_cast_decl_region ();
3139 gcc_assert (base_reg);
3140 tree decl = base_reg->get_decl ();
3142 /* Special-case: to avoid having to explicitly update all previously
3143 untracked globals when calling an unknown fn, they implicitly have
3144 an unknown value if an unknown call has occurred, unless this is
3145 static to-this-TU and hasn't escaped. Globals that have escaped
3146 are explicitly tracked, so we shouldn't hit this case for them. */
3147 if (m_store.called_unknown_fn_p ()
3148 && TREE_PUBLIC (decl)
3149 && !TREE_READONLY (decl))
3150 return m_mgr->get_or_create_unknown_svalue (reg->get_type ());
3152 /* If we are on a path from the entrypoint from "main" and we have a
3153 global decl defined in this TU that hasn't been touched yet, then
3154 the initial value of REG can be taken from the initialization value
3155 of the decl. */
3156 if (called_from_main_p () || TREE_READONLY (decl))
3158 /* Attempt to get the initializer value for base_reg. */
3159 if (const svalue *base_reg_init
3160 = base_reg->get_svalue_for_initializer (m_mgr))
3162 if (reg == base_reg)
3163 return base_reg_init;
3164 else
3166 /* Get the value for REG within base_reg_init. */
3167 binding_cluster c (base_reg);
3168 c.bind (m_mgr->get_store_manager (), base_reg, base_reg_init);
3169 const svalue *sval
3170 = c.get_any_binding (m_mgr->get_store_manager (), reg);
3171 if (sval)
3173 if (reg->get_type ())
3174 sval = m_mgr->get_or_create_cast (reg->get_type (),
3175 sval);
3176 return sval;
3182 /* Otherwise, return INIT_VAL(REG). */
3183 return m_mgr->get_or_create_initial_value (reg);
3186 /* Get a value for REG, looking it up in the store, or otherwise falling
3187 back to "initial" or "unknown" values.
3188 Use CTXT to report any warnings associated with reading from REG. */
3190 const svalue *
3191 region_model::get_store_value (const region *reg,
3192 region_model_context *ctxt) const
3194 check_region_for_read (reg, ctxt);
3196 /* Special-case: handle var_decls in the constant pool. */
3197 if (const decl_region *decl_reg = reg->dyn_cast_decl_region ())
3198 if (const svalue *sval = decl_reg->maybe_get_constant_value (m_mgr))
3199 return sval;
3201 const svalue *sval
3202 = m_store.get_any_binding (m_mgr->get_store_manager (), reg);
3203 if (sval)
3205 if (reg->get_type ())
3206 sval = m_mgr->get_or_create_cast (reg->get_type (), sval);
3207 return sval;
3210 /* Special-case: read at a constant index within a STRING_CST. */
3211 if (const offset_region *offset_reg = reg->dyn_cast_offset_region ())
3212 if (tree byte_offset_cst
3213 = offset_reg->get_byte_offset ()->maybe_get_constant ())
3214 if (const string_region *str_reg
3215 = reg->get_parent_region ()->dyn_cast_string_region ())
3217 tree string_cst = str_reg->get_string_cst ();
3218 if (const svalue *char_sval
3219 = m_mgr->maybe_get_char_from_string_cst (string_cst,
3220 byte_offset_cst))
3221 return m_mgr->get_or_create_cast (reg->get_type (), char_sval);
3224 /* Special-case: read the initial char of a STRING_CST. */
3225 if (const cast_region *cast_reg = reg->dyn_cast_cast_region ())
3226 if (const string_region *str_reg
3227 = cast_reg->get_original_region ()->dyn_cast_string_region ())
3229 tree string_cst = str_reg->get_string_cst ();
3230 tree byte_offset_cst = build_int_cst (integer_type_node, 0);
3231 if (const svalue *char_sval
3232 = m_mgr->maybe_get_char_from_string_cst (string_cst,
3233 byte_offset_cst))
3234 return m_mgr->get_or_create_cast (reg->get_type (), char_sval);
3237 /* Otherwise we implicitly have the initial value of the region
3238 (if the cluster had been touched, binding_cluster::get_any_binding,
3239 would have returned UNKNOWN, and we would already have returned
3240 that above). */
3242 /* Handle globals. */
3243 if (reg->get_base_region ()->get_parent_region ()->get_kind ()
3244 == RK_GLOBALS)
3245 return get_initial_value_for_global (reg);
3247 return m_mgr->get_or_create_initial_value (reg);
3250 /* Return false if REG does not exist, true if it may do.
3251 This is for detecting regions within the stack that don't exist anymore
3252 after frames are popped. */
3254 bool
3255 region_model::region_exists_p (const region *reg) const
3257 /* If within a stack frame, check that the stack frame is live. */
3258 if (const frame_region *enclosing_frame = reg->maybe_get_frame_region ())
3260 /* Check that the current frame is the enclosing frame, or is called
3261 by it. */
3262 for (const frame_region *iter_frame = get_current_frame (); iter_frame;
3263 iter_frame = iter_frame->get_calling_frame ())
3264 if (iter_frame == enclosing_frame)
3265 return true;
3266 return false;
3269 return true;
3272 /* Get a region for referencing PTR_SVAL, creating a region if need be, and
3273 potentially generating warnings via CTXT.
3274 PTR_SVAL must be of pointer type.
3275 PTR_TREE if non-NULL can be used when emitting diagnostics. */
3277 const region *
3278 region_model::deref_rvalue (const svalue *ptr_sval, tree ptr_tree,
3279 region_model_context *ctxt) const
3281 gcc_assert (ptr_sval);
3282 gcc_assert (POINTER_TYPE_P (ptr_sval->get_type ()));
3284 /* If we're dereferencing PTR_SVAL, assume that it is non-NULL; add this
3285 as a constraint. This suppresses false positives from
3286 -Wanalyzer-null-dereference for the case where we later have an
3287 if (PTR_SVAL) that would occur if we considered the false branch
3288 and transitioned the malloc state machine from start->null. */
3289 tree null_ptr_cst = build_int_cst (ptr_sval->get_type (), 0);
3290 const svalue *null_ptr = m_mgr->get_or_create_constant_svalue (null_ptr_cst);
3291 m_constraints->add_constraint (ptr_sval, NE_EXPR, null_ptr);
3293 switch (ptr_sval->get_kind ())
3295 default:
3296 break;
3298 case SK_REGION:
3300 const region_svalue *region_sval
3301 = as_a <const region_svalue *> (ptr_sval);
3302 return region_sval->get_pointee ();
3305 case SK_BINOP:
3307 const binop_svalue *binop_sval
3308 = as_a <const binop_svalue *> (ptr_sval);
3309 switch (binop_sval->get_op ())
3311 case POINTER_PLUS_EXPR:
3313 /* If we have a symbolic value expressing pointer arithmentic,
3314 try to convert it to a suitable region. */
3315 const region *parent_region
3316 = deref_rvalue (binop_sval->get_arg0 (), NULL_TREE, ctxt);
3317 const svalue *offset = binop_sval->get_arg1 ();
3318 tree type= TREE_TYPE (ptr_sval->get_type ());
3319 return m_mgr->get_offset_region (parent_region, type, offset);
3321 default:
3322 break;
3325 break;
3327 case SK_POISONED:
3329 if (ctxt)
3331 tree ptr = get_representative_tree (ptr_sval);
3332 /* If we can't get a representative tree for PTR_SVAL
3333 (e.g. if it hasn't been bound into the store), then
3334 fall back on PTR_TREE, if non-NULL. */
3335 if (!ptr)
3336 ptr = ptr_tree;
3337 if (ptr)
3339 const poisoned_svalue *poisoned_sval
3340 = as_a <const poisoned_svalue *> (ptr_sval);
3341 enum poison_kind pkind = poisoned_sval->get_poison_kind ();
3342 ctxt->warn (new poisoned_value_diagnostic (ptr, pkind, NULL));
3346 break;
3349 return m_mgr->get_symbolic_region (ptr_sval);
3352 /* Attempt to get BITS within any value of REG, as TYPE.
3353 In particular, extract values from compound_svalues for the case
3354 where there's a concrete binding at BITS.
3355 Return an unknown svalue if we can't handle the given case.
3356 Use CTXT to report any warnings associated with reading from REG. */
3358 const svalue *
3359 region_model::get_rvalue_for_bits (tree type,
3360 const region *reg,
3361 const bit_range &bits,
3362 region_model_context *ctxt) const
3364 const svalue *sval = get_store_value (reg, ctxt);
3365 return m_mgr->get_or_create_bits_within (type, bits, sval);
3368 /* A subclass of pending_diagnostic for complaining about writes to
3369 constant regions of memory. */
3371 class write_to_const_diagnostic
3372 : public pending_diagnostic_subclass<write_to_const_diagnostic>
3374 public:
3375 write_to_const_diagnostic (const region *reg, tree decl)
3376 : m_reg (reg), m_decl (decl)
3379 const char *get_kind () const final override
3381 return "write_to_const_diagnostic";
3384 bool operator== (const write_to_const_diagnostic &other) const
3386 return (m_reg == other.m_reg
3387 && m_decl == other.m_decl);
3390 int get_controlling_option () const final override
3392 return OPT_Wanalyzer_write_to_const;
3395 bool emit (rich_location *rich_loc) final override
3397 auto_diagnostic_group d;
3398 bool warned;
3399 switch (m_reg->get_kind ())
3401 default:
3402 warned = warning_at (rich_loc, get_controlling_option (),
3403 "write to %<const%> object %qE", m_decl);
3404 break;
3405 case RK_FUNCTION:
3406 warned = warning_at (rich_loc, get_controlling_option (),
3407 "write to function %qE", m_decl);
3408 break;
3409 case RK_LABEL:
3410 warned = warning_at (rich_loc, get_controlling_option (),
3411 "write to label %qE", m_decl);
3412 break;
3414 if (warned)
3415 inform (DECL_SOURCE_LOCATION (m_decl), "declared here");
3416 return warned;
3419 label_text describe_final_event (const evdesc::final_event &ev) final override
3421 switch (m_reg->get_kind ())
3423 default:
3424 return ev.formatted_print ("write to %<const%> object %qE here", m_decl);
3425 case RK_FUNCTION:
3426 return ev.formatted_print ("write to function %qE here", m_decl);
3427 case RK_LABEL:
3428 return ev.formatted_print ("write to label %qE here", m_decl);
3432 private:
3433 const region *m_reg;
3434 tree m_decl;
3437 /* A subclass of pending_diagnostic for complaining about writes to
3438 string literals. */
3440 class write_to_string_literal_diagnostic
3441 : public pending_diagnostic_subclass<write_to_string_literal_diagnostic>
3443 public:
3444 write_to_string_literal_diagnostic (const region *reg)
3445 : m_reg (reg)
3448 const char *get_kind () const final override
3450 return "write_to_string_literal_diagnostic";
3453 bool operator== (const write_to_string_literal_diagnostic &other) const
3455 return m_reg == other.m_reg;
3458 int get_controlling_option () const final override
3460 return OPT_Wanalyzer_write_to_string_literal;
3463 bool emit (rich_location *rich_loc) final override
3465 return warning_at (rich_loc, get_controlling_option (),
3466 "write to string literal");
3467 /* Ideally we would show the location of the STRING_CST as well,
3468 but it is not available at this point. */
3471 label_text describe_final_event (const evdesc::final_event &ev) final override
3473 return ev.formatted_print ("write to string literal here");
3476 private:
3477 const region *m_reg;
3480 /* Use CTXT to warn If DEST_REG is a region that shouldn't be written to. */
3482 void
3483 region_model::check_for_writable_region (const region* dest_reg,
3484 region_model_context *ctxt) const
3486 /* Fail gracefully if CTXT is NULL. */
3487 if (!ctxt)
3488 return;
3490 const region *base_reg = dest_reg->get_base_region ();
3491 switch (base_reg->get_kind ())
3493 default:
3494 break;
3495 case RK_FUNCTION:
3497 const function_region *func_reg = as_a <const function_region *> (base_reg);
3498 tree fndecl = func_reg->get_fndecl ();
3499 ctxt->warn (new write_to_const_diagnostic (func_reg, fndecl));
3501 break;
3502 case RK_LABEL:
3504 const label_region *label_reg = as_a <const label_region *> (base_reg);
3505 tree label = label_reg->get_label ();
3506 ctxt->warn (new write_to_const_diagnostic (label_reg, label));
3508 break;
3509 case RK_DECL:
3511 const decl_region *decl_reg = as_a <const decl_region *> (base_reg);
3512 tree decl = decl_reg->get_decl ();
3513 /* Warn about writes to const globals.
3514 Don't warn for writes to const locals, and params in particular,
3515 since we would warn in push_frame when setting them up (e.g the
3516 "this" param is "T* const"). */
3517 if (TREE_READONLY (decl)
3518 && is_global_var (decl))
3519 ctxt->warn (new write_to_const_diagnostic (dest_reg, decl));
3521 break;
3522 case RK_STRING:
3523 ctxt->warn (new write_to_string_literal_diagnostic (dest_reg));
3524 break;
3528 /* Get the capacity of REG in bytes. */
3530 const svalue *
3531 region_model::get_capacity (const region *reg) const
3533 switch (reg->get_kind ())
3535 default:
3536 break;
3537 case RK_DECL:
3539 const decl_region *decl_reg = as_a <const decl_region *> (reg);
3540 tree decl = decl_reg->get_decl ();
3541 if (TREE_CODE (decl) == SSA_NAME)
3543 tree type = TREE_TYPE (decl);
3544 tree size = TYPE_SIZE (type);
3545 return get_rvalue (size, NULL);
3547 else
3549 tree size = decl_init_size (decl, false);
3550 if (size)
3551 return get_rvalue (size, NULL);
3554 break;
3555 case RK_SIZED:
3556 /* Look through sized regions to get at the capacity
3557 of the underlying regions. */
3558 return get_capacity (reg->get_parent_region ());
3561 if (const svalue *recorded = get_dynamic_extents (reg))
3562 return recorded;
3564 return m_mgr->get_or_create_unknown_svalue (sizetype);
3567 /* Return the string size, including the 0-terminator, if SVAL is a
3568 constant_svalue holding a string. Otherwise, return an unknown_svalue. */
3570 const svalue *
3571 region_model::get_string_size (const svalue *sval) const
3573 tree cst = sval->maybe_get_constant ();
3574 if (!cst || TREE_CODE (cst) != STRING_CST)
3575 return m_mgr->get_or_create_unknown_svalue (size_type_node);
3577 tree out = build_int_cst (size_type_node, TREE_STRING_LENGTH (cst));
3578 return m_mgr->get_or_create_constant_svalue (out);
3581 /* Return the string size, including the 0-terminator, if REG is a
3582 string_region. Otherwise, return an unknown_svalue. */
3584 const svalue *
3585 region_model::get_string_size (const region *reg) const
3587 const string_region *str_reg = dyn_cast <const string_region *> (reg);
3588 if (!str_reg)
3589 return m_mgr->get_or_create_unknown_svalue (size_type_node);
3591 tree cst = str_reg->get_string_cst ();
3592 tree out = build_int_cst (size_type_node, TREE_STRING_LENGTH (cst));
3593 return m_mgr->get_or_create_constant_svalue (out);
3596 /* If CTXT is non-NULL, use it to warn about any problems accessing REG,
3597 using DIR to determine if this access is a read or write. */
3599 void
3600 region_model::check_region_access (const region *reg,
3601 enum access_direction dir,
3602 region_model_context *ctxt) const
3604 /* Fail gracefully if CTXT is NULL. */
3605 if (!ctxt)
3606 return;
3608 check_region_for_taint (reg, dir, ctxt);
3609 check_region_bounds (reg, dir, ctxt);
3611 switch (dir)
3613 default:
3614 gcc_unreachable ();
3615 case DIR_READ:
3616 /* Currently a no-op. */
3617 break;
3618 case DIR_WRITE:
3619 check_for_writable_region (reg, ctxt);
3620 break;
3624 /* If CTXT is non-NULL, use it to warn about any problems writing to REG. */
3626 void
3627 region_model::check_region_for_write (const region *dest_reg,
3628 region_model_context *ctxt) const
3630 check_region_access (dest_reg, DIR_WRITE, ctxt);
3633 /* If CTXT is non-NULL, use it to warn about any problems reading from REG. */
3635 void
3636 region_model::check_region_for_read (const region *src_reg,
3637 region_model_context *ctxt) const
3639 check_region_access (src_reg, DIR_READ, ctxt);
3642 /* Concrete subclass for casts of pointers that lead to trailing bytes. */
3644 class dubious_allocation_size
3645 : public pending_diagnostic_subclass<dubious_allocation_size>
3647 public:
3648 dubious_allocation_size (const region *lhs, const region *rhs)
3649 : m_lhs (lhs), m_rhs (rhs), m_expr (NULL_TREE)
3652 dubious_allocation_size (const region *lhs, const region *rhs,
3653 tree expr)
3654 : m_lhs (lhs), m_rhs (rhs), m_expr (expr)
3657 const char *get_kind () const final override
3659 return "dubious_allocation_size";
3662 bool operator== (const dubious_allocation_size &other) const
3664 return m_lhs == other.m_lhs && m_rhs == other.m_rhs
3665 && pending_diagnostic::same_tree_p (m_expr, other.m_expr);
3668 int get_controlling_option () const final override
3670 return OPT_Wanalyzer_allocation_size;
3673 bool emit (rich_location *rich_loc) final override
3675 diagnostic_metadata m;
3676 m.add_cwe (131);
3678 return warning_meta (rich_loc, m, get_controlling_option (),
3679 "allocated buffer size is not a multiple"
3680 " of the pointee's size");
3683 label_text
3684 describe_region_creation_event (const evdesc::region_creation &ev) final
3685 override
3687 m_allocation_event = &ev;
3688 if (m_expr)
3690 if (TREE_CODE (m_expr) == INTEGER_CST)
3691 return ev.formatted_print ("allocated %E bytes here", m_expr);
3692 else
3693 return ev.formatted_print ("allocated %qE bytes here", m_expr);
3696 return ev.formatted_print ("allocated here");
3699 label_text describe_final_event (const evdesc::final_event &ev) final
3700 override
3702 tree pointee_type = TREE_TYPE (m_lhs->get_type ());
3703 if (m_allocation_event)
3704 /* Fallback: Typically, we should always
3705 see an m_allocation_event before. */
3706 return ev.formatted_print ("assigned to %qT here;"
3707 " %<sizeof (%T)%> is %qE",
3708 m_lhs->get_type (), pointee_type,
3709 size_in_bytes (pointee_type));
3711 if (m_expr)
3713 if (TREE_CODE (m_expr) == INTEGER_CST)
3714 return ev.formatted_print ("allocated %E bytes and assigned to"
3715 " %qT here; %<sizeof (%T)%> is %qE",
3716 m_expr, m_lhs->get_type (), pointee_type,
3717 size_in_bytes (pointee_type));
3718 else
3719 return ev.formatted_print ("allocated %qE bytes and assigned to"
3720 " %qT here; %<sizeof (%T)%> is %qE",
3721 m_expr, m_lhs->get_type (), pointee_type,
3722 size_in_bytes (pointee_type));
3725 return ev.formatted_print ("allocated and assigned to %qT here;"
3726 " %<sizeof (%T)%> is %qE",
3727 m_lhs->get_type (), pointee_type,
3728 size_in_bytes (pointee_type));
3731 void mark_interesting_stuff (interesting_t *interest) final override
3733 interest->add_region_creation (m_rhs);
3736 private:
3737 const region *m_lhs;
3738 const region *m_rhs;
3739 const tree m_expr;
3740 const evdesc::region_creation *m_allocation_event;
3743 /* Return true on dubious allocation sizes for constant sizes. */
3745 static bool
3746 capacity_compatible_with_type (tree cst, tree pointee_size_tree,
3747 bool is_struct)
3749 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
3750 gcc_assert (TREE_CODE (pointee_size_tree) == INTEGER_CST);
3752 unsigned HOST_WIDE_INT pointee_size = TREE_INT_CST_LOW (pointee_size_tree);
3753 unsigned HOST_WIDE_INT alloc_size = TREE_INT_CST_LOW (cst);
3755 if (is_struct)
3756 return alloc_size == 0 || alloc_size >= pointee_size;
3757 return alloc_size % pointee_size == 0;
3760 static bool
3761 capacity_compatible_with_type (tree cst, tree pointee_size_tree)
3763 return capacity_compatible_with_type (cst, pointee_size_tree, false);
3766 /* Checks whether SVAL could be a multiple of SIZE_CST.
3768 It works by visiting all svalues inside SVAL until it reaches
3769 atomic nodes. From those, it goes back up again and adds each
3770 node that might be a multiple of SIZE_CST to the RESULT_SET. */
3772 class size_visitor : public visitor
3774 public:
3775 size_visitor (tree size_cst, const svalue *root_sval, constraint_manager *cm)
3776 : m_size_cst (size_cst), m_root_sval (root_sval), m_cm (cm)
3778 m_root_sval->accept (this);
3781 bool get_result ()
3783 return result_set.contains (m_root_sval);
3786 void visit_constant_svalue (const constant_svalue *sval) final override
3788 check_constant (sval->get_constant (), sval);
3791 void visit_unknown_svalue (const unknown_svalue *sval ATTRIBUTE_UNUSED)
3792 final override
3794 result_set.add (sval);
3797 void visit_poisoned_svalue (const poisoned_svalue *sval ATTRIBUTE_UNUSED)
3798 final override
3800 result_set.add (sval);
3803 void visit_unaryop_svalue (const unaryop_svalue *sval) final override
3805 const svalue *arg = sval->get_arg ();
3806 if (result_set.contains (arg))
3807 result_set.add (sval);
3810 void visit_binop_svalue (const binop_svalue *sval) final override
3812 const svalue *arg0 = sval->get_arg0 ();
3813 const svalue *arg1 = sval->get_arg1 ();
3815 if (sval->get_op () == MULT_EXPR)
3817 if (result_set.contains (arg0) || result_set.contains (arg1))
3818 result_set.add (sval);
3820 else
3822 if (result_set.contains (arg0) && result_set.contains (arg1))
3823 result_set.add (sval);
3827 void visit_repeated_svalue (const repeated_svalue *sval) final override
3829 sval->get_inner_svalue ()->accept (this);
3830 if (result_set.contains (sval->get_inner_svalue ()))
3831 result_set.add (sval);
3834 void visit_unmergeable_svalue (const unmergeable_svalue *sval) final override
3836 sval->get_arg ()->accept (this);
3837 if (result_set.contains (sval->get_arg ()))
3838 result_set.add (sval);
3841 void visit_widening_svalue (const widening_svalue *sval) final override
3843 const svalue *base = sval->get_base_svalue ();
3844 const svalue *iter = sval->get_iter_svalue ();
3846 if (result_set.contains (base) && result_set.contains (iter))
3847 result_set.add (sval);
3850 void visit_conjured_svalue (const conjured_svalue *sval ATTRIBUTE_UNUSED)
3851 final override
3853 equiv_class_id id (-1);
3854 if (m_cm->get_equiv_class_by_svalue (sval, &id))
3856 if (tree cst = id.get_obj (*m_cm).get_any_constant ())
3857 check_constant (cst, sval);
3858 else
3859 result_set.add (sval);
3863 void visit_asm_output_svalue (const asm_output_svalue *sval ATTRIBUTE_UNUSED)
3864 final override
3866 result_set.add (sval);
3869 void visit_const_fn_result_svalue (const const_fn_result_svalue
3870 *sval ATTRIBUTE_UNUSED) final override
3872 result_set.add (sval);
3875 private:
3876 void check_constant (tree cst, const svalue *sval)
3878 switch (TREE_CODE (cst))
3880 default:
3881 /* Assume all unhandled operands are compatible. */
3882 result_set.add (sval);
3883 break;
3884 case INTEGER_CST:
3885 if (capacity_compatible_with_type (cst, m_size_cst))
3886 result_set.add (sval);
3887 break;
3891 tree m_size_cst;
3892 const svalue *m_root_sval;
3893 constraint_manager *m_cm;
3894 svalue_set result_set; /* Used as a mapping of svalue*->bool. */
3897 /* Return true if a struct or union either uses the inheritance pattern,
3898 where the first field is a base struct, or the flexible array member
3899 pattern, where the last field is an array without a specified size. */
3901 static bool
3902 struct_or_union_with_inheritance_p (tree struc)
3904 tree iter = TYPE_FIELDS (struc);
3905 if (iter == NULL_TREE)
3906 return false;
3907 if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (iter)))
3908 return true;
3910 tree last_field;
3911 while (iter != NULL_TREE)
3913 last_field = iter;
3914 iter = DECL_CHAIN (iter);
3917 if (last_field != NULL_TREE
3918 && TREE_CODE (TREE_TYPE (last_field)) == ARRAY_TYPE)
3919 return true;
3921 return false;
3924 /* Return true if the lhs and rhs of an assignment have different types. */
3926 static bool
3927 is_any_cast_p (const gimple *stmt)
3929 if (const gassign *assign = dyn_cast <const gassign *> (stmt))
3930 return gimple_assign_cast_p (assign)
3931 || !pending_diagnostic::same_tree_p (
3932 TREE_TYPE (gimple_assign_lhs (assign)),
3933 TREE_TYPE (gimple_assign_rhs1 (assign)));
3934 else if (const gcall *call = dyn_cast <const gcall *> (stmt))
3936 tree lhs = gimple_call_lhs (call);
3937 return lhs != NULL_TREE && !pending_diagnostic::same_tree_p (
3938 TREE_TYPE (gimple_call_lhs (call)),
3939 gimple_call_return_type (call));
3942 return false;
3945 /* On pointer assignments, check whether the buffer size of
3946 RHS_SVAL is compatible with the type of the LHS_REG.
3947 Use a non-null CTXT to report allocation size warnings. */
3949 void
3950 region_model::check_region_size (const region *lhs_reg, const svalue *rhs_sval,
3951 region_model_context *ctxt) const
3953 if (!ctxt || ctxt->get_stmt () == NULL)
3954 return;
3955 /* Only report warnings on assignments that actually change the type. */
3956 if (!is_any_cast_p (ctxt->get_stmt ()))
3957 return;
3959 const region_svalue *reg_sval = dyn_cast <const region_svalue *> (rhs_sval);
3960 if (!reg_sval)
3961 return;
3963 tree pointer_type = lhs_reg->get_type ();
3964 if (pointer_type == NULL_TREE || !POINTER_TYPE_P (pointer_type))
3965 return;
3967 tree pointee_type = TREE_TYPE (pointer_type);
3968 /* Make sure that the type on the left-hand size actually has a size. */
3969 if (pointee_type == NULL_TREE || VOID_TYPE_P (pointee_type)
3970 || TYPE_SIZE_UNIT (pointee_type) == NULL_TREE)
3971 return;
3973 /* Bail out early on pointers to structs where we can
3974 not deduce whether the buffer size is compatible. */
3975 bool is_struct = RECORD_OR_UNION_TYPE_P (pointee_type);
3976 if (is_struct && struct_or_union_with_inheritance_p (pointee_type))
3977 return;
3979 tree pointee_size_tree = size_in_bytes (pointee_type);
3980 /* We give up if the type size is not known at compile-time or the
3981 type size is always compatible regardless of the buffer size. */
3982 if (TREE_CODE (pointee_size_tree) != INTEGER_CST
3983 || integer_zerop (pointee_size_tree)
3984 || integer_onep (pointee_size_tree))
3985 return;
3987 const region *rhs_reg = reg_sval->get_pointee ();
3988 const svalue *capacity = get_capacity (rhs_reg);
3989 switch (capacity->get_kind ())
3991 case svalue_kind::SK_CONSTANT:
3993 const constant_svalue *cst_cap_sval
3994 = as_a <const constant_svalue *> (capacity);
3995 tree cst_cap = cst_cap_sval->get_constant ();
3996 if (TREE_CODE (cst_cap) == INTEGER_CST
3997 && !capacity_compatible_with_type (cst_cap, pointee_size_tree,
3998 is_struct))
3999 ctxt->warn (new dubious_allocation_size (lhs_reg, rhs_reg,
4000 cst_cap));
4002 break;
4003 default:
4005 if (!is_struct)
4007 size_visitor v (pointee_size_tree, capacity, m_constraints);
4008 if (!v.get_result ())
4010 tree expr = get_representative_tree (capacity);
4011 ctxt->warn (new dubious_allocation_size (lhs_reg, rhs_reg,
4012 expr));
4015 break;
4020 /* Set the value of the region given by LHS_REG to the value given
4021 by RHS_SVAL.
4022 Use CTXT to report any warnings associated with writing to LHS_REG. */
4024 void
4025 region_model::set_value (const region *lhs_reg, const svalue *rhs_sval,
4026 region_model_context *ctxt)
4028 gcc_assert (lhs_reg);
4029 gcc_assert (rhs_sval);
4031 check_region_size (lhs_reg, rhs_sval, ctxt);
4033 check_region_for_write (lhs_reg, ctxt);
4035 m_store.set_value (m_mgr->get_store_manager(), lhs_reg, rhs_sval,
4036 ctxt ? ctxt->get_uncertainty () : NULL);
4039 /* Set the value of the region given by LHS to the value given by RHS. */
4041 void
4042 region_model::set_value (tree lhs, tree rhs, region_model_context *ctxt)
4044 const region *lhs_reg = get_lvalue (lhs, ctxt);
4045 const svalue *rhs_sval = get_rvalue (rhs, ctxt);
4046 gcc_assert (lhs_reg);
4047 gcc_assert (rhs_sval);
4048 set_value (lhs_reg, rhs_sval, ctxt);
4051 /* Remove all bindings overlapping REG within the store. */
4053 void
4054 region_model::clobber_region (const region *reg)
4056 m_store.clobber_region (m_mgr->get_store_manager(), reg);
4059 /* Remove any bindings for REG within the store. */
4061 void
4062 region_model::purge_region (const region *reg)
4064 m_store.purge_region (m_mgr->get_store_manager(), reg);
4067 /* Fill REG with SVAL. */
4069 void
4070 region_model::fill_region (const region *reg, const svalue *sval)
4072 m_store.fill_region (m_mgr->get_store_manager(), reg, sval);
4075 /* Zero-fill REG. */
4077 void
4078 region_model::zero_fill_region (const region *reg)
4080 m_store.zero_fill_region (m_mgr->get_store_manager(), reg);
4083 /* Mark REG as having unknown content. */
4085 void
4086 region_model::mark_region_as_unknown (const region *reg,
4087 uncertainty_t *uncertainty)
4089 m_store.mark_region_as_unknown (m_mgr->get_store_manager(), reg,
4090 uncertainty);
4093 /* Determine what is known about the condition "LHS_SVAL OP RHS_SVAL" within
4094 this model. */
4096 tristate
4097 region_model::eval_condition (const svalue *lhs,
4098 enum tree_code op,
4099 const svalue *rhs) const
4101 /* For now, make no attempt to capture constraints on floating-point
4102 values. */
4103 if ((lhs->get_type () && FLOAT_TYPE_P (lhs->get_type ()))
4104 || (rhs->get_type () && FLOAT_TYPE_P (rhs->get_type ())))
4105 return tristate::unknown ();
4107 tristate ts = eval_condition_without_cm (lhs, op, rhs);
4108 if (ts.is_known ())
4109 return ts;
4111 /* Otherwise, try constraints. */
4112 return m_constraints->eval_condition (lhs, op, rhs);
4115 /* Determine what is known about the condition "LHS_SVAL OP RHS_SVAL" within
4116 this model, without resorting to the constraint_manager.
4118 This is exposed so that impl_region_model_context::on_state_leak can
4119 check for equality part-way through region_model::purge_unused_svalues
4120 without risking creating new ECs. */
4122 tristate
4123 region_model::eval_condition_without_cm (const svalue *lhs,
4124 enum tree_code op,
4125 const svalue *rhs) const
4127 gcc_assert (lhs);
4128 gcc_assert (rhs);
4130 /* See what we know based on the values. */
4132 /* For now, make no attempt to capture constraints on floating-point
4133 values. */
4134 if ((lhs->get_type () && FLOAT_TYPE_P (lhs->get_type ()))
4135 || (rhs->get_type () && FLOAT_TYPE_P (rhs->get_type ())))
4136 return tristate::unknown ();
4138 /* Unwrap any unmergeable values. */
4139 lhs = lhs->unwrap_any_unmergeable ();
4140 rhs = rhs->unwrap_any_unmergeable ();
4142 if (lhs == rhs)
4144 /* If we have the same svalue, then we have equality
4145 (apart from NaN-handling).
4146 TODO: should this definitely be the case for poisoned values? */
4147 /* Poisoned and unknown values are "unknowable". */
4148 if (lhs->get_kind () == SK_POISONED
4149 || lhs->get_kind () == SK_UNKNOWN)
4150 return tristate::TS_UNKNOWN;
4152 switch (op)
4154 case EQ_EXPR:
4155 case GE_EXPR:
4156 case LE_EXPR:
4157 return tristate::TS_TRUE;
4159 case NE_EXPR:
4160 case GT_EXPR:
4161 case LT_EXPR:
4162 return tristate::TS_FALSE;
4164 default:
4165 /* For other ops, use the logic below. */
4166 break;
4170 /* If we have a pair of region_svalues, compare them. */
4171 if (const region_svalue *lhs_ptr = lhs->dyn_cast_region_svalue ())
4172 if (const region_svalue *rhs_ptr = rhs->dyn_cast_region_svalue ())
4174 tristate res = region_svalue::eval_condition (lhs_ptr, op, rhs_ptr);
4175 if (res.is_known ())
4176 return res;
4177 /* Otherwise, only known through constraints. */
4180 /* If we have a pair of constants, compare them. */
4181 if (const constant_svalue *cst_lhs = lhs->dyn_cast_constant_svalue ())
4182 if (const constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ())
4183 return constant_svalue::eval_condition (cst_lhs, op, cst_rhs);
4185 /* Handle comparison against zero. */
4186 if (const constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ())
4187 if (zerop (cst_rhs->get_constant ()))
4189 if (const region_svalue *ptr = lhs->dyn_cast_region_svalue ())
4191 /* A region_svalue is a non-NULL pointer, except in certain
4192 special cases (see the comment for region::non_null_p). */
4193 const region *pointee = ptr->get_pointee ();
4194 if (pointee->non_null_p ())
4196 switch (op)
4198 default:
4199 gcc_unreachable ();
4201 case EQ_EXPR:
4202 case GE_EXPR:
4203 case LE_EXPR:
4204 return tristate::TS_FALSE;
4206 case NE_EXPR:
4207 case GT_EXPR:
4208 case LT_EXPR:
4209 return tristate::TS_TRUE;
4213 else if (const binop_svalue *binop = lhs->dyn_cast_binop_svalue ())
4215 /* Treat offsets from a non-NULL pointer as being non-NULL. This
4216 isn't strictly true, in that eventually ptr++ will wrap
4217 around and be NULL, but it won't occur in practise and thus
4218 can be used to suppress effectively false positives that we
4219 shouldn't warn for. */
4220 if (binop->get_op () == POINTER_PLUS_EXPR)
4222 tristate lhs_ts
4223 = eval_condition_without_cm (binop->get_arg0 (),
4224 op, rhs);
4225 if (lhs_ts.is_known ())
4226 return lhs_ts;
4231 /* Handle rejection of equality for comparisons of the initial values of
4232 "external" values (such as params) with the address of locals. */
4233 if (const initial_svalue *init_lhs = lhs->dyn_cast_initial_svalue ())
4234 if (const region_svalue *rhs_ptr = rhs->dyn_cast_region_svalue ())
4236 tristate res = compare_initial_and_pointer (init_lhs, rhs_ptr);
4237 if (res.is_known ())
4238 return res;
4240 if (const initial_svalue *init_rhs = rhs->dyn_cast_initial_svalue ())
4241 if (const region_svalue *lhs_ptr = lhs->dyn_cast_region_svalue ())
4243 tristate res = compare_initial_and_pointer (init_rhs, lhs_ptr);
4244 if (res.is_known ())
4245 return res;
4248 if (const widening_svalue *widen_lhs = lhs->dyn_cast_widening_svalue ())
4249 if (tree rhs_cst = rhs->maybe_get_constant ())
4251 tristate res = widen_lhs->eval_condition_without_cm (op, rhs_cst);
4252 if (res.is_known ())
4253 return res;
4256 /* Handle comparisons between two svalues with more than one operand. */
4257 if (const binop_svalue *binop = lhs->dyn_cast_binop_svalue ())
4259 switch (op)
4261 default:
4262 break;
4263 case EQ_EXPR:
4265 /* TODO: binops can be equal even if they are not structurally
4266 equal in case of commutative operators. */
4267 tristate res = structural_equality (lhs, rhs);
4268 if (res.is_true ())
4269 return res;
4271 break;
4272 case LE_EXPR:
4274 tristate res = structural_equality (lhs, rhs);
4275 if (res.is_true ())
4276 return res;
4278 break;
4279 case GE_EXPR:
4281 tristate res = structural_equality (lhs, rhs);
4282 if (res.is_true ())
4283 return res;
4284 res = symbolic_greater_than (binop, rhs);
4285 if (res.is_true ())
4286 return res;
4288 break;
4289 case GT_EXPR:
4291 tristate res = symbolic_greater_than (binop, rhs);
4292 if (res.is_true ())
4293 return res;
4295 break;
4299 return tristate::TS_UNKNOWN;
4302 /* Subroutine of region_model::eval_condition_without_cm, for rejecting
4303 equality of INIT_VAL(PARM) with &LOCAL. */
4305 tristate
4306 region_model::compare_initial_and_pointer (const initial_svalue *init,
4307 const region_svalue *ptr) const
4309 const region *pointee = ptr->get_pointee ();
4311 /* If we have a pointer to something within a stack frame, it can't be the
4312 initial value of a param. */
4313 if (pointee->maybe_get_frame_region ())
4314 if (init->initial_value_of_param_p ())
4315 return tristate::TS_FALSE;
4317 return tristate::TS_UNKNOWN;
4320 /* Return true if SVAL is definitely positive. */
4322 static bool
4323 is_positive_svalue (const svalue *sval)
4325 if (tree cst = sval->maybe_get_constant ())
4326 return !zerop (cst) && get_range_pos_neg (cst) == 1;
4327 tree type = sval->get_type ();
4328 if (!type)
4329 return false;
4330 /* Consider a binary operation size_t + int. The analyzer wraps the int in
4331 an unaryop_svalue, converting it to a size_t, but in the dynamic execution
4332 the result is smaller than the first operand. Thus, we have to look if
4333 the argument of the unaryop_svalue is also positive. */
4334 if (const unaryop_svalue *un_op = dyn_cast <const unaryop_svalue *> (sval))
4335 return CONVERT_EXPR_CODE_P (un_op->get_op ()) && TYPE_UNSIGNED (type)
4336 && is_positive_svalue (un_op->get_arg ());
4337 return TYPE_UNSIGNED (type);
4340 /* Return true if A is definitely larger than B.
4342 Limitation: does not account for integer overflows and does not try to
4343 return false, so it can not be used negated. */
4345 tristate
4346 region_model::symbolic_greater_than (const binop_svalue *bin_a,
4347 const svalue *b) const
4349 if (bin_a->get_op () == PLUS_EXPR || bin_a->get_op () == MULT_EXPR)
4351 /* Eliminate the right-hand side of both svalues. */
4352 if (const binop_svalue *bin_b = dyn_cast <const binop_svalue *> (b))
4353 if (bin_a->get_op () == bin_b->get_op ()
4354 && eval_condition_without_cm (bin_a->get_arg1 (),
4355 GT_EXPR,
4356 bin_b->get_arg1 ()).is_true ()
4357 && eval_condition_without_cm (bin_a->get_arg0 (),
4358 GE_EXPR,
4359 bin_b->get_arg0 ()).is_true ())
4360 return tristate (tristate::TS_TRUE);
4362 /* Otherwise, try to remove a positive offset or factor from BIN_A. */
4363 if (is_positive_svalue (bin_a->get_arg1 ())
4364 && eval_condition_without_cm (bin_a->get_arg0 (),
4365 GE_EXPR, b).is_true ())
4366 return tristate (tristate::TS_TRUE);
4368 return tristate::unknown ();
4371 /* Return true if A and B are equal structurally.
4373 Structural equality means that A and B are equal if the svalues A and B have
4374 the same nodes at the same positions in the tree and the leafs are equal.
4375 Equality for conjured_svalues and initial_svalues is determined by comparing
4376 the pointers while constants are compared by value. That behavior is useful
4377 to check for binaryop_svlaues that evaluate to the same concrete value but
4378 might use one operand with a different type but the same constant value.
4380 For example,
4381 binop_svalue (mult_expr,
4382 initial_svalue (‘size_t’, decl_region (..., 'some_var')),
4383 constant_svalue (‘size_t’, 4))
4385 binop_svalue (mult_expr,
4386 initial_svalue (‘size_t’, decl_region (..., 'some_var'),
4387 constant_svalue (‘sizetype’, 4))
4388 are structurally equal. A concrete C code example, where this occurs, can
4389 be found in test7 of out-of-bounds-5.c. */
4391 tristate
4392 region_model::structural_equality (const svalue *a, const svalue *b) const
4394 /* If A and B are referentially equal, they are also structurally equal. */
4395 if (a == b)
4396 return tristate (tristate::TS_TRUE);
4398 switch (a->get_kind ())
4400 default:
4401 return tristate::unknown ();
4402 /* SK_CONJURED and SK_INITIAL are already handled
4403 by the referential equality above. */
4404 case SK_CONSTANT:
4406 tree a_cst = a->maybe_get_constant ();
4407 tree b_cst = b->maybe_get_constant ();
4408 if (a_cst && b_cst)
4409 return tristate (tree_int_cst_equal (a_cst, b_cst));
4411 return tristate (tristate::TS_FALSE);
4412 case SK_UNARYOP:
4414 const unaryop_svalue *un_a = as_a <const unaryop_svalue *> (a);
4415 if (const unaryop_svalue *un_b = dyn_cast <const unaryop_svalue *> (b))
4416 return tristate (pending_diagnostic::same_tree_p (un_a->get_type (),
4417 un_b->get_type ())
4418 && un_a->get_op () == un_b->get_op ()
4419 && structural_equality (un_a->get_arg (),
4420 un_b->get_arg ()));
4422 return tristate (tristate::TS_FALSE);
4423 case SK_BINOP:
4425 const binop_svalue *bin_a = as_a <const binop_svalue *> (a);
4426 if (const binop_svalue *bin_b = dyn_cast <const binop_svalue *> (b))
4427 return tristate (bin_a->get_op () == bin_b->get_op ()
4428 && structural_equality (bin_a->get_arg0 (),
4429 bin_b->get_arg0 ())
4430 && structural_equality (bin_a->get_arg1 (),
4431 bin_b->get_arg1 ()));
4433 return tristate (tristate::TS_FALSE);
4437 /* Handle various constraints of the form:
4438 LHS: ((bool)INNER_LHS INNER_OP INNER_RHS))
4439 OP : == or !=
4440 RHS: zero
4441 and (with a cast):
4442 LHS: CAST([long]int, ((bool)INNER_LHS INNER_OP INNER_RHS))
4443 OP : == or !=
4444 RHS: zero
4445 by adding constraints for INNER_LHS INNEROP INNER_RHS.
4447 Return true if this function can fully handle the constraint; if
4448 so, add the implied constraint(s) and write true to *OUT if they
4449 are consistent with existing constraints, or write false to *OUT
4450 if they contradicts existing constraints.
4452 Return false for cases that this function doeesn't know how to handle.
4454 For example, if we're checking a stored conditional, we'll have
4455 something like:
4456 LHS: CAST(long int, (&HEAP_ALLOCATED_REGION(8)!=(int *)0B))
4457 OP : NE_EXPR
4458 RHS: zero
4459 which this function can turn into an add_constraint of:
4460 (&HEAP_ALLOCATED_REGION(8) != (int *)0B)
4462 Similarly, optimized && and || conditionals lead to e.g.
4463 if (p && q)
4464 becoming gimple like this:
4465 _1 = p_6 == 0B;
4466 _2 = q_8 == 0B
4467 _3 = _1 | _2
4468 On the "_3 is false" branch we can have constraints of the form:
4469 ((&HEAP_ALLOCATED_REGION(8)!=(int *)0B)
4470 | (&HEAP_ALLOCATED_REGION(10)!=(int *)0B))
4471 == 0
4472 which implies that both _1 and _2 are false,
4473 which this function can turn into a pair of add_constraints of
4474 (&HEAP_ALLOCATED_REGION(8)!=(int *)0B)
4475 and:
4476 (&HEAP_ALLOCATED_REGION(10)!=(int *)0B). */
4478 bool
4479 region_model::add_constraints_from_binop (const svalue *outer_lhs,
4480 enum tree_code outer_op,
4481 const svalue *outer_rhs,
4482 bool *out,
4483 region_model_context *ctxt)
4485 while (const svalue *cast = outer_lhs->maybe_undo_cast ())
4486 outer_lhs = cast;
4487 const binop_svalue *binop_sval = outer_lhs->dyn_cast_binop_svalue ();
4488 if (!binop_sval)
4489 return false;
4490 if (!outer_rhs->all_zeroes_p ())
4491 return false;
4493 const svalue *inner_lhs = binop_sval->get_arg0 ();
4494 enum tree_code inner_op = binop_sval->get_op ();
4495 const svalue *inner_rhs = binop_sval->get_arg1 ();
4497 if (outer_op != NE_EXPR && outer_op != EQ_EXPR)
4498 return false;
4500 /* We have either
4501 - "OUTER_LHS != false" (i.e. OUTER is true), or
4502 - "OUTER_LHS == false" (i.e. OUTER is false). */
4503 bool is_true = outer_op == NE_EXPR;
4505 switch (inner_op)
4507 default:
4508 return false;
4510 case EQ_EXPR:
4511 case NE_EXPR:
4513 /* ...and "(inner_lhs OP inner_rhs) == 0"
4514 then (inner_lhs OP inner_rhs) must have the same
4515 logical value as LHS. */
4516 if (!is_true)
4517 inner_op = invert_tree_comparison (inner_op, false /* honor_nans */);
4518 *out = add_constraint (inner_lhs, inner_op, inner_rhs, ctxt);
4519 return true;
4521 break;
4523 case BIT_AND_EXPR:
4524 if (is_true)
4526 /* ...and "(inner_lhs & inner_rhs) != 0"
4527 then both inner_lhs and inner_rhs must be true. */
4528 const svalue *false_sval
4529 = m_mgr->get_or_create_constant_svalue (boolean_false_node);
4530 bool sat1 = add_constraint (inner_lhs, NE_EXPR, false_sval, ctxt);
4531 bool sat2 = add_constraint (inner_rhs, NE_EXPR, false_sval, ctxt);
4532 *out = sat1 && sat2;
4533 return true;
4535 return false;
4537 case BIT_IOR_EXPR:
4538 if (!is_true)
4540 /* ...and "(inner_lhs | inner_rhs) == 0"
4541 i.e. "(inner_lhs | inner_rhs)" is false
4542 then both inner_lhs and inner_rhs must be false. */
4543 const svalue *false_sval
4544 = m_mgr->get_or_create_constant_svalue (boolean_false_node);
4545 bool sat1 = add_constraint (inner_lhs, EQ_EXPR, false_sval, ctxt);
4546 bool sat2 = add_constraint (inner_rhs, EQ_EXPR, false_sval, ctxt);
4547 *out = sat1 && sat2;
4548 return true;
4550 return false;
4554 /* Attempt to add the constraint "LHS OP RHS" to this region_model.
4555 If it is consistent with existing constraints, add it, and return true.
4556 Return false if it contradicts existing constraints.
4557 Use CTXT for reporting any diagnostics associated with the accesses. */
4559 bool
4560 region_model::add_constraint (tree lhs, enum tree_code op, tree rhs,
4561 region_model_context *ctxt)
4563 /* For now, make no attempt to capture constraints on floating-point
4564 values. */
4565 if (FLOAT_TYPE_P (TREE_TYPE (lhs)) || FLOAT_TYPE_P (TREE_TYPE (rhs)))
4566 return true;
4568 const svalue *lhs_sval = get_rvalue (lhs, ctxt);
4569 const svalue *rhs_sval = get_rvalue (rhs, ctxt);
4571 return add_constraint (lhs_sval, op, rhs_sval, ctxt);
4574 /* Attempt to add the constraint "LHS OP RHS" to this region_model.
4575 If it is consistent with existing constraints, add it, and return true.
4576 Return false if it contradicts existing constraints.
4577 Use CTXT for reporting any diagnostics associated with the accesses. */
4579 bool
4580 region_model::add_constraint (const svalue *lhs,
4581 enum tree_code op,
4582 const svalue *rhs,
4583 region_model_context *ctxt)
4585 tristate t_cond = eval_condition (lhs, op, rhs);
4587 /* If we already have the condition, do nothing. */
4588 if (t_cond.is_true ())
4589 return true;
4591 /* Reject a constraint that would contradict existing knowledge, as
4592 unsatisfiable. */
4593 if (t_cond.is_false ())
4594 return false;
4596 bool out;
4597 if (add_constraints_from_binop (lhs, op, rhs, &out, ctxt))
4598 return out;
4600 /* Attempt to store the constraint. */
4601 if (!m_constraints->add_constraint (lhs, op, rhs))
4602 return false;
4604 /* Notify the context, if any. This exists so that the state machines
4605 in a program_state can be notified about the condition, and so can
4606 set sm-state for e.g. unchecked->checked, both for cfg-edges, and
4607 when synthesizing constraints as above. */
4608 if (ctxt)
4609 ctxt->on_condition (lhs, op, rhs);
4611 /* If we have &REGION == NULL, then drop dynamic extents for REGION (for
4612 the case where REGION is heap-allocated and thus could be NULL). */
4613 if (tree rhs_cst = rhs->maybe_get_constant ())
4614 if (op == EQ_EXPR && zerop (rhs_cst))
4615 if (const region_svalue *region_sval = lhs->dyn_cast_region_svalue ())
4616 unset_dynamic_extents (region_sval->get_pointee ());
4618 return true;
4621 /* As above, but when returning false, if OUT is non-NULL, write a
4622 new rejected_constraint to *OUT. */
4624 bool
4625 region_model::add_constraint (tree lhs, enum tree_code op, tree rhs,
4626 region_model_context *ctxt,
4627 rejected_constraint **out)
4629 bool sat = add_constraint (lhs, op, rhs, ctxt);
4630 if (!sat && out)
4631 *out = new rejected_op_constraint (*this, lhs, op, rhs);
4632 return sat;
4635 /* Determine what is known about the condition "LHS OP RHS" within
4636 this model.
4637 Use CTXT for reporting any diagnostics associated with the accesses. */
4639 tristate
4640 region_model::eval_condition (tree lhs,
4641 enum tree_code op,
4642 tree rhs,
4643 region_model_context *ctxt)
4645 /* For now, make no attempt to model constraints on floating-point
4646 values. */
4647 if (FLOAT_TYPE_P (TREE_TYPE (lhs)) || FLOAT_TYPE_P (TREE_TYPE (rhs)))
4648 return tristate::unknown ();
4650 return eval_condition (get_rvalue (lhs, ctxt), op, get_rvalue (rhs, ctxt));
4653 /* Implementation of region_model::get_representative_path_var.
4654 Attempt to return a path_var that represents SVAL, or return NULL_TREE.
4655 Use VISITED to prevent infinite mutual recursion with the overload for
4656 regions. */
4658 path_var
4659 region_model::get_representative_path_var_1 (const svalue *sval,
4660 svalue_set *visited) const
4662 gcc_assert (sval);
4664 /* Prevent infinite recursion. */
4665 if (visited->contains (sval))
4666 return path_var (NULL_TREE, 0);
4667 visited->add (sval);
4669 /* Handle casts by recursion into get_representative_path_var. */
4670 if (const svalue *cast_sval = sval->maybe_undo_cast ())
4672 path_var result = get_representative_path_var (cast_sval, visited);
4673 tree orig_type = sval->get_type ();
4674 /* If necessary, wrap the result in a cast. */
4675 if (result.m_tree && orig_type)
4676 result.m_tree = build1 (NOP_EXPR, orig_type, result.m_tree);
4677 return result;
4680 auto_vec<path_var> pvs;
4681 m_store.get_representative_path_vars (this, visited, sval, &pvs);
4683 if (tree cst = sval->maybe_get_constant ())
4684 pvs.safe_push (path_var (cst, 0));
4686 /* Handle string literals and various other pointers. */
4687 if (const region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
4689 const region *reg = ptr_sval->get_pointee ();
4690 if (path_var pv = get_representative_path_var (reg, visited))
4691 return path_var (build1 (ADDR_EXPR,
4692 sval->get_type (),
4693 pv.m_tree),
4694 pv.m_stack_depth);
4697 /* If we have a sub_svalue, look for ways to represent the parent. */
4698 if (const sub_svalue *sub_sval = sval->dyn_cast_sub_svalue ())
4700 const svalue *parent_sval = sub_sval->get_parent ();
4701 const region *subreg = sub_sval->get_subregion ();
4702 if (path_var parent_pv
4703 = get_representative_path_var (parent_sval, visited))
4704 if (const field_region *field_reg = subreg->dyn_cast_field_region ())
4705 return path_var (build3 (COMPONENT_REF,
4706 sval->get_type (),
4707 parent_pv.m_tree,
4708 field_reg->get_field (),
4709 NULL_TREE),
4710 parent_pv.m_stack_depth);
4713 /* Handle binops. */
4714 if (const binop_svalue *binop_sval = sval->dyn_cast_binop_svalue ())
4715 if (path_var lhs_pv
4716 = get_representative_path_var (binop_sval->get_arg0 (), visited))
4717 if (path_var rhs_pv
4718 = get_representative_path_var (binop_sval->get_arg1 (), visited))
4719 return path_var (build2 (binop_sval->get_op (),
4720 sval->get_type (),
4721 lhs_pv.m_tree, rhs_pv.m_tree),
4722 lhs_pv.m_stack_depth);
4724 if (pvs.length () < 1)
4725 return path_var (NULL_TREE, 0);
4727 pvs.qsort (readability_comparator);
4728 return pvs[0];
4731 /* Attempt to return a path_var that represents SVAL, or return NULL_TREE.
4732 Use VISITED to prevent infinite mutual recursion with the overload for
4733 regions
4735 This function defers to get_representative_path_var_1 to do the work;
4736 it adds verification that get_representative_path_var_1 returned a tree
4737 of the correct type. */
4739 path_var
4740 region_model::get_representative_path_var (const svalue *sval,
4741 svalue_set *visited) const
4743 if (sval == NULL)
4744 return path_var (NULL_TREE, 0);
4746 tree orig_type = sval->get_type ();
4748 path_var result = get_representative_path_var_1 (sval, visited);
4750 /* Verify that the result has the same type as SVAL, if any. */
4751 if (result.m_tree && orig_type)
4752 gcc_assert (TREE_TYPE (result.m_tree) == orig_type);
4754 return result;
4757 /* Attempt to return a tree that represents SVAL, or return NULL_TREE.
4759 Strip off any top-level cast, to avoid messages like
4760 double-free of '(void *)ptr'
4761 from analyzer diagnostics. */
4763 tree
4764 region_model::get_representative_tree (const svalue *sval) const
4766 svalue_set visited;
4767 tree expr = get_representative_path_var (sval, &visited).m_tree;
4769 /* Strip off any top-level cast. */
4770 if (expr && TREE_CODE (expr) == NOP_EXPR)
4771 expr = TREE_OPERAND (expr, 0);
4773 return fixup_tree_for_diagnostic (expr);
4776 tree
4777 region_model::get_representative_tree (const region *reg) const
4779 svalue_set visited;
4780 tree expr = get_representative_path_var (reg, &visited).m_tree;
4782 /* Strip off any top-level cast. */
4783 if (expr && TREE_CODE (expr) == NOP_EXPR)
4784 expr = TREE_OPERAND (expr, 0);
4786 return fixup_tree_for_diagnostic (expr);
4789 /* Implementation of region_model::get_representative_path_var.
4791 Attempt to return a path_var that represents REG, or return
4792 the NULL path_var.
4793 For example, a region for a field of a local would be a path_var
4794 wrapping a COMPONENT_REF.
4795 Use VISITED to prevent infinite mutual recursion with the overload for
4796 svalues. */
4798 path_var
4799 region_model::get_representative_path_var_1 (const region *reg,
4800 svalue_set *visited) const
4802 switch (reg->get_kind ())
4804 default:
4805 gcc_unreachable ();
4807 case RK_FRAME:
4808 case RK_GLOBALS:
4809 case RK_CODE:
4810 case RK_HEAP:
4811 case RK_STACK:
4812 case RK_ROOT:
4813 /* Regions that represent memory spaces are not expressible as trees. */
4814 return path_var (NULL_TREE, 0);
4816 case RK_FUNCTION:
4818 const function_region *function_reg
4819 = as_a <const function_region *> (reg);
4820 return path_var (function_reg->get_fndecl (), 0);
4822 case RK_LABEL:
4824 const label_region *label_reg = as_a <const label_region *> (reg);
4825 return path_var (label_reg->get_label (), 0);
4828 case RK_SYMBOLIC:
4830 const symbolic_region *symbolic_reg
4831 = as_a <const symbolic_region *> (reg);
4832 const svalue *pointer = symbolic_reg->get_pointer ();
4833 path_var pointer_pv = get_representative_path_var (pointer, visited);
4834 if (!pointer_pv)
4835 return path_var (NULL_TREE, 0);
4836 tree offset = build_int_cst (pointer->get_type (), 0);
4837 return path_var (build2 (MEM_REF,
4838 reg->get_type (),
4839 pointer_pv.m_tree,
4840 offset),
4841 pointer_pv.m_stack_depth);
4843 case RK_DECL:
4845 const decl_region *decl_reg = as_a <const decl_region *> (reg);
4846 return path_var (decl_reg->get_decl (), decl_reg->get_stack_depth ());
4848 case RK_FIELD:
4850 const field_region *field_reg = as_a <const field_region *> (reg);
4851 path_var parent_pv
4852 = get_representative_path_var (reg->get_parent_region (), visited);
4853 if (!parent_pv)
4854 return path_var (NULL_TREE, 0);
4855 return path_var (build3 (COMPONENT_REF,
4856 reg->get_type (),
4857 parent_pv.m_tree,
4858 field_reg->get_field (),
4859 NULL_TREE),
4860 parent_pv.m_stack_depth);
4863 case RK_ELEMENT:
4865 const element_region *element_reg
4866 = as_a <const element_region *> (reg);
4867 path_var parent_pv
4868 = get_representative_path_var (reg->get_parent_region (), visited);
4869 if (!parent_pv)
4870 return path_var (NULL_TREE, 0);
4871 path_var index_pv
4872 = get_representative_path_var (element_reg->get_index (), visited);
4873 if (!index_pv)
4874 return path_var (NULL_TREE, 0);
4875 return path_var (build4 (ARRAY_REF,
4876 reg->get_type (),
4877 parent_pv.m_tree, index_pv.m_tree,
4878 NULL_TREE, NULL_TREE),
4879 parent_pv.m_stack_depth);
4882 case RK_OFFSET:
4884 const offset_region *offset_reg
4885 = as_a <const offset_region *> (reg);
4886 path_var parent_pv
4887 = get_representative_path_var (reg->get_parent_region (), visited);
4888 if (!parent_pv)
4889 return path_var (NULL_TREE, 0);
4890 path_var offset_pv
4891 = get_representative_path_var (offset_reg->get_byte_offset (),
4892 visited);
4893 if (!offset_pv || TREE_CODE (offset_pv.m_tree) != INTEGER_CST)
4894 return path_var (NULL_TREE, 0);
4895 tree addr_parent = build1 (ADDR_EXPR,
4896 build_pointer_type (reg->get_type ()),
4897 parent_pv.m_tree);
4898 return path_var (build2 (MEM_REF,
4899 reg->get_type (),
4900 addr_parent, offset_pv.m_tree),
4901 parent_pv.m_stack_depth);
4904 case RK_SIZED:
4905 return path_var (NULL_TREE, 0);
4907 case RK_CAST:
4909 path_var parent_pv
4910 = get_representative_path_var (reg->get_parent_region (), visited);
4911 if (!parent_pv)
4912 return path_var (NULL_TREE, 0);
4913 return path_var (build1 (NOP_EXPR,
4914 reg->get_type (),
4915 parent_pv.m_tree),
4916 parent_pv.m_stack_depth);
4919 case RK_HEAP_ALLOCATED:
4920 case RK_ALLOCA:
4921 /* No good way to express heap-allocated/alloca regions as trees. */
4922 return path_var (NULL_TREE, 0);
4924 case RK_STRING:
4926 const string_region *string_reg = as_a <const string_region *> (reg);
4927 return path_var (string_reg->get_string_cst (), 0);
4930 case RK_VAR_ARG:
4931 case RK_UNKNOWN:
4932 return path_var (NULL_TREE, 0);
4936 /* Attempt to return a path_var that represents REG, or return
4937 the NULL path_var.
4938 For example, a region for a field of a local would be a path_var
4939 wrapping a COMPONENT_REF.
4940 Use VISITED to prevent infinite mutual recursion with the overload for
4941 svalues.
4943 This function defers to get_representative_path_var_1 to do the work;
4944 it adds verification that get_representative_path_var_1 returned a tree
4945 of the correct type. */
4947 path_var
4948 region_model::get_representative_path_var (const region *reg,
4949 svalue_set *visited) const
4951 path_var result = get_representative_path_var_1 (reg, visited);
4953 /* Verify that the result has the same type as REG, if any. */
4954 if (result.m_tree && reg->get_type ())
4955 gcc_assert (TREE_TYPE (result.m_tree) == reg->get_type ());
4957 return result;
4960 /* Update this model for any phis in SNODE, assuming we came from
4961 LAST_CFG_SUPEREDGE. */
4963 void
4964 region_model::update_for_phis (const supernode *snode,
4965 const cfg_superedge *last_cfg_superedge,
4966 region_model_context *ctxt)
4968 gcc_assert (last_cfg_superedge);
4970 /* Copy this state and pass it to handle_phi so that all of the phi stmts
4971 are effectively handled simultaneously. */
4972 const region_model old_state (*this);
4974 for (gphi_iterator gpi = const_cast<supernode *>(snode)->start_phis ();
4975 !gsi_end_p (gpi); gsi_next (&gpi))
4977 gphi *phi = gpi.phi ();
4979 tree src = last_cfg_superedge->get_phi_arg (phi);
4980 tree lhs = gimple_phi_result (phi);
4982 /* Update next_state based on phi and old_state. */
4983 handle_phi (phi, lhs, src, old_state, ctxt);
4987 /* Attempt to update this model for taking EDGE (where the last statement
4988 was LAST_STMT), returning true if the edge can be taken, false
4989 otherwise.
4990 When returning false, if OUT is non-NULL, write a new rejected_constraint
4991 to it.
4993 For CFG superedges where LAST_STMT is a conditional or a switch
4994 statement, attempt to add the relevant conditions for EDGE to this
4995 model, returning true if they are feasible, or false if they are
4996 impossible.
4998 For call superedges, push frame information and store arguments
4999 into parameters.
5001 For return superedges, pop frame information and store return
5002 values into any lhs.
5004 Rejection of call/return superedges happens elsewhere, in
5005 program_point::on_edge (i.e. based on program point, rather
5006 than program state). */
5008 bool
5009 region_model::maybe_update_for_edge (const superedge &edge,
5010 const gimple *last_stmt,
5011 region_model_context *ctxt,
5012 rejected_constraint **out)
5014 /* Handle frame updates for interprocedural edges. */
5015 switch (edge.m_kind)
5017 default:
5018 break;
5020 case SUPEREDGE_CALL:
5022 const call_superedge *call_edge = as_a <const call_superedge *> (&edge);
5023 update_for_call_superedge (*call_edge, ctxt);
5025 break;
5027 case SUPEREDGE_RETURN:
5029 const return_superedge *return_edge
5030 = as_a <const return_superedge *> (&edge);
5031 update_for_return_superedge (*return_edge, ctxt);
5033 break;
5035 case SUPEREDGE_INTRAPROCEDURAL_CALL:
5037 const callgraph_superedge *cg_sedge
5038 = as_a <const callgraph_superedge *> (&edge);
5039 update_for_call_summary (*cg_sedge, ctxt);
5041 break;
5044 if (last_stmt == NULL)
5045 return true;
5047 /* Apply any constraints for conditionals/switch statements. */
5049 if (const gcond *cond_stmt = dyn_cast <const gcond *> (last_stmt))
5051 const cfg_superedge *cfg_sedge = as_a <const cfg_superedge *> (&edge);
5052 return apply_constraints_for_gcond (*cfg_sedge, cond_stmt, ctxt, out);
5055 if (const gswitch *switch_stmt = dyn_cast <const gswitch *> (last_stmt))
5057 const switch_cfg_superedge *switch_sedge
5058 = as_a <const switch_cfg_superedge *> (&edge);
5059 return apply_constraints_for_gswitch (*switch_sedge, switch_stmt,
5060 ctxt, out);
5063 /* Apply any constraints due to an exception being thrown. */
5064 if (const cfg_superedge *cfg_sedge = dyn_cast <const cfg_superedge *> (&edge))
5065 if (cfg_sedge->get_flags () & EDGE_EH)
5066 return apply_constraints_for_exception (last_stmt, ctxt, out);
5068 return true;
5071 /* Push a new frame_region on to the stack region.
5072 Populate the frame_region with child regions for the function call's
5073 parameters, using values from the arguments at the callsite in the
5074 caller's frame. */
5076 void
5077 region_model::update_for_gcall (const gcall *call_stmt,
5078 region_model_context *ctxt,
5079 function *callee)
5081 /* Build a vec of argument svalues, using the current top
5082 frame for resolving tree expressions. */
5083 auto_vec<const svalue *> arg_svals (gimple_call_num_args (call_stmt));
5085 for (unsigned i = 0; i < gimple_call_num_args (call_stmt); i++)
5087 tree arg = gimple_call_arg (call_stmt, i);
5088 arg_svals.quick_push (get_rvalue (arg, ctxt));
5091 if(!callee)
5093 /* Get the function * from the gcall. */
5094 tree fn_decl = get_fndecl_for_call (call_stmt,ctxt);
5095 callee = DECL_STRUCT_FUNCTION (fn_decl);
5098 push_frame (callee, &arg_svals, ctxt);
5101 /* Pop the top-most frame_region from the stack, and copy the return
5102 region's values (if any) into the region for the lvalue of the LHS of
5103 the call (if any). */
5105 void
5106 region_model::update_for_return_gcall (const gcall *call_stmt,
5107 region_model_context *ctxt)
5109 /* Get the lvalue for the result of the call, passing it to pop_frame,
5110 so that pop_frame can determine the region with respect to the
5111 *caller* frame. */
5112 tree lhs = gimple_call_lhs (call_stmt);
5113 pop_frame (lhs, NULL, ctxt);
5116 /* Extract calling information from the superedge and update the model for the
5117 call */
5119 void
5120 region_model::update_for_call_superedge (const call_superedge &call_edge,
5121 region_model_context *ctxt)
5123 const gcall *call_stmt = call_edge.get_call_stmt ();
5124 update_for_gcall (call_stmt, ctxt, call_edge.get_callee_function ());
5127 /* Extract calling information from the return superedge and update the model
5128 for the returning call */
5130 void
5131 region_model::update_for_return_superedge (const return_superedge &return_edge,
5132 region_model_context *ctxt)
5134 const gcall *call_stmt = return_edge.get_call_stmt ();
5135 update_for_return_gcall (call_stmt, ctxt);
5138 /* Update this region_model with a summary of the effect of calling
5139 and returning from CG_SEDGE.
5141 TODO: Currently this is extremely simplistic: we merely set the
5142 return value to "unknown". A proper implementation would e.g. update
5143 sm-state, and presumably be reworked to support multiple outcomes. */
5145 void
5146 region_model::update_for_call_summary (const callgraph_superedge &cg_sedge,
5147 region_model_context *ctxt)
5149 /* For now, set any return value to "unknown". */
5150 const gcall *call_stmt = cg_sedge.get_call_stmt ();
5151 tree lhs = gimple_call_lhs (call_stmt);
5152 if (lhs)
5153 mark_region_as_unknown (get_lvalue (lhs, ctxt),
5154 ctxt ? ctxt->get_uncertainty () : NULL);
5156 // TODO: actually implement some kind of summary here
5159 /* Given a true or false edge guarded by conditional statement COND_STMT,
5160 determine appropriate constraints for the edge to be taken.
5162 If they are feasible, add the constraints and return true.
5164 Return false if the constraints contradict existing knowledge
5165 (and so the edge should not be taken).
5166 When returning false, if OUT is non-NULL, write a new rejected_constraint
5167 to it. */
5169 bool
5170 region_model::apply_constraints_for_gcond (const cfg_superedge &sedge,
5171 const gcond *cond_stmt,
5172 region_model_context *ctxt,
5173 rejected_constraint **out)
5175 ::edge cfg_edge = sedge.get_cfg_edge ();
5176 gcc_assert (cfg_edge != NULL);
5177 gcc_assert (cfg_edge->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE));
5179 enum tree_code op = gimple_cond_code (cond_stmt);
5180 tree lhs = gimple_cond_lhs (cond_stmt);
5181 tree rhs = gimple_cond_rhs (cond_stmt);
5182 if (cfg_edge->flags & EDGE_FALSE_VALUE)
5183 op = invert_tree_comparison (op, false /* honor_nans */);
5184 return add_constraint (lhs, op, rhs, ctxt, out);
5187 /* Given an EDGE guarded by SWITCH_STMT, determine appropriate constraints
5188 for the edge to be taken.
5190 If they are feasible, add the constraints and return true.
5192 Return false if the constraints contradict existing knowledge
5193 (and so the edge should not be taken).
5194 When returning false, if OUT is non-NULL, write a new rejected_constraint
5195 to it. */
5197 bool
5198 region_model::apply_constraints_for_gswitch (const switch_cfg_superedge &edge,
5199 const gswitch *switch_stmt,
5200 region_model_context *ctxt,
5201 rejected_constraint **out)
5203 bounded_ranges_manager *ranges_mgr = get_range_manager ();
5204 const bounded_ranges *all_cases_ranges
5205 = ranges_mgr->get_or_create_ranges_for_switch (&edge, switch_stmt);
5206 tree index = gimple_switch_index (switch_stmt);
5207 const svalue *index_sval = get_rvalue (index, ctxt);
5208 bool sat = m_constraints->add_bounded_ranges (index_sval, all_cases_ranges);
5209 if (!sat && out)
5210 *out = new rejected_ranges_constraint (*this, index, all_cases_ranges);
5211 if (sat && ctxt && !all_cases_ranges->empty_p ())
5212 ctxt->on_bounded_ranges (*index_sval, *all_cases_ranges);
5213 return sat;
5216 /* Apply any constraints due to an exception being thrown at LAST_STMT.
5218 If they are feasible, add the constraints and return true.
5220 Return false if the constraints contradict existing knowledge
5221 (and so the edge should not be taken).
5222 When returning false, if OUT is non-NULL, write a new rejected_constraint
5223 to it. */
5225 bool
5226 region_model::apply_constraints_for_exception (const gimple *last_stmt,
5227 region_model_context *ctxt,
5228 rejected_constraint **out)
5230 gcc_assert (last_stmt);
5231 if (const gcall *call = dyn_cast <const gcall *> (last_stmt))
5232 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
5233 if (is_named_call_p (callee_fndecl, "operator new", call, 1)
5234 || is_named_call_p (callee_fndecl, "operator new []", call, 1))
5236 /* We have an exception thrown from operator new.
5237 Add a constraint that the result was NULL, to avoid a false
5238 leak report due to the result being lost when following
5239 the EH edge. */
5240 if (tree lhs = gimple_call_lhs (call))
5241 return add_constraint (lhs, EQ_EXPR, null_pointer_node, ctxt, out);
5242 return true;
5244 return true;
5247 /* For use with push_frame when handling a top-level call within the analysis.
5248 PARAM has a defined but unknown initial value.
5249 Anything it points to has escaped, since the calling context "knows"
5250 the pointer, and thus calls to unknown functions could read/write into
5251 the region. */
5253 void
5254 region_model::on_top_level_param (tree param,
5255 region_model_context *ctxt)
5257 if (POINTER_TYPE_P (TREE_TYPE (param)))
5259 const region *param_reg = get_lvalue (param, ctxt);
5260 const svalue *init_ptr_sval
5261 = m_mgr->get_or_create_initial_value (param_reg);
5262 const region *pointee_reg = m_mgr->get_symbolic_region (init_ptr_sval);
5263 m_store.mark_as_escaped (pointee_reg);
5267 /* Update this region_model to reflect pushing a frame onto the stack
5268 for a call to FUN.
5270 If ARG_SVALS is non-NULL, use it to populate the parameters
5271 in the new frame.
5272 Otherwise, the params have their initial_svalues.
5274 Return the frame_region for the new frame. */
5276 const region *
5277 region_model::push_frame (function *fun, const vec<const svalue *> *arg_svals,
5278 region_model_context *ctxt)
5280 m_current_frame = m_mgr->get_frame_region (m_current_frame, fun);
5281 if (arg_svals)
5283 /* Arguments supplied from a caller frame. */
5284 tree fndecl = fun->decl;
5285 unsigned idx = 0;
5286 for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm;
5287 iter_parm = DECL_CHAIN (iter_parm), ++idx)
5289 /* If there's a mismatching declaration, the call stmt might
5290 not have enough args. Handle this case by leaving the
5291 rest of the params as uninitialized. */
5292 if (idx >= arg_svals->length ())
5293 break;
5294 tree parm_lval = iter_parm;
5295 if (tree parm_default_ssa = ssa_default_def (fun, iter_parm))
5296 parm_lval = parm_default_ssa;
5297 const region *parm_reg = get_lvalue (parm_lval, ctxt);
5298 const svalue *arg_sval = (*arg_svals)[idx];
5299 set_value (parm_reg, arg_sval, ctxt);
5302 /* Handle any variadic args. */
5303 unsigned va_arg_idx = 0;
5304 for (; idx < arg_svals->length (); idx++, va_arg_idx++)
5306 const svalue *arg_sval = (*arg_svals)[idx];
5307 const region *var_arg_reg
5308 = m_mgr->get_var_arg_region (m_current_frame,
5309 va_arg_idx);
5310 set_value (var_arg_reg, arg_sval, ctxt);
5313 else
5315 /* Otherwise we have a top-level call within the analysis. The params
5316 have defined but unknown initial values.
5317 Anything they point to has escaped. */
5318 tree fndecl = fun->decl;
5319 for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm;
5320 iter_parm = DECL_CHAIN (iter_parm))
5322 if (tree parm_default_ssa = ssa_default_def (fun, iter_parm))
5323 on_top_level_param (parm_default_ssa, ctxt);
5324 else
5325 on_top_level_param (iter_parm, ctxt);
5329 return m_current_frame;
5332 /* Get the function of the top-most frame in this region_model's stack.
5333 There must be such a frame. */
5335 function *
5336 region_model::get_current_function () const
5338 const frame_region *frame = get_current_frame ();
5339 gcc_assert (frame);
5340 return frame->get_function ();
5343 /* Pop the topmost frame_region from this region_model's stack;
5345 If RESULT_LVALUE is non-null, copy any return value from the frame
5346 into the corresponding region (evaluated with respect to the *caller*
5347 frame, rather than the called frame).
5348 If OUT_RESULT is non-null, copy any return value from the frame
5349 into *OUT_RESULT.
5351 Purge the frame region and all its descendent regions.
5352 Convert any pointers that point into such regions into
5353 POISON_KIND_POPPED_STACK svalues. */
5355 void
5356 region_model::pop_frame (tree result_lvalue,
5357 const svalue **out_result,
5358 region_model_context *ctxt)
5360 gcc_assert (m_current_frame);
5362 /* Evaluate the result, within the callee frame. */
5363 const frame_region *frame_reg = m_current_frame;
5364 tree fndecl = m_current_frame->get_function ()->decl;
5365 tree result = DECL_RESULT (fndecl);
5366 const svalue *retval = NULL;
5367 if (result && TREE_TYPE (result) != void_type_node)
5369 retval = get_rvalue (result, ctxt);
5370 if (out_result)
5371 *out_result = retval;
5374 /* Pop the frame. */
5375 m_current_frame = m_current_frame->get_calling_frame ();
5377 if (result_lvalue && retval)
5379 /* Compute result_dst_reg using RESULT_LVALUE *after* popping
5380 the frame, but before poisoning pointers into the old frame. */
5381 const region *result_dst_reg = get_lvalue (result_lvalue, ctxt);
5382 set_value (result_dst_reg, retval, ctxt);
5385 unbind_region_and_descendents (frame_reg,POISON_KIND_POPPED_STACK);
5388 /* Get the number of frames in this region_model's stack. */
5391 region_model::get_stack_depth () const
5393 const frame_region *frame = get_current_frame ();
5394 if (frame)
5395 return frame->get_stack_depth ();
5396 else
5397 return 0;
5400 /* Get the frame_region with the given index within the stack.
5401 The frame_region must exist. */
5403 const frame_region *
5404 region_model::get_frame_at_index (int index) const
5406 const frame_region *frame = get_current_frame ();
5407 gcc_assert (frame);
5408 gcc_assert (index >= 0);
5409 gcc_assert (index <= frame->get_index ());
5410 while (index != frame->get_index ())
5412 frame = frame->get_calling_frame ();
5413 gcc_assert (frame);
5415 return frame;
5418 /* Unbind svalues for any regions in REG and below.
5419 Find any pointers to such regions; convert them to
5420 poisoned values of kind PKIND.
5421 Also purge any dynamic extents. */
5423 void
5424 region_model::unbind_region_and_descendents (const region *reg,
5425 enum poison_kind pkind)
5427 /* Gather a set of base regions to be unbound. */
5428 hash_set<const region *> base_regs;
5429 for (store::cluster_map_t::iterator iter = m_store.begin ();
5430 iter != m_store.end (); ++iter)
5432 const region *iter_base_reg = (*iter).first;
5433 if (iter_base_reg->descendent_of_p (reg))
5434 base_regs.add (iter_base_reg);
5436 for (hash_set<const region *>::iterator iter = base_regs.begin ();
5437 iter != base_regs.end (); ++iter)
5438 m_store.purge_cluster (*iter);
5440 /* Find any pointers to REG or its descendents; convert to poisoned. */
5441 poison_any_pointers_to_descendents (reg, pkind);
5443 /* Purge dynamic extents of any base regions in REG and below
5444 (e.g. VLAs and alloca stack regions). */
5445 for (auto iter : m_dynamic_extents)
5447 const region *iter_reg = iter.first;
5448 if (iter_reg->descendent_of_p (reg))
5449 unset_dynamic_extents (iter_reg);
5453 /* Implementation of BindingVisitor.
5454 Update the bound svalues for regions below REG to use poisoned
5455 values instead. */
5457 struct bad_pointer_finder
5459 bad_pointer_finder (const region *reg, enum poison_kind pkind,
5460 region_model_manager *mgr)
5461 : m_reg (reg), m_pkind (pkind), m_mgr (mgr), m_count (0)
5464 void on_binding (const binding_key *, const svalue *&sval)
5466 if (const region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
5468 const region *ptr_dst = ptr_sval->get_pointee ();
5469 /* Poison ptrs to descendents of REG, but not to REG itself,
5470 otherwise double-free detection doesn't work (since sm-state
5471 for "free" is stored on the original ptr svalue). */
5472 if (ptr_dst->descendent_of_p (m_reg)
5473 && ptr_dst != m_reg)
5475 sval = m_mgr->get_or_create_poisoned_svalue (m_pkind,
5476 sval->get_type ());
5477 ++m_count;
5482 const region *m_reg;
5483 enum poison_kind m_pkind;
5484 region_model_manager *const m_mgr;
5485 int m_count;
5488 /* Find any pointers to REG or its descendents; convert them to
5489 poisoned values of kind PKIND.
5490 Return the number of pointers that were poisoned. */
5493 region_model::poison_any_pointers_to_descendents (const region *reg,
5494 enum poison_kind pkind)
5496 bad_pointer_finder bv (reg, pkind, m_mgr);
5497 m_store.for_each_binding (bv);
5498 return bv.m_count;
5501 /* Attempt to merge THIS with OTHER_MODEL, writing the result
5502 to OUT_MODEL. Use POINT to distinguish values created as a
5503 result of merging. */
5505 bool
5506 region_model::can_merge_with_p (const region_model &other_model,
5507 const program_point &point,
5508 region_model *out_model,
5509 const extrinsic_state *ext_state,
5510 const program_state *state_a,
5511 const program_state *state_b) const
5513 gcc_assert (out_model);
5514 gcc_assert (m_mgr == other_model.m_mgr);
5515 gcc_assert (m_mgr == out_model->m_mgr);
5517 if (m_current_frame != other_model.m_current_frame)
5518 return false;
5519 out_model->m_current_frame = m_current_frame;
5521 model_merger m (this, &other_model, point, out_model,
5522 ext_state, state_a, state_b);
5524 if (!store::can_merge_p (&m_store, &other_model.m_store,
5525 &out_model->m_store, m_mgr->get_store_manager (),
5526 &m))
5527 return false;
5529 if (!m_dynamic_extents.can_merge_with_p (other_model.m_dynamic_extents,
5530 &out_model->m_dynamic_extents))
5531 return false;
5533 /* Merge constraints. */
5534 constraint_manager::merge (*m_constraints,
5535 *other_model.m_constraints,
5536 out_model->m_constraints);
5538 return true;
5541 /* Attempt to get the fndecl used at CALL, if known, or NULL_TREE
5542 otherwise. */
5544 tree
5545 region_model::get_fndecl_for_call (const gcall *call,
5546 region_model_context *ctxt)
5548 tree fn_ptr = gimple_call_fn (call);
5549 if (fn_ptr == NULL_TREE)
5550 return NULL_TREE;
5551 const svalue *fn_ptr_sval = get_rvalue (fn_ptr, ctxt);
5552 if (const region_svalue *fn_ptr_ptr
5553 = fn_ptr_sval->dyn_cast_region_svalue ())
5555 const region *reg = fn_ptr_ptr->get_pointee ();
5556 if (const function_region *fn_reg = reg->dyn_cast_function_region ())
5558 tree fn_decl = fn_reg->get_fndecl ();
5559 cgraph_node *node = cgraph_node::get (fn_decl);
5560 if (!node)
5561 return NULL_TREE;
5562 const cgraph_node *ultimate_node = node->ultimate_alias_target ();
5563 if (ultimate_node)
5564 return ultimate_node->decl;
5568 return NULL_TREE;
5571 /* Would be much simpler to use a lambda here, if it were supported. */
5573 struct append_regions_cb_data
5575 const region_model *model;
5576 auto_vec<const decl_region *> *out;
5579 /* Populate *OUT with all decl_regions in the current
5580 frame that have clusters within the store. */
5582 void
5583 region_model::
5584 get_regions_for_current_frame (auto_vec<const decl_region *> *out) const
5586 append_regions_cb_data data;
5587 data.model = this;
5588 data.out = out;
5589 m_store.for_each_cluster (append_regions_cb, &data);
5592 /* Implementation detail of get_regions_for_current_frame. */
5594 void
5595 region_model::append_regions_cb (const region *base_reg,
5596 append_regions_cb_data *cb_data)
5598 if (base_reg->get_parent_region () != cb_data->model->m_current_frame)
5599 return;
5600 if (const decl_region *decl_reg = base_reg->dyn_cast_decl_region ())
5601 cb_data->out->safe_push (decl_reg);
5605 /* Abstract class for diagnostics related to the use of
5606 floating-point arithmetic where precision is needed. */
5608 class imprecise_floating_point_arithmetic : public pending_diagnostic
5610 public:
5611 int get_controlling_option () const final override
5613 return OPT_Wanalyzer_imprecise_fp_arithmetic;
5617 /* Concrete diagnostic to complain about uses of floating-point arithmetic
5618 in the size argument of malloc etc. */
5620 class float_as_size_arg : public imprecise_floating_point_arithmetic
5622 public:
5623 float_as_size_arg (tree arg) : m_arg (arg)
5626 const char *get_kind () const final override
5628 return "float_as_size_arg_diagnostic";
5631 bool subclass_equal_p (const pending_diagnostic &other) const final override
5633 return same_tree_p (m_arg, ((const float_as_size_arg &) other).m_arg);
5636 bool emit (rich_location *rich_loc) final override
5638 diagnostic_metadata m;
5639 bool warned = warning_meta (rich_loc, m, get_controlling_option (),
5640 "use of floating-point arithmetic here might"
5641 " yield unexpected results");
5642 if (warned)
5643 inform (rich_loc->get_loc (), "only use operands of an integer type"
5644 " inside the size argument");
5645 return warned;
5648 label_text describe_final_event (const evdesc::final_event &ev) final
5649 override
5651 if (m_arg)
5652 return ev.formatted_print ("operand %qE is of type %qT",
5653 m_arg, TREE_TYPE (m_arg));
5654 return ev.formatted_print ("at least one operand of the size argument is"
5655 " of a floating-point type");
5658 private:
5659 tree m_arg;
5662 /* Visitor to find uses of floating-point variables/constants in an svalue. */
5664 class contains_floating_point_visitor : public visitor
5666 public:
5667 contains_floating_point_visitor (const svalue *root_sval) : m_result (NULL)
5669 root_sval->accept (this);
5672 const svalue *get_svalue_to_report ()
5674 return m_result;
5677 void visit_constant_svalue (const constant_svalue *sval) final override
5679 /* At the point the analyzer runs, constant integer operands in a floating
5680 point expression are already implictly converted to floating-points.
5681 Thus, we do prefer to report non-constants such that the diagnostic
5682 always reports a floating-point operand. */
5683 tree type = sval->get_type ();
5684 if (type && FLOAT_TYPE_P (type) && !m_result)
5685 m_result = sval;
5688 void visit_conjured_svalue (const conjured_svalue *sval) final override
5690 tree type = sval->get_type ();
5691 if (type && FLOAT_TYPE_P (type))
5692 m_result = sval;
5695 void visit_initial_svalue (const initial_svalue *sval) final override
5697 tree type = sval->get_type ();
5698 if (type && FLOAT_TYPE_P (type))
5699 m_result = sval;
5702 private:
5703 /* Non-null if at least one floating-point operand was found. */
5704 const svalue *m_result;
5707 /* May complain about uses of floating-point operands in SIZE_IN_BYTES. */
5709 void
5710 region_model::check_dynamic_size_for_floats (const svalue *size_in_bytes,
5711 region_model_context *ctxt) const
5713 gcc_assert (ctxt);
5715 contains_floating_point_visitor v (size_in_bytes);
5716 if (const svalue *float_sval = v.get_svalue_to_report ())
5718 tree diag_arg = get_representative_tree (float_sval);
5719 ctxt->warn (new float_as_size_arg (diag_arg));
5723 /* Return a new region describing a heap-allocated block of memory.
5724 Use CTXT to complain about tainted sizes. */
5726 const region *
5727 region_model::create_region_for_heap_alloc (const svalue *size_in_bytes,
5728 region_model_context *ctxt)
5730 const region *reg = m_mgr->create_region_for_heap_alloc ();
5731 if (compat_types_p (size_in_bytes->get_type (), size_type_node))
5732 set_dynamic_extents (reg, size_in_bytes, ctxt);
5733 return reg;
5736 /* Return a new region describing a block of memory allocated within the
5737 current frame.
5738 Use CTXT to complain about tainted sizes. */
5740 const region *
5741 region_model::create_region_for_alloca (const svalue *size_in_bytes,
5742 region_model_context *ctxt)
5744 const region *reg = m_mgr->create_region_for_alloca (m_current_frame);
5745 if (compat_types_p (size_in_bytes->get_type (), size_type_node))
5746 set_dynamic_extents (reg, size_in_bytes, ctxt);
5747 return reg;
5750 /* Record that the size of REG is SIZE_IN_BYTES.
5751 Use CTXT to complain about tainted sizes. */
5753 void
5754 region_model::set_dynamic_extents (const region *reg,
5755 const svalue *size_in_bytes,
5756 region_model_context *ctxt)
5758 assert_compat_types (size_in_bytes->get_type (), size_type_node);
5759 if (ctxt)
5761 check_dynamic_size_for_taint (reg->get_memory_space (), size_in_bytes,
5762 ctxt);
5763 check_dynamic_size_for_floats (size_in_bytes, ctxt);
5765 m_dynamic_extents.put (reg, size_in_bytes);
5768 /* Get the recording of REG in bytes, or NULL if no dynamic size was
5769 recorded. */
5771 const svalue *
5772 region_model::get_dynamic_extents (const region *reg) const
5774 if (const svalue * const *slot = m_dynamic_extents.get (reg))
5775 return *slot;
5776 return NULL;
5779 /* Unset any recorded dynamic size of REG. */
5781 void
5782 region_model::unset_dynamic_extents (const region *reg)
5784 m_dynamic_extents.remove (reg);
5787 /* class noop_region_model_context : public region_model_context. */
5789 void
5790 noop_region_model_context::add_note (pending_note *pn)
5792 delete pn;
5795 void
5796 noop_region_model_context::bifurcate (custom_edge_info *info)
5798 delete info;
5801 void
5802 noop_region_model_context::terminate_path ()
5806 /* struct model_merger. */
5808 /* Dump a multiline representation of this merger to PP. */
5810 void
5811 model_merger::dump_to_pp (pretty_printer *pp, bool simple) const
5813 pp_string (pp, "model A:");
5814 pp_newline (pp);
5815 m_model_a->dump_to_pp (pp, simple, true);
5816 pp_newline (pp);
5818 pp_string (pp, "model B:");
5819 pp_newline (pp);
5820 m_model_b->dump_to_pp (pp, simple, true);
5821 pp_newline (pp);
5823 pp_string (pp, "merged model:");
5824 pp_newline (pp);
5825 m_merged_model->dump_to_pp (pp, simple, true);
5826 pp_newline (pp);
5829 /* Dump a multiline representation of this merger to FILE. */
5831 void
5832 model_merger::dump (FILE *fp, bool simple) const
5834 pretty_printer pp;
5835 pp_format_decoder (&pp) = default_tree_printer;
5836 pp_show_color (&pp) = pp_show_color (global_dc->printer);
5837 pp.buffer->stream = fp;
5838 dump_to_pp (&pp, simple);
5839 pp_flush (&pp);
5842 /* Dump a multiline representation of this merger to stderr. */
5844 DEBUG_FUNCTION void
5845 model_merger::dump (bool simple) const
5847 dump (stderr, simple);
5850 /* Return true if it's OK to merge SVAL with other svalues. */
5852 bool
5853 model_merger::mergeable_svalue_p (const svalue *sval) const
5855 if (m_ext_state)
5857 /* Reject merging svalues that have non-purgable sm-state,
5858 to avoid falsely reporting memory leaks by merging them
5859 with something else. For example, given a local var "p",
5860 reject the merger of a:
5861 store_a mapping "p" to a malloc-ed ptr
5862 with:
5863 store_b mapping "p" to a NULL ptr. */
5864 if (m_state_a)
5865 if (!m_state_a->can_purge_p (*m_ext_state, sval))
5866 return false;
5867 if (m_state_b)
5868 if (!m_state_b->can_purge_p (*m_ext_state, sval))
5869 return false;
5871 return true;
5874 } // namespace ana
5876 /* Dump RMODEL fully to stderr (i.e. without summarization). */
5878 DEBUG_FUNCTION void
5879 debug (const region_model &rmodel)
5881 rmodel.dump (false);
5884 /* class rejected_op_constraint : public rejected_constraint. */
5886 void
5887 rejected_op_constraint::dump_to_pp (pretty_printer *pp) const
5889 region_model m (m_model);
5890 const svalue *lhs_sval = m.get_rvalue (m_lhs, NULL);
5891 const svalue *rhs_sval = m.get_rvalue (m_rhs, NULL);
5892 lhs_sval->dump_to_pp (pp, true);
5893 pp_printf (pp, " %s ", op_symbol_code (m_op));
5894 rhs_sval->dump_to_pp (pp, true);
5897 /* class rejected_ranges_constraint : public rejected_constraint. */
5899 void
5900 rejected_ranges_constraint::dump_to_pp (pretty_printer *pp) const
5902 region_model m (m_model);
5903 const svalue *sval = m.get_rvalue (m_expr, NULL);
5904 sval->dump_to_pp (pp, true);
5905 pp_string (pp, " in ");
5906 m_ranges->dump_to_pp (pp, true);
5909 /* class engine. */
5911 /* engine's ctor. */
5913 engine::engine (const supergraph *sg, logger *logger)
5914 : m_sg (sg), m_mgr (logger)
5918 /* Dump the managed objects by class to LOGGER, and the per-class totals. */
5920 void
5921 engine::log_stats (logger *logger) const
5923 m_mgr.log_stats (logger, true);
5926 namespace ana {
5928 #if CHECKING_P
5930 namespace selftest {
5932 /* Build a constant tree of the given type from STR. */
5934 static tree
5935 build_real_cst_from_string (tree type, const char *str)
5937 REAL_VALUE_TYPE real;
5938 real_from_string (&real, str);
5939 return build_real (type, real);
5942 /* Append various "interesting" constants to OUT (e.g. NaN). */
5944 static void
5945 append_interesting_constants (auto_vec<tree> *out)
5947 out->safe_push (build_int_cst (integer_type_node, 0));
5948 out->safe_push (build_int_cst (integer_type_node, 42));
5949 out->safe_push (build_int_cst (unsigned_type_node, 0));
5950 out->safe_push (build_int_cst (unsigned_type_node, 42));
5951 out->safe_push (build_real_cst_from_string (float_type_node, "QNaN"));
5952 out->safe_push (build_real_cst_from_string (float_type_node, "-QNaN"));
5953 out->safe_push (build_real_cst_from_string (float_type_node, "SNaN"));
5954 out->safe_push (build_real_cst_from_string (float_type_node, "-SNaN"));
5955 out->safe_push (build_real_cst_from_string (float_type_node, "0.0"));
5956 out->safe_push (build_real_cst_from_string (float_type_node, "-0.0"));
5957 out->safe_push (build_real_cst_from_string (float_type_node, "Inf"));
5958 out->safe_push (build_real_cst_from_string (float_type_node, "-Inf"));
5961 /* Verify that tree_cmp is a well-behaved comparator for qsort, even
5962 if the underlying constants aren't comparable. */
5964 static void
5965 test_tree_cmp_on_constants ()
5967 auto_vec<tree> csts;
5968 append_interesting_constants (&csts);
5970 /* Try sorting every triple. */
5971 const unsigned num = csts.length ();
5972 for (unsigned i = 0; i < num; i++)
5973 for (unsigned j = 0; j < num; j++)
5974 for (unsigned k = 0; k < num; k++)
5976 auto_vec<tree> v (3);
5977 v.quick_push (csts[i]);
5978 v.quick_push (csts[j]);
5979 v.quick_push (csts[k]);
5980 v.qsort (tree_cmp);
5984 /* Implementation detail of the ASSERT_CONDITION_* macros. */
5986 void
5987 assert_condition (const location &loc,
5988 region_model &model,
5989 const svalue *lhs, tree_code op, const svalue *rhs,
5990 tristate expected)
5992 tristate actual = model.eval_condition (lhs, op, rhs);
5993 ASSERT_EQ_AT (loc, actual, expected);
5996 /* Implementation detail of the ASSERT_CONDITION_* macros. */
5998 void
5999 assert_condition (const location &loc,
6000 region_model &model,
6001 tree lhs, tree_code op, tree rhs,
6002 tristate expected)
6004 tristate actual = model.eval_condition (lhs, op, rhs, NULL);
6005 ASSERT_EQ_AT (loc, actual, expected);
6008 /* Implementation detail of ASSERT_DUMP_TREE_EQ. */
6010 static void
6011 assert_dump_tree_eq (const location &loc, tree t, const char *expected)
6013 auto_fix_quotes sentinel;
6014 pretty_printer pp;
6015 pp_format_decoder (&pp) = default_tree_printer;
6016 dump_tree (&pp, t);
6017 ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected);
6020 /* Assert that dump_tree (T) is EXPECTED. */
6022 #define ASSERT_DUMP_TREE_EQ(T, EXPECTED) \
6023 SELFTEST_BEGIN_STMT \
6024 assert_dump_tree_eq ((SELFTEST_LOCATION), (T), (EXPECTED)); \
6025 SELFTEST_END_STMT
6027 /* Implementation detail of ASSERT_DUMP_EQ. */
6029 static void
6030 assert_dump_eq (const location &loc,
6031 const region_model &model,
6032 bool summarize,
6033 const char *expected)
6035 auto_fix_quotes sentinel;
6036 pretty_printer pp;
6037 pp_format_decoder (&pp) = default_tree_printer;
6039 model.dump_to_pp (&pp, summarize, true);
6040 ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected);
6043 /* Assert that MODEL.dump_to_pp (SUMMARIZE) is EXPECTED. */
6045 #define ASSERT_DUMP_EQ(MODEL, SUMMARIZE, EXPECTED) \
6046 SELFTEST_BEGIN_STMT \
6047 assert_dump_eq ((SELFTEST_LOCATION), (MODEL), (SUMMARIZE), (EXPECTED)); \
6048 SELFTEST_END_STMT
6050 /* Smoketest for region_model::dump_to_pp. */
6052 static void
6053 test_dump ()
6055 region_model_manager mgr;
6056 region_model model (&mgr);
6058 ASSERT_DUMP_EQ (model, false,
6059 "stack depth: 0\n"
6060 "m_called_unknown_fn: FALSE\n"
6061 "constraint_manager:\n"
6062 " equiv classes:\n"
6063 " constraints:\n");
6064 ASSERT_DUMP_EQ (model, true,
6065 "stack depth: 0\n"
6066 "m_called_unknown_fn: FALSE\n"
6067 "constraint_manager:\n"
6068 " equiv classes:\n"
6069 " constraints:\n");
6072 /* Helper function for selftests. Create a struct or union type named NAME,
6073 with the fields given by the FIELD_DECLS in FIELDS.
6074 If IS_STRUCT is true create a RECORD_TYPE (aka a struct), otherwise
6075 create a UNION_TYPE. */
6077 static tree
6078 make_test_compound_type (const char *name, bool is_struct,
6079 const auto_vec<tree> *fields)
6081 tree t = make_node (is_struct ? RECORD_TYPE : UNION_TYPE);
6082 TYPE_NAME (t) = get_identifier (name);
6083 TYPE_SIZE (t) = 0;
6085 tree fieldlist = NULL;
6086 int i;
6087 tree field;
6088 FOR_EACH_VEC_ELT (*fields, i, field)
6090 gcc_assert (TREE_CODE (field) == FIELD_DECL);
6091 DECL_CONTEXT (field) = t;
6092 fieldlist = chainon (field, fieldlist);
6094 fieldlist = nreverse (fieldlist);
6095 TYPE_FIELDS (t) = fieldlist;
6097 layout_type (t);
6098 return t;
6101 /* Selftest fixture for creating the type "struct coord {int x; int y; };". */
6103 struct coord_test
6105 coord_test ()
6107 auto_vec<tree> fields;
6108 m_x_field = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
6109 get_identifier ("x"), integer_type_node);
6110 fields.safe_push (m_x_field);
6111 m_y_field = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
6112 get_identifier ("y"), integer_type_node);
6113 fields.safe_push (m_y_field);
6114 m_coord_type = make_test_compound_type ("coord", true, &fields);
6117 tree m_x_field;
6118 tree m_y_field;
6119 tree m_coord_type;
6122 /* Verify usage of a struct. */
6124 static void
6125 test_struct ()
6127 coord_test ct;
6129 tree c = build_global_decl ("c", ct.m_coord_type);
6130 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
6131 c, ct.m_x_field, NULL_TREE);
6132 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
6133 c, ct.m_y_field, NULL_TREE);
6135 tree int_17 = build_int_cst (integer_type_node, 17);
6136 tree int_m3 = build_int_cst (integer_type_node, -3);
6138 region_model_manager mgr;
6139 region_model model (&mgr);
6140 model.set_value (c_x, int_17, NULL);
6141 model.set_value (c_y, int_m3, NULL);
6143 /* Verify get_offset for "c.x". */
6145 const region *c_x_reg = model.get_lvalue (c_x, NULL);
6146 region_offset offset = c_x_reg->get_offset (&mgr);
6147 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (c, NULL));
6148 ASSERT_EQ (offset.get_bit_offset (), 0);
6151 /* Verify get_offset for "c.y". */
6153 const region *c_y_reg = model.get_lvalue (c_y, NULL);
6154 region_offset offset = c_y_reg->get_offset (&mgr);
6155 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (c, NULL));
6156 ASSERT_EQ (offset.get_bit_offset (), INT_TYPE_SIZE);
6160 /* Verify usage of an array element. */
6162 static void
6163 test_array_1 ()
6165 tree tlen = size_int (10);
6166 tree arr_type = build_array_type (char_type_node, build_index_type (tlen));
6168 tree a = build_global_decl ("a", arr_type);
6170 region_model_manager mgr;
6171 region_model model (&mgr);
6172 tree int_0 = build_int_cst (integer_type_node, 0);
6173 tree a_0 = build4 (ARRAY_REF, char_type_node,
6174 a, int_0, NULL_TREE, NULL_TREE);
6175 tree char_A = build_int_cst (char_type_node, 'A');
6176 model.set_value (a_0, char_A, NULL);
6179 /* Verify that region_model::get_representative_tree works as expected. */
6181 static void
6182 test_get_representative_tree ()
6184 region_model_manager mgr;
6186 /* STRING_CST. */
6188 tree string_cst = build_string (4, "foo");
6189 region_model m (&mgr);
6190 const svalue *str_sval = m.get_rvalue (string_cst, NULL);
6191 tree rep = m.get_representative_tree (str_sval);
6192 ASSERT_EQ (rep, string_cst);
6195 /* String literal. */
6197 tree string_cst_ptr = build_string_literal (4, "foo");
6198 region_model m (&mgr);
6199 const svalue *str_sval = m.get_rvalue (string_cst_ptr, NULL);
6200 tree rep = m.get_representative_tree (str_sval);
6201 ASSERT_DUMP_TREE_EQ (rep, "&\"foo\"[0]");
6204 /* Value of an element within an array. */
6206 tree tlen = size_int (10);
6207 tree arr_type = build_array_type (char_type_node, build_index_type (tlen));
6208 tree a = build_global_decl ("a", arr_type);
6209 placeholder_svalue test_sval (char_type_node, "test value");
6211 /* Value of a[3]. */
6213 test_region_model_context ctxt;
6214 region_model model (&mgr);
6215 tree int_3 = build_int_cst (integer_type_node, 3);
6216 tree a_3 = build4 (ARRAY_REF, char_type_node,
6217 a, int_3, NULL_TREE, NULL_TREE);
6218 const region *a_3_reg = model.get_lvalue (a_3, &ctxt);
6219 model.set_value (a_3_reg, &test_sval, &ctxt);
6220 tree rep = model.get_representative_tree (&test_sval);
6221 ASSERT_DUMP_TREE_EQ (rep, "a[3]");
6224 /* Value of a[0]. */
6226 test_region_model_context ctxt;
6227 region_model model (&mgr);
6228 tree idx = build_int_cst (integer_type_node, 0);
6229 tree a_0 = build4 (ARRAY_REF, char_type_node,
6230 a, idx, NULL_TREE, NULL_TREE);
6231 const region *a_0_reg = model.get_lvalue (a_0, &ctxt);
6232 model.set_value (a_0_reg, &test_sval, &ctxt);
6233 tree rep = model.get_representative_tree (&test_sval);
6234 ASSERT_DUMP_TREE_EQ (rep, "a[0]");
6238 /* Value of a field within a struct. */
6240 coord_test ct;
6242 tree c = build_global_decl ("c", ct.m_coord_type);
6243 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
6244 c, ct.m_x_field, NULL_TREE);
6245 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
6246 c, ct.m_y_field, NULL_TREE);
6248 test_region_model_context ctxt;
6250 /* Value of initial field. */
6252 region_model m (&mgr);
6253 const region *c_x_reg = m.get_lvalue (c_x, &ctxt);
6254 placeholder_svalue test_sval_x (integer_type_node, "test x val");
6255 m.set_value (c_x_reg, &test_sval_x, &ctxt);
6256 tree rep = m.get_representative_tree (&test_sval_x);
6257 ASSERT_DUMP_TREE_EQ (rep, "c.x");
6260 /* Value of non-initial field. */
6262 region_model m (&mgr);
6263 const region *c_y_reg = m.get_lvalue (c_y, &ctxt);
6264 placeholder_svalue test_sval_y (integer_type_node, "test y val");
6265 m.set_value (c_y_reg, &test_sval_y, &ctxt);
6266 tree rep = m.get_representative_tree (&test_sval_y);
6267 ASSERT_DUMP_TREE_EQ (rep, "c.y");
6272 /* Verify that calling region_model::get_rvalue repeatedly on the same
6273 tree constant retrieves the same svalue *. */
6275 static void
6276 test_unique_constants ()
6278 tree int_0 = build_int_cst (integer_type_node, 0);
6279 tree int_42 = build_int_cst (integer_type_node, 42);
6281 test_region_model_context ctxt;
6282 region_model_manager mgr;
6283 region_model model (&mgr);
6284 ASSERT_EQ (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_0, &ctxt));
6285 ASSERT_EQ (model.get_rvalue (int_42, &ctxt),
6286 model.get_rvalue (int_42, &ctxt));
6287 ASSERT_NE (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_42, &ctxt));
6288 ASSERT_EQ (ctxt.get_num_diagnostics (), 0);
6290 /* A "(const int)42" will be a different tree from "(int)42)"... */
6291 tree const_int_type_node
6292 = build_qualified_type (integer_type_node, TYPE_QUAL_CONST);
6293 tree const_int_42 = build_int_cst (const_int_type_node, 42);
6294 ASSERT_NE (int_42, const_int_42);
6295 /* It should have a different const_svalue. */
6296 const svalue *int_42_sval = model.get_rvalue (int_42, &ctxt);
6297 const svalue *const_int_42_sval = model.get_rvalue (const_int_42, &ctxt);
6298 ASSERT_NE (int_42_sval, const_int_42_sval);
6299 /* But they should compare as equal. */
6300 ASSERT_CONDITION_TRUE (model, int_42_sval, EQ_EXPR, const_int_42_sval);
6301 ASSERT_CONDITION_FALSE (model, int_42_sval, NE_EXPR, const_int_42_sval);
6304 /* Verify that each type gets its own singleton unknown_svalue within a
6305 region_model_manager, and that NULL_TREE gets its own singleton. */
6307 static void
6308 test_unique_unknowns ()
6310 region_model_manager mgr;
6311 const svalue *unknown_int
6312 = mgr.get_or_create_unknown_svalue (integer_type_node);
6313 /* Repeated calls with the same type should get the same "unknown"
6314 svalue. */
6315 const svalue *unknown_int_2
6316 = mgr.get_or_create_unknown_svalue (integer_type_node);
6317 ASSERT_EQ (unknown_int, unknown_int_2);
6319 /* Different types (or the NULL type) should have different
6320 unknown_svalues. */
6321 const svalue *unknown_NULL_type = mgr.get_or_create_unknown_svalue (NULL);
6322 ASSERT_NE (unknown_NULL_type, unknown_int);
6324 /* Repeated calls with NULL for the type should get the same "unknown"
6325 svalue. */
6326 const svalue *unknown_NULL_type_2 = mgr.get_or_create_unknown_svalue (NULL);
6327 ASSERT_EQ (unknown_NULL_type, unknown_NULL_type_2);
6330 /* Verify that initial_svalue are handled as expected. */
6332 static void
6333 test_initial_svalue_folding ()
6335 region_model_manager mgr;
6336 tree x = build_global_decl ("x", integer_type_node);
6337 tree y = build_global_decl ("y", integer_type_node);
6339 test_region_model_context ctxt;
6340 region_model model (&mgr);
6341 const svalue *x_init = model.get_rvalue (x, &ctxt);
6342 const svalue *y_init = model.get_rvalue (y, &ctxt);
6343 ASSERT_NE (x_init, y_init);
6344 const region *x_reg = model.get_lvalue (x, &ctxt);
6345 ASSERT_EQ (x_init, mgr.get_or_create_initial_value (x_reg));
6349 /* Verify that unary ops are folded as expected. */
6351 static void
6352 test_unaryop_svalue_folding ()
6354 region_model_manager mgr;
6355 tree x = build_global_decl ("x", integer_type_node);
6356 tree y = build_global_decl ("y", integer_type_node);
6358 test_region_model_context ctxt;
6359 region_model model (&mgr);
6360 const svalue *x_init = model.get_rvalue (x, &ctxt);
6361 const svalue *y_init = model.get_rvalue (y, &ctxt);
6362 const region *x_reg = model.get_lvalue (x, &ctxt);
6363 ASSERT_EQ (x_init, mgr.get_or_create_initial_value (x_reg));
6365 /* "(int)x" -> "x". */
6366 ASSERT_EQ (x_init, mgr.get_or_create_cast (integer_type_node, x_init));
6368 /* "(void *)x" -> something other than "x". */
6369 ASSERT_NE (x_init, mgr.get_or_create_cast (ptr_type_node, x_init));
6371 /* "!(x == y)" -> "x != y". */
6372 ASSERT_EQ (mgr.get_or_create_unaryop
6373 (boolean_type_node, TRUTH_NOT_EXPR,
6374 mgr.get_or_create_binop (boolean_type_node, EQ_EXPR,
6375 x_init, y_init)),
6376 mgr.get_or_create_binop (boolean_type_node, NE_EXPR,
6377 x_init, y_init));
6378 /* "!(x > y)" -> "x <= y". */
6379 ASSERT_EQ (mgr.get_or_create_unaryop
6380 (boolean_type_node, TRUTH_NOT_EXPR,
6381 mgr.get_or_create_binop (boolean_type_node, GT_EXPR,
6382 x_init, y_init)),
6383 mgr.get_or_create_binop (boolean_type_node, LE_EXPR,
6384 x_init, y_init));
6387 /* Verify that binops on constant svalues are folded. */
6389 static void
6390 test_binop_svalue_folding ()
6392 #define NUM_CSTS 10
6393 tree cst_int[NUM_CSTS];
6394 region_model_manager mgr;
6395 const svalue *cst_sval[NUM_CSTS];
6396 for (int i = 0; i < NUM_CSTS; i++)
6398 cst_int[i] = build_int_cst (integer_type_node, i);
6399 cst_sval[i] = mgr.get_or_create_constant_svalue (cst_int[i]);
6400 ASSERT_EQ (cst_sval[i]->get_kind (), SK_CONSTANT);
6401 ASSERT_EQ (cst_sval[i]->maybe_get_constant (), cst_int[i]);
6404 for (int i = 0; i < NUM_CSTS; i++)
6405 for (int j = 0; j < NUM_CSTS; j++)
6407 if (i != j)
6408 ASSERT_NE (cst_sval[i], cst_sval[j]);
6409 if (i + j < NUM_CSTS)
6411 const svalue *sum
6412 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
6413 cst_sval[i], cst_sval[j]);
6414 ASSERT_EQ (sum, cst_sval[i + j]);
6416 if (i - j >= 0)
6418 const svalue *difference
6419 = mgr.get_or_create_binop (integer_type_node, MINUS_EXPR,
6420 cst_sval[i], cst_sval[j]);
6421 ASSERT_EQ (difference, cst_sval[i - j]);
6423 if (i * j < NUM_CSTS)
6425 const svalue *product
6426 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6427 cst_sval[i], cst_sval[j]);
6428 ASSERT_EQ (product, cst_sval[i * j]);
6430 const svalue *eq = mgr.get_or_create_binop (integer_type_node, EQ_EXPR,
6431 cst_sval[i], cst_sval[j]);
6432 ASSERT_EQ (eq, i == j ? cst_sval[1] : cst_sval [0]);
6433 const svalue *neq = mgr.get_or_create_binop (integer_type_node, NE_EXPR,
6434 cst_sval[i], cst_sval[j]);
6435 ASSERT_EQ (neq, i != j ? cst_sval[1] : cst_sval [0]);
6436 // etc
6439 tree x = build_global_decl ("x", integer_type_node);
6441 test_region_model_context ctxt;
6442 region_model model (&mgr);
6443 const svalue *x_init = model.get_rvalue (x, &ctxt);
6445 /* PLUS_EXPR folding. */
6446 const svalue *x_init_plus_zero
6447 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
6448 x_init, cst_sval[0]);
6449 ASSERT_EQ (x_init_plus_zero, x_init);
6450 const svalue *zero_plus_x_init
6451 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
6452 cst_sval[0], x_init);
6453 ASSERT_EQ (zero_plus_x_init, x_init);
6455 /* MULT_EXPR folding. */
6456 const svalue *x_init_times_zero
6457 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6458 x_init, cst_sval[0]);
6459 ASSERT_EQ (x_init_times_zero, cst_sval[0]);
6460 const svalue *zero_times_x_init
6461 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6462 cst_sval[0], x_init);
6463 ASSERT_EQ (zero_times_x_init, cst_sval[0]);
6465 const svalue *x_init_times_one
6466 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6467 x_init, cst_sval[1]);
6468 ASSERT_EQ (x_init_times_one, x_init);
6469 const svalue *one_times_x_init
6470 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6471 cst_sval[1], x_init);
6472 ASSERT_EQ (one_times_x_init, x_init);
6474 // etc
6475 // TODO: do we want to use the match-and-simplify DSL for this?
6477 /* Verify that binops put any constants on the RHS. */
6478 const svalue *four_times_x_init
6479 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6480 cst_sval[4], x_init);
6481 const svalue *x_init_times_four
6482 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6483 x_init, cst_sval[4]);
6484 ASSERT_EQ (four_times_x_init, x_init_times_four);
6485 const binop_svalue *binop = four_times_x_init->dyn_cast_binop_svalue ();
6486 ASSERT_EQ (binop->get_op (), MULT_EXPR);
6487 ASSERT_EQ (binop->get_arg0 (), x_init);
6488 ASSERT_EQ (binop->get_arg1 (), cst_sval[4]);
6490 /* Verify that ((x + 1) + 1) == (x + 2). */
6491 const svalue *x_init_plus_one
6492 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
6493 x_init, cst_sval[1]);
6494 const svalue *x_init_plus_two
6495 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
6496 x_init, cst_sval[2]);
6497 const svalue *x_init_plus_one_plus_one
6498 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
6499 x_init_plus_one, cst_sval[1]);
6500 ASSERT_EQ (x_init_plus_one_plus_one, x_init_plus_two);
6502 /* Verify various binops on booleans. */
6504 const svalue *sval_true = mgr.get_or_create_int_cst (boolean_type_node, 1);
6505 const svalue *sval_false = mgr.get_or_create_int_cst (boolean_type_node, 0);
6506 const svalue *sval_unknown
6507 = mgr.get_or_create_unknown_svalue (boolean_type_node);
6508 const placeholder_svalue sval_placeholder (boolean_type_node, "v");
6509 for (auto op : {BIT_IOR_EXPR, TRUTH_OR_EXPR})
6511 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
6512 sval_true, sval_unknown),
6513 sval_true);
6514 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
6515 sval_false, sval_unknown),
6516 sval_unknown);
6517 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
6518 sval_false, &sval_placeholder),
6519 &sval_placeholder);
6521 for (auto op : {BIT_AND_EXPR, TRUTH_AND_EXPR})
6523 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
6524 sval_false, sval_unknown),
6525 sval_false);
6526 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
6527 sval_true, sval_unknown),
6528 sval_unknown);
6529 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
6530 sval_true, &sval_placeholder),
6531 &sval_placeholder);
6536 /* Verify that sub_svalues are folded as expected. */
6538 static void
6539 test_sub_svalue_folding ()
6541 coord_test ct;
6542 tree c = build_global_decl ("c", ct.m_coord_type);
6543 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
6544 c, ct.m_x_field, NULL_TREE);
6546 region_model_manager mgr;
6547 region_model model (&mgr);
6548 test_region_model_context ctxt;
6549 const region *c_x_reg = model.get_lvalue (c_x, &ctxt);
6551 /* Verify that sub_svalue of "unknown" simply
6552 yields an unknown. */
6554 const svalue *unknown = mgr.get_or_create_unknown_svalue (ct.m_coord_type);
6555 const svalue *sub = mgr.get_or_create_sub_svalue (TREE_TYPE (ct.m_x_field),
6556 unknown, c_x_reg);
6557 ASSERT_EQ (sub->get_kind (), SK_UNKNOWN);
6558 ASSERT_EQ (sub->get_type (), TREE_TYPE (ct.m_x_field));
6561 /* Test that region::descendent_of_p works as expected. */
6563 static void
6564 test_descendent_of_p ()
6566 region_model_manager mgr;
6567 const region *stack = mgr.get_stack_region ();
6568 const region *heap = mgr.get_heap_region ();
6569 const region *code = mgr.get_code_region ();
6570 const region *globals = mgr.get_globals_region ();
6572 /* descendent_of_p should return true when used on the region itself. */
6573 ASSERT_TRUE (stack->descendent_of_p (stack));
6574 ASSERT_FALSE (stack->descendent_of_p (heap));
6575 ASSERT_FALSE (stack->descendent_of_p (code));
6576 ASSERT_FALSE (stack->descendent_of_p (globals));
6578 tree x = build_global_decl ("x", integer_type_node);
6579 const region *x_reg = mgr.get_region_for_global (x);
6580 ASSERT_TRUE (x_reg->descendent_of_p (globals));
6582 /* A cast_region should be a descendent of the original region. */
6583 const region *cast_reg = mgr.get_cast_region (x_reg, ptr_type_node);
6584 ASSERT_TRUE (cast_reg->descendent_of_p (x_reg));
6587 /* Verify that bit_range_region works as expected. */
6589 static void
6590 test_bit_range_regions ()
6592 tree x = build_global_decl ("x", integer_type_node);
6593 region_model_manager mgr;
6594 const region *x_reg = mgr.get_region_for_global (x);
6595 const region *byte0
6596 = mgr.get_bit_range (x_reg, char_type_node, bit_range (0, 8));
6597 const region *byte1
6598 = mgr.get_bit_range (x_reg, char_type_node, bit_range (8, 8));
6599 ASSERT_TRUE (byte0->descendent_of_p (x_reg));
6600 ASSERT_TRUE (byte1->descendent_of_p (x_reg));
6601 ASSERT_NE (byte0, byte1);
6604 /* Verify that simple assignments work as expected. */
6606 static void
6607 test_assignment ()
6609 tree int_0 = build_int_cst (integer_type_node, 0);
6610 tree x = build_global_decl ("x", integer_type_node);
6611 tree y = build_global_decl ("y", integer_type_node);
6613 /* "x == 0", then use of y, then "y = 0;". */
6614 region_model_manager mgr;
6615 region_model model (&mgr);
6616 ADD_SAT_CONSTRAINT (model, x, EQ_EXPR, int_0);
6617 ASSERT_CONDITION_UNKNOWN (model, y, EQ_EXPR, int_0);
6618 model.set_value (model.get_lvalue (y, NULL),
6619 model.get_rvalue (int_0, NULL),
6620 NULL);
6621 ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, int_0);
6622 ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, x);
6625 /* Verify that compound assignments work as expected. */
6627 static void
6628 test_compound_assignment ()
6630 coord_test ct;
6632 tree c = build_global_decl ("c", ct.m_coord_type);
6633 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
6634 c, ct.m_x_field, NULL_TREE);
6635 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
6636 c, ct.m_y_field, NULL_TREE);
6637 tree d = build_global_decl ("d", ct.m_coord_type);
6638 tree d_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
6639 d, ct.m_x_field, NULL_TREE);
6640 tree d_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
6641 d, ct.m_y_field, NULL_TREE);
6643 tree int_17 = build_int_cst (integer_type_node, 17);
6644 tree int_m3 = build_int_cst (integer_type_node, -3);
6646 region_model_manager mgr;
6647 region_model model (&mgr);
6648 model.set_value (c_x, int_17, NULL);
6649 model.set_value (c_y, int_m3, NULL);
6651 /* Copy c to d. */
6652 const svalue *sval = model.get_rvalue (c, NULL);
6653 model.set_value (model.get_lvalue (d, NULL), sval, NULL);
6655 /* Check that the fields have the same svalues. */
6656 ASSERT_EQ (model.get_rvalue (c_x, NULL), model.get_rvalue (d_x, NULL));
6657 ASSERT_EQ (model.get_rvalue (c_y, NULL), model.get_rvalue (d_y, NULL));
6660 /* Verify the details of pushing and popping stack frames. */
6662 static void
6663 test_stack_frames ()
6665 tree int_42 = build_int_cst (integer_type_node, 42);
6666 tree int_10 = build_int_cst (integer_type_node, 10);
6667 tree int_5 = build_int_cst (integer_type_node, 5);
6668 tree int_0 = build_int_cst (integer_type_node, 0);
6670 auto_vec <tree> param_types;
6671 tree parent_fndecl = make_fndecl (integer_type_node,
6672 "parent_fn",
6673 param_types);
6674 allocate_struct_function (parent_fndecl, true);
6676 tree child_fndecl = make_fndecl (integer_type_node,
6677 "child_fn",
6678 param_types);
6679 allocate_struct_function (child_fndecl, true);
6681 /* "a" and "b" in the parent frame. */
6682 tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL,
6683 get_identifier ("a"),
6684 integer_type_node);
6685 DECL_CONTEXT (a) = parent_fndecl;
6686 tree b = build_decl (UNKNOWN_LOCATION, PARM_DECL,
6687 get_identifier ("b"),
6688 integer_type_node);
6689 DECL_CONTEXT (b) = parent_fndecl;
6690 /* "x" and "y" in a child frame. */
6691 tree x = build_decl (UNKNOWN_LOCATION, PARM_DECL,
6692 get_identifier ("x"),
6693 integer_type_node);
6694 DECL_CONTEXT (x) = child_fndecl;
6695 tree y = build_decl (UNKNOWN_LOCATION, PARM_DECL,
6696 get_identifier ("y"),
6697 integer_type_node);
6698 DECL_CONTEXT (y) = child_fndecl;
6700 /* "p" global. */
6701 tree p = build_global_decl ("p", ptr_type_node);
6703 /* "q" global. */
6704 tree q = build_global_decl ("q", ptr_type_node);
6706 region_model_manager mgr;
6707 test_region_model_context ctxt;
6708 region_model model (&mgr);
6710 /* Push stack frame for "parent_fn". */
6711 const region *parent_frame_reg
6712 = model.push_frame (DECL_STRUCT_FUNCTION (parent_fndecl),
6713 NULL, &ctxt);
6714 ASSERT_EQ (model.get_current_frame (), parent_frame_reg);
6715 ASSERT_TRUE (model.region_exists_p (parent_frame_reg));
6716 const region *a_in_parent_reg = model.get_lvalue (a, &ctxt);
6717 model.set_value (a_in_parent_reg,
6718 model.get_rvalue (int_42, &ctxt),
6719 &ctxt);
6720 ASSERT_EQ (a_in_parent_reg->maybe_get_frame_region (), parent_frame_reg);
6722 model.add_constraint (b, LT_EXPR, int_10, &ctxt);
6723 ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt),
6724 tristate (tristate::TS_TRUE));
6726 /* Push stack frame for "child_fn". */
6727 const region *child_frame_reg
6728 = model.push_frame (DECL_STRUCT_FUNCTION (child_fndecl), NULL, &ctxt);
6729 ASSERT_EQ (model.get_current_frame (), child_frame_reg);
6730 ASSERT_TRUE (model.region_exists_p (child_frame_reg));
6731 const region *x_in_child_reg = model.get_lvalue (x, &ctxt);
6732 model.set_value (x_in_child_reg,
6733 model.get_rvalue (int_0, &ctxt),
6734 &ctxt);
6735 ASSERT_EQ (x_in_child_reg->maybe_get_frame_region (), child_frame_reg);
6737 model.add_constraint (y, NE_EXPR, int_5, &ctxt);
6738 ASSERT_EQ (model.eval_condition (y, NE_EXPR, int_5, &ctxt),
6739 tristate (tristate::TS_TRUE));
6741 /* Point a global pointer at a local in the child frame: p = &x. */
6742 const region *p_in_globals_reg = model.get_lvalue (p, &ctxt);
6743 model.set_value (p_in_globals_reg,
6744 mgr.get_ptr_svalue (ptr_type_node, x_in_child_reg),
6745 &ctxt);
6746 ASSERT_EQ (p_in_globals_reg->maybe_get_frame_region (), NULL);
6748 /* Point another global pointer at p: q = &p. */
6749 const region *q_in_globals_reg = model.get_lvalue (q, &ctxt);
6750 model.set_value (q_in_globals_reg,
6751 mgr.get_ptr_svalue (ptr_type_node, p_in_globals_reg),
6752 &ctxt);
6754 /* Test region::descendent_of_p. */
6755 ASSERT_TRUE (child_frame_reg->descendent_of_p (child_frame_reg));
6756 ASSERT_TRUE (x_in_child_reg->descendent_of_p (child_frame_reg));
6757 ASSERT_FALSE (a_in_parent_reg->descendent_of_p (child_frame_reg));
6759 /* Pop the "child_fn" frame from the stack. */
6760 model.pop_frame (NULL, NULL, &ctxt);
6761 ASSERT_FALSE (model.region_exists_p (child_frame_reg));
6762 ASSERT_TRUE (model.region_exists_p (parent_frame_reg));
6764 /* Verify that p (which was pointing at the local "x" in the popped
6765 frame) has been poisoned. */
6766 const svalue *new_p_sval = model.get_rvalue (p, NULL);
6767 ASSERT_EQ (new_p_sval->get_kind (), SK_POISONED);
6768 ASSERT_EQ (new_p_sval->dyn_cast_poisoned_svalue ()->get_poison_kind (),
6769 POISON_KIND_POPPED_STACK);
6771 /* Verify that q still points to p, in spite of the region
6772 renumbering. */
6773 const svalue *new_q_sval = model.get_rvalue (q, &ctxt);
6774 ASSERT_EQ (new_q_sval->get_kind (), SK_REGION);
6775 ASSERT_EQ (new_q_sval->maybe_get_region (),
6776 model.get_lvalue (p, &ctxt));
6778 /* Verify that top of stack has been updated. */
6779 ASSERT_EQ (model.get_current_frame (), parent_frame_reg);
6781 /* Verify locals in parent frame. */
6782 /* Verify "a" still has its value. */
6783 const svalue *new_a_sval = model.get_rvalue (a, &ctxt);
6784 ASSERT_EQ (new_a_sval->get_kind (), SK_CONSTANT);
6785 ASSERT_EQ (new_a_sval->dyn_cast_constant_svalue ()->get_constant (),
6786 int_42);
6787 /* Verify "b" still has its constraint. */
6788 ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt),
6789 tristate (tristate::TS_TRUE));
6792 /* Verify that get_representative_path_var works as expected, that
6793 we can map from regions to parms and back within a recursive call
6794 stack. */
6796 static void
6797 test_get_representative_path_var ()
6799 auto_vec <tree> param_types;
6800 tree fndecl = make_fndecl (integer_type_node,
6801 "factorial",
6802 param_types);
6803 allocate_struct_function (fndecl, true);
6805 /* Parm "n". */
6806 tree n = build_decl (UNKNOWN_LOCATION, PARM_DECL,
6807 get_identifier ("n"),
6808 integer_type_node);
6809 DECL_CONTEXT (n) = fndecl;
6811 region_model_manager mgr;
6812 test_region_model_context ctxt;
6813 region_model model (&mgr);
6815 /* Push 5 stack frames for "factorial", each with a param */
6816 auto_vec<const region *> parm_regs;
6817 auto_vec<const svalue *> parm_svals;
6818 for (int depth = 0; depth < 5; depth++)
6820 const region *frame_n_reg
6821 = model.push_frame (DECL_STRUCT_FUNCTION (fndecl), NULL, &ctxt);
6822 const region *parm_n_reg = model.get_lvalue (path_var (n, depth), &ctxt);
6823 parm_regs.safe_push (parm_n_reg);
6825 ASSERT_EQ (parm_n_reg->get_parent_region (), frame_n_reg);
6826 const svalue *sval_n = mgr.get_or_create_initial_value (parm_n_reg);
6827 parm_svals.safe_push (sval_n);
6830 /* Verify that we can recognize that the regions are the parms,
6831 at every depth. */
6832 for (int depth = 0; depth < 5; depth++)
6835 svalue_set visited;
6836 ASSERT_EQ (model.get_representative_path_var (parm_regs[depth],
6837 &visited),
6838 path_var (n, depth + 1));
6840 /* ...and that we can lookup lvalues for locals for all frames,
6841 not just the top. */
6842 ASSERT_EQ (model.get_lvalue (path_var (n, depth), NULL),
6843 parm_regs[depth]);
6844 /* ...and that we can locate the svalues. */
6846 svalue_set visited;
6847 ASSERT_EQ (model.get_representative_path_var (parm_svals[depth],
6848 &visited),
6849 path_var (n, depth + 1));
6854 /* Ensure that region_model::operator== works as expected. */
6856 static void
6857 test_equality_1 ()
6859 tree int_42 = build_int_cst (integer_type_node, 42);
6860 tree int_17 = build_int_cst (integer_type_node, 17);
6862 /* Verify that "empty" region_model instances are equal to each other. */
6863 region_model_manager mgr;
6864 region_model model0 (&mgr);
6865 region_model model1 (&mgr);
6866 ASSERT_EQ (model0, model1);
6868 /* Verify that setting state in model1 makes the models non-equal. */
6869 tree x = build_global_decl ("x", integer_type_node);
6870 model0.set_value (x, int_42, NULL);
6871 ASSERT_EQ (model0.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
6872 ASSERT_NE (model0, model1);
6874 /* Verify the copy-ctor. */
6875 region_model model2 (model0);
6876 ASSERT_EQ (model0, model2);
6877 ASSERT_EQ (model2.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
6878 ASSERT_NE (model1, model2);
6880 /* Verify that models obtained from copy-ctor are independently editable
6881 w/o affecting the original model. */
6882 model2.set_value (x, int_17, NULL);
6883 ASSERT_NE (model0, model2);
6884 ASSERT_EQ (model2.get_rvalue (x, NULL)->maybe_get_constant (), int_17);
6885 ASSERT_EQ (model0.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
6888 /* Verify that region models for
6889 x = 42; y = 113;
6891 y = 113; x = 42;
6892 are equal. */
6894 static void
6895 test_canonicalization_2 ()
6897 tree int_42 = build_int_cst (integer_type_node, 42);
6898 tree int_113 = build_int_cst (integer_type_node, 113);
6899 tree x = build_global_decl ("x", integer_type_node);
6900 tree y = build_global_decl ("y", integer_type_node);
6902 region_model_manager mgr;
6903 region_model model0 (&mgr);
6904 model0.set_value (model0.get_lvalue (x, NULL),
6905 model0.get_rvalue (int_42, NULL),
6906 NULL);
6907 model0.set_value (model0.get_lvalue (y, NULL),
6908 model0.get_rvalue (int_113, NULL),
6909 NULL);
6911 region_model model1 (&mgr);
6912 model1.set_value (model1.get_lvalue (y, NULL),
6913 model1.get_rvalue (int_113, NULL),
6914 NULL);
6915 model1.set_value (model1.get_lvalue (x, NULL),
6916 model1.get_rvalue (int_42, NULL),
6917 NULL);
6919 ASSERT_EQ (model0, model1);
6922 /* Verify that constraints for
6923 x > 3 && y > 42
6925 y > 42 && x > 3
6926 are equal after canonicalization. */
6928 static void
6929 test_canonicalization_3 ()
6931 tree int_3 = build_int_cst (integer_type_node, 3);
6932 tree int_42 = build_int_cst (integer_type_node, 42);
6933 tree x = build_global_decl ("x", integer_type_node);
6934 tree y = build_global_decl ("y", integer_type_node);
6936 region_model_manager mgr;
6937 region_model model0 (&mgr);
6938 model0.add_constraint (x, GT_EXPR, int_3, NULL);
6939 model0.add_constraint (y, GT_EXPR, int_42, NULL);
6941 region_model model1 (&mgr);
6942 model1.add_constraint (y, GT_EXPR, int_42, NULL);
6943 model1.add_constraint (x, GT_EXPR, int_3, NULL);
6945 model0.canonicalize ();
6946 model1.canonicalize ();
6947 ASSERT_EQ (model0, model1);
6950 /* Verify that we can canonicalize a model containing NaN and other real
6951 constants. */
6953 static void
6954 test_canonicalization_4 ()
6956 auto_vec<tree> csts;
6957 append_interesting_constants (&csts);
6959 region_model_manager mgr;
6960 region_model model (&mgr);
6962 for (tree cst : csts)
6963 model.get_rvalue (cst, NULL);
6965 model.canonicalize ();
6968 /* Assert that if we have two region_model instances
6969 with values VAL_A and VAL_B for EXPR that they are
6970 mergable. Write the merged model to *OUT_MERGED_MODEL,
6971 and the merged svalue ptr to *OUT_MERGED_SVALUE.
6972 If VAL_A or VAL_B are NULL_TREE, don't populate EXPR
6973 for that region_model. */
6975 static void
6976 assert_region_models_merge (tree expr, tree val_a, tree val_b,
6977 region_model *out_merged_model,
6978 const svalue **out_merged_svalue)
6980 region_model_manager *mgr = out_merged_model->get_manager ();
6981 program_point point (program_point::origin (*mgr));
6982 test_region_model_context ctxt;
6983 region_model model0 (mgr);
6984 region_model model1 (mgr);
6985 if (val_a)
6986 model0.set_value (model0.get_lvalue (expr, &ctxt),
6987 model0.get_rvalue (val_a, &ctxt),
6988 &ctxt);
6989 if (val_b)
6990 model1.set_value (model1.get_lvalue (expr, &ctxt),
6991 model1.get_rvalue (val_b, &ctxt),
6992 &ctxt);
6994 /* They should be mergeable. */
6995 ASSERT_TRUE (model0.can_merge_with_p (model1, point, out_merged_model));
6996 *out_merged_svalue = out_merged_model->get_rvalue (expr, &ctxt);
6999 /* Verify that we can merge region_model instances. */
7001 static void
7002 test_state_merging ()
7004 tree int_42 = build_int_cst (integer_type_node, 42);
7005 tree int_113 = build_int_cst (integer_type_node, 113);
7006 tree x = build_global_decl ("x", integer_type_node);
7007 tree y = build_global_decl ("y", integer_type_node);
7008 tree z = build_global_decl ("z", integer_type_node);
7009 tree p = build_global_decl ("p", ptr_type_node);
7011 tree addr_of_y = build1 (ADDR_EXPR, ptr_type_node, y);
7012 tree addr_of_z = build1 (ADDR_EXPR, ptr_type_node, z);
7014 auto_vec <tree> param_types;
7015 tree test_fndecl = make_fndecl (integer_type_node, "test_fn", param_types);
7016 allocate_struct_function (test_fndecl, true);
7018 /* Param "a". */
7019 tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7020 get_identifier ("a"),
7021 integer_type_node);
7022 DECL_CONTEXT (a) = test_fndecl;
7023 tree addr_of_a = build1 (ADDR_EXPR, ptr_type_node, a);
7025 /* Param "q", a pointer. */
7026 tree q = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7027 get_identifier ("q"),
7028 ptr_type_node);
7029 DECL_CONTEXT (q) = test_fndecl;
7031 region_model_manager mgr;
7032 program_point point (program_point::origin (mgr));
7035 region_model model0 (&mgr);
7036 region_model model1 (&mgr);
7037 region_model merged (&mgr);
7038 /* Verify empty models can be merged. */
7039 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7040 ASSERT_EQ (model0, merged);
7043 /* Verify that we can merge two contradictory constraints on the
7044 value for a global. */
7045 /* TODO: verify that the merged model doesn't have a value for
7046 the global */
7048 region_model model0 (&mgr);
7049 region_model model1 (&mgr);
7050 region_model merged (&mgr);
7051 test_region_model_context ctxt;
7052 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
7053 model1.add_constraint (x, EQ_EXPR, int_113, &ctxt);
7054 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7055 ASSERT_NE (model0, merged);
7056 ASSERT_NE (model1, merged);
7059 /* Verify handling of a PARM_DECL. */
7061 test_region_model_context ctxt;
7062 region_model model0 (&mgr);
7063 region_model model1 (&mgr);
7064 ASSERT_EQ (model0.get_stack_depth (), 0);
7065 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, &ctxt);
7066 ASSERT_EQ (model0.get_stack_depth (), 1);
7067 model1.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, &ctxt);
7069 placeholder_svalue test_sval (integer_type_node, "test sval");
7070 model0.set_value (model0.get_lvalue (a, &ctxt), &test_sval, &ctxt);
7071 model1.set_value (model1.get_lvalue (a, &ctxt), &test_sval, &ctxt);
7072 ASSERT_EQ (model0, model1);
7074 /* They should be mergeable, and the result should be the same. */
7075 region_model merged (&mgr);
7076 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7077 ASSERT_EQ (model0, merged);
7078 /* In particular, "a" should have the placeholder value. */
7079 ASSERT_EQ (merged.get_rvalue (a, &ctxt), &test_sval);
7082 /* Verify handling of a global. */
7084 test_region_model_context ctxt;
7085 region_model model0 (&mgr);
7086 region_model model1 (&mgr);
7088 placeholder_svalue test_sval (integer_type_node, "test sval");
7089 model0.set_value (model0.get_lvalue (x, &ctxt), &test_sval, &ctxt);
7090 model1.set_value (model1.get_lvalue (x, &ctxt), &test_sval, &ctxt);
7091 ASSERT_EQ (model0, model1);
7093 /* They should be mergeable, and the result should be the same. */
7094 region_model merged (&mgr);
7095 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7096 ASSERT_EQ (model0, merged);
7097 /* In particular, "x" should have the placeholder value. */
7098 ASSERT_EQ (merged.get_rvalue (x, &ctxt), &test_sval);
7101 /* Use global-handling to verify various combinations of values. */
7103 /* Two equal constant values. */
7105 region_model merged (&mgr);
7106 const svalue *merged_x_sval;
7107 assert_region_models_merge (x, int_42, int_42, &merged, &merged_x_sval);
7109 /* In particular, there should be a constant value for "x". */
7110 ASSERT_EQ (merged_x_sval->get_kind (), SK_CONSTANT);
7111 ASSERT_EQ (merged_x_sval->dyn_cast_constant_svalue ()->get_constant (),
7112 int_42);
7115 /* Two non-equal constant values. */
7117 region_model merged (&mgr);
7118 const svalue *merged_x_sval;
7119 assert_region_models_merge (x, int_42, int_113, &merged, &merged_x_sval);
7121 /* In particular, there should be a "widening" value for "x". */
7122 ASSERT_EQ (merged_x_sval->get_kind (), SK_WIDENING);
7125 /* Initial and constant. */
7127 region_model merged (&mgr);
7128 const svalue *merged_x_sval;
7129 assert_region_models_merge (x, NULL_TREE, int_113, &merged, &merged_x_sval);
7131 /* In particular, there should be an unknown value for "x". */
7132 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
7135 /* Constant and initial. */
7137 region_model merged (&mgr);
7138 const svalue *merged_x_sval;
7139 assert_region_models_merge (x, int_42, NULL_TREE, &merged, &merged_x_sval);
7141 /* In particular, there should be an unknown value for "x". */
7142 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
7145 /* Unknown and constant. */
7146 // TODO
7148 /* Pointers: NULL and NULL. */
7149 // TODO
7151 /* Pointers: NULL and non-NULL. */
7152 // TODO
7154 /* Pointers: non-NULL and non-NULL: ptr to a local. */
7156 region_model model0 (&mgr);
7157 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
7158 model0.set_value (model0.get_lvalue (p, NULL),
7159 model0.get_rvalue (addr_of_a, NULL), NULL);
7161 region_model model1 (model0);
7162 ASSERT_EQ (model0, model1);
7164 /* They should be mergeable, and the result should be the same. */
7165 region_model merged (&mgr);
7166 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7167 ASSERT_EQ (model0, merged);
7170 /* Pointers: non-NULL and non-NULL: ptr to a global. */
7172 region_model merged (&mgr);
7173 /* p == &y in both input models. */
7174 const svalue *merged_p_sval;
7175 assert_region_models_merge (p, addr_of_y, addr_of_y, &merged,
7176 &merged_p_sval);
7178 /* We should get p == &y in the merged model. */
7179 ASSERT_EQ (merged_p_sval->get_kind (), SK_REGION);
7180 const region_svalue *merged_p_ptr
7181 = merged_p_sval->dyn_cast_region_svalue ();
7182 const region *merged_p_star_reg = merged_p_ptr->get_pointee ();
7183 ASSERT_EQ (merged_p_star_reg, merged.get_lvalue (y, NULL));
7186 /* Pointers: non-NULL ptrs to different globals: should be unknown. */
7188 region_model merged (&mgr);
7189 /* x == &y vs x == &z in the input models; these are actually casts
7190 of the ptrs to "int". */
7191 const svalue *merged_x_sval;
7192 // TODO:
7193 assert_region_models_merge (x, addr_of_y, addr_of_z, &merged,
7194 &merged_x_sval);
7196 /* We should get x == unknown in the merged model. */
7197 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
7200 /* Pointers: non-NULL and non-NULL: ptr to a heap region. */
7202 test_region_model_context ctxt;
7203 region_model model0 (&mgr);
7204 tree size = build_int_cst (size_type_node, 1024);
7205 const svalue *size_sval = mgr.get_or_create_constant_svalue (size);
7206 const region *new_reg
7207 = model0.create_region_for_heap_alloc (size_sval, &ctxt);
7208 const svalue *ptr_sval = mgr.get_ptr_svalue (ptr_type_node, new_reg);
7209 model0.set_value (model0.get_lvalue (p, &ctxt),
7210 ptr_sval, &ctxt);
7212 region_model model1 (model0);
7214 ASSERT_EQ (model0, model1);
7216 region_model merged (&mgr);
7217 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7219 /* The merged model ought to be identical. */
7220 ASSERT_EQ (model0, merged);
7223 /* Two regions sharing the same placeholder svalue should continue sharing
7224 it after self-merger. */
7226 test_region_model_context ctxt;
7227 region_model model0 (&mgr);
7228 placeholder_svalue placeholder_sval (integer_type_node, "test");
7229 model0.set_value (model0.get_lvalue (x, &ctxt),
7230 &placeholder_sval, &ctxt);
7231 model0.set_value (model0.get_lvalue (y, &ctxt), &placeholder_sval, &ctxt);
7232 region_model model1 (model0);
7234 /* They should be mergeable, and the result should be the same. */
7235 region_model merged (&mgr);
7236 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7237 ASSERT_EQ (model0, merged);
7239 /* In particular, we should have x == y. */
7240 ASSERT_EQ (merged.eval_condition (x, EQ_EXPR, y, &ctxt),
7241 tristate (tristate::TS_TRUE));
7245 region_model model0 (&mgr);
7246 region_model model1 (&mgr);
7247 test_region_model_context ctxt;
7248 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
7249 model1.add_constraint (x, NE_EXPR, int_42, &ctxt);
7250 region_model merged (&mgr);
7251 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7255 region_model model0 (&mgr);
7256 region_model model1 (&mgr);
7257 test_region_model_context ctxt;
7258 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
7259 model1.add_constraint (x, NE_EXPR, int_42, &ctxt);
7260 model1.add_constraint (x, EQ_EXPR, int_113, &ctxt);
7261 region_model merged (&mgr);
7262 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7265 // TODO: what can't we merge? need at least one such test
7267 /* TODO: various things
7268 - heap regions
7269 - value merging:
7270 - every combination, but in particular
7271 - pairs of regions
7274 /* Views. */
7276 test_region_model_context ctxt;
7277 region_model model0 (&mgr);
7279 const region *x_reg = model0.get_lvalue (x, &ctxt);
7280 const region *x_as_ptr = mgr.get_cast_region (x_reg, ptr_type_node);
7281 model0.set_value (x_as_ptr, model0.get_rvalue (addr_of_y, &ctxt), &ctxt);
7283 region_model model1 (model0);
7284 ASSERT_EQ (model1, model0);
7286 /* They should be mergeable, and the result should be the same. */
7287 region_model merged (&mgr);
7288 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7291 /* Verify that we can merge a model in which a local in an older stack
7292 frame points to a local in a more recent stack frame. */
7294 region_model model0 (&mgr);
7295 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
7296 const region *q_in_first_frame = model0.get_lvalue (q, NULL);
7298 /* Push a second frame. */
7299 const region *reg_2nd_frame
7300 = model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
7302 /* Have a pointer in the older frame point to a local in the
7303 more recent frame. */
7304 const svalue *sval_ptr = model0.get_rvalue (addr_of_a, NULL);
7305 model0.set_value (q_in_first_frame, sval_ptr, NULL);
7307 /* Verify that it's pointing at the newer frame. */
7308 const region *reg_pointee = sval_ptr->maybe_get_region ();
7309 ASSERT_EQ (reg_pointee->get_parent_region (), reg_2nd_frame);
7311 model0.canonicalize ();
7313 region_model model1 (model0);
7314 ASSERT_EQ (model0, model1);
7316 /* They should be mergeable, and the result should be the same
7317 (after canonicalization, at least). */
7318 region_model merged (&mgr);
7319 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7320 merged.canonicalize ();
7321 ASSERT_EQ (model0, merged);
7324 /* Verify that we can merge a model in which a local points to a global. */
7326 region_model model0 (&mgr);
7327 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
7328 model0.set_value (model0.get_lvalue (q, NULL),
7329 model0.get_rvalue (addr_of_y, NULL), NULL);
7331 region_model model1 (model0);
7332 ASSERT_EQ (model0, model1);
7334 /* They should be mergeable, and the result should be the same
7335 (after canonicalization, at least). */
7336 region_model merged (&mgr);
7337 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7338 ASSERT_EQ (model0, merged);
7342 /* Verify that constraints are correctly merged when merging region_model
7343 instances. */
7345 static void
7346 test_constraint_merging ()
7348 tree int_0 = build_int_cst (integer_type_node, 0);
7349 tree int_5 = build_int_cst (integer_type_node, 5);
7350 tree x = build_global_decl ("x", integer_type_node);
7351 tree y = build_global_decl ("y", integer_type_node);
7352 tree z = build_global_decl ("z", integer_type_node);
7353 tree n = build_global_decl ("n", integer_type_node);
7355 region_model_manager mgr;
7356 test_region_model_context ctxt;
7358 /* model0: 0 <= (x == y) < n. */
7359 region_model model0 (&mgr);
7360 model0.add_constraint (x, EQ_EXPR, y, &ctxt);
7361 model0.add_constraint (x, GE_EXPR, int_0, NULL);
7362 model0.add_constraint (x, LT_EXPR, n, NULL);
7364 /* model1: z != 5 && (0 <= x < n). */
7365 region_model model1 (&mgr);
7366 model1.add_constraint (z, NE_EXPR, int_5, NULL);
7367 model1.add_constraint (x, GE_EXPR, int_0, NULL);
7368 model1.add_constraint (x, LT_EXPR, n, NULL);
7370 /* They should be mergeable; the merged constraints should
7371 be: (0 <= x < n). */
7372 program_point point (program_point::origin (mgr));
7373 region_model merged (&mgr);
7374 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7376 ASSERT_EQ (merged.eval_condition (x, GE_EXPR, int_0, &ctxt),
7377 tristate (tristate::TS_TRUE));
7378 ASSERT_EQ (merged.eval_condition (x, LT_EXPR, n, &ctxt),
7379 tristate (tristate::TS_TRUE));
7381 ASSERT_EQ (merged.eval_condition (z, NE_EXPR, int_5, &ctxt),
7382 tristate (tristate::TS_UNKNOWN));
7383 ASSERT_EQ (merged.eval_condition (x, LT_EXPR, y, &ctxt),
7384 tristate (tristate::TS_UNKNOWN));
7387 /* Verify that widening_svalue::eval_condition_without_cm works as
7388 expected. */
7390 static void
7391 test_widening_constraints ()
7393 region_model_manager mgr;
7394 program_point point (program_point::origin (mgr));
7395 tree int_0 = build_int_cst (integer_type_node, 0);
7396 tree int_m1 = build_int_cst (integer_type_node, -1);
7397 tree int_1 = build_int_cst (integer_type_node, 1);
7398 tree int_256 = build_int_cst (integer_type_node, 256);
7399 test_region_model_context ctxt;
7400 const svalue *int_0_sval = mgr.get_or_create_constant_svalue (int_0);
7401 const svalue *int_1_sval = mgr.get_or_create_constant_svalue (int_1);
7402 const svalue *w_zero_then_one_sval
7403 = mgr.get_or_create_widening_svalue (integer_type_node, point,
7404 int_0_sval, int_1_sval);
7405 const widening_svalue *w_zero_then_one
7406 = w_zero_then_one_sval->dyn_cast_widening_svalue ();
7407 ASSERT_EQ (w_zero_then_one->get_direction (),
7408 widening_svalue::DIR_ASCENDING);
7409 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_m1),
7410 tristate::TS_FALSE);
7411 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_0),
7412 tristate::TS_FALSE);
7413 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_1),
7414 tristate::TS_UNKNOWN);
7415 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_256),
7416 tristate::TS_UNKNOWN);
7418 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_m1),
7419 tristate::TS_FALSE);
7420 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_0),
7421 tristate::TS_UNKNOWN);
7422 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_1),
7423 tristate::TS_UNKNOWN);
7424 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_256),
7425 tristate::TS_UNKNOWN);
7427 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_m1),
7428 tristate::TS_TRUE);
7429 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_0),
7430 tristate::TS_UNKNOWN);
7431 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_1),
7432 tristate::TS_UNKNOWN);
7433 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_256),
7434 tristate::TS_UNKNOWN);
7436 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_m1),
7437 tristate::TS_TRUE);
7438 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_0),
7439 tristate::TS_TRUE);
7440 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_1),
7441 tristate::TS_UNKNOWN);
7442 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_256),
7443 tristate::TS_UNKNOWN);
7445 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_m1),
7446 tristate::TS_FALSE);
7447 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_0),
7448 tristate::TS_UNKNOWN);
7449 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_1),
7450 tristate::TS_UNKNOWN);
7451 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_256),
7452 tristate::TS_UNKNOWN);
7454 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_m1),
7455 tristate::TS_TRUE);
7456 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_0),
7457 tristate::TS_UNKNOWN);
7458 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_1),
7459 tristate::TS_UNKNOWN);
7460 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_256),
7461 tristate::TS_UNKNOWN);
7464 /* Verify merging constraints for states simulating successive iterations
7465 of a loop.
7466 Simulate:
7467 for (i = 0; i < 256; i++)
7468 [...body...]
7469 i.e. this gimple:.
7470 i_15 = 0;
7471 goto <bb 4>;
7473 <bb 4> :
7474 i_11 = PHI <i_15(2), i_23(3)>
7475 if (i_11 <= 255)
7476 goto <bb 3>;
7477 else
7478 goto [AFTER LOOP]
7480 <bb 3> :
7481 [LOOP BODY]
7482 i_23 = i_11 + 1;
7484 and thus these ops (and resultant states):
7485 i_11 = PHI()
7486 {i_11: 0}
7487 add_constraint (i_11 <= 255) [for the true edge]
7488 {i_11: 0} [constraint was a no-op]
7489 i_23 = i_11 + 1;
7490 {i_22: 1}
7491 i_11 = PHI()
7492 {i_11: WIDENED (at phi, 0, 1)}
7493 add_constraint (i_11 <= 255) [for the true edge]
7494 {i_11: WIDENED (at phi, 0, 1); WIDENED <= 255}
7495 i_23 = i_11 + 1;
7496 {i_23: (WIDENED (at phi, 0, 1) + 1); WIDENED <= 255}
7497 i_11 = PHI(); merge with state at phi above
7498 {i_11: WIDENED (at phi, 0, 1); WIDENED <= 256}
7499 [changing meaning of "WIDENED" here]
7500 if (i_11 <= 255)
7501 T: {i_11: WIDENED (at phi, 0, 1); WIDENED <= 255}; cache hit
7502 F: {i_11: 256}
7505 static void
7506 test_iteration_1 ()
7508 region_model_manager mgr;
7509 program_point point (program_point::origin (mgr));
7511 tree int_0 = build_int_cst (integer_type_node, 0);
7512 tree int_1 = build_int_cst (integer_type_node, 1);
7513 tree int_256 = build_int_cst (integer_type_node, 256);
7514 tree int_257 = build_int_cst (integer_type_node, 257);
7515 tree i = build_global_decl ("i", integer_type_node);
7517 test_region_model_context ctxt;
7519 /* model0: i: 0. */
7520 region_model model0 (&mgr);
7521 model0.set_value (i, int_0, &ctxt);
7523 /* model1: i: 1. */
7524 region_model model1 (&mgr);
7525 model1.set_value (i, int_1, &ctxt);
7527 /* Should merge "i" to a widened value. */
7528 region_model model2 (&mgr);
7529 ASSERT_TRUE (model1.can_merge_with_p (model0, point, &model2));
7530 const svalue *merged_i = model2.get_rvalue (i, &ctxt);
7531 ASSERT_EQ (merged_i->get_kind (), SK_WIDENING);
7532 const widening_svalue *w = merged_i->dyn_cast_widening_svalue ();
7533 ASSERT_EQ (w->get_direction (), widening_svalue::DIR_ASCENDING);
7535 /* Add constraint: i < 256 */
7536 model2.add_constraint (i, LT_EXPR, int_256, &ctxt);
7537 ASSERT_EQ (model2.eval_condition (i, LT_EXPR, int_256, &ctxt),
7538 tristate (tristate::TS_TRUE));
7539 ASSERT_EQ (model2.eval_condition (i, GE_EXPR, int_0, &ctxt),
7540 tristate (tristate::TS_TRUE));
7542 /* Try merging with the initial state. */
7543 region_model model3 (&mgr);
7544 ASSERT_TRUE (model2.can_merge_with_p (model0, point, &model3));
7545 /* Merging the merged value with the initial value should be idempotent,
7546 so that the analysis converges. */
7547 ASSERT_EQ (model3.get_rvalue (i, &ctxt), merged_i);
7548 /* Merger of 0 and a widening value with constraint < CST
7549 should retain the constraint, even though it was implicit
7550 for the 0 case. */
7551 ASSERT_EQ (model3.eval_condition (i, LT_EXPR, int_256, &ctxt),
7552 tristate (tristate::TS_TRUE));
7553 /* ...and we should have equality: the analysis should have converged. */
7554 ASSERT_EQ (model3, model2);
7556 /* "i_23 = i_11 + 1;" */
7557 region_model model4 (model3);
7558 ASSERT_EQ (model4, model2);
7559 model4.set_value (i, build2 (PLUS_EXPR, integer_type_node, i, int_1), &ctxt);
7560 const svalue *plus_one = model4.get_rvalue (i, &ctxt);
7561 ASSERT_EQ (plus_one->get_kind (), SK_BINOP);
7563 /* Try merging with the "i: 1" state. */
7564 region_model model5 (&mgr);
7565 ASSERT_TRUE (model4.can_merge_with_p (model1, point, &model5));
7566 ASSERT_EQ (model5.get_rvalue (i, &ctxt), plus_one);
7567 ASSERT_EQ (model5, model4);
7569 /* "i_11 = PHI();" merge with state at phi above.
7570 For i, we should have a merger of WIDENING with WIDENING + 1,
7571 and this should be WIDENING again. */
7572 region_model model6 (&mgr);
7573 ASSERT_TRUE (model5.can_merge_with_p (model2, point, &model6));
7574 const svalue *merged_widening = model6.get_rvalue (i, &ctxt);
7575 ASSERT_EQ (merged_widening->get_kind (), SK_WIDENING);
7577 ASSERT_CONDITION_TRUE (model6, i, LT_EXPR, int_257);
7580 /* Verify that if we mark a pointer to a malloc-ed region as non-NULL,
7581 all cast pointers to that region are also known to be non-NULL. */
7583 static void
7584 test_malloc_constraints ()
7586 region_model_manager mgr;
7587 region_model model (&mgr);
7588 tree p = build_global_decl ("p", ptr_type_node);
7589 tree char_star = build_pointer_type (char_type_node);
7590 tree q = build_global_decl ("q", char_star);
7591 tree null_ptr = build_int_cst (ptr_type_node, 0);
7593 const svalue *size_in_bytes
7594 = mgr.get_or_create_unknown_svalue (size_type_node);
7595 const region *reg = model.create_region_for_heap_alloc (size_in_bytes, NULL);
7596 const svalue *sval = mgr.get_ptr_svalue (ptr_type_node, reg);
7597 model.set_value (model.get_lvalue (p, NULL), sval, NULL);
7598 model.set_value (q, p, NULL);
7600 ASSERT_CONDITION_UNKNOWN (model, p, NE_EXPR, null_ptr);
7601 ASSERT_CONDITION_UNKNOWN (model, p, EQ_EXPR, null_ptr);
7602 ASSERT_CONDITION_UNKNOWN (model, q, NE_EXPR, null_ptr);
7603 ASSERT_CONDITION_UNKNOWN (model, q, EQ_EXPR, null_ptr);
7605 model.add_constraint (p, NE_EXPR, null_ptr, NULL);
7607 ASSERT_CONDITION_TRUE (model, p, NE_EXPR, null_ptr);
7608 ASSERT_CONDITION_FALSE (model, p, EQ_EXPR, null_ptr);
7609 ASSERT_CONDITION_TRUE (model, q, NE_EXPR, null_ptr);
7610 ASSERT_CONDITION_FALSE (model, q, EQ_EXPR, null_ptr);
7613 /* Smoketest of getting and setting the value of a variable. */
7615 static void
7616 test_var ()
7618 /* "int i;" */
7619 tree i = build_global_decl ("i", integer_type_node);
7621 tree int_17 = build_int_cst (integer_type_node, 17);
7622 tree int_m3 = build_int_cst (integer_type_node, -3);
7624 region_model_manager mgr;
7625 region_model model (&mgr);
7627 const region *i_reg = model.get_lvalue (i, NULL);
7628 ASSERT_EQ (i_reg->get_kind (), RK_DECL);
7630 /* Reading "i" should give a symbolic "initial value". */
7631 const svalue *sval_init = model.get_rvalue (i, NULL);
7632 ASSERT_EQ (sval_init->get_kind (), SK_INITIAL);
7633 ASSERT_EQ (sval_init->dyn_cast_initial_svalue ()->get_region (), i_reg);
7634 /* ..and doing it again should give the same "initial value". */
7635 ASSERT_EQ (model.get_rvalue (i, NULL), sval_init);
7637 /* "i = 17;". */
7638 model.set_value (i, int_17, NULL);
7639 ASSERT_EQ (model.get_rvalue (i, NULL),
7640 model.get_rvalue (int_17, NULL));
7642 /* "i = -3;". */
7643 model.set_value (i, int_m3, NULL);
7644 ASSERT_EQ (model.get_rvalue (i, NULL),
7645 model.get_rvalue (int_m3, NULL));
7647 /* Verify get_offset for "i". */
7649 region_offset offset = i_reg->get_offset (&mgr);
7650 ASSERT_EQ (offset.get_base_region (), i_reg);
7651 ASSERT_EQ (offset.get_bit_offset (), 0);
7655 static void
7656 test_array_2 ()
7658 /* "int arr[10];" */
7659 tree tlen = size_int (10);
7660 tree arr_type
7661 = build_array_type (integer_type_node, build_index_type (tlen));
7662 tree arr = build_global_decl ("arr", arr_type);
7664 /* "int i;" */
7665 tree i = build_global_decl ("i", integer_type_node);
7667 tree int_0 = build_int_cst (integer_type_node, 0);
7668 tree int_1 = build_int_cst (integer_type_node, 1);
7670 tree arr_0 = build4 (ARRAY_REF, integer_type_node,
7671 arr, int_0, NULL_TREE, NULL_TREE);
7672 tree arr_1 = build4 (ARRAY_REF, integer_type_node,
7673 arr, int_1, NULL_TREE, NULL_TREE);
7674 tree arr_i = build4 (ARRAY_REF, integer_type_node,
7675 arr, i, NULL_TREE, NULL_TREE);
7677 tree int_17 = build_int_cst (integer_type_node, 17);
7678 tree int_42 = build_int_cst (integer_type_node, 42);
7679 tree int_m3 = build_int_cst (integer_type_node, -3);
7681 region_model_manager mgr;
7682 region_model model (&mgr);
7683 /* "arr[0] = 17;". */
7684 model.set_value (arr_0, int_17, NULL);
7685 /* "arr[1] = -3;". */
7686 model.set_value (arr_1, int_m3, NULL);
7688 ASSERT_EQ (model.get_rvalue (arr_0, NULL), model.get_rvalue (int_17, NULL));
7689 ASSERT_EQ (model.get_rvalue (arr_1, NULL), model.get_rvalue (int_m3, NULL));
7691 /* Overwrite a pre-existing binding: "arr[1] = 42;". */
7692 model.set_value (arr_1, int_42, NULL);
7693 ASSERT_EQ (model.get_rvalue (arr_1, NULL), model.get_rvalue (int_42, NULL));
7695 /* Verify get_offset for "arr[0]". */
7697 const region *arr_0_reg = model.get_lvalue (arr_0, NULL);
7698 region_offset offset = arr_0_reg->get_offset (&mgr);
7699 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
7700 ASSERT_EQ (offset.get_bit_offset (), 0);
7703 /* Verify get_offset for "arr[1]". */
7705 const region *arr_1_reg = model.get_lvalue (arr_1, NULL);
7706 region_offset offset = arr_1_reg->get_offset (&mgr);
7707 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
7708 ASSERT_EQ (offset.get_bit_offset (), INT_TYPE_SIZE);
7711 /* Verify get_offset for "arr[i]". */
7713 const region *arr_i_reg = model.get_lvalue (arr_i, NULL);
7714 region_offset offset = arr_i_reg->get_offset (&mgr);
7715 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
7716 ASSERT_EQ (offset.get_symbolic_byte_offset ()->get_kind (), SK_BINOP);
7719 /* "arr[i] = i;" - this should remove the earlier bindings. */
7720 model.set_value (arr_i, i, NULL);
7721 ASSERT_EQ (model.get_rvalue (arr_i, NULL), model.get_rvalue (i, NULL));
7722 ASSERT_EQ (model.get_rvalue (arr_0, NULL)->get_kind (), SK_UNKNOWN);
7724 /* "arr[0] = 17;" - this should remove the arr[i] binding. */
7725 model.set_value (arr_0, int_17, NULL);
7726 ASSERT_EQ (model.get_rvalue (arr_0, NULL), model.get_rvalue (int_17, NULL));
7727 ASSERT_EQ (model.get_rvalue (arr_i, NULL)->get_kind (), SK_UNKNOWN);
7730 /* Smoketest of dereferencing a pointer via MEM_REF. */
7732 static void
7733 test_mem_ref ()
7736 x = 17;
7737 p = &x;
7740 tree x = build_global_decl ("x", integer_type_node);
7741 tree int_star = build_pointer_type (integer_type_node);
7742 tree p = build_global_decl ("p", int_star);
7744 tree int_17 = build_int_cst (integer_type_node, 17);
7745 tree addr_of_x = build1 (ADDR_EXPR, int_star, x);
7746 tree offset_0 = build_int_cst (integer_type_node, 0);
7747 tree star_p = build2 (MEM_REF, integer_type_node, p, offset_0);
7749 region_model_manager mgr;
7750 region_model model (&mgr);
7752 /* "x = 17;". */
7753 model.set_value (x, int_17, NULL);
7755 /* "p = &x;". */
7756 model.set_value (p, addr_of_x, NULL);
7758 const svalue *sval = model.get_rvalue (star_p, NULL);
7759 ASSERT_EQ (sval->maybe_get_constant (), int_17);
7762 /* Test for a POINTER_PLUS_EXPR followed by a MEM_REF.
7763 Analogous to this code:
7764 void test_6 (int a[10])
7766 __analyzer_eval (a[3] == 42); [should be UNKNOWN]
7767 a[3] = 42;
7768 __analyzer_eval (a[3] == 42); [should be TRUE]
7770 from data-model-1.c, which looks like this at the gimple level:
7771 # __analyzer_eval (a[3] == 42); [should be UNKNOWN]
7772 int *_1 = a_10(D) + 12; # POINTER_PLUS_EXPR
7773 int _2 = *_1; # MEM_REF
7774 _Bool _3 = _2 == 42;
7775 int _4 = (int) _3;
7776 __analyzer_eval (_4);
7778 # a[3] = 42;
7779 int *_5 = a_10(D) + 12; # POINTER_PLUS_EXPR
7780 *_5 = 42; # MEM_REF
7782 # __analyzer_eval (a[3] == 42); [should be TRUE]
7783 int *_6 = a_10(D) + 12; # POINTER_PLUS_EXPR
7784 int _7 = *_6; # MEM_REF
7785 _Bool _8 = _7 == 42;
7786 int _9 = (int) _8;
7787 __analyzer_eval (_9); */
7789 static void
7790 test_POINTER_PLUS_EXPR_then_MEM_REF ()
7792 tree int_star = build_pointer_type (integer_type_node);
7793 tree a = build_global_decl ("a", int_star);
7794 tree offset_12 = build_int_cst (size_type_node, 12);
7795 tree pointer_plus_expr = build2 (POINTER_PLUS_EXPR, int_star, a, offset_12);
7796 tree offset_0 = build_int_cst (integer_type_node, 0);
7797 tree mem_ref = build2 (MEM_REF, integer_type_node,
7798 pointer_plus_expr, offset_0);
7799 region_model_manager mgr;
7800 region_model m (&mgr);
7802 tree int_42 = build_int_cst (integer_type_node, 42);
7803 m.set_value (mem_ref, int_42, NULL);
7804 ASSERT_EQ (m.get_rvalue (mem_ref, NULL)->maybe_get_constant (), int_42);
7807 /* Verify that malloc works. */
7809 static void
7810 test_malloc ()
7812 tree int_star = build_pointer_type (integer_type_node);
7813 tree p = build_global_decl ("p", int_star);
7814 tree n = build_global_decl ("n", integer_type_node);
7815 tree n_times_4 = build2 (MULT_EXPR, size_type_node,
7816 n, build_int_cst (size_type_node, 4));
7818 region_model_manager mgr;
7819 test_region_model_context ctxt;
7820 region_model model (&mgr);
7822 /* "p = malloc (n * 4);". */
7823 const svalue *size_sval = model.get_rvalue (n_times_4, &ctxt);
7824 const region *reg = model.create_region_for_heap_alloc (size_sval, &ctxt);
7825 const svalue *ptr = mgr.get_ptr_svalue (int_star, reg);
7826 model.set_value (model.get_lvalue (p, &ctxt), ptr, &ctxt);
7827 ASSERT_EQ (model.get_capacity (reg), size_sval);
7830 /* Verify that alloca works. */
7832 static void
7833 test_alloca ()
7835 auto_vec <tree> param_types;
7836 tree fndecl = make_fndecl (integer_type_node,
7837 "test_fn",
7838 param_types);
7839 allocate_struct_function (fndecl, true);
7842 tree int_star = build_pointer_type (integer_type_node);
7843 tree p = build_global_decl ("p", int_star);
7844 tree n = build_global_decl ("n", integer_type_node);
7845 tree n_times_4 = build2 (MULT_EXPR, size_type_node,
7846 n, build_int_cst (size_type_node, 4));
7848 region_model_manager mgr;
7849 test_region_model_context ctxt;
7850 region_model model (&mgr);
7852 /* Push stack frame. */
7853 const region *frame_reg
7854 = model.push_frame (DECL_STRUCT_FUNCTION (fndecl),
7855 NULL, &ctxt);
7856 /* "p = alloca (n * 4);". */
7857 const svalue *size_sval = model.get_rvalue (n_times_4, &ctxt);
7858 const region *reg = model.create_region_for_alloca (size_sval, &ctxt);
7859 ASSERT_EQ (reg->get_parent_region (), frame_reg);
7860 const svalue *ptr = mgr.get_ptr_svalue (int_star, reg);
7861 model.set_value (model.get_lvalue (p, &ctxt), ptr, &ctxt);
7862 ASSERT_EQ (model.get_capacity (reg), size_sval);
7864 /* Verify that the pointers to the alloca region are replaced by
7865 poisoned values when the frame is popped. */
7866 model.pop_frame (NULL, NULL, &ctxt);
7867 ASSERT_EQ (model.get_rvalue (p, NULL)->get_kind (), SK_POISONED);
7870 /* Verify that svalue::involves_p works. */
7872 static void
7873 test_involves_p ()
7875 region_model_manager mgr;
7876 tree int_star = build_pointer_type (integer_type_node);
7877 tree p = build_global_decl ("p", int_star);
7878 tree q = build_global_decl ("q", int_star);
7880 test_region_model_context ctxt;
7881 region_model model (&mgr);
7882 const svalue *p_init = model.get_rvalue (p, &ctxt);
7883 const svalue *q_init = model.get_rvalue (q, &ctxt);
7885 ASSERT_TRUE (p_init->involves_p (p_init));
7886 ASSERT_FALSE (p_init->involves_p (q_init));
7888 const region *star_p_reg = mgr.get_symbolic_region (p_init);
7889 const region *star_q_reg = mgr.get_symbolic_region (q_init);
7891 const svalue *init_star_p = mgr.get_or_create_initial_value (star_p_reg);
7892 const svalue *init_star_q = mgr.get_or_create_initial_value (star_q_reg);
7894 ASSERT_TRUE (init_star_p->involves_p (p_init));
7895 ASSERT_FALSE (p_init->involves_p (init_star_p));
7896 ASSERT_FALSE (init_star_p->involves_p (q_init));
7897 ASSERT_TRUE (init_star_q->involves_p (q_init));
7898 ASSERT_FALSE (init_star_q->involves_p (p_init));
7901 /* Run all of the selftests within this file. */
7903 void
7904 analyzer_region_model_cc_tests ()
7906 test_tree_cmp_on_constants ();
7907 test_dump ();
7908 test_struct ();
7909 test_array_1 ();
7910 test_get_representative_tree ();
7911 test_unique_constants ();
7912 test_unique_unknowns ();
7913 test_initial_svalue_folding ();
7914 test_unaryop_svalue_folding ();
7915 test_binop_svalue_folding ();
7916 test_sub_svalue_folding ();
7917 test_descendent_of_p ();
7918 test_bit_range_regions ();
7919 test_assignment ();
7920 test_compound_assignment ();
7921 test_stack_frames ();
7922 test_get_representative_path_var ();
7923 test_equality_1 ();
7924 test_canonicalization_2 ();
7925 test_canonicalization_3 ();
7926 test_canonicalization_4 ();
7927 test_state_merging ();
7928 test_constraint_merging ();
7929 test_widening_constraints ();
7930 test_iteration_1 ();
7931 test_malloc_constraints ();
7932 test_var ();
7933 test_array_2 ();
7934 test_mem_ref ();
7935 test_POINTER_PLUS_EXPR_then_MEM_REF ();
7936 test_malloc ();
7937 test_alloca ();
7938 test_involves_p ();
7941 } // namespace selftest
7943 #endif /* CHECKING_P */
7945 } // namespace ana
7947 #endif /* #if ENABLE_ANALYZER */