c++: Tweaks for -Wredundant-move [PR107363]
[official-gcc.git] / gcc / analyzer / region-model.cc
blob5ffad64a9c52ce3830085d69deb4bb96420372af
1 /* Classes for modeling the state of memory.
2 Copyright (C) 2019-2022 Free Software Foundation, Inc.
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #define INCLUDE_MEMORY
23 #include "system.h"
24 #include "coretypes.h"
25 #include "make-unique.h"
26 #include "tree.h"
27 #include "function.h"
28 #include "basic-block.h"
29 #include "gimple.h"
30 #include "gimple-iterator.h"
31 #include "diagnostic-core.h"
32 #include "graphviz.h"
33 #include "options.h"
34 #include "cgraph.h"
35 #include "tree-dfa.h"
36 #include "stringpool.h"
37 #include "convert.h"
38 #include "target.h"
39 #include "fold-const.h"
40 #include "tree-pretty-print.h"
41 #include "diagnostic-color.h"
42 #include "diagnostic-metadata.h"
43 #include "bitmap.h"
44 #include "selftest.h"
45 #include "analyzer/analyzer.h"
46 #include "analyzer/analyzer-logging.h"
47 #include "ordered-hash-map.h"
48 #include "options.h"
49 #include "cgraph.h"
50 #include "cfg.h"
51 #include "analyzer/supergraph.h"
52 #include "sbitmap.h"
53 #include "analyzer/call-string.h"
54 #include "analyzer/program-point.h"
55 #include "analyzer/store.h"
56 #include "analyzer/region-model.h"
57 #include "analyzer/constraint-manager.h"
58 #include "diagnostic-event-id.h"
59 #include "analyzer/sm.h"
60 #include "diagnostic-event-id.h"
61 #include "analyzer/sm.h"
62 #include "analyzer/pending-diagnostic.h"
63 #include "analyzer/region-model-reachability.h"
64 #include "analyzer/analyzer-selftests.h"
65 #include "analyzer/program-state.h"
66 #include "analyzer/call-summary.h"
67 #include "stor-layout.h"
68 #include "attribs.h"
69 #include "tree-object-size.h"
70 #include "gimple-ssa.h"
71 #include "tree-phinodes.h"
72 #include "tree-ssa-operands.h"
73 #include "ssa-iterators.h"
74 #include "calls.h"
75 #include "is-a.h"
76 #include "gcc-rich-location.h"
78 #if ENABLE_ANALYZER
80 namespace ana {
82 /* Dump T to PP in language-independent form, for debugging/logging/dumping
83 purposes. */
85 void
86 dump_tree (pretty_printer *pp, tree t)
88 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
91 /* Dump T to PP in language-independent form in quotes, for
92 debugging/logging/dumping purposes. */
94 void
95 dump_quoted_tree (pretty_printer *pp, tree t)
97 pp_begin_quote (pp, pp_show_color (pp));
98 dump_tree (pp, t);
99 pp_end_quote (pp, pp_show_color (pp));
102 /* Equivalent to pp_printf (pp, "%qT", t), to avoid nesting pp_printf
103 calls within other pp_printf calls.
105 default_tree_printer handles 'T' and some other codes by calling
106 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
107 dump_generic_node calls pp_printf in various places, leading to
108 garbled output.
110 Ideally pp_printf could be made to be reentrant, but in the meantime
111 this function provides a workaround. */
113 void
114 print_quoted_type (pretty_printer *pp, tree t)
116 pp_begin_quote (pp, pp_show_color (pp));
117 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
118 pp_end_quote (pp, pp_show_color (pp));
121 /* class region_to_value_map. */
123 /* Assignment operator for region_to_value_map. */
125 region_to_value_map &
126 region_to_value_map::operator= (const region_to_value_map &other)
128 m_hash_map.empty ();
129 for (auto iter : other.m_hash_map)
131 const region *reg = iter.first;
132 const svalue *sval = iter.second;
133 m_hash_map.put (reg, sval);
135 return *this;
138 /* Equality operator for region_to_value_map. */
140 bool
141 region_to_value_map::operator== (const region_to_value_map &other) const
143 if (m_hash_map.elements () != other.m_hash_map.elements ())
144 return false;
146 for (auto iter : *this)
148 const region *reg = iter.first;
149 const svalue *sval = iter.second;
150 const svalue * const *other_slot = other.get (reg);
151 if (other_slot == NULL)
152 return false;
153 if (sval != *other_slot)
154 return false;
157 return true;
160 /* Dump this object to PP. */
162 void
163 region_to_value_map::dump_to_pp (pretty_printer *pp, bool simple,
164 bool multiline) const
166 auto_vec<const region *> regs;
167 for (iterator iter = begin (); iter != end (); ++iter)
168 regs.safe_push ((*iter).first);
169 regs.qsort (region::cmp_ptr_ptr);
170 if (multiline)
171 pp_newline (pp);
172 else
173 pp_string (pp, " {");
174 unsigned i;
175 const region *reg;
176 FOR_EACH_VEC_ELT (regs, i, reg)
178 if (multiline)
179 pp_string (pp, " ");
180 else if (i > 0)
181 pp_string (pp, ", ");
182 reg->dump_to_pp (pp, simple);
183 pp_string (pp, ": ");
184 const svalue *sval = *get (reg);
185 sval->dump_to_pp (pp, true);
186 if (multiline)
187 pp_newline (pp);
189 if (!multiline)
190 pp_string (pp, "}");
193 /* Dump this object to stderr. */
195 DEBUG_FUNCTION void
196 region_to_value_map::dump (bool simple) const
198 pretty_printer pp;
199 pp_format_decoder (&pp) = default_tree_printer;
200 pp_show_color (&pp) = pp_show_color (global_dc->printer);
201 pp.buffer->stream = stderr;
202 dump_to_pp (&pp, simple, true);
203 pp_newline (&pp);
204 pp_flush (&pp);
208 /* Attempt to merge THIS with OTHER, writing the result
209 to OUT.
211 For now, write (region, value) mappings that are in common between THIS
212 and OTHER to OUT, effectively taking the intersection, rather than
213 rejecting differences. */
215 bool
216 region_to_value_map::can_merge_with_p (const region_to_value_map &other,
217 region_to_value_map *out) const
219 for (auto iter : *this)
221 const region *iter_reg = iter.first;
222 const svalue *iter_sval = iter.second;
223 const svalue * const * other_slot = other.get (iter_reg);
224 if (other_slot)
225 if (iter_sval == *other_slot)
226 out->put (iter_reg, iter_sval);
228 return true;
231 /* Purge any state involving SVAL. */
233 void
234 region_to_value_map::purge_state_involving (const svalue *sval)
236 auto_vec<const region *> to_purge;
237 for (auto iter : *this)
239 const region *iter_reg = iter.first;
240 const svalue *iter_sval = iter.second;
241 if (iter_reg->involves_p (sval) || iter_sval->involves_p (sval))
242 to_purge.safe_push (iter_reg);
244 for (auto iter : to_purge)
245 m_hash_map.remove (iter);
248 /* class region_model. */
250 /* Ctor for region_model: construct an "empty" model. */
252 region_model::region_model (region_model_manager *mgr)
253 : m_mgr (mgr), m_store (), m_current_frame (NULL),
254 m_dynamic_extents ()
256 m_constraints = new constraint_manager (mgr);
259 /* region_model's copy ctor. */
261 region_model::region_model (const region_model &other)
262 : m_mgr (other.m_mgr), m_store (other.m_store),
263 m_constraints (new constraint_manager (*other.m_constraints)),
264 m_current_frame (other.m_current_frame),
265 m_dynamic_extents (other.m_dynamic_extents)
269 /* region_model's dtor. */
271 region_model::~region_model ()
273 delete m_constraints;
276 /* region_model's assignment operator. */
278 region_model &
279 region_model::operator= (const region_model &other)
281 /* m_mgr is const. */
282 gcc_assert (m_mgr == other.m_mgr);
284 m_store = other.m_store;
286 delete m_constraints;
287 m_constraints = new constraint_manager (*other.m_constraints);
289 m_current_frame = other.m_current_frame;
291 m_dynamic_extents = other.m_dynamic_extents;
293 return *this;
296 /* Equality operator for region_model.
298 Amongst other things this directly compares the stores and the constraint
299 managers, so for this to be meaningful both this and OTHER should
300 have been canonicalized. */
302 bool
303 region_model::operator== (const region_model &other) const
305 /* We can only compare instances that use the same manager. */
306 gcc_assert (m_mgr == other.m_mgr);
308 if (m_store != other.m_store)
309 return false;
311 if (*m_constraints != *other.m_constraints)
312 return false;
314 if (m_current_frame != other.m_current_frame)
315 return false;
317 if (m_dynamic_extents != other.m_dynamic_extents)
318 return false;
320 gcc_checking_assert (hash () == other.hash ());
322 return true;
325 /* Generate a hash value for this region_model. */
327 hashval_t
328 region_model::hash () const
330 hashval_t result = m_store.hash ();
331 result ^= m_constraints->hash ();
332 return result;
335 /* Dump a representation of this model to PP, showing the
336 stack, the store, and any constraints.
337 Use SIMPLE to control how svalues and regions are printed. */
339 void
340 region_model::dump_to_pp (pretty_printer *pp, bool simple,
341 bool multiline) const
343 /* Dump stack. */
344 pp_printf (pp, "stack depth: %i", get_stack_depth ());
345 if (multiline)
346 pp_newline (pp);
347 else
348 pp_string (pp, " {");
349 for (const frame_region *iter_frame = m_current_frame; iter_frame;
350 iter_frame = iter_frame->get_calling_frame ())
352 if (multiline)
353 pp_string (pp, " ");
354 else if (iter_frame != m_current_frame)
355 pp_string (pp, ", ");
356 pp_printf (pp, "frame (index %i): ", iter_frame->get_index ());
357 iter_frame->dump_to_pp (pp, simple);
358 if (multiline)
359 pp_newline (pp);
361 if (!multiline)
362 pp_string (pp, "}");
364 /* Dump store. */
365 if (!multiline)
366 pp_string (pp, ", {");
367 m_store.dump_to_pp (pp, simple, multiline,
368 m_mgr->get_store_manager ());
369 if (!multiline)
370 pp_string (pp, "}");
372 /* Dump constraints. */
373 pp_string (pp, "constraint_manager:");
374 if (multiline)
375 pp_newline (pp);
376 else
377 pp_string (pp, " {");
378 m_constraints->dump_to_pp (pp, multiline);
379 if (!multiline)
380 pp_string (pp, "}");
382 /* Dump sizes of dynamic regions, if any are known. */
383 if (!m_dynamic_extents.is_empty ())
385 pp_string (pp, "dynamic_extents:");
386 m_dynamic_extents.dump_to_pp (pp, simple, multiline);
390 /* Dump a representation of this model to FILE. */
392 void
393 region_model::dump (FILE *fp, bool simple, bool multiline) const
395 pretty_printer pp;
396 pp_format_decoder (&pp) = default_tree_printer;
397 pp_show_color (&pp) = pp_show_color (global_dc->printer);
398 pp.buffer->stream = fp;
399 dump_to_pp (&pp, simple, multiline);
400 pp_newline (&pp);
401 pp_flush (&pp);
404 /* Dump a multiline representation of this model to stderr. */
406 DEBUG_FUNCTION void
407 region_model::dump (bool simple) const
409 dump (stderr, simple, true);
412 /* Dump a multiline representation of this model to stderr. */
414 DEBUG_FUNCTION void
415 region_model::debug () const
417 dump (true);
420 /* Assert that this object is valid. */
422 void
423 region_model::validate () const
425 m_store.validate ();
428 /* Canonicalize the store and constraints, to maximize the chance of
429 equality between region_model instances. */
431 void
432 region_model::canonicalize ()
434 m_store.canonicalize (m_mgr->get_store_manager ());
435 m_constraints->canonicalize ();
438 /* Return true if this region_model is in canonical form. */
440 bool
441 region_model::canonicalized_p () const
443 region_model copy (*this);
444 copy.canonicalize ();
445 return *this == copy;
448 /* See the comment for store::loop_replay_fixup. */
450 void
451 region_model::loop_replay_fixup (const region_model *dst_state)
453 m_store.loop_replay_fixup (dst_state->get_store (), m_mgr);
456 /* A subclass of pending_diagnostic for complaining about uses of
457 poisoned values. */
459 class poisoned_value_diagnostic
460 : public pending_diagnostic_subclass<poisoned_value_diagnostic>
462 public:
463 poisoned_value_diagnostic (tree expr, enum poison_kind pkind,
464 const region *src_region)
465 : m_expr (expr), m_pkind (pkind),
466 m_src_region (src_region)
469 const char *get_kind () const final override { return "poisoned_value_diagnostic"; }
471 bool use_of_uninit_p () const final override
473 return m_pkind == POISON_KIND_UNINIT;
476 bool operator== (const poisoned_value_diagnostic &other) const
478 return (m_expr == other.m_expr
479 && m_pkind == other.m_pkind
480 && m_src_region == other.m_src_region);
483 int get_controlling_option () const final override
485 switch (m_pkind)
487 default:
488 gcc_unreachable ();
489 case POISON_KIND_UNINIT:
490 return OPT_Wanalyzer_use_of_uninitialized_value;
491 case POISON_KIND_FREED:
492 return OPT_Wanalyzer_use_after_free;
493 case POISON_KIND_POPPED_STACK:
494 return OPT_Wanalyzer_use_of_pointer_in_stale_stack_frame;
498 bool emit (rich_location *rich_loc) final override
500 switch (m_pkind)
502 default:
503 gcc_unreachable ();
504 case POISON_KIND_UNINIT:
506 diagnostic_metadata m;
507 m.add_cwe (457); /* "CWE-457: Use of Uninitialized Variable". */
508 return warning_meta (rich_loc, m, get_controlling_option (),
509 "use of uninitialized value %qE",
510 m_expr);
512 break;
513 case POISON_KIND_FREED:
515 diagnostic_metadata m;
516 m.add_cwe (416); /* "CWE-416: Use After Free". */
517 return warning_meta (rich_loc, m, get_controlling_option (),
518 "use after %<free%> of %qE",
519 m_expr);
521 break;
522 case POISON_KIND_POPPED_STACK:
524 /* TODO: which CWE? */
525 return warning_at
526 (rich_loc, get_controlling_option (),
527 "dereferencing pointer %qE to within stale stack frame",
528 m_expr);
530 break;
534 label_text describe_final_event (const evdesc::final_event &ev) final override
536 switch (m_pkind)
538 default:
539 gcc_unreachable ();
540 case POISON_KIND_UNINIT:
541 return ev.formatted_print ("use of uninitialized value %qE here",
542 m_expr);
543 case POISON_KIND_FREED:
544 return ev.formatted_print ("use after %<free%> of %qE here",
545 m_expr);
546 case POISON_KIND_POPPED_STACK:
547 return ev.formatted_print
548 ("dereferencing pointer %qE to within stale stack frame",
549 m_expr);
553 void mark_interesting_stuff (interesting_t *interest) final override
555 if (m_src_region)
556 interest->add_region_creation (m_src_region);
559 private:
560 tree m_expr;
561 enum poison_kind m_pkind;
562 const region *m_src_region;
565 /* A subclass of pending_diagnostic for complaining about shifts
566 by negative counts. */
568 class shift_count_negative_diagnostic
569 : public pending_diagnostic_subclass<shift_count_negative_diagnostic>
571 public:
572 shift_count_negative_diagnostic (const gassign *assign, tree count_cst)
573 : m_assign (assign), m_count_cst (count_cst)
576 const char *get_kind () const final override
578 return "shift_count_negative_diagnostic";
581 bool operator== (const shift_count_negative_diagnostic &other) const
583 return (m_assign == other.m_assign
584 && same_tree_p (m_count_cst, other.m_count_cst));
587 int get_controlling_option () const final override
589 return OPT_Wanalyzer_shift_count_negative;
592 bool emit (rich_location *rich_loc) final override
594 return warning_at (rich_loc, get_controlling_option (),
595 "shift by negative count (%qE)", m_count_cst);
598 label_text describe_final_event (const evdesc::final_event &ev) final override
600 return ev.formatted_print ("shift by negative amount here (%qE)", m_count_cst);
603 private:
604 const gassign *m_assign;
605 tree m_count_cst;
608 /* A subclass of pending_diagnostic for complaining about shifts
609 by counts >= the width of the operand type. */
611 class shift_count_overflow_diagnostic
612 : public pending_diagnostic_subclass<shift_count_overflow_diagnostic>
614 public:
615 shift_count_overflow_diagnostic (const gassign *assign,
616 int operand_precision,
617 tree count_cst)
618 : m_assign (assign), m_operand_precision (operand_precision),
619 m_count_cst (count_cst)
622 const char *get_kind () const final override
624 return "shift_count_overflow_diagnostic";
627 bool operator== (const shift_count_overflow_diagnostic &other) const
629 return (m_assign == other.m_assign
630 && m_operand_precision == other.m_operand_precision
631 && same_tree_p (m_count_cst, other.m_count_cst));
634 int get_controlling_option () const final override
636 return OPT_Wanalyzer_shift_count_overflow;
639 bool emit (rich_location *rich_loc) final override
641 return warning_at (rich_loc, get_controlling_option (),
642 "shift by count (%qE) >= precision of type (%qi)",
643 m_count_cst, m_operand_precision);
646 label_text describe_final_event (const evdesc::final_event &ev) final override
648 return ev.formatted_print ("shift by count %qE here", m_count_cst);
651 private:
652 const gassign *m_assign;
653 int m_operand_precision;
654 tree m_count_cst;
657 /* If ASSIGN is a stmt that can be modelled via
658 set_value (lhs_reg, SVALUE, CTXT)
659 for some SVALUE, get the SVALUE.
660 Otherwise return NULL. */
662 const svalue *
663 region_model::get_gassign_result (const gassign *assign,
664 region_model_context *ctxt)
666 tree lhs = gimple_assign_lhs (assign);
667 tree rhs1 = gimple_assign_rhs1 (assign);
668 enum tree_code op = gimple_assign_rhs_code (assign);
669 switch (op)
671 default:
672 return NULL;
674 case POINTER_PLUS_EXPR:
676 /* e.g. "_1 = a_10(D) + 12;" */
677 tree ptr = rhs1;
678 tree offset = gimple_assign_rhs2 (assign);
680 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
681 const svalue *offset_sval = get_rvalue (offset, ctxt);
682 /* Quoting tree.def, "the second operand [of a POINTER_PLUS_EXPR]
683 is an integer of type sizetype". */
684 offset_sval = m_mgr->get_or_create_cast (size_type_node, offset_sval);
686 const svalue *sval_binop
687 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
688 ptr_sval, offset_sval);
689 return sval_binop;
691 break;
693 case POINTER_DIFF_EXPR:
695 /* e.g. "_1 = p_2(D) - q_3(D);". */
696 tree rhs2 = gimple_assign_rhs2 (assign);
697 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
698 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
700 // TODO: perhaps fold to zero if they're known to be equal?
702 const svalue *sval_binop
703 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
704 rhs1_sval, rhs2_sval);
705 return sval_binop;
707 break;
709 /* Assignments of the form
710 set_value (lvalue (LHS), rvalue (EXPR))
711 for various EXPR.
712 We already have the lvalue for the LHS above, as "lhs_reg". */
713 case ADDR_EXPR: /* LHS = &RHS; */
714 case BIT_FIELD_REF:
715 case COMPONENT_REF: /* LHS = op0.op1; */
716 case MEM_REF:
717 case REAL_CST:
718 case COMPLEX_CST:
719 case VECTOR_CST:
720 case INTEGER_CST:
721 case ARRAY_REF:
722 case SSA_NAME: /* LHS = VAR; */
723 case VAR_DECL: /* LHS = VAR; */
724 case PARM_DECL:/* LHS = VAR; */
725 case REALPART_EXPR:
726 case IMAGPART_EXPR:
727 return get_rvalue (rhs1, ctxt);
729 case ABS_EXPR:
730 case ABSU_EXPR:
731 case CONJ_EXPR:
732 case BIT_NOT_EXPR:
733 case FIX_TRUNC_EXPR:
734 case FLOAT_EXPR:
735 case NEGATE_EXPR:
736 case NOP_EXPR:
737 case VIEW_CONVERT_EXPR:
739 /* Unary ops. */
740 const svalue *rhs_sval = get_rvalue (rhs1, ctxt);
741 const svalue *sval_unaryop
742 = m_mgr->get_or_create_unaryop (TREE_TYPE (lhs), op, rhs_sval);
743 return sval_unaryop;
746 case EQ_EXPR:
747 case GE_EXPR:
748 case LE_EXPR:
749 case NE_EXPR:
750 case GT_EXPR:
751 case LT_EXPR:
752 case UNORDERED_EXPR:
753 case ORDERED_EXPR:
755 tree rhs2 = gimple_assign_rhs2 (assign);
757 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
758 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
760 if (TREE_TYPE (lhs) == boolean_type_node)
762 /* Consider constraints between svalues. */
763 tristate t = eval_condition (rhs1_sval, op, rhs2_sval);
764 if (t.is_known ())
765 return m_mgr->get_or_create_constant_svalue
766 (t.is_true () ? boolean_true_node : boolean_false_node);
769 /* Otherwise, generate a symbolic binary op. */
770 const svalue *sval_binop
771 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
772 rhs1_sval, rhs2_sval);
773 return sval_binop;
775 break;
777 case PLUS_EXPR:
778 case MINUS_EXPR:
779 case MULT_EXPR:
780 case MULT_HIGHPART_EXPR:
781 case TRUNC_DIV_EXPR:
782 case CEIL_DIV_EXPR:
783 case FLOOR_DIV_EXPR:
784 case ROUND_DIV_EXPR:
785 case TRUNC_MOD_EXPR:
786 case CEIL_MOD_EXPR:
787 case FLOOR_MOD_EXPR:
788 case ROUND_MOD_EXPR:
789 case RDIV_EXPR:
790 case EXACT_DIV_EXPR:
791 case LSHIFT_EXPR:
792 case RSHIFT_EXPR:
793 case LROTATE_EXPR:
794 case RROTATE_EXPR:
795 case BIT_IOR_EXPR:
796 case BIT_XOR_EXPR:
797 case BIT_AND_EXPR:
798 case MIN_EXPR:
799 case MAX_EXPR:
800 case COMPLEX_EXPR:
802 /* Binary ops. */
803 tree rhs2 = gimple_assign_rhs2 (assign);
805 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
806 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
808 if (ctxt && (op == LSHIFT_EXPR || op == RSHIFT_EXPR))
810 /* "INT34-C. Do not shift an expression by a negative number of bits
811 or by greater than or equal to the number of bits that exist in
812 the operand." */
813 if (const tree rhs2_cst = rhs2_sval->maybe_get_constant ())
814 if (TREE_CODE (rhs2_cst) == INTEGER_CST)
816 if (tree_int_cst_sgn (rhs2_cst) < 0)
817 ctxt->warn
818 (make_unique<shift_count_negative_diagnostic>
819 (assign, rhs2_cst));
820 else if (compare_tree_int (rhs2_cst,
821 TYPE_PRECISION (TREE_TYPE (rhs1)))
822 >= 0)
823 ctxt->warn
824 (make_unique<shift_count_overflow_diagnostic>
825 (assign,
826 int (TYPE_PRECISION (TREE_TYPE (rhs1))),
827 rhs2_cst));
831 const svalue *sval_binop
832 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
833 rhs1_sval, rhs2_sval);
834 return sval_binop;
837 /* Vector expressions. In theory we could implement these elementwise,
838 but for now, simply return unknown values. */
839 case VEC_DUPLICATE_EXPR:
840 case VEC_SERIES_EXPR:
841 case VEC_COND_EXPR:
842 case VEC_PERM_EXPR:
843 case VEC_WIDEN_MULT_HI_EXPR:
844 case VEC_WIDEN_MULT_LO_EXPR:
845 case VEC_WIDEN_MULT_EVEN_EXPR:
846 case VEC_WIDEN_MULT_ODD_EXPR:
847 case VEC_UNPACK_HI_EXPR:
848 case VEC_UNPACK_LO_EXPR:
849 case VEC_UNPACK_FLOAT_HI_EXPR:
850 case VEC_UNPACK_FLOAT_LO_EXPR:
851 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
852 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
853 case VEC_PACK_TRUNC_EXPR:
854 case VEC_PACK_SAT_EXPR:
855 case VEC_PACK_FIX_TRUNC_EXPR:
856 case VEC_PACK_FLOAT_EXPR:
857 case VEC_WIDEN_LSHIFT_HI_EXPR:
858 case VEC_WIDEN_LSHIFT_LO_EXPR:
859 return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (lhs));
863 /* Workaround for discarding certain false positives from
864 -Wanalyzer-use-of-uninitialized-value
865 of the form:
866 ((A OR-IF B) OR-IF C)
867 and:
868 ((A AND-IF B) AND-IF C)
869 where evaluating B is redundant, but could involve simple accesses of
870 uninitialized locals.
872 When optimization is turned on the FE can immediately fold compound
873 conditionals. Specifically, c_parser_condition parses this condition:
874 ((A OR-IF B) OR-IF C)
875 and calls c_fully_fold on the condition.
876 Within c_fully_fold, fold_truth_andor is called, which bails when
877 optimization is off, but if any optimization is turned on can convert the
878 ((A OR-IF B) OR-IF C)
879 into:
880 ((A OR B) OR_IF C)
881 for sufficiently simple B
882 i.e. the inner OR-IF becomes an OR.
883 At gimplification time the inner OR becomes BIT_IOR_EXPR (in gimplify_expr),
884 giving this for the inner condition:
885 tmp = A | B;
886 if (tmp)
887 thus effectively synthesizing a redundant access of B when optimization
888 is turned on, when compared to:
889 if (A) goto L1; else goto L4;
890 L1: if (B) goto L2; else goto L4;
891 L2: if (C) goto L3; else goto L4;
892 for the unoptimized case.
894 Return true if CTXT appears to be handling such a short-circuitable stmt,
895 such as the def-stmt for B for the:
896 tmp = A | B;
897 case above, for the case where A is true and thus B would have been
898 short-circuited without optimization, using MODEL for the value of A. */
900 static bool
901 within_short_circuited_stmt_p (const region_model *model,
902 const gassign *assign_stmt)
904 /* We must have an assignment to a temporary of _Bool type. */
905 tree lhs = gimple_assign_lhs (assign_stmt);
906 if (TREE_TYPE (lhs) != boolean_type_node)
907 return false;
908 if (TREE_CODE (lhs) != SSA_NAME)
909 return false;
910 if (SSA_NAME_VAR (lhs) != NULL_TREE)
911 return false;
913 /* The temporary bool must be used exactly once: as the second arg of
914 a BIT_IOR_EXPR or BIT_AND_EXPR. */
915 use_operand_p use_op;
916 gimple *use_stmt;
917 if (!single_imm_use (lhs, &use_op, &use_stmt))
918 return false;
919 const gassign *use_assign = dyn_cast <const gassign *> (use_stmt);
920 if (!use_assign)
921 return false;
922 enum tree_code op = gimple_assign_rhs_code (use_assign);
923 if (!(op == BIT_IOR_EXPR ||op == BIT_AND_EXPR))
924 return false;
925 if (!(gimple_assign_rhs1 (use_assign) != lhs
926 && gimple_assign_rhs2 (use_assign) == lhs))
927 return false;
929 /* The first arg of the bitwise stmt must have a known value in MODEL
930 that implies that the value of the second arg doesn't matter, i.e.
931 1 for bitwise or, 0 for bitwise and. */
932 tree other_arg = gimple_assign_rhs1 (use_assign);
933 /* Use a NULL ctxt here to avoid generating warnings. */
934 const svalue *other_arg_sval = model->get_rvalue (other_arg, NULL);
935 tree other_arg_cst = other_arg_sval->maybe_get_constant ();
936 if (!other_arg_cst)
937 return false;
938 switch (op)
940 default:
941 gcc_unreachable ();
942 case BIT_IOR_EXPR:
943 if (zerop (other_arg_cst))
944 return false;
945 break;
946 case BIT_AND_EXPR:
947 if (!zerop (other_arg_cst))
948 return false;
949 break;
952 /* All tests passed. We appear to be in a stmt that generates a boolean
953 temporary with a value that won't matter. */
954 return true;
957 /* Workaround for discarding certain false positives from
958 -Wanalyzer-use-of-uninitialized-value
959 seen with -ftrivial-auto-var-init=.
961 -ftrivial-auto-var-init= will generate calls to IFN_DEFERRED_INIT.
963 If the address of the var is taken, gimplification will give us
964 something like:
966 _1 = .DEFERRED_INIT (4, 2, &"len"[0]);
967 len = _1;
969 The result of DEFERRED_INIT will be an uninit value; we don't
970 want to emit a false positive for "len = _1;"
972 Return true if ASSIGN_STMT is such a stmt. */
974 static bool
975 due_to_ifn_deferred_init_p (const gassign *assign_stmt)
978 /* We must have an assignment to a decl from an SSA name that's the
979 result of a IFN_DEFERRED_INIT call. */
980 if (gimple_assign_rhs_code (assign_stmt) != SSA_NAME)
981 return false;
982 tree lhs = gimple_assign_lhs (assign_stmt);
983 if (TREE_CODE (lhs) != VAR_DECL)
984 return false;
985 tree rhs = gimple_assign_rhs1 (assign_stmt);
986 if (TREE_CODE (rhs) != SSA_NAME)
987 return false;
988 const gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
989 const gcall *call = dyn_cast <const gcall *> (def_stmt);
990 if (!call)
991 return false;
992 if (gimple_call_internal_p (call)
993 && gimple_call_internal_fn (call) == IFN_DEFERRED_INIT)
994 return true;
995 return false;
998 /* Check for SVAL being poisoned, adding a warning to CTXT.
999 Return SVAL, or, if a warning is added, another value, to avoid
1000 repeatedly complaining about the same poisoned value in followup code. */
1002 const svalue *
1003 region_model::check_for_poison (const svalue *sval,
1004 tree expr,
1005 region_model_context *ctxt) const
1007 if (!ctxt)
1008 return sval;
1010 if (const poisoned_svalue *poisoned_sval = sval->dyn_cast_poisoned_svalue ())
1012 enum poison_kind pkind = poisoned_sval->get_poison_kind ();
1014 /* Ignore uninitialized uses of empty types; there's nothing
1015 to initialize. */
1016 if (pkind == POISON_KIND_UNINIT
1017 && sval->get_type ()
1018 && is_empty_type (sval->get_type ()))
1019 return sval;
1021 if (pkind == POISON_KIND_UNINIT)
1022 if (const gimple *curr_stmt = ctxt->get_stmt ())
1023 if (const gassign *assign_stmt
1024 = dyn_cast <const gassign *> (curr_stmt))
1026 /* Special case to avoid certain false positives. */
1027 if (within_short_circuited_stmt_p (this, assign_stmt))
1028 return sval;
1030 /* Special case to avoid false positive on
1031 -ftrivial-auto-var-init=. */
1032 if (due_to_ifn_deferred_init_p (assign_stmt))
1033 return sval;
1036 /* If we have an SSA name for a temporary, we don't want to print
1037 '<unknown>'.
1038 Poisoned values are shared by type, and so we can't reconstruct
1039 the tree other than via the def stmts, using
1040 fixup_tree_for_diagnostic. */
1041 tree diag_arg = fixup_tree_for_diagnostic (expr);
1042 const region *src_region = NULL;
1043 if (pkind == POISON_KIND_UNINIT)
1044 src_region = get_region_for_poisoned_expr (expr);
1045 if (ctxt->warn (make_unique<poisoned_value_diagnostic> (diag_arg,
1046 pkind,
1047 src_region)))
1049 /* We only want to report use of a poisoned value at the first
1050 place it gets used; return an unknown value to avoid generating
1051 a chain of followup warnings. */
1052 sval = m_mgr->get_or_create_unknown_svalue (sval->get_type ());
1055 return sval;
1058 return sval;
1061 /* Attempt to get a region for describing EXPR, the source of region of
1062 a poisoned_svalue for use in a poisoned_value_diagnostic.
1063 Return NULL if there is no good region to use. */
1065 const region *
1066 region_model::get_region_for_poisoned_expr (tree expr) const
1068 if (TREE_CODE (expr) == SSA_NAME)
1070 tree decl = SSA_NAME_VAR (expr);
1071 if (decl && DECL_P (decl))
1072 expr = decl;
1073 else
1074 return NULL;
1076 return get_lvalue (expr, NULL);
1079 /* Update this model for the ASSIGN stmt, using CTXT to report any
1080 diagnostics. */
1082 void
1083 region_model::on_assignment (const gassign *assign, region_model_context *ctxt)
1085 tree lhs = gimple_assign_lhs (assign);
1086 tree rhs1 = gimple_assign_rhs1 (assign);
1088 const region *lhs_reg = get_lvalue (lhs, ctxt);
1090 /* Most assignments are handled by:
1091 set_value (lhs_reg, SVALUE, CTXT)
1092 for some SVALUE. */
1093 if (const svalue *sval = get_gassign_result (assign, ctxt))
1095 tree expr = get_diagnostic_tree_for_gassign (assign);
1096 check_for_poison (sval, expr, ctxt);
1097 set_value (lhs_reg, sval, ctxt);
1098 return;
1101 enum tree_code op = gimple_assign_rhs_code (assign);
1102 switch (op)
1104 default:
1106 if (0)
1107 sorry_at (assign->location, "unhandled assignment op: %qs",
1108 get_tree_code_name (op));
1109 const svalue *unknown_sval
1110 = m_mgr->get_or_create_unknown_svalue (TREE_TYPE (lhs));
1111 set_value (lhs_reg, unknown_sval, ctxt);
1113 break;
1115 case CONSTRUCTOR:
1117 if (TREE_CLOBBER_P (rhs1))
1119 /* e.g. "x ={v} {CLOBBER};" */
1120 clobber_region (lhs_reg);
1122 else
1124 /* Any CONSTRUCTOR that survives to this point is either
1125 just a zero-init of everything, or a vector. */
1126 if (!CONSTRUCTOR_NO_CLEARING (rhs1))
1127 zero_fill_region (lhs_reg);
1128 unsigned ix;
1129 tree index;
1130 tree val;
1131 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), ix, index, val)
1133 gcc_assert (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE);
1134 if (!index)
1135 index = build_int_cst (integer_type_node, ix);
1136 gcc_assert (TREE_CODE (index) == INTEGER_CST);
1137 const svalue *index_sval
1138 = m_mgr->get_or_create_constant_svalue (index);
1139 gcc_assert (index_sval);
1140 const region *sub_reg
1141 = m_mgr->get_element_region (lhs_reg,
1142 TREE_TYPE (val),
1143 index_sval);
1144 const svalue *val_sval = get_rvalue (val, ctxt);
1145 set_value (sub_reg, val_sval, ctxt);
1149 break;
1151 case STRING_CST:
1153 /* e.g. "struct s2 x = {{'A', 'B', 'C', 'D'}};". */
1154 const svalue *rhs_sval = get_rvalue (rhs1, ctxt);
1155 m_store.set_value (m_mgr->get_store_manager(), lhs_reg, rhs_sval,
1156 ctxt ? ctxt->get_uncertainty () : NULL);
1158 break;
1162 /* A pending_diagnostic subclass for implementing "__analyzer_dump_path". */
1164 class dump_path_diagnostic
1165 : public pending_diagnostic_subclass<dump_path_diagnostic>
1167 public:
1168 int get_controlling_option () const final override
1170 return 0;
1173 bool emit (rich_location *richloc) final override
1175 inform (richloc, "path");
1176 return true;
1179 const char *get_kind () const final override { return "dump_path_diagnostic"; }
1181 bool operator== (const dump_path_diagnostic &) const
1183 return true;
1187 /* Handle the pre-sm-state part of STMT, modifying this object in-place.
1188 Write true to *OUT_TERMINATE_PATH if the path should be terminated.
1189 Write true to *OUT_UNKNOWN_SIDE_EFFECTS if the stmt has unknown
1190 side effects. */
1192 void
1193 region_model::on_stmt_pre (const gimple *stmt,
1194 bool *out_terminate_path,
1195 bool *out_unknown_side_effects,
1196 region_model_context *ctxt)
1198 switch (gimple_code (stmt))
1200 default:
1201 /* No-op for now. */
1202 break;
1204 case GIMPLE_ASSIGN:
1206 const gassign *assign = as_a <const gassign *> (stmt);
1207 on_assignment (assign, ctxt);
1209 break;
1211 case GIMPLE_ASM:
1213 const gasm *asm_stmt = as_a <const gasm *> (stmt);
1214 on_asm_stmt (asm_stmt, ctxt);
1216 break;
1218 case GIMPLE_CALL:
1220 /* Track whether we have a gcall to a function that's not recognized by
1221 anything, for which we don't have a function body, or for which we
1222 don't know the fndecl. */
1223 const gcall *call = as_a <const gcall *> (stmt);
1225 /* Debugging/test support. */
1226 if (is_special_named_call_p (call, "__analyzer_describe", 2))
1227 impl_call_analyzer_describe (call, ctxt);
1228 else if (is_special_named_call_p (call, "__analyzer_dump_capacity", 1))
1229 impl_call_analyzer_dump_capacity (call, ctxt);
1230 else if (is_special_named_call_p (call, "__analyzer_dump_escaped", 0))
1231 impl_call_analyzer_dump_escaped (call);
1232 else if (is_special_named_call_p (call, "__analyzer_dump_path", 0))
1234 /* Handle the builtin "__analyzer_dump_path" by queuing a
1235 diagnostic at this exploded_node. */
1236 ctxt->warn (make_unique<dump_path_diagnostic> ());
1238 else if (is_special_named_call_p (call, "__analyzer_dump_region_model",
1241 /* Handle the builtin "__analyzer_dump_region_model" by dumping
1242 the region model's state to stderr. */
1243 dump (false);
1245 else if (is_special_named_call_p (call, "__analyzer_eval", 1))
1246 impl_call_analyzer_eval (call, ctxt);
1247 else if (is_special_named_call_p (call, "__analyzer_break", 0))
1249 /* Handle the builtin "__analyzer_break" by triggering a
1250 breakpoint. */
1251 /* TODO: is there a good cross-platform way to do this? */
1252 raise (SIGINT);
1254 else if (is_special_named_call_p (call,
1255 "__analyzer_dump_exploded_nodes",
1258 /* This is handled elsewhere. */
1260 else if (is_special_named_call_p (call, "__analyzer_get_unknown_ptr",
1263 call_details cd (call, this, ctxt);
1264 impl_call_analyzer_get_unknown_ptr (cd);
1266 else
1267 *out_unknown_side_effects = on_call_pre (call, ctxt,
1268 out_terminate_path);
1270 break;
1272 case GIMPLE_RETURN:
1274 const greturn *return_ = as_a <const greturn *> (stmt);
1275 on_return (return_, ctxt);
1277 break;
1281 /* Abstract base class for all out-of-bounds warnings with concrete values. */
1283 class out_of_bounds : public pending_diagnostic_subclass<out_of_bounds>
1285 public:
1286 out_of_bounds (const region *reg, tree diag_arg,
1287 byte_range out_of_bounds_range)
1288 : m_reg (reg), m_diag_arg (diag_arg),
1289 m_out_of_bounds_range (out_of_bounds_range)
1292 const char *get_kind () const final override
1294 return "out_of_bounds_diagnostic";
1297 bool operator== (const out_of_bounds &other) const
1299 return m_reg == other.m_reg
1300 && m_out_of_bounds_range == other.m_out_of_bounds_range
1301 && pending_diagnostic::same_tree_p (m_diag_arg, other.m_diag_arg);
1304 int get_controlling_option () const final override
1306 return OPT_Wanalyzer_out_of_bounds;
1309 void mark_interesting_stuff (interesting_t *interest) final override
1311 interest->add_region_creation (m_reg);
1314 protected:
1315 const region *m_reg;
1316 tree m_diag_arg;
1317 byte_range m_out_of_bounds_range;
1320 /* Abstract subclass to complaing about out-of-bounds
1321 past the end of the buffer. */
1323 class past_the_end : public out_of_bounds
1325 public:
1326 past_the_end (const region *reg, tree diag_arg, byte_range range,
1327 tree byte_bound)
1328 : out_of_bounds (reg, diag_arg, range), m_byte_bound (byte_bound)
1331 bool operator== (const past_the_end &other) const
1333 return out_of_bounds::operator== (other)
1334 && pending_diagnostic::same_tree_p (m_byte_bound,
1335 other.m_byte_bound);
1338 label_text
1339 describe_region_creation_event (const evdesc::region_creation &ev) final
1340 override
1342 if (m_byte_bound && TREE_CODE (m_byte_bound) == INTEGER_CST)
1343 return ev.formatted_print ("capacity is %E bytes", m_byte_bound);
1345 return label_text ();
1348 protected:
1349 tree m_byte_bound;
1352 /* Concrete subclass to complain about buffer overflows. */
1354 class buffer_overflow : public past_the_end
1356 public:
1357 buffer_overflow (const region *reg, tree diag_arg,
1358 byte_range range, tree byte_bound)
1359 : past_the_end (reg, diag_arg, range, byte_bound)
1362 bool emit (rich_location *rich_loc) final override
1364 diagnostic_metadata m;
1365 bool warned;
1366 switch (m_reg->get_memory_space ())
1368 default:
1369 m.add_cwe (787);
1370 warned = warning_meta (rich_loc, m, get_controlling_option (),
1371 "buffer overflow");
1372 break;
1373 case MEMSPACE_STACK:
1374 m.add_cwe (121);
1375 warned = warning_meta (rich_loc, m, get_controlling_option (),
1376 "stack-based buffer overflow");
1377 break;
1378 case MEMSPACE_HEAP:
1379 m.add_cwe (122);
1380 warned = warning_meta (rich_loc, m, get_controlling_option (),
1381 "heap-based buffer overflow");
1382 break;
1385 if (warned)
1387 char num_bytes_past_buf[WIDE_INT_PRINT_BUFFER_SIZE];
1388 print_dec (m_out_of_bounds_range.m_size_in_bytes,
1389 num_bytes_past_buf, UNSIGNED);
1390 if (m_diag_arg)
1391 inform (rich_loc->get_loc (), "write is %s bytes past the end"
1392 " of %qE", num_bytes_past_buf,
1393 m_diag_arg);
1394 else
1395 inform (rich_loc->get_loc (), "write is %s bytes past the end"
1396 "of the region",
1397 num_bytes_past_buf);
1400 return warned;
1403 label_text describe_final_event (const evdesc::final_event &ev)
1404 final override
1406 byte_size_t start = m_out_of_bounds_range.get_start_byte_offset ();
1407 byte_size_t end = m_out_of_bounds_range.get_last_byte_offset ();
1408 char start_buf[WIDE_INT_PRINT_BUFFER_SIZE];
1409 print_dec (start, start_buf, SIGNED);
1410 char end_buf[WIDE_INT_PRINT_BUFFER_SIZE];
1411 print_dec (end, end_buf, SIGNED);
1413 if (start == end)
1415 if (m_diag_arg)
1416 return ev.formatted_print ("out-of-bounds write at byte %s but %qE"
1417 " ends at byte %E", start_buf, m_diag_arg,
1418 m_byte_bound);
1419 return ev.formatted_print ("out-of-bounds write at byte %s but region"
1420 " ends at byte %E", start_buf,
1421 m_byte_bound);
1423 else
1425 if (m_diag_arg)
1426 return ev.formatted_print ("out-of-bounds write from byte %s till"
1427 " byte %s but %qE ends at byte %E",
1428 start_buf, end_buf, m_diag_arg,
1429 m_byte_bound);
1430 return ev.formatted_print ("out-of-bounds write from byte %s till"
1431 " byte %s but region ends at byte %E",
1432 start_buf, end_buf, m_byte_bound);
1437 /* Concrete subclass to complain about buffer overreads. */
1439 class buffer_overread : public past_the_end
1441 public:
1442 buffer_overread (const region *reg, tree diag_arg,
1443 byte_range range, tree byte_bound)
1444 : past_the_end (reg, diag_arg, range, byte_bound)
1447 bool emit (rich_location *rich_loc) final override
1449 diagnostic_metadata m;
1450 m.add_cwe (126);
1451 bool warned = warning_meta (rich_loc, m, get_controlling_option (),
1452 "buffer overread");
1454 if (warned)
1456 char num_bytes_past_buf[WIDE_INT_PRINT_BUFFER_SIZE];
1457 print_dec (m_out_of_bounds_range.m_size_in_bytes,
1458 num_bytes_past_buf, UNSIGNED);
1459 if (m_diag_arg)
1460 inform (rich_loc->get_loc (), "read is %s bytes past the end"
1461 " of %qE", num_bytes_past_buf,
1462 m_diag_arg);
1463 else
1464 inform (rich_loc->get_loc (), "read is %s bytes past the end"
1465 "of the region",
1466 num_bytes_past_buf);
1469 return warned;
1472 label_text describe_final_event (const evdesc::final_event &ev)
1473 final override
1475 byte_size_t start = m_out_of_bounds_range.get_start_byte_offset ();
1476 byte_size_t end = m_out_of_bounds_range.get_last_byte_offset ();
1477 char start_buf[WIDE_INT_PRINT_BUFFER_SIZE];
1478 print_dec (start, start_buf, SIGNED);
1479 char end_buf[WIDE_INT_PRINT_BUFFER_SIZE];
1480 print_dec (end, end_buf, SIGNED);
1482 if (start == end)
1484 if (m_diag_arg)
1485 return ev.formatted_print ("out-of-bounds read at byte %s but %qE"
1486 " ends at byte %E", start_buf, m_diag_arg,
1487 m_byte_bound);
1488 return ev.formatted_print ("out-of-bounds read at byte %s but region"
1489 " ends at byte %E", start_buf,
1490 m_byte_bound);
1492 else
1494 if (m_diag_arg)
1495 return ev.formatted_print ("out-of-bounds read from byte %s till"
1496 " byte %s but %qE ends at byte %E",
1497 start_buf, end_buf, m_diag_arg,
1498 m_byte_bound);
1499 return ev.formatted_print ("out-of-bounds read from byte %s till"
1500 " byte %s but region ends at byte %E",
1501 start_buf, end_buf, m_byte_bound);
1506 /* Concrete subclass to complain about buffer underflows. */
1508 class buffer_underflow : public out_of_bounds
1510 public:
1511 buffer_underflow (const region *reg, tree diag_arg, byte_range range)
1512 : out_of_bounds (reg, diag_arg, range)
1515 bool emit (rich_location *rich_loc) final override
1517 diagnostic_metadata m;
1518 m.add_cwe (124);
1519 return warning_meta (rich_loc, m, get_controlling_option (),
1520 "buffer underflow");
1523 label_text describe_final_event (const evdesc::final_event &ev)
1524 final override
1526 byte_size_t start = m_out_of_bounds_range.get_start_byte_offset ();
1527 byte_size_t end = m_out_of_bounds_range.get_last_byte_offset ();
1528 char start_buf[WIDE_INT_PRINT_BUFFER_SIZE];
1529 print_dec (start, start_buf, SIGNED);
1530 char end_buf[WIDE_INT_PRINT_BUFFER_SIZE];
1531 print_dec (end, end_buf, SIGNED);
1533 if (start == end)
1535 if (m_diag_arg)
1536 return ev.formatted_print ("out-of-bounds write at byte %s but %qE"
1537 " starts at byte 0", start_buf,
1538 m_diag_arg);
1539 return ev.formatted_print ("out-of-bounds write at byte %s but region"
1540 " starts at byte 0", start_buf);
1542 else
1544 if (m_diag_arg)
1545 return ev.formatted_print ("out-of-bounds write from byte %s till"
1546 " byte %s but %qE starts at byte 0",
1547 start_buf, end_buf, m_diag_arg);
1548 return ev.formatted_print ("out-of-bounds write from byte %s till"
1549 " byte %s but region starts at byte 0",
1550 start_buf, end_buf);;
1555 /* Concrete subclass to complain about buffer underreads. */
1557 class buffer_underread : public out_of_bounds
1559 public:
1560 buffer_underread (const region *reg, tree diag_arg, byte_range range)
1561 : out_of_bounds (reg, diag_arg, range)
1564 bool emit (rich_location *rich_loc) final override
1566 diagnostic_metadata m;
1567 m.add_cwe (127);
1568 return warning_meta (rich_loc, m, get_controlling_option (),
1569 "buffer underread");
1572 label_text describe_final_event (const evdesc::final_event &ev)
1573 final override
1575 byte_size_t start = m_out_of_bounds_range.get_start_byte_offset ();
1576 byte_size_t end = m_out_of_bounds_range.get_last_byte_offset ();
1577 char start_buf[WIDE_INT_PRINT_BUFFER_SIZE];
1578 print_dec (start, start_buf, SIGNED);
1579 char end_buf[WIDE_INT_PRINT_BUFFER_SIZE];
1580 print_dec (end, end_buf, SIGNED);
1582 if (start == end)
1584 if (m_diag_arg)
1585 return ev.formatted_print ("out-of-bounds read at byte %s but %qE"
1586 " starts at byte 0", start_buf,
1587 m_diag_arg);
1588 return ev.formatted_print ("out-of-bounds read at byte %s but region"
1589 " starts at byte 0", start_buf);
1591 else
1593 if (m_diag_arg)
1594 return ev.formatted_print ("out-of-bounds read from byte %s till"
1595 " byte %s but %qE starts at byte 0",
1596 start_buf, end_buf, m_diag_arg);
1597 return ev.formatted_print ("out-of-bounds read from byte %s till"
1598 " byte %s but region starts at byte 0",
1599 start_buf, end_buf);;
1604 /* Abstract class to complain about out-of-bounds read/writes where
1605 the values are symbolic. */
1607 class symbolic_past_the_end
1608 : public pending_diagnostic_subclass<symbolic_past_the_end>
1610 public:
1611 symbolic_past_the_end (const region *reg, tree diag_arg, tree offset,
1612 tree num_bytes, tree capacity)
1613 : m_reg (reg), m_diag_arg (diag_arg), m_offset (offset),
1614 m_num_bytes (num_bytes), m_capacity (capacity)
1617 const char *get_kind () const final override
1619 return "symbolic_past_the_end";
1622 bool operator== (const symbolic_past_the_end &other) const
1624 return m_reg == other.m_reg
1625 && pending_diagnostic::same_tree_p (m_diag_arg, other.m_diag_arg)
1626 && pending_diagnostic::same_tree_p (m_offset, other.m_offset)
1627 && pending_diagnostic::same_tree_p (m_num_bytes, other.m_num_bytes)
1628 && pending_diagnostic::same_tree_p (m_capacity, other.m_capacity);
1631 int get_controlling_option () const final override
1633 return OPT_Wanalyzer_out_of_bounds;
1636 void mark_interesting_stuff (interesting_t *interest) final override
1638 interest->add_region_creation (m_reg);
1641 label_text
1642 describe_region_creation_event (const evdesc::region_creation &ev) final
1643 override
1645 if (m_capacity)
1646 return ev.formatted_print ("capacity is %qE bytes", m_capacity);
1648 return label_text ();
1651 label_text
1652 describe_final_event (const evdesc::final_event &ev) final override
1654 const char *byte_str;
1655 if (pending_diagnostic::same_tree_p (m_num_bytes, integer_one_node))
1656 byte_str = "byte";
1657 else
1658 byte_str = "bytes";
1660 if (m_offset)
1662 if (m_num_bytes && TREE_CODE (m_num_bytes) == INTEGER_CST)
1664 if (m_diag_arg)
1665 return ev.formatted_print ("%s of %E %s at offset %qE"
1666 " exceeds %qE", m_dir_str,
1667 m_num_bytes, byte_str,
1668 m_offset, m_diag_arg);
1669 else
1670 return ev.formatted_print ("%s of %E %s at offset %qE"
1671 " exceeds the buffer", m_dir_str,
1672 m_num_bytes, byte_str, m_offset);
1674 else if (m_num_bytes)
1676 if (m_diag_arg)
1677 return ev.formatted_print ("%s of %qE %s at offset %qE"
1678 " exceeds %qE", m_dir_str,
1679 m_num_bytes, byte_str,
1680 m_offset, m_diag_arg);
1681 else
1682 return ev.formatted_print ("%s of %qE %s at offset %qE"
1683 " exceeds the buffer", m_dir_str,
1684 m_num_bytes, byte_str, m_offset);
1686 else
1688 if (m_diag_arg)
1689 return ev.formatted_print ("%s at offset %qE exceeds %qE",
1690 m_dir_str, m_offset, m_diag_arg);
1691 else
1692 return ev.formatted_print ("%s at offset %qE exceeds the"
1693 " buffer", m_dir_str, m_offset);
1696 if (m_diag_arg)
1697 return ev.formatted_print ("out-of-bounds %s on %qE",
1698 m_dir_str, m_diag_arg);
1699 return ev.formatted_print ("out-of-bounds %s", m_dir_str);
1702 protected:
1703 const region *m_reg;
1704 tree m_diag_arg;
1705 tree m_offset;
1706 tree m_num_bytes;
1707 tree m_capacity;
1708 const char *m_dir_str;
1711 /* Concrete subclass to complain about overflows with symbolic values. */
1713 class symbolic_buffer_overflow : public symbolic_past_the_end
1715 public:
1716 symbolic_buffer_overflow (const region *reg, tree diag_arg, tree offset,
1717 tree num_bytes, tree capacity)
1718 : symbolic_past_the_end (reg, diag_arg, offset, num_bytes, capacity)
1720 m_dir_str = "write";
1723 bool emit (rich_location *rich_loc) final override
1725 diagnostic_metadata m;
1726 switch (m_reg->get_memory_space ())
1728 default:
1729 m.add_cwe (787);
1730 return warning_meta (rich_loc, m, get_controlling_option (),
1731 "buffer overflow");
1732 case MEMSPACE_STACK:
1733 m.add_cwe (121);
1734 return warning_meta (rich_loc, m, get_controlling_option (),
1735 "stack-based buffer overflow");
1736 case MEMSPACE_HEAP:
1737 m.add_cwe (122);
1738 return warning_meta (rich_loc, m, get_controlling_option (),
1739 "heap-based buffer overflow");
1744 /* Concrete subclass to complain about overreads with symbolic values. */
1746 class symbolic_buffer_overread : public symbolic_past_the_end
1748 public:
1749 symbolic_buffer_overread (const region *reg, tree diag_arg, tree offset,
1750 tree num_bytes, tree capacity)
1751 : symbolic_past_the_end (reg, diag_arg, offset, num_bytes, capacity)
1753 m_dir_str = "read";
1756 bool emit (rich_location *rich_loc) final override
1758 diagnostic_metadata m;
1759 m.add_cwe (126);
1760 return warning_meta (rich_loc, m, get_controlling_option (),
1761 "buffer overread");
1765 /* Check whether an access is past the end of the BASE_REG. */
1767 void
1768 region_model::check_symbolic_bounds (const region *base_reg,
1769 const svalue *sym_byte_offset,
1770 const svalue *num_bytes_sval,
1771 const svalue *capacity,
1772 enum access_direction dir,
1773 region_model_context *ctxt) const
1775 gcc_assert (ctxt);
1777 const svalue *next_byte
1778 = m_mgr->get_or_create_binop (num_bytes_sval->get_type (), PLUS_EXPR,
1779 sym_byte_offset, num_bytes_sval);
1781 if (eval_condition (next_byte, GT_EXPR, capacity).is_true ())
1783 tree diag_arg = get_representative_tree (base_reg);
1784 tree offset_tree = get_representative_tree (sym_byte_offset);
1785 tree num_bytes_tree = get_representative_tree (num_bytes_sval);
1786 tree capacity_tree = get_representative_tree (capacity);
1787 switch (dir)
1789 default:
1790 gcc_unreachable ();
1791 break;
1792 case DIR_READ:
1793 ctxt->warn (make_unique<symbolic_buffer_overread> (base_reg,
1794 diag_arg,
1795 offset_tree,
1796 num_bytes_tree,
1797 capacity_tree));
1798 break;
1799 case DIR_WRITE:
1800 ctxt->warn (make_unique<symbolic_buffer_overflow> (base_reg,
1801 diag_arg,
1802 offset_tree,
1803 num_bytes_tree,
1804 capacity_tree));
1805 break;
1810 static tree
1811 maybe_get_integer_cst_tree (const svalue *sval)
1813 tree cst_tree = sval->maybe_get_constant ();
1814 if (cst_tree && TREE_CODE (cst_tree) == INTEGER_CST)
1815 return cst_tree;
1817 return NULL_TREE;
1820 /* May complain when the access on REG is out-of-bounds. */
1822 void
1823 region_model::check_region_bounds (const region *reg,
1824 enum access_direction dir,
1825 region_model_context *ctxt) const
1827 gcc_assert (ctxt);
1829 /* Get the offset. */
1830 region_offset reg_offset = reg->get_offset (m_mgr);
1831 const region *base_reg = reg_offset.get_base_region ();
1833 /* Bail out on symbolic regions.
1834 (e.g. because the analyzer did not see previous offsets on the latter,
1835 it might think that a negative access is before the buffer). */
1836 if (base_reg->symbolic_p ())
1837 return;
1839 /* Find out how many bytes were accessed. */
1840 const svalue *num_bytes_sval = reg->get_byte_size_sval (m_mgr);
1841 tree num_bytes_tree = maybe_get_integer_cst_tree (num_bytes_sval);
1842 /* Bail out if 0 bytes are accessed. */
1843 if (num_bytes_tree && zerop (num_bytes_tree))
1844 return;
1846 /* Get the capacity of the buffer. */
1847 const svalue *capacity = get_capacity (base_reg);
1848 tree cst_capacity_tree = maybe_get_integer_cst_tree (capacity);
1850 /* The constant offset from a pointer is represented internally as a sizetype
1851 but should be interpreted as a signed value here. The statement below
1852 converts the offset from bits to bytes and then to a signed integer with
1853 the same precision the sizetype has on the target system.
1855 For example, this is needed for out-of-bounds-3.c test1 to pass when
1856 compiled with a 64-bit gcc build targeting 32-bit systems. */
1857 byte_offset_t offset;
1858 if (!reg_offset.symbolic_p ())
1859 offset = wi::sext (reg_offset.get_bit_offset () >> LOG2_BITS_PER_UNIT,
1860 TYPE_PRECISION (size_type_node));
1862 /* If either the offset or the number of bytes accessed are symbolic,
1863 we have to reason about symbolic values. */
1864 if (reg_offset.symbolic_p () || !num_bytes_tree)
1866 const svalue* byte_offset_sval;
1867 if (!reg_offset.symbolic_p ())
1869 tree offset_tree = wide_int_to_tree (integer_type_node, offset);
1870 byte_offset_sval
1871 = m_mgr->get_or_create_constant_svalue (offset_tree);
1873 else
1874 byte_offset_sval = reg_offset.get_symbolic_byte_offset ();
1875 check_symbolic_bounds (base_reg, byte_offset_sval, num_bytes_sval,
1876 capacity, dir, ctxt);
1877 return;
1880 /* Otherwise continue to check with concrete values. */
1881 byte_range out (0, 0);
1882 /* NUM_BYTES_TREE should always be interpreted as unsigned. */
1883 byte_offset_t num_bytes_unsigned = wi::to_offset (num_bytes_tree);
1884 byte_range read_bytes (offset, num_bytes_unsigned);
1885 /* If read_bytes has a subset < 0, we do have an underflow. */
1886 if (read_bytes.falls_short_of_p (0, &out))
1888 tree diag_arg = get_representative_tree (base_reg);
1889 switch (dir)
1891 default:
1892 gcc_unreachable ();
1893 break;
1894 case DIR_READ:
1895 ctxt->warn (make_unique<buffer_underread> (reg, diag_arg, out));
1896 break;
1897 case DIR_WRITE:
1898 ctxt->warn (make_unique<buffer_underflow> (reg, diag_arg, out));
1899 break;
1903 /* For accesses past the end, we do need a concrete capacity. No need to
1904 do a symbolic check here because the inequality check does not reason
1905 whether constants are greater than symbolic values. */
1906 if (!cst_capacity_tree)
1907 return;
1909 byte_range buffer (0, wi::to_offset (cst_capacity_tree));
1910 /* If READ_BYTES exceeds BUFFER, we do have an overflow. */
1911 if (read_bytes.exceeds_p (buffer, &out))
1913 tree byte_bound = wide_int_to_tree (size_type_node,
1914 buffer.get_next_byte_offset ());
1915 tree diag_arg = get_representative_tree (base_reg);
1917 switch (dir)
1919 default:
1920 gcc_unreachable ();
1921 break;
1922 case DIR_READ:
1923 ctxt->warn (make_unique<buffer_overread> (reg, diag_arg,
1924 out, byte_bound));
1925 break;
1926 case DIR_WRITE:
1927 ctxt->warn (make_unique<buffer_overflow> (reg, diag_arg,
1928 out, byte_bound));
1929 break;
1934 /* Ensure that all arguments at the call described by CD are checked
1935 for poisoned values, by calling get_rvalue on each argument. */
1937 void
1938 region_model::check_call_args (const call_details &cd) const
1940 for (unsigned arg_idx = 0; arg_idx < cd.num_args (); arg_idx++)
1941 cd.get_arg_svalue (arg_idx);
1944 /* Return true if CD is known to be a call to a function with
1945 __attribute__((const)). */
1947 static bool
1948 const_fn_p (const call_details &cd)
1950 tree fndecl = cd.get_fndecl_for_call ();
1951 if (!fndecl)
1952 return false;
1953 gcc_assert (DECL_P (fndecl));
1954 return TREE_READONLY (fndecl);
1957 /* If this CD is known to be a call to a function with
1958 __attribute__((const)), attempt to get a const_fn_result_svalue
1959 based on the arguments, or return NULL otherwise. */
1961 static const svalue *
1962 maybe_get_const_fn_result (const call_details &cd)
1964 if (!const_fn_p (cd))
1965 return NULL;
1967 unsigned num_args = cd.num_args ();
1968 if (num_args > const_fn_result_svalue::MAX_INPUTS)
1969 /* Too many arguments. */
1970 return NULL;
1972 auto_vec<const svalue *> inputs (num_args);
1973 for (unsigned arg_idx = 0; arg_idx < num_args; arg_idx++)
1975 const svalue *arg_sval = cd.get_arg_svalue (arg_idx);
1976 if (!arg_sval->can_have_associated_state_p ())
1977 return NULL;
1978 inputs.quick_push (arg_sval);
1981 region_model_manager *mgr = cd.get_manager ();
1982 const svalue *sval
1983 = mgr->get_or_create_const_fn_result_svalue (cd.get_lhs_type (),
1984 cd.get_fndecl_for_call (),
1985 inputs);
1986 return sval;
1989 /* Update this model for an outcome of a call that returns a specific
1990 integer constant.
1991 If UNMERGEABLE, then make the result unmergeable, e.g. to prevent
1992 the state-merger code from merging success and failure outcomes. */
1994 void
1995 region_model::update_for_int_cst_return (const call_details &cd,
1996 int retval,
1997 bool unmergeable)
1999 if (!cd.get_lhs_type ())
2000 return;
2001 const svalue *result
2002 = m_mgr->get_or_create_int_cst (cd.get_lhs_type (), retval);
2003 if (unmergeable)
2004 result = m_mgr->get_or_create_unmergeable (result);
2005 set_value (cd.get_lhs_region (), result, cd.get_ctxt ());
2008 /* Update this model for an outcome of a call that returns zero.
2009 If UNMERGEABLE, then make the result unmergeable, e.g. to prevent
2010 the state-merger code from merging success and failure outcomes. */
2012 void
2013 region_model::update_for_zero_return (const call_details &cd,
2014 bool unmergeable)
2016 update_for_int_cst_return (cd, 0, unmergeable);
2019 /* Update this model for an outcome of a call that returns non-zero. */
2021 void
2022 region_model::update_for_nonzero_return (const call_details &cd)
2024 if (!cd.get_lhs_type ())
2025 return;
2026 const svalue *zero
2027 = m_mgr->get_or_create_int_cst (cd.get_lhs_type (), 0);
2028 const svalue *result
2029 = get_store_value (cd.get_lhs_region (), cd.get_ctxt ());
2030 add_constraint (result, NE_EXPR, zero, cd.get_ctxt ());
2033 /* Subroutine of region_model::maybe_get_copy_bounds.
2034 The Linux kernel commonly uses
2035 min_t([unsigned] long, VAR, sizeof(T));
2036 to set an upper bound on the size of a copy_to_user.
2037 Attempt to simplify such sizes by trying to get the upper bound as a
2038 constant.
2039 Return the simplified svalue if possible, or NULL otherwise. */
2041 static const svalue *
2042 maybe_simplify_upper_bound (const svalue *num_bytes_sval,
2043 region_model_manager *mgr)
2045 tree type = num_bytes_sval->get_type ();
2046 while (const svalue *raw = num_bytes_sval->maybe_undo_cast ())
2047 num_bytes_sval = raw;
2048 if (const binop_svalue *binop_sval = num_bytes_sval->dyn_cast_binop_svalue ())
2049 if (binop_sval->get_op () == MIN_EXPR)
2050 if (binop_sval->get_arg1 ()->get_kind () == SK_CONSTANT)
2052 return mgr->get_or_create_cast (type, binop_sval->get_arg1 ());
2053 /* TODO: we might want to also capture the constraint
2054 when recording the diagnostic, or note that we're using
2055 the upper bound. */
2057 return NULL;
2060 /* Attempt to get an upper bound for the size of a copy when simulating a
2061 copy function.
2063 NUM_BYTES_SVAL is the symbolic value for the size of the copy.
2064 Use it if it's constant, otherwise try to simplify it. Failing
2065 that, use the size of SRC_REG if constant.
2067 Return a symbolic value for an upper limit on the number of bytes
2068 copied, or NULL if no such value could be determined. */
2070 const svalue *
2071 region_model::maybe_get_copy_bounds (const region *src_reg,
2072 const svalue *num_bytes_sval)
2074 if (num_bytes_sval->maybe_get_constant ())
2075 return num_bytes_sval;
2077 if (const svalue *simplified
2078 = maybe_simplify_upper_bound (num_bytes_sval, m_mgr))
2079 num_bytes_sval = simplified;
2081 if (num_bytes_sval->maybe_get_constant ())
2082 return num_bytes_sval;
2084 /* For now, try just guessing the size as the capacity of the
2085 base region of the src.
2086 This is a hack; we might get too large a value. */
2087 const region *src_base_reg = src_reg->get_base_region ();
2088 num_bytes_sval = get_capacity (src_base_reg);
2090 if (num_bytes_sval->maybe_get_constant ())
2091 return num_bytes_sval;
2093 /* Non-constant: give up. */
2094 return NULL;
2097 /* Get any known_function for FNDECL, or NULL if there is none. */
2099 const known_function *
2100 region_model::get_known_function (tree fndecl) const
2102 known_function_manager *known_fn_mgr = m_mgr->get_known_function_manager ();
2103 return known_fn_mgr->get_by_fndecl (fndecl);
2106 /* Update this model for the CALL stmt, using CTXT to report any
2107 diagnostics - the first half.
2109 Updates to the region_model that should be made *before* sm-states
2110 are updated are done here; other updates to the region_model are done
2111 in region_model::on_call_post.
2113 Return true if the function call has unknown side effects (it wasn't
2114 recognized and we don't have a body for it, or are unable to tell which
2115 fndecl it is).
2117 Write true to *OUT_TERMINATE_PATH if this execution path should be
2118 terminated (e.g. the function call terminates the process). */
2120 bool
2121 region_model::on_call_pre (const gcall *call, region_model_context *ctxt,
2122 bool *out_terminate_path)
2124 call_details cd (call, this, ctxt);
2126 bool unknown_side_effects = false;
2128 /* Special-case for IFN_DEFERRED_INIT.
2129 We want to report uninitialized variables with -fanalyzer (treating
2130 -ftrivial-auto-var-init= as purely a mitigation feature).
2131 Handle IFN_DEFERRED_INIT by treating it as no-op: don't touch the
2132 lhs of the call, so that it is still uninitialized from the point of
2133 view of the analyzer. */
2134 if (gimple_call_internal_p (call)
2135 && gimple_call_internal_fn (call) == IFN_DEFERRED_INIT)
2136 return false;
2138 /* Get svalues for all of the arguments at the callsite, to ensure that we
2139 complain about any uninitialized arguments. This might lead to
2140 duplicates if any of the handling below also looks up the svalues,
2141 but the deduplication code should deal with that. */
2142 if (ctxt)
2143 check_call_args (cd);
2145 /* Some of the cases below update the lhs of the call based on the
2146 return value, but not all. Provide a default value, which may
2147 get overwritten below. */
2148 if (tree lhs = gimple_call_lhs (call))
2150 const region *lhs_region = get_lvalue (lhs, ctxt);
2151 const svalue *sval = maybe_get_const_fn_result (cd);
2152 if (!sval)
2154 /* For the common case of functions without __attribute__((const)),
2155 use a conjured value, and purge any prior state involving that
2156 value (in case this is in a loop). */
2157 sval = m_mgr->get_or_create_conjured_svalue (TREE_TYPE (lhs), call,
2158 lhs_region,
2159 conjured_purge (this,
2160 ctxt));
2162 set_value (lhs_region, sval, ctxt);
2165 if (gimple_call_internal_p (call))
2167 switch (gimple_call_internal_fn (call))
2169 default:
2170 break;
2171 case IFN_BUILTIN_EXPECT:
2172 impl_call_builtin_expect (cd);
2173 return false;
2174 case IFN_UBSAN_BOUNDS:
2175 return false;
2176 case IFN_VA_ARG:
2177 impl_call_va_arg (cd);
2178 return false;
2182 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
2184 /* The various impl_call_* member functions are implemented
2185 in region-model-impl-calls.cc.
2186 Having them split out into separate functions makes it easier
2187 to put breakpoints on the handling of specific functions. */
2188 int callee_fndecl_flags = flags_from_decl_or_type (callee_fndecl);
2190 if (fndecl_built_in_p (callee_fndecl, BUILT_IN_NORMAL)
2191 && gimple_builtin_call_types_compatible_p (call, callee_fndecl))
2192 switch (DECL_UNCHECKED_FUNCTION_CODE (callee_fndecl))
2194 default:
2195 if (!(callee_fndecl_flags & (ECF_CONST | ECF_PURE)))
2196 unknown_side_effects = true;
2197 break;
2198 case BUILT_IN_ALLOCA:
2199 case BUILT_IN_ALLOCA_WITH_ALIGN:
2200 impl_call_alloca (cd);
2201 return false;
2202 case BUILT_IN_CALLOC:
2203 impl_call_calloc (cd);
2204 return false;
2205 case BUILT_IN_EXPECT:
2206 case BUILT_IN_EXPECT_WITH_PROBABILITY:
2207 impl_call_builtin_expect (cd);
2208 return false;
2209 case BUILT_IN_FREE:
2210 /* Handle in "on_call_post". */
2211 break;
2212 case BUILT_IN_MALLOC:
2213 impl_call_malloc (cd);
2214 return false;
2215 case BUILT_IN_MEMCPY:
2216 case BUILT_IN_MEMCPY_CHK:
2217 impl_call_memcpy (cd);
2218 return false;
2219 case BUILT_IN_MEMSET:
2220 case BUILT_IN_MEMSET_CHK:
2221 impl_call_memset (cd);
2222 return false;
2223 break;
2224 case BUILT_IN_REALLOC:
2225 return false;
2226 case BUILT_IN_STRCHR:
2227 /* Handle in "on_call_post". */
2228 return false;
2229 case BUILT_IN_STRCPY:
2230 case BUILT_IN_STRCPY_CHK:
2231 impl_call_strcpy (cd);
2232 return false;
2233 case BUILT_IN_STRLEN:
2234 impl_call_strlen (cd);
2235 return false;
2237 case BUILT_IN_STACK_SAVE:
2238 case BUILT_IN_STACK_RESTORE:
2239 return false;
2241 /* Stdio builtins. */
2242 case BUILT_IN_FPRINTF:
2243 case BUILT_IN_FPRINTF_UNLOCKED:
2244 case BUILT_IN_PUTC:
2245 case BUILT_IN_PUTC_UNLOCKED:
2246 case BUILT_IN_FPUTC:
2247 case BUILT_IN_FPUTC_UNLOCKED:
2248 case BUILT_IN_FPUTS:
2249 case BUILT_IN_FPUTS_UNLOCKED:
2250 case BUILT_IN_FWRITE:
2251 case BUILT_IN_FWRITE_UNLOCKED:
2252 case BUILT_IN_PRINTF:
2253 case BUILT_IN_PRINTF_UNLOCKED:
2254 case BUILT_IN_PUTCHAR:
2255 case BUILT_IN_PUTCHAR_UNLOCKED:
2256 case BUILT_IN_PUTS:
2257 case BUILT_IN_PUTS_UNLOCKED:
2258 case BUILT_IN_VFPRINTF:
2259 case BUILT_IN_VPRINTF:
2260 /* These stdio builtins have external effects that are out
2261 of scope for the analyzer: we only want to model the effects
2262 on the return value. */
2263 break;
2265 case BUILT_IN_VA_START:
2266 impl_call_va_start (cd);
2267 return false;
2268 case BUILT_IN_VA_COPY:
2269 impl_call_va_copy (cd);
2270 return false;
2272 else if (is_named_call_p (callee_fndecl, "malloc", call, 1))
2274 impl_call_malloc (cd);
2275 return false;
2277 else if (is_named_call_p (callee_fndecl, "calloc", call, 2))
2279 impl_call_calloc (cd);
2280 return false;
2282 else if (is_named_call_p (callee_fndecl, "alloca", call, 1))
2284 impl_call_alloca (cd);
2285 return false;
2287 else if (is_named_call_p (callee_fndecl, "realloc", call, 2))
2289 impl_call_realloc (cd);
2290 return false;
2292 else if (is_named_call_p (callee_fndecl, "__errno_location", call, 0))
2294 impl_call_errno_location (cd);
2295 return false;
2297 else if (is_named_call_p (callee_fndecl, "error"))
2299 if (impl_call_error (cd, 3, out_terminate_path))
2300 return false;
2301 else
2302 unknown_side_effects = true;
2304 else if (is_named_call_p (callee_fndecl, "error_at_line"))
2306 if (impl_call_error (cd, 5, out_terminate_path))
2307 return false;
2308 else
2309 unknown_side_effects = true;
2311 else if (is_named_call_p (callee_fndecl, "fgets", call, 3)
2312 || is_named_call_p (callee_fndecl, "fgets_unlocked", call, 3))
2314 impl_call_fgets (cd);
2315 return false;
2317 else if (is_named_call_p (callee_fndecl, "fread", call, 4))
2319 impl_call_fread (cd);
2320 return false;
2322 else if (is_named_call_p (callee_fndecl, "getchar", call, 0))
2324 /* No side-effects (tracking stream state is out-of-scope
2325 for the analyzer). */
2327 else if (is_named_call_p (callee_fndecl, "memset", call, 3)
2328 && POINTER_TYPE_P (cd.get_arg_type (0)))
2330 impl_call_memset (cd);
2331 return false;
2333 else if (is_pipe_call_p (callee_fndecl, "pipe", call, 1)
2334 || is_pipe_call_p (callee_fndecl, "pipe2", call, 2))
2336 /* Handle in "on_call_post"; bail now so that fd array
2337 is left untouched so that we can detect use-of-uninit
2338 for the case where the call fails. */
2339 return false;
2341 else if (is_named_call_p (callee_fndecl, "putenv", call, 1)
2342 && POINTER_TYPE_P (cd.get_arg_type (0)))
2344 impl_call_putenv (cd);
2345 return false;
2347 else if (is_named_call_p (callee_fndecl, "strchr", call, 2)
2348 && POINTER_TYPE_P (cd.get_arg_type (0)))
2350 /* Handle in "on_call_post". */
2351 return false;
2353 else if (is_named_call_p (callee_fndecl, "strlen", call, 1)
2354 && POINTER_TYPE_P (cd.get_arg_type (0)))
2356 impl_call_strlen (cd);
2357 return false;
2359 else if (is_named_call_p (callee_fndecl, "operator new", call, 1))
2361 impl_call_operator_new (cd);
2362 return false;
2364 else if (is_named_call_p (callee_fndecl, "operator new []", call, 1))
2366 impl_call_operator_new (cd);
2367 return false;
2369 else if (is_named_call_p (callee_fndecl, "operator delete", call, 1)
2370 || is_named_call_p (callee_fndecl, "operator delete", call, 2)
2371 || is_named_call_p (callee_fndecl, "operator delete []", call, 1))
2373 /* Handle in "on_call_post". */
2375 else if (const known_function *kf = get_known_function (callee_fndecl))
2377 kf->impl_call_pre (cd);
2378 return false;
2380 else if (!fndecl_has_gimple_body_p (callee_fndecl)
2381 && (!(callee_fndecl_flags & (ECF_CONST | ECF_PURE)))
2382 && !fndecl_built_in_p (callee_fndecl))
2383 unknown_side_effects = true;
2385 else
2386 unknown_side_effects = true;
2388 return unknown_side_effects;
2391 /* Update this model for the CALL stmt, using CTXT to report any
2392 diagnostics - the second half.
2394 Updates to the region_model that should be made *after* sm-states
2395 are updated are done here; other updates to the region_model are done
2396 in region_model::on_call_pre.
2398 If UNKNOWN_SIDE_EFFECTS is true, also call handle_unrecognized_call
2399 to purge state. */
2401 void
2402 region_model::on_call_post (const gcall *call,
2403 bool unknown_side_effects,
2404 region_model_context *ctxt)
2406 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
2408 call_details cd (call, this, ctxt);
2409 if (is_named_call_p (callee_fndecl, "free", call, 1))
2411 impl_call_free (cd);
2412 return;
2414 if (is_named_call_p (callee_fndecl, "operator delete", call, 1)
2415 || is_named_call_p (callee_fndecl, "operator delete", call, 2)
2416 || is_named_call_p (callee_fndecl, "operator delete []", call, 1))
2418 impl_call_operator_delete (cd);
2419 return;
2421 else if (is_pipe_call_p (callee_fndecl, "pipe", call, 1)
2422 || is_pipe_call_p (callee_fndecl, "pipe2", call, 2))
2424 impl_call_pipe (cd);
2425 return;
2427 else if (is_named_call_p (callee_fndecl, "strchr", call, 2)
2428 && POINTER_TYPE_P (cd.get_arg_type (0)))
2430 impl_call_strchr (cd);
2431 return;
2433 /* Was this fndecl referenced by
2434 __attribute__((malloc(FOO)))? */
2435 if (lookup_attribute ("*dealloc", DECL_ATTRIBUTES (callee_fndecl)))
2437 impl_deallocation_call (cd);
2438 return;
2440 if (fndecl_built_in_p (callee_fndecl, BUILT_IN_NORMAL)
2441 && gimple_builtin_call_types_compatible_p (call, callee_fndecl))
2442 switch (DECL_UNCHECKED_FUNCTION_CODE (callee_fndecl))
2444 default:
2445 break;
2446 case BUILT_IN_REALLOC:
2447 impl_call_realloc (cd);
2448 return;
2450 case BUILT_IN_STRCHR:
2451 impl_call_strchr (cd);
2452 return;
2454 case BUILT_IN_VA_END:
2455 impl_call_va_end (cd);
2456 return;
2460 if (unknown_side_effects)
2461 handle_unrecognized_call (call, ctxt);
2464 /* Purge state involving SVAL from this region_model, using CTXT
2465 (if non-NULL) to purge other state in a program_state.
2467 For example, if we're at the def-stmt of an SSA name, then we need to
2468 purge any state for svalues that involve that SSA name. This avoids
2469 false positives in loops, since a symbolic value referring to the
2470 SSA name will be referring to the previous value of that SSA name.
2472 For example, in:
2473 while ((e = hashmap_iter_next(&iter))) {
2474 struct oid2strbuf *e_strbuf = (struct oid2strbuf *)e;
2475 free (e_strbuf->value);
2477 at the def-stmt of e_8:
2478 e_8 = hashmap_iter_next (&iter);
2479 we should purge the "freed" state of:
2480 INIT_VAL(CAST_REG(‘struct oid2strbuf’, (*INIT_VAL(e_8))).value)
2481 which is the "e_strbuf->value" value from the previous iteration,
2482 or we will erroneously report a double-free - the "e_8" within it
2483 refers to the previous value. */
2485 void
2486 region_model::purge_state_involving (const svalue *sval,
2487 region_model_context *ctxt)
2489 if (!sval->can_have_associated_state_p ())
2490 return;
2491 m_store.purge_state_involving (sval, m_mgr);
2492 m_constraints->purge_state_involving (sval);
2493 m_dynamic_extents.purge_state_involving (sval);
2494 if (ctxt)
2495 ctxt->purge_state_involving (sval);
2498 /* A pending_note subclass for adding a note about an
2499 __attribute__((access, ...)) to a diagnostic. */
2501 class reason_attr_access : public pending_note_subclass<reason_attr_access>
2503 public:
2504 reason_attr_access (tree callee_fndecl, const attr_access &access)
2505 : m_callee_fndecl (callee_fndecl),
2506 m_ptr_argno (access.ptrarg),
2507 m_access_str (TREE_STRING_POINTER (access.to_external_string ()))
2511 const char *get_kind () const final override { return "reason_attr_access"; }
2513 void emit () const final override
2515 inform (DECL_SOURCE_LOCATION (m_callee_fndecl),
2516 "parameter %i of %qD marked with attribute %qs",
2517 m_ptr_argno + 1, m_callee_fndecl, m_access_str);
2520 bool operator== (const reason_attr_access &other) const
2522 return (m_callee_fndecl == other.m_callee_fndecl
2523 && m_ptr_argno == other.m_ptr_argno
2524 && !strcmp (m_access_str, other.m_access_str));
2527 private:
2528 tree m_callee_fndecl;
2529 unsigned m_ptr_argno;
2530 const char *m_access_str;
2533 /* Check CALL a call to external function CALLEE_FNDECL based on
2534 any __attribute__ ((access, ....) on the latter, complaining to
2535 CTXT about any issues.
2537 Currently we merely call check_region_for_write on any regions
2538 pointed to by arguments marked with a "write_only" or "read_write"
2539 attribute. */
2541 void
2542 region_model::
2543 check_external_function_for_access_attr (const gcall *call,
2544 tree callee_fndecl,
2545 region_model_context *ctxt) const
2547 gcc_assert (call);
2548 gcc_assert (callee_fndecl);
2549 gcc_assert (ctxt);
2551 tree fntype = TREE_TYPE (callee_fndecl);
2552 if (!fntype)
2553 return;
2555 if (!TYPE_ATTRIBUTES (fntype))
2556 return;
2558 /* Initialize a map of attribute access specifications for arguments
2559 to the function call. */
2560 rdwr_map rdwr_idx;
2561 init_attr_rdwr_indices (&rdwr_idx, TYPE_ATTRIBUTES (fntype));
2563 unsigned argno = 0;
2565 for (tree iter = TYPE_ARG_TYPES (fntype); iter;
2566 iter = TREE_CHAIN (iter), ++argno)
2568 const attr_access* access = rdwr_idx.get (argno);
2569 if (!access)
2570 continue;
2572 /* Ignore any duplicate entry in the map for the size argument. */
2573 if (access->ptrarg != argno)
2574 continue;
2576 if (access->mode == access_write_only
2577 || access->mode == access_read_write)
2579 /* Subclass of decorated_region_model_context that
2580 adds a note about the attr access to any saved diagnostics. */
2581 class annotating_ctxt : public note_adding_context
2583 public:
2584 annotating_ctxt (tree callee_fndecl,
2585 const attr_access &access,
2586 region_model_context *ctxt)
2587 : note_adding_context (ctxt),
2588 m_callee_fndecl (callee_fndecl),
2589 m_access (access)
2592 std::unique_ptr<pending_note> make_note () final override
2594 return make_unique<reason_attr_access>
2595 (m_callee_fndecl, m_access);
2597 private:
2598 tree m_callee_fndecl;
2599 const attr_access &m_access;
2602 /* Use this ctxt below so that any diagnostics get the
2603 note added to them. */
2604 annotating_ctxt my_ctxt (callee_fndecl, *access, ctxt);
2606 tree ptr_tree = gimple_call_arg (call, access->ptrarg);
2607 const svalue *ptr_sval = get_rvalue (ptr_tree, &my_ctxt);
2608 const region *reg = deref_rvalue (ptr_sval, ptr_tree, &my_ctxt);
2609 check_region_for_write (reg, &my_ctxt);
2610 /* We don't use the size arg for now. */
2615 /* Handle a call CALL to a function with unknown behavior.
2617 Traverse the regions in this model, determining what regions are
2618 reachable from pointer arguments to CALL and from global variables,
2619 recursively.
2621 Set all reachable regions to new unknown values and purge sm-state
2622 from their values, and from values that point to them. */
2624 void
2625 region_model::handle_unrecognized_call (const gcall *call,
2626 region_model_context *ctxt)
2628 tree fndecl = get_fndecl_for_call (call, ctxt);
2630 if (fndecl && ctxt)
2631 check_external_function_for_access_attr (call, fndecl, ctxt);
2633 reachable_regions reachable_regs (this);
2635 /* Determine the reachable regions and their mutability. */
2637 /* Add globals and regions that already escaped in previous
2638 unknown calls. */
2639 m_store.for_each_cluster (reachable_regions::init_cluster_cb,
2640 &reachable_regs);
2642 /* Params that are pointers. */
2643 tree iter_param_types = NULL_TREE;
2644 if (fndecl)
2645 iter_param_types = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
2646 for (unsigned arg_idx = 0; arg_idx < gimple_call_num_args (call); arg_idx++)
2648 /* Track expected param type, where available. */
2649 tree param_type = NULL_TREE;
2650 if (iter_param_types)
2652 param_type = TREE_VALUE (iter_param_types);
2653 gcc_assert (param_type);
2654 iter_param_types = TREE_CHAIN (iter_param_types);
2657 tree parm = gimple_call_arg (call, arg_idx);
2658 const svalue *parm_sval = get_rvalue (parm, ctxt);
2659 reachable_regs.handle_parm (parm_sval, param_type);
2663 uncertainty_t *uncertainty = ctxt ? ctxt->get_uncertainty () : NULL;
2665 /* Purge sm-state for the svalues that were reachable,
2666 both in non-mutable and mutable form. */
2667 for (svalue_set::iterator iter
2668 = reachable_regs.begin_reachable_svals ();
2669 iter != reachable_regs.end_reachable_svals (); ++iter)
2671 const svalue *sval = (*iter);
2672 if (ctxt)
2673 ctxt->on_unknown_change (sval, false);
2675 for (svalue_set::iterator iter
2676 = reachable_regs.begin_mutable_svals ();
2677 iter != reachable_regs.end_mutable_svals (); ++iter)
2679 const svalue *sval = (*iter);
2680 if (ctxt)
2681 ctxt->on_unknown_change (sval, true);
2682 if (uncertainty)
2683 uncertainty->on_mutable_sval_at_unknown_call (sval);
2686 /* Mark any clusters that have escaped. */
2687 reachable_regs.mark_escaped_clusters (ctxt);
2689 /* Update bindings for all clusters that have escaped, whether above,
2690 or previously. */
2691 m_store.on_unknown_fncall (call, m_mgr->get_store_manager (),
2692 conjured_purge (this, ctxt));
2694 /* Purge dynamic extents from any regions that have escaped mutably:
2695 realloc could have been called on them. */
2696 for (hash_set<const region *>::iterator
2697 iter = reachable_regs.begin_mutable_base_regs ();
2698 iter != reachable_regs.end_mutable_base_regs ();
2699 ++iter)
2701 const region *base_reg = (*iter);
2702 unset_dynamic_extents (base_reg);
2706 /* Traverse the regions in this model, determining what regions are
2707 reachable from the store and populating *OUT.
2709 If EXTRA_SVAL is non-NULL, treat it as an additional "root"
2710 for reachability (for handling return values from functions when
2711 analyzing return of the only function on the stack).
2713 If UNCERTAINTY is non-NULL, treat any svalues that were recorded
2714 within it as being maybe-bound as additional "roots" for reachability.
2716 Find svalues that haven't leaked. */
2718 void
2719 region_model::get_reachable_svalues (svalue_set *out,
2720 const svalue *extra_sval,
2721 const uncertainty_t *uncertainty)
2723 reachable_regions reachable_regs (this);
2725 /* Add globals and regions that already escaped in previous
2726 unknown calls. */
2727 m_store.for_each_cluster (reachable_regions::init_cluster_cb,
2728 &reachable_regs);
2730 if (extra_sval)
2731 reachable_regs.handle_sval (extra_sval);
2733 if (uncertainty)
2734 for (uncertainty_t::iterator iter
2735 = uncertainty->begin_maybe_bound_svals ();
2736 iter != uncertainty->end_maybe_bound_svals (); ++iter)
2737 reachable_regs.handle_sval (*iter);
2739 /* Get regions for locals that have explicitly bound values. */
2740 for (store::cluster_map_t::iterator iter = m_store.begin ();
2741 iter != m_store.end (); ++iter)
2743 const region *base_reg = (*iter).first;
2744 if (const region *parent = base_reg->get_parent_region ())
2745 if (parent->get_kind () == RK_FRAME)
2746 reachable_regs.add (base_reg, false);
2749 /* Populate *OUT based on the values that were reachable. */
2750 for (svalue_set::iterator iter
2751 = reachable_regs.begin_reachable_svals ();
2752 iter != reachable_regs.end_reachable_svals (); ++iter)
2753 out->add (*iter);
2756 /* Update this model for the RETURN_STMT, using CTXT to report any
2757 diagnostics. */
2759 void
2760 region_model::on_return (const greturn *return_stmt, region_model_context *ctxt)
2762 tree callee = get_current_function ()->decl;
2763 tree lhs = DECL_RESULT (callee);
2764 tree rhs = gimple_return_retval (return_stmt);
2766 if (lhs && rhs)
2768 const svalue *sval = get_rvalue (rhs, ctxt);
2769 const region *ret_reg = get_lvalue (lhs, ctxt);
2770 set_value (ret_reg, sval, ctxt);
2774 /* Update this model for a call and return of setjmp/sigsetjmp at CALL within
2775 ENODE, using CTXT to report any diagnostics.
2777 This is for the initial direct invocation of setjmp/sigsetjmp (which returns
2778 0), as opposed to any second return due to longjmp/sigsetjmp. */
2780 void
2781 region_model::on_setjmp (const gcall *call, const exploded_node *enode,
2782 region_model_context *ctxt)
2784 const svalue *buf_ptr = get_rvalue (gimple_call_arg (call, 0), ctxt);
2785 const region *buf_reg = deref_rvalue (buf_ptr, gimple_call_arg (call, 0),
2786 ctxt);
2788 /* Create a setjmp_svalue for this call and store it in BUF_REG's
2789 region. */
2790 if (buf_reg)
2792 setjmp_record r (enode, call);
2793 const svalue *sval
2794 = m_mgr->get_or_create_setjmp_svalue (r, buf_reg->get_type ());
2795 set_value (buf_reg, sval, ctxt);
2798 /* Direct calls to setjmp return 0. */
2799 if (tree lhs = gimple_call_lhs (call))
2801 const svalue *new_sval
2802 = m_mgr->get_or_create_int_cst (TREE_TYPE (lhs), 0);
2803 const region *lhs_reg = get_lvalue (lhs, ctxt);
2804 set_value (lhs_reg, new_sval, ctxt);
2808 /* Update this region_model for rewinding from a "longjmp" at LONGJMP_CALL
2809 to a "setjmp" at SETJMP_CALL where the final stack depth should be
2810 SETJMP_STACK_DEPTH. Pop any stack frames. Leak detection is *not*
2811 done, and should be done by the caller. */
2813 void
2814 region_model::on_longjmp (const gcall *longjmp_call, const gcall *setjmp_call,
2815 int setjmp_stack_depth, region_model_context *ctxt)
2817 /* Evaluate the val, using the frame of the "longjmp". */
2818 tree fake_retval = gimple_call_arg (longjmp_call, 1);
2819 const svalue *fake_retval_sval = get_rvalue (fake_retval, ctxt);
2821 /* Pop any frames until we reach the stack depth of the function where
2822 setjmp was called. */
2823 gcc_assert (get_stack_depth () >= setjmp_stack_depth);
2824 while (get_stack_depth () > setjmp_stack_depth)
2825 pop_frame (NULL, NULL, ctxt);
2827 gcc_assert (get_stack_depth () == setjmp_stack_depth);
2829 /* Assign to LHS of "setjmp" in new_state. */
2830 if (tree lhs = gimple_call_lhs (setjmp_call))
2832 /* Passing 0 as the val to longjmp leads to setjmp returning 1. */
2833 const svalue *zero_sval
2834 = m_mgr->get_or_create_int_cst (TREE_TYPE (fake_retval), 0);
2835 tristate eq_zero = eval_condition (fake_retval_sval, EQ_EXPR, zero_sval);
2836 /* If we have 0, use 1. */
2837 if (eq_zero.is_true ())
2839 const svalue *one_sval
2840 = m_mgr->get_or_create_int_cst (TREE_TYPE (fake_retval), 1);
2841 fake_retval_sval = one_sval;
2843 else
2845 /* Otherwise note that the value is nonzero. */
2846 m_constraints->add_constraint (fake_retval_sval, NE_EXPR, zero_sval);
2849 /* Decorate the return value from setjmp as being unmergeable,
2850 so that we don't attempt to merge states with it as zero
2851 with states in which it's nonzero, leading to a clean distinction
2852 in the exploded_graph betweeen the first return and the second
2853 return. */
2854 fake_retval_sval = m_mgr->get_or_create_unmergeable (fake_retval_sval);
2856 const region *lhs_reg = get_lvalue (lhs, ctxt);
2857 set_value (lhs_reg, fake_retval_sval, ctxt);
2861 /* Update this region_model for a phi stmt of the form
2862 LHS = PHI <...RHS...>.
2863 where RHS is for the appropriate edge.
2864 Get state from OLD_STATE so that all of the phi stmts for a basic block
2865 are effectively handled simultaneously. */
2867 void
2868 region_model::handle_phi (const gphi *phi,
2869 tree lhs, tree rhs,
2870 const region_model &old_state,
2871 region_model_context *ctxt)
2873 /* For now, don't bother tracking the .MEM SSA names. */
2874 if (tree var = SSA_NAME_VAR (lhs))
2875 if (TREE_CODE (var) == VAR_DECL)
2876 if (VAR_DECL_IS_VIRTUAL_OPERAND (var))
2877 return;
2879 const svalue *src_sval = old_state.get_rvalue (rhs, ctxt);
2880 const region *dst_reg = old_state.get_lvalue (lhs, ctxt);
2882 set_value (dst_reg, src_sval, ctxt);
2884 if (ctxt)
2885 ctxt->on_phi (phi, rhs);
2888 /* Implementation of region_model::get_lvalue; the latter adds type-checking.
2890 Get the id of the region for PV within this region_model,
2891 emitting any diagnostics to CTXT. */
2893 const region *
2894 region_model::get_lvalue_1 (path_var pv, region_model_context *ctxt) const
2896 tree expr = pv.m_tree;
2898 gcc_assert (expr);
2900 switch (TREE_CODE (expr))
2902 default:
2903 return m_mgr->get_region_for_unexpected_tree_code (ctxt, expr,
2904 dump_location_t ());
2906 case ARRAY_REF:
2908 tree array = TREE_OPERAND (expr, 0);
2909 tree index = TREE_OPERAND (expr, 1);
2911 const region *array_reg = get_lvalue (array, ctxt);
2912 const svalue *index_sval = get_rvalue (index, ctxt);
2913 return m_mgr->get_element_region (array_reg,
2914 TREE_TYPE (TREE_TYPE (array)),
2915 index_sval);
2917 break;
2919 case BIT_FIELD_REF:
2921 tree inner_expr = TREE_OPERAND (expr, 0);
2922 const region *inner_reg = get_lvalue (inner_expr, ctxt);
2923 tree num_bits = TREE_OPERAND (expr, 1);
2924 tree first_bit_offset = TREE_OPERAND (expr, 2);
2925 gcc_assert (TREE_CODE (num_bits) == INTEGER_CST);
2926 gcc_assert (TREE_CODE (first_bit_offset) == INTEGER_CST);
2927 bit_range bits (TREE_INT_CST_LOW (first_bit_offset),
2928 TREE_INT_CST_LOW (num_bits));
2929 return m_mgr->get_bit_range (inner_reg, TREE_TYPE (expr), bits);
2931 break;
2933 case MEM_REF:
2935 tree ptr = TREE_OPERAND (expr, 0);
2936 tree offset = TREE_OPERAND (expr, 1);
2937 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
2938 const svalue *offset_sval = get_rvalue (offset, ctxt);
2939 const region *star_ptr = deref_rvalue (ptr_sval, ptr, ctxt);
2940 return m_mgr->get_offset_region (star_ptr,
2941 TREE_TYPE (expr),
2942 offset_sval);
2944 break;
2946 case FUNCTION_DECL:
2947 return m_mgr->get_region_for_fndecl (expr);
2949 case LABEL_DECL:
2950 return m_mgr->get_region_for_label (expr);
2952 case VAR_DECL:
2953 /* Handle globals. */
2954 if (is_global_var (expr))
2955 return m_mgr->get_region_for_global (expr);
2957 /* Fall through. */
2959 case SSA_NAME:
2960 case PARM_DECL:
2961 case RESULT_DECL:
2963 gcc_assert (TREE_CODE (expr) == SSA_NAME
2964 || TREE_CODE (expr) == PARM_DECL
2965 || TREE_CODE (expr) == VAR_DECL
2966 || TREE_CODE (expr) == RESULT_DECL);
2968 int stack_index = pv.m_stack_depth;
2969 const frame_region *frame = get_frame_at_index (stack_index);
2970 gcc_assert (frame);
2971 return frame->get_region_for_local (m_mgr, expr, ctxt);
2974 case COMPONENT_REF:
2976 /* obj.field */
2977 tree obj = TREE_OPERAND (expr, 0);
2978 tree field = TREE_OPERAND (expr, 1);
2979 const region *obj_reg = get_lvalue (obj, ctxt);
2980 return m_mgr->get_field_region (obj_reg, field);
2982 break;
2984 case STRING_CST:
2985 return m_mgr->get_region_for_string (expr);
2989 /* Assert that SRC_TYPE can be converted to DST_TYPE as a no-op. */
2991 static void
2992 assert_compat_types (tree src_type, tree dst_type)
2994 if (src_type && dst_type && !VOID_TYPE_P (dst_type))
2996 #if CHECKING_P
2997 if (!(useless_type_conversion_p (src_type, dst_type)))
2998 internal_error ("incompatible types: %qT and %qT", src_type, dst_type);
2999 #endif
3003 /* Return true if SRC_TYPE can be converted to DST_TYPE as a no-op. */
3005 bool
3006 compat_types_p (tree src_type, tree dst_type)
3008 if (src_type && dst_type && !VOID_TYPE_P (dst_type))
3009 if (!(useless_type_conversion_p (src_type, dst_type)))
3010 return false;
3011 return true;
3014 /* Get the region for PV within this region_model,
3015 emitting any diagnostics to CTXT. */
3017 const region *
3018 region_model::get_lvalue (path_var pv, region_model_context *ctxt) const
3020 if (pv.m_tree == NULL_TREE)
3021 return NULL;
3023 const region *result_reg = get_lvalue_1 (pv, ctxt);
3024 assert_compat_types (result_reg->get_type (), TREE_TYPE (pv.m_tree));
3025 return result_reg;
3028 /* Get the region for EXPR within this region_model (assuming the most
3029 recent stack frame if it's a local). */
3031 const region *
3032 region_model::get_lvalue (tree expr, region_model_context *ctxt) const
3034 return get_lvalue (path_var (expr, get_stack_depth () - 1), ctxt);
3037 /* Implementation of region_model::get_rvalue; the latter adds type-checking.
3039 Get the value of PV within this region_model,
3040 emitting any diagnostics to CTXT. */
3042 const svalue *
3043 region_model::get_rvalue_1 (path_var pv, region_model_context *ctxt) const
3045 gcc_assert (pv.m_tree);
3047 switch (TREE_CODE (pv.m_tree))
3049 default:
3050 return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (pv.m_tree));
3052 case ADDR_EXPR:
3054 /* "&EXPR". */
3055 tree expr = pv.m_tree;
3056 tree op0 = TREE_OPERAND (expr, 0);
3057 const region *expr_reg = get_lvalue (op0, ctxt);
3058 return m_mgr->get_ptr_svalue (TREE_TYPE (expr), expr_reg);
3060 break;
3062 case BIT_FIELD_REF:
3064 tree expr = pv.m_tree;
3065 tree op0 = TREE_OPERAND (expr, 0);
3066 const region *reg = get_lvalue (op0, ctxt);
3067 tree num_bits = TREE_OPERAND (expr, 1);
3068 tree first_bit_offset = TREE_OPERAND (expr, 2);
3069 gcc_assert (TREE_CODE (num_bits) == INTEGER_CST);
3070 gcc_assert (TREE_CODE (first_bit_offset) == INTEGER_CST);
3071 bit_range bits (TREE_INT_CST_LOW (first_bit_offset),
3072 TREE_INT_CST_LOW (num_bits));
3073 return get_rvalue_for_bits (TREE_TYPE (expr), reg, bits, ctxt);
3076 case SSA_NAME:
3077 case VAR_DECL:
3078 case PARM_DECL:
3079 case RESULT_DECL:
3080 case ARRAY_REF:
3082 const region *reg = get_lvalue (pv, ctxt);
3083 return get_store_value (reg, ctxt);
3086 case REALPART_EXPR:
3087 case IMAGPART_EXPR:
3088 case VIEW_CONVERT_EXPR:
3090 tree expr = pv.m_tree;
3091 tree arg = TREE_OPERAND (expr, 0);
3092 const svalue *arg_sval = get_rvalue (arg, ctxt);
3093 const svalue *sval_unaryop
3094 = m_mgr->get_or_create_unaryop (TREE_TYPE (expr), TREE_CODE (expr),
3095 arg_sval);
3096 return sval_unaryop;
3099 case INTEGER_CST:
3100 case REAL_CST:
3101 case COMPLEX_CST:
3102 case VECTOR_CST:
3103 case STRING_CST:
3104 return m_mgr->get_or_create_constant_svalue (pv.m_tree);
3106 case POINTER_PLUS_EXPR:
3108 tree expr = pv.m_tree;
3109 tree ptr = TREE_OPERAND (expr, 0);
3110 tree offset = TREE_OPERAND (expr, 1);
3111 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
3112 const svalue *offset_sval = get_rvalue (offset, ctxt);
3113 const svalue *sval_binop
3114 = m_mgr->get_or_create_binop (TREE_TYPE (expr), POINTER_PLUS_EXPR,
3115 ptr_sval, offset_sval);
3116 return sval_binop;
3119 /* Binary ops. */
3120 case PLUS_EXPR:
3121 case MULT_EXPR:
3123 tree expr = pv.m_tree;
3124 tree arg0 = TREE_OPERAND (expr, 0);
3125 tree arg1 = TREE_OPERAND (expr, 1);
3126 const svalue *arg0_sval = get_rvalue (arg0, ctxt);
3127 const svalue *arg1_sval = get_rvalue (arg1, ctxt);
3128 const svalue *sval_binop
3129 = m_mgr->get_or_create_binop (TREE_TYPE (expr), TREE_CODE (expr),
3130 arg0_sval, arg1_sval);
3131 return sval_binop;
3134 case COMPONENT_REF:
3135 case MEM_REF:
3137 const region *ref_reg = get_lvalue (pv, ctxt);
3138 return get_store_value (ref_reg, ctxt);
3140 case OBJ_TYPE_REF:
3142 tree expr = OBJ_TYPE_REF_EXPR (pv.m_tree);
3143 return get_rvalue (expr, ctxt);
3148 /* Get the value of PV within this region_model,
3149 emitting any diagnostics to CTXT. */
3151 const svalue *
3152 region_model::get_rvalue (path_var pv, region_model_context *ctxt) const
3154 if (pv.m_tree == NULL_TREE)
3155 return NULL;
3157 const svalue *result_sval = get_rvalue_1 (pv, ctxt);
3159 assert_compat_types (result_sval->get_type (), TREE_TYPE (pv.m_tree));
3161 result_sval = check_for_poison (result_sval, pv.m_tree, ctxt);
3163 return result_sval;
3166 /* Get the value of EXPR within this region_model (assuming the most
3167 recent stack frame if it's a local). */
3169 const svalue *
3170 region_model::get_rvalue (tree expr, region_model_context *ctxt) const
3172 return get_rvalue (path_var (expr, get_stack_depth () - 1), ctxt);
3175 /* Return true if this model is on a path with "main" as the entrypoint
3176 (as opposed to one in which we're merely analyzing a subset of the
3177 path through the code). */
3179 bool
3180 region_model::called_from_main_p () const
3182 if (!m_current_frame)
3183 return false;
3184 /* Determine if the oldest stack frame in this model is for "main". */
3185 const frame_region *frame0 = get_frame_at_index (0);
3186 gcc_assert (frame0);
3187 return id_equal (DECL_NAME (frame0->get_function ()->decl), "main");
3190 /* Subroutine of region_model::get_store_value for when REG is (or is within)
3191 a global variable that hasn't been touched since the start of this path
3192 (or was implicitly touched due to a call to an unknown function). */
3194 const svalue *
3195 region_model::get_initial_value_for_global (const region *reg) const
3197 /* Get the decl that REG is for (or is within). */
3198 const decl_region *base_reg
3199 = reg->get_base_region ()->dyn_cast_decl_region ();
3200 gcc_assert (base_reg);
3201 tree decl = base_reg->get_decl ();
3203 /* Special-case: to avoid having to explicitly update all previously
3204 untracked globals when calling an unknown fn, they implicitly have
3205 an unknown value if an unknown call has occurred, unless this is
3206 static to-this-TU and hasn't escaped. Globals that have escaped
3207 are explicitly tracked, so we shouldn't hit this case for them. */
3208 if (m_store.called_unknown_fn_p ()
3209 && TREE_PUBLIC (decl)
3210 && !TREE_READONLY (decl))
3211 return m_mgr->get_or_create_unknown_svalue (reg->get_type ());
3213 /* If we are on a path from the entrypoint from "main" and we have a
3214 global decl defined in this TU that hasn't been touched yet, then
3215 the initial value of REG can be taken from the initialization value
3216 of the decl. */
3217 if (called_from_main_p () || TREE_READONLY (decl))
3219 /* Attempt to get the initializer value for base_reg. */
3220 if (const svalue *base_reg_init
3221 = base_reg->get_svalue_for_initializer (m_mgr))
3223 if (reg == base_reg)
3224 return base_reg_init;
3225 else
3227 /* Get the value for REG within base_reg_init. */
3228 binding_cluster c (base_reg);
3229 c.bind (m_mgr->get_store_manager (), base_reg, base_reg_init);
3230 const svalue *sval
3231 = c.get_any_binding (m_mgr->get_store_manager (), reg);
3232 if (sval)
3234 if (reg->get_type ())
3235 sval = m_mgr->get_or_create_cast (reg->get_type (),
3236 sval);
3237 return sval;
3243 /* Otherwise, return INIT_VAL(REG). */
3244 return m_mgr->get_or_create_initial_value (reg);
3247 /* Get a value for REG, looking it up in the store, or otherwise falling
3248 back to "initial" or "unknown" values.
3249 Use CTXT to report any warnings associated with reading from REG. */
3251 const svalue *
3252 region_model::get_store_value (const region *reg,
3253 region_model_context *ctxt) const
3255 check_region_for_read (reg, ctxt);
3257 /* Special-case: handle var_decls in the constant pool. */
3258 if (const decl_region *decl_reg = reg->dyn_cast_decl_region ())
3259 if (const svalue *sval = decl_reg->maybe_get_constant_value (m_mgr))
3260 return sval;
3262 const svalue *sval
3263 = m_store.get_any_binding (m_mgr->get_store_manager (), reg);
3264 if (sval)
3266 if (reg->get_type ())
3267 sval = m_mgr->get_or_create_cast (reg->get_type (), sval);
3268 return sval;
3271 /* Special-case: read at a constant index within a STRING_CST. */
3272 if (const offset_region *offset_reg = reg->dyn_cast_offset_region ())
3273 if (tree byte_offset_cst
3274 = offset_reg->get_byte_offset ()->maybe_get_constant ())
3275 if (const string_region *str_reg
3276 = reg->get_parent_region ()->dyn_cast_string_region ())
3278 tree string_cst = str_reg->get_string_cst ();
3279 if (const svalue *char_sval
3280 = m_mgr->maybe_get_char_from_string_cst (string_cst,
3281 byte_offset_cst))
3282 return m_mgr->get_or_create_cast (reg->get_type (), char_sval);
3285 /* Special-case: read the initial char of a STRING_CST. */
3286 if (const cast_region *cast_reg = reg->dyn_cast_cast_region ())
3287 if (const string_region *str_reg
3288 = cast_reg->get_original_region ()->dyn_cast_string_region ())
3290 tree string_cst = str_reg->get_string_cst ();
3291 tree byte_offset_cst = build_int_cst (integer_type_node, 0);
3292 if (const svalue *char_sval
3293 = m_mgr->maybe_get_char_from_string_cst (string_cst,
3294 byte_offset_cst))
3295 return m_mgr->get_or_create_cast (reg->get_type (), char_sval);
3298 /* Otherwise we implicitly have the initial value of the region
3299 (if the cluster had been touched, binding_cluster::get_any_binding,
3300 would have returned UNKNOWN, and we would already have returned
3301 that above). */
3303 /* Handle globals. */
3304 if (reg->get_base_region ()->get_parent_region ()->get_kind ()
3305 == RK_GLOBALS)
3306 return get_initial_value_for_global (reg);
3308 return m_mgr->get_or_create_initial_value (reg);
3311 /* Return false if REG does not exist, true if it may do.
3312 This is for detecting regions within the stack that don't exist anymore
3313 after frames are popped. */
3315 bool
3316 region_model::region_exists_p (const region *reg) const
3318 /* If within a stack frame, check that the stack frame is live. */
3319 if (const frame_region *enclosing_frame = reg->maybe_get_frame_region ())
3321 /* Check that the current frame is the enclosing frame, or is called
3322 by it. */
3323 for (const frame_region *iter_frame = get_current_frame (); iter_frame;
3324 iter_frame = iter_frame->get_calling_frame ())
3325 if (iter_frame == enclosing_frame)
3326 return true;
3327 return false;
3330 return true;
3333 /* Get a region for referencing PTR_SVAL, creating a region if need be, and
3334 potentially generating warnings via CTXT.
3335 PTR_SVAL must be of pointer type.
3336 PTR_TREE if non-NULL can be used when emitting diagnostics. */
3338 const region *
3339 region_model::deref_rvalue (const svalue *ptr_sval, tree ptr_tree,
3340 region_model_context *ctxt) const
3342 gcc_assert (ptr_sval);
3343 gcc_assert (POINTER_TYPE_P (ptr_sval->get_type ()));
3345 /* If we're dereferencing PTR_SVAL, assume that it is non-NULL; add this
3346 as a constraint. This suppresses false positives from
3347 -Wanalyzer-null-dereference for the case where we later have an
3348 if (PTR_SVAL) that would occur if we considered the false branch
3349 and transitioned the malloc state machine from start->null. */
3350 tree null_ptr_cst = build_int_cst (ptr_sval->get_type (), 0);
3351 const svalue *null_ptr = m_mgr->get_or_create_constant_svalue (null_ptr_cst);
3352 m_constraints->add_constraint (ptr_sval, NE_EXPR, null_ptr);
3354 switch (ptr_sval->get_kind ())
3356 default:
3357 break;
3359 case SK_REGION:
3361 const region_svalue *region_sval
3362 = as_a <const region_svalue *> (ptr_sval);
3363 return region_sval->get_pointee ();
3366 case SK_BINOP:
3368 const binop_svalue *binop_sval
3369 = as_a <const binop_svalue *> (ptr_sval);
3370 switch (binop_sval->get_op ())
3372 case POINTER_PLUS_EXPR:
3374 /* If we have a symbolic value expressing pointer arithmentic,
3375 try to convert it to a suitable region. */
3376 const region *parent_region
3377 = deref_rvalue (binop_sval->get_arg0 (), NULL_TREE, ctxt);
3378 const svalue *offset = binop_sval->get_arg1 ();
3379 tree type= TREE_TYPE (ptr_sval->get_type ());
3380 return m_mgr->get_offset_region (parent_region, type, offset);
3382 default:
3383 break;
3386 break;
3388 case SK_POISONED:
3390 if (ctxt)
3392 tree ptr = get_representative_tree (ptr_sval);
3393 /* If we can't get a representative tree for PTR_SVAL
3394 (e.g. if it hasn't been bound into the store), then
3395 fall back on PTR_TREE, if non-NULL. */
3396 if (!ptr)
3397 ptr = ptr_tree;
3398 if (ptr)
3400 const poisoned_svalue *poisoned_sval
3401 = as_a <const poisoned_svalue *> (ptr_sval);
3402 enum poison_kind pkind = poisoned_sval->get_poison_kind ();
3403 ctxt->warn (make_unique<poisoned_value_diagnostic>
3404 (ptr, pkind, NULL));
3408 break;
3411 return m_mgr->get_symbolic_region (ptr_sval);
3414 /* Attempt to get BITS within any value of REG, as TYPE.
3415 In particular, extract values from compound_svalues for the case
3416 where there's a concrete binding at BITS.
3417 Return an unknown svalue if we can't handle the given case.
3418 Use CTXT to report any warnings associated with reading from REG. */
3420 const svalue *
3421 region_model::get_rvalue_for_bits (tree type,
3422 const region *reg,
3423 const bit_range &bits,
3424 region_model_context *ctxt) const
3426 const svalue *sval = get_store_value (reg, ctxt);
3427 return m_mgr->get_or_create_bits_within (type, bits, sval);
3430 /* A subclass of pending_diagnostic for complaining about writes to
3431 constant regions of memory. */
3433 class write_to_const_diagnostic
3434 : public pending_diagnostic_subclass<write_to_const_diagnostic>
3436 public:
3437 write_to_const_diagnostic (const region *reg, tree decl)
3438 : m_reg (reg), m_decl (decl)
3441 const char *get_kind () const final override
3443 return "write_to_const_diagnostic";
3446 bool operator== (const write_to_const_diagnostic &other) const
3448 return (m_reg == other.m_reg
3449 && m_decl == other.m_decl);
3452 int get_controlling_option () const final override
3454 return OPT_Wanalyzer_write_to_const;
3457 bool emit (rich_location *rich_loc) final override
3459 auto_diagnostic_group d;
3460 bool warned;
3461 switch (m_reg->get_kind ())
3463 default:
3464 warned = warning_at (rich_loc, get_controlling_option (),
3465 "write to %<const%> object %qE", m_decl);
3466 break;
3467 case RK_FUNCTION:
3468 warned = warning_at (rich_loc, get_controlling_option (),
3469 "write to function %qE", m_decl);
3470 break;
3471 case RK_LABEL:
3472 warned = warning_at (rich_loc, get_controlling_option (),
3473 "write to label %qE", m_decl);
3474 break;
3476 if (warned)
3477 inform (DECL_SOURCE_LOCATION (m_decl), "declared here");
3478 return warned;
3481 label_text describe_final_event (const evdesc::final_event &ev) final override
3483 switch (m_reg->get_kind ())
3485 default:
3486 return ev.formatted_print ("write to %<const%> object %qE here", m_decl);
3487 case RK_FUNCTION:
3488 return ev.formatted_print ("write to function %qE here", m_decl);
3489 case RK_LABEL:
3490 return ev.formatted_print ("write to label %qE here", m_decl);
3494 private:
3495 const region *m_reg;
3496 tree m_decl;
3499 /* A subclass of pending_diagnostic for complaining about writes to
3500 string literals. */
3502 class write_to_string_literal_diagnostic
3503 : public pending_diagnostic_subclass<write_to_string_literal_diagnostic>
3505 public:
3506 write_to_string_literal_diagnostic (const region *reg)
3507 : m_reg (reg)
3510 const char *get_kind () const final override
3512 return "write_to_string_literal_diagnostic";
3515 bool operator== (const write_to_string_literal_diagnostic &other) const
3517 return m_reg == other.m_reg;
3520 int get_controlling_option () const final override
3522 return OPT_Wanalyzer_write_to_string_literal;
3525 bool emit (rich_location *rich_loc) final override
3527 return warning_at (rich_loc, get_controlling_option (),
3528 "write to string literal");
3529 /* Ideally we would show the location of the STRING_CST as well,
3530 but it is not available at this point. */
3533 label_text describe_final_event (const evdesc::final_event &ev) final override
3535 return ev.formatted_print ("write to string literal here");
3538 private:
3539 const region *m_reg;
3542 /* Use CTXT to warn If DEST_REG is a region that shouldn't be written to. */
3544 void
3545 region_model::check_for_writable_region (const region* dest_reg,
3546 region_model_context *ctxt) const
3548 /* Fail gracefully if CTXT is NULL. */
3549 if (!ctxt)
3550 return;
3552 const region *base_reg = dest_reg->get_base_region ();
3553 switch (base_reg->get_kind ())
3555 default:
3556 break;
3557 case RK_FUNCTION:
3559 const function_region *func_reg = as_a <const function_region *> (base_reg);
3560 tree fndecl = func_reg->get_fndecl ();
3561 ctxt->warn (make_unique<write_to_const_diagnostic>
3562 (func_reg, fndecl));
3564 break;
3565 case RK_LABEL:
3567 const label_region *label_reg = as_a <const label_region *> (base_reg);
3568 tree label = label_reg->get_label ();
3569 ctxt->warn (make_unique<write_to_const_diagnostic>
3570 (label_reg, label));
3572 break;
3573 case RK_DECL:
3575 const decl_region *decl_reg = as_a <const decl_region *> (base_reg);
3576 tree decl = decl_reg->get_decl ();
3577 /* Warn about writes to const globals.
3578 Don't warn for writes to const locals, and params in particular,
3579 since we would warn in push_frame when setting them up (e.g the
3580 "this" param is "T* const"). */
3581 if (TREE_READONLY (decl)
3582 && is_global_var (decl))
3583 ctxt->warn (make_unique<write_to_const_diagnostic> (dest_reg, decl));
3585 break;
3586 case RK_STRING:
3587 ctxt->warn (make_unique<write_to_string_literal_diagnostic> (dest_reg));
3588 break;
3592 /* Get the capacity of REG in bytes. */
3594 const svalue *
3595 region_model::get_capacity (const region *reg) const
3597 switch (reg->get_kind ())
3599 default:
3600 break;
3601 case RK_DECL:
3603 const decl_region *decl_reg = as_a <const decl_region *> (reg);
3604 tree decl = decl_reg->get_decl ();
3605 if (TREE_CODE (decl) == SSA_NAME)
3607 tree type = TREE_TYPE (decl);
3608 tree size = TYPE_SIZE (type);
3609 return get_rvalue (size, NULL);
3611 else
3613 tree size = decl_init_size (decl, false);
3614 if (size)
3615 return get_rvalue (size, NULL);
3618 break;
3619 case RK_SIZED:
3620 /* Look through sized regions to get at the capacity
3621 of the underlying regions. */
3622 return get_capacity (reg->get_parent_region ());
3625 if (const svalue *recorded = get_dynamic_extents (reg))
3626 return recorded;
3628 return m_mgr->get_or_create_unknown_svalue (sizetype);
3631 /* Return the string size, including the 0-terminator, if SVAL is a
3632 constant_svalue holding a string. Otherwise, return an unknown_svalue. */
3634 const svalue *
3635 region_model::get_string_size (const svalue *sval) const
3637 tree cst = sval->maybe_get_constant ();
3638 if (!cst || TREE_CODE (cst) != STRING_CST)
3639 return m_mgr->get_or_create_unknown_svalue (size_type_node);
3641 tree out = build_int_cst (size_type_node, TREE_STRING_LENGTH (cst));
3642 return m_mgr->get_or_create_constant_svalue (out);
3645 /* Return the string size, including the 0-terminator, if REG is a
3646 string_region. Otherwise, return an unknown_svalue. */
3648 const svalue *
3649 region_model::get_string_size (const region *reg) const
3651 const string_region *str_reg = dyn_cast <const string_region *> (reg);
3652 if (!str_reg)
3653 return m_mgr->get_or_create_unknown_svalue (size_type_node);
3655 tree cst = str_reg->get_string_cst ();
3656 tree out = build_int_cst (size_type_node, TREE_STRING_LENGTH (cst));
3657 return m_mgr->get_or_create_constant_svalue (out);
3660 /* If CTXT is non-NULL, use it to warn about any problems accessing REG,
3661 using DIR to determine if this access is a read or write. */
3663 void
3664 region_model::check_region_access (const region *reg,
3665 enum access_direction dir,
3666 region_model_context *ctxt) const
3668 /* Fail gracefully if CTXT is NULL. */
3669 if (!ctxt)
3670 return;
3672 check_region_for_taint (reg, dir, ctxt);
3673 check_region_bounds (reg, dir, ctxt);
3675 switch (dir)
3677 default:
3678 gcc_unreachable ();
3679 case DIR_READ:
3680 /* Currently a no-op. */
3681 break;
3682 case DIR_WRITE:
3683 check_for_writable_region (reg, ctxt);
3684 break;
3688 /* If CTXT is non-NULL, use it to warn about any problems writing to REG. */
3690 void
3691 region_model::check_region_for_write (const region *dest_reg,
3692 region_model_context *ctxt) const
3694 check_region_access (dest_reg, DIR_WRITE, ctxt);
3697 /* If CTXT is non-NULL, use it to warn about any problems reading from REG. */
3699 void
3700 region_model::check_region_for_read (const region *src_reg,
3701 region_model_context *ctxt) const
3703 check_region_access (src_reg, DIR_READ, ctxt);
3706 /* Concrete subclass for casts of pointers that lead to trailing bytes. */
3708 class dubious_allocation_size
3709 : public pending_diagnostic_subclass<dubious_allocation_size>
3711 public:
3712 dubious_allocation_size (const region *lhs, const region *rhs)
3713 : m_lhs (lhs), m_rhs (rhs), m_expr (NULL_TREE)
3716 dubious_allocation_size (const region *lhs, const region *rhs,
3717 tree expr)
3718 : m_lhs (lhs), m_rhs (rhs), m_expr (expr)
3721 const char *get_kind () const final override
3723 return "dubious_allocation_size";
3726 bool operator== (const dubious_allocation_size &other) const
3728 return m_lhs == other.m_lhs && m_rhs == other.m_rhs
3729 && pending_diagnostic::same_tree_p (m_expr, other.m_expr);
3732 int get_controlling_option () const final override
3734 return OPT_Wanalyzer_allocation_size;
3737 bool emit (rich_location *rich_loc) final override
3739 diagnostic_metadata m;
3740 m.add_cwe (131);
3742 return warning_meta (rich_loc, m, get_controlling_option (),
3743 "allocated buffer size is not a multiple"
3744 " of the pointee's size");
3747 label_text
3748 describe_region_creation_event (const evdesc::region_creation &ev) final
3749 override
3751 m_allocation_event = &ev;
3752 if (m_expr)
3754 if (TREE_CODE (m_expr) == INTEGER_CST)
3755 return ev.formatted_print ("allocated %E bytes here", m_expr);
3756 else
3757 return ev.formatted_print ("allocated %qE bytes here", m_expr);
3760 return ev.formatted_print ("allocated here");
3763 label_text describe_final_event (const evdesc::final_event &ev) final
3764 override
3766 tree pointee_type = TREE_TYPE (m_lhs->get_type ());
3767 if (m_allocation_event)
3768 /* Fallback: Typically, we should always
3769 see an m_allocation_event before. */
3770 return ev.formatted_print ("assigned to %qT here;"
3771 " %<sizeof (%T)%> is %qE",
3772 m_lhs->get_type (), pointee_type,
3773 size_in_bytes (pointee_type));
3775 if (m_expr)
3777 if (TREE_CODE (m_expr) == INTEGER_CST)
3778 return ev.formatted_print ("allocated %E bytes and assigned to"
3779 " %qT here; %<sizeof (%T)%> is %qE",
3780 m_expr, m_lhs->get_type (), pointee_type,
3781 size_in_bytes (pointee_type));
3782 else
3783 return ev.formatted_print ("allocated %qE bytes and assigned to"
3784 " %qT here; %<sizeof (%T)%> is %qE",
3785 m_expr, m_lhs->get_type (), pointee_type,
3786 size_in_bytes (pointee_type));
3789 return ev.formatted_print ("allocated and assigned to %qT here;"
3790 " %<sizeof (%T)%> is %qE",
3791 m_lhs->get_type (), pointee_type,
3792 size_in_bytes (pointee_type));
3795 void mark_interesting_stuff (interesting_t *interest) final override
3797 interest->add_region_creation (m_rhs);
3800 private:
3801 const region *m_lhs;
3802 const region *m_rhs;
3803 const tree m_expr;
3804 const evdesc::region_creation *m_allocation_event;
3807 /* Return true on dubious allocation sizes for constant sizes. */
3809 static bool
3810 capacity_compatible_with_type (tree cst, tree pointee_size_tree,
3811 bool is_struct)
3813 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
3814 gcc_assert (TREE_CODE (pointee_size_tree) == INTEGER_CST);
3816 unsigned HOST_WIDE_INT pointee_size = TREE_INT_CST_LOW (pointee_size_tree);
3817 unsigned HOST_WIDE_INT alloc_size = TREE_INT_CST_LOW (cst);
3819 if (is_struct)
3820 return alloc_size == 0 || alloc_size >= pointee_size;
3821 return alloc_size % pointee_size == 0;
3824 static bool
3825 capacity_compatible_with_type (tree cst, tree pointee_size_tree)
3827 return capacity_compatible_with_type (cst, pointee_size_tree, false);
3830 /* Checks whether SVAL could be a multiple of SIZE_CST.
3832 It works by visiting all svalues inside SVAL until it reaches
3833 atomic nodes. From those, it goes back up again and adds each
3834 node that might be a multiple of SIZE_CST to the RESULT_SET. */
3836 class size_visitor : public visitor
3838 public:
3839 size_visitor (tree size_cst, const svalue *root_sval, constraint_manager *cm)
3840 : m_size_cst (size_cst), m_root_sval (root_sval), m_cm (cm)
3842 m_root_sval->accept (this);
3845 bool get_result ()
3847 return result_set.contains (m_root_sval);
3850 void visit_constant_svalue (const constant_svalue *sval) final override
3852 check_constant (sval->get_constant (), sval);
3855 void visit_unknown_svalue (const unknown_svalue *sval ATTRIBUTE_UNUSED)
3856 final override
3858 result_set.add (sval);
3861 void visit_poisoned_svalue (const poisoned_svalue *sval ATTRIBUTE_UNUSED)
3862 final override
3864 result_set.add (sval);
3867 void visit_unaryop_svalue (const unaryop_svalue *sval) final override
3869 const svalue *arg = sval->get_arg ();
3870 if (result_set.contains (arg))
3871 result_set.add (sval);
3874 void visit_binop_svalue (const binop_svalue *sval) final override
3876 const svalue *arg0 = sval->get_arg0 ();
3877 const svalue *arg1 = sval->get_arg1 ();
3879 if (sval->get_op () == MULT_EXPR)
3881 if (result_set.contains (arg0) || result_set.contains (arg1))
3882 result_set.add (sval);
3884 else
3886 if (result_set.contains (arg0) && result_set.contains (arg1))
3887 result_set.add (sval);
3891 void visit_repeated_svalue (const repeated_svalue *sval) final override
3893 sval->get_inner_svalue ()->accept (this);
3894 if (result_set.contains (sval->get_inner_svalue ()))
3895 result_set.add (sval);
3898 void visit_unmergeable_svalue (const unmergeable_svalue *sval) final override
3900 sval->get_arg ()->accept (this);
3901 if (result_set.contains (sval->get_arg ()))
3902 result_set.add (sval);
3905 void visit_widening_svalue (const widening_svalue *sval) final override
3907 const svalue *base = sval->get_base_svalue ();
3908 const svalue *iter = sval->get_iter_svalue ();
3910 if (result_set.contains (base) && result_set.contains (iter))
3911 result_set.add (sval);
3914 void visit_conjured_svalue (const conjured_svalue *sval ATTRIBUTE_UNUSED)
3915 final override
3917 equiv_class_id id (-1);
3918 if (m_cm->get_equiv_class_by_svalue (sval, &id))
3920 if (tree cst = id.get_obj (*m_cm).get_any_constant ())
3921 check_constant (cst, sval);
3922 else
3923 result_set.add (sval);
3927 void visit_asm_output_svalue (const asm_output_svalue *sval ATTRIBUTE_UNUSED)
3928 final override
3930 result_set.add (sval);
3933 void visit_const_fn_result_svalue (const const_fn_result_svalue
3934 *sval ATTRIBUTE_UNUSED) final override
3936 result_set.add (sval);
3939 private:
3940 void check_constant (tree cst, const svalue *sval)
3942 switch (TREE_CODE (cst))
3944 default:
3945 /* Assume all unhandled operands are compatible. */
3946 result_set.add (sval);
3947 break;
3948 case INTEGER_CST:
3949 if (capacity_compatible_with_type (cst, m_size_cst))
3950 result_set.add (sval);
3951 break;
3955 tree m_size_cst;
3956 const svalue *m_root_sval;
3957 constraint_manager *m_cm;
3958 svalue_set result_set; /* Used as a mapping of svalue*->bool. */
3961 /* Return true if a struct or union either uses the inheritance pattern,
3962 where the first field is a base struct, or the flexible array member
3963 pattern, where the last field is an array without a specified size. */
3965 static bool
3966 struct_or_union_with_inheritance_p (tree struc)
3968 tree iter = TYPE_FIELDS (struc);
3969 if (iter == NULL_TREE)
3970 return false;
3971 if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (iter)))
3972 return true;
3974 tree last_field;
3975 while (iter != NULL_TREE)
3977 last_field = iter;
3978 iter = DECL_CHAIN (iter);
3981 if (last_field != NULL_TREE
3982 && TREE_CODE (TREE_TYPE (last_field)) == ARRAY_TYPE)
3983 return true;
3985 return false;
3988 /* Return true if the lhs and rhs of an assignment have different types. */
3990 static bool
3991 is_any_cast_p (const gimple *stmt)
3993 if (const gassign *assign = dyn_cast <const gassign *> (stmt))
3994 return gimple_assign_cast_p (assign)
3995 || !pending_diagnostic::same_tree_p (
3996 TREE_TYPE (gimple_assign_lhs (assign)),
3997 TREE_TYPE (gimple_assign_rhs1 (assign)));
3998 else if (const gcall *call = dyn_cast <const gcall *> (stmt))
4000 tree lhs = gimple_call_lhs (call);
4001 return lhs != NULL_TREE && !pending_diagnostic::same_tree_p (
4002 TREE_TYPE (gimple_call_lhs (call)),
4003 gimple_call_return_type (call));
4006 return false;
4009 /* On pointer assignments, check whether the buffer size of
4010 RHS_SVAL is compatible with the type of the LHS_REG.
4011 Use a non-null CTXT to report allocation size warnings. */
4013 void
4014 region_model::check_region_size (const region *lhs_reg, const svalue *rhs_sval,
4015 region_model_context *ctxt) const
4017 if (!ctxt || ctxt->get_stmt () == NULL)
4018 return;
4019 /* Only report warnings on assignments that actually change the type. */
4020 if (!is_any_cast_p (ctxt->get_stmt ()))
4021 return;
4023 const region_svalue *reg_sval = dyn_cast <const region_svalue *> (rhs_sval);
4024 if (!reg_sval)
4025 return;
4027 tree pointer_type = lhs_reg->get_type ();
4028 if (pointer_type == NULL_TREE || !POINTER_TYPE_P (pointer_type))
4029 return;
4031 tree pointee_type = TREE_TYPE (pointer_type);
4032 /* Make sure that the type on the left-hand size actually has a size. */
4033 if (pointee_type == NULL_TREE || VOID_TYPE_P (pointee_type)
4034 || TYPE_SIZE_UNIT (pointee_type) == NULL_TREE)
4035 return;
4037 /* Bail out early on pointers to structs where we can
4038 not deduce whether the buffer size is compatible. */
4039 bool is_struct = RECORD_OR_UNION_TYPE_P (pointee_type);
4040 if (is_struct && struct_or_union_with_inheritance_p (pointee_type))
4041 return;
4043 tree pointee_size_tree = size_in_bytes (pointee_type);
4044 /* We give up if the type size is not known at compile-time or the
4045 type size is always compatible regardless of the buffer size. */
4046 if (TREE_CODE (pointee_size_tree) != INTEGER_CST
4047 || integer_zerop (pointee_size_tree)
4048 || integer_onep (pointee_size_tree))
4049 return;
4051 const region *rhs_reg = reg_sval->get_pointee ();
4052 const svalue *capacity = get_capacity (rhs_reg);
4053 switch (capacity->get_kind ())
4055 case svalue_kind::SK_CONSTANT:
4057 const constant_svalue *cst_cap_sval
4058 = as_a <const constant_svalue *> (capacity);
4059 tree cst_cap = cst_cap_sval->get_constant ();
4060 if (TREE_CODE (cst_cap) == INTEGER_CST
4061 && !capacity_compatible_with_type (cst_cap, pointee_size_tree,
4062 is_struct))
4063 ctxt->warn (make_unique <dubious_allocation_size> (lhs_reg, rhs_reg,
4064 cst_cap));
4066 break;
4067 default:
4069 if (!is_struct)
4071 size_visitor v (pointee_size_tree, capacity, m_constraints);
4072 if (!v.get_result ())
4074 tree expr = get_representative_tree (capacity);
4075 ctxt->warn (make_unique <dubious_allocation_size> (lhs_reg,
4076 rhs_reg,
4077 expr));
4080 break;
4085 /* Set the value of the region given by LHS_REG to the value given
4086 by RHS_SVAL.
4087 Use CTXT to report any warnings associated with writing to LHS_REG. */
4089 void
4090 region_model::set_value (const region *lhs_reg, const svalue *rhs_sval,
4091 region_model_context *ctxt)
4093 gcc_assert (lhs_reg);
4094 gcc_assert (rhs_sval);
4096 check_region_size (lhs_reg, rhs_sval, ctxt);
4098 check_region_for_write (lhs_reg, ctxt);
4100 m_store.set_value (m_mgr->get_store_manager(), lhs_reg, rhs_sval,
4101 ctxt ? ctxt->get_uncertainty () : NULL);
4104 /* Set the value of the region given by LHS to the value given by RHS. */
4106 void
4107 region_model::set_value (tree lhs, tree rhs, region_model_context *ctxt)
4109 const region *lhs_reg = get_lvalue (lhs, ctxt);
4110 const svalue *rhs_sval = get_rvalue (rhs, ctxt);
4111 gcc_assert (lhs_reg);
4112 gcc_assert (rhs_sval);
4113 set_value (lhs_reg, rhs_sval, ctxt);
4116 /* Remove all bindings overlapping REG within the store. */
4118 void
4119 region_model::clobber_region (const region *reg)
4121 m_store.clobber_region (m_mgr->get_store_manager(), reg);
4124 /* Remove any bindings for REG within the store. */
4126 void
4127 region_model::purge_region (const region *reg)
4129 m_store.purge_region (m_mgr->get_store_manager(), reg);
4132 /* Fill REG with SVAL. */
4134 void
4135 region_model::fill_region (const region *reg, const svalue *sval)
4137 m_store.fill_region (m_mgr->get_store_manager(), reg, sval);
4140 /* Zero-fill REG. */
4142 void
4143 region_model::zero_fill_region (const region *reg)
4145 m_store.zero_fill_region (m_mgr->get_store_manager(), reg);
4148 /* Mark REG as having unknown content. */
4150 void
4151 region_model::mark_region_as_unknown (const region *reg,
4152 uncertainty_t *uncertainty)
4154 m_store.mark_region_as_unknown (m_mgr->get_store_manager(), reg,
4155 uncertainty);
4158 /* Determine what is known about the condition "LHS_SVAL OP RHS_SVAL" within
4159 this model. */
4161 tristate
4162 region_model::eval_condition (const svalue *lhs,
4163 enum tree_code op,
4164 const svalue *rhs) const
4166 gcc_assert (lhs);
4167 gcc_assert (rhs);
4169 /* For now, make no attempt to capture constraints on floating-point
4170 values. */
4171 if ((lhs->get_type () && FLOAT_TYPE_P (lhs->get_type ()))
4172 || (rhs->get_type () && FLOAT_TYPE_P (rhs->get_type ())))
4173 return tristate::unknown ();
4175 /* See what we know based on the values. */
4177 /* Unwrap any unmergeable values. */
4178 lhs = lhs->unwrap_any_unmergeable ();
4179 rhs = rhs->unwrap_any_unmergeable ();
4181 if (lhs == rhs)
4183 /* If we have the same svalue, then we have equality
4184 (apart from NaN-handling).
4185 TODO: should this definitely be the case for poisoned values? */
4186 /* Poisoned and unknown values are "unknowable". */
4187 if (lhs->get_kind () == SK_POISONED
4188 || lhs->get_kind () == SK_UNKNOWN)
4189 return tristate::TS_UNKNOWN;
4191 switch (op)
4193 case EQ_EXPR:
4194 case GE_EXPR:
4195 case LE_EXPR:
4196 return tristate::TS_TRUE;
4198 case NE_EXPR:
4199 case GT_EXPR:
4200 case LT_EXPR:
4201 return tristate::TS_FALSE;
4203 default:
4204 /* For other ops, use the logic below. */
4205 break;
4209 /* If we have a pair of region_svalues, compare them. */
4210 if (const region_svalue *lhs_ptr = lhs->dyn_cast_region_svalue ())
4211 if (const region_svalue *rhs_ptr = rhs->dyn_cast_region_svalue ())
4213 tristate res = region_svalue::eval_condition (lhs_ptr, op, rhs_ptr);
4214 if (res.is_known ())
4215 return res;
4216 /* Otherwise, only known through constraints. */
4219 if (const constant_svalue *cst_lhs = lhs->dyn_cast_constant_svalue ())
4221 /* If we have a pair of constants, compare them. */
4222 if (const constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ())
4223 return constant_svalue::eval_condition (cst_lhs, op, cst_rhs);
4224 else
4226 /* When we have one constant, put it on the RHS. */
4227 std::swap (lhs, rhs);
4228 op = swap_tree_comparison (op);
4231 gcc_assert (lhs->get_kind () != SK_CONSTANT);
4233 /* Handle comparison against zero. */
4234 if (const constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ())
4235 if (zerop (cst_rhs->get_constant ()))
4237 if (const region_svalue *ptr = lhs->dyn_cast_region_svalue ())
4239 /* A region_svalue is a non-NULL pointer, except in certain
4240 special cases (see the comment for region::non_null_p). */
4241 const region *pointee = ptr->get_pointee ();
4242 if (pointee->non_null_p ())
4244 switch (op)
4246 default:
4247 gcc_unreachable ();
4249 case EQ_EXPR:
4250 case GE_EXPR:
4251 case LE_EXPR:
4252 return tristate::TS_FALSE;
4254 case NE_EXPR:
4255 case GT_EXPR:
4256 case LT_EXPR:
4257 return tristate::TS_TRUE;
4261 else if (const binop_svalue *binop = lhs->dyn_cast_binop_svalue ())
4263 /* Treat offsets from a non-NULL pointer as being non-NULL. This
4264 isn't strictly true, in that eventually ptr++ will wrap
4265 around and be NULL, but it won't occur in practise and thus
4266 can be used to suppress effectively false positives that we
4267 shouldn't warn for. */
4268 if (binop->get_op () == POINTER_PLUS_EXPR)
4270 tristate lhs_ts = eval_condition (binop->get_arg0 (), op, rhs);
4271 if (lhs_ts.is_known ())
4272 return lhs_ts;
4277 /* Handle rejection of equality for comparisons of the initial values of
4278 "external" values (such as params) with the address of locals. */
4279 if (const initial_svalue *init_lhs = lhs->dyn_cast_initial_svalue ())
4280 if (const region_svalue *rhs_ptr = rhs->dyn_cast_region_svalue ())
4282 tristate res = compare_initial_and_pointer (init_lhs, rhs_ptr);
4283 if (res.is_known ())
4284 return res;
4286 if (const initial_svalue *init_rhs = rhs->dyn_cast_initial_svalue ())
4287 if (const region_svalue *lhs_ptr = lhs->dyn_cast_region_svalue ())
4289 tristate res = compare_initial_and_pointer (init_rhs, lhs_ptr);
4290 if (res.is_known ())
4291 return res;
4294 if (const widening_svalue *widen_lhs = lhs->dyn_cast_widening_svalue ())
4295 if (tree rhs_cst = rhs->maybe_get_constant ())
4297 tristate res = widen_lhs->eval_condition_without_cm (op, rhs_cst);
4298 if (res.is_known ())
4299 return res;
4302 /* Handle comparisons between two svalues with more than one operand. */
4303 if (const binop_svalue *binop = lhs->dyn_cast_binop_svalue ())
4305 switch (op)
4307 default:
4308 break;
4309 case EQ_EXPR:
4311 /* TODO: binops can be equal even if they are not structurally
4312 equal in case of commutative operators. */
4313 tristate res = structural_equality (lhs, rhs);
4314 if (res.is_true ())
4315 return res;
4317 break;
4318 case LE_EXPR:
4320 tristate res = structural_equality (lhs, rhs);
4321 if (res.is_true ())
4322 return res;
4324 break;
4325 case GE_EXPR:
4327 tristate res = structural_equality (lhs, rhs);
4328 if (res.is_true ())
4329 return res;
4330 res = symbolic_greater_than (binop, rhs);
4331 if (res.is_true ())
4332 return res;
4334 break;
4335 case GT_EXPR:
4337 tristate res = symbolic_greater_than (binop, rhs);
4338 if (res.is_true ())
4339 return res;
4341 break;
4345 /* Otherwise, try constraints.
4346 Cast to const to ensure we don't change the constraint_manager as we
4347 do this (e.g. by creating equivalence classes). */
4348 const constraint_manager *constraints = m_constraints;
4349 return constraints->eval_condition (lhs, op, rhs);
4352 /* Subroutine of region_model::eval_condition, for rejecting
4353 equality of INIT_VAL(PARM) with &LOCAL. */
4355 tristate
4356 region_model::compare_initial_and_pointer (const initial_svalue *init,
4357 const region_svalue *ptr) const
4359 const region *pointee = ptr->get_pointee ();
4361 /* If we have a pointer to something within a stack frame, it can't be the
4362 initial value of a param. */
4363 if (pointee->maybe_get_frame_region ())
4364 if (init->initial_value_of_param_p ())
4365 return tristate::TS_FALSE;
4367 return tristate::TS_UNKNOWN;
4370 /* Return true if SVAL is definitely positive. */
4372 static bool
4373 is_positive_svalue (const svalue *sval)
4375 if (tree cst = sval->maybe_get_constant ())
4376 return !zerop (cst) && get_range_pos_neg (cst) == 1;
4377 tree type = sval->get_type ();
4378 if (!type)
4379 return false;
4380 /* Consider a binary operation size_t + int. The analyzer wraps the int in
4381 an unaryop_svalue, converting it to a size_t, but in the dynamic execution
4382 the result is smaller than the first operand. Thus, we have to look if
4383 the argument of the unaryop_svalue is also positive. */
4384 if (const unaryop_svalue *un_op = dyn_cast <const unaryop_svalue *> (sval))
4385 return CONVERT_EXPR_CODE_P (un_op->get_op ()) && TYPE_UNSIGNED (type)
4386 && is_positive_svalue (un_op->get_arg ());
4387 return TYPE_UNSIGNED (type);
4390 /* Return true if A is definitely larger than B.
4392 Limitation: does not account for integer overflows and does not try to
4393 return false, so it can not be used negated. */
4395 tristate
4396 region_model::symbolic_greater_than (const binop_svalue *bin_a,
4397 const svalue *b) const
4399 if (bin_a->get_op () == PLUS_EXPR || bin_a->get_op () == MULT_EXPR)
4401 /* Eliminate the right-hand side of both svalues. */
4402 if (const binop_svalue *bin_b = dyn_cast <const binop_svalue *> (b))
4403 if (bin_a->get_op () == bin_b->get_op ()
4404 && eval_condition (bin_a->get_arg1 (),
4405 GT_EXPR,
4406 bin_b->get_arg1 ()).is_true ()
4407 && eval_condition (bin_a->get_arg0 (),
4408 GE_EXPR,
4409 bin_b->get_arg0 ()).is_true ())
4410 return tristate (tristate::TS_TRUE);
4412 /* Otherwise, try to remove a positive offset or factor from BIN_A. */
4413 if (is_positive_svalue (bin_a->get_arg1 ())
4414 && eval_condition (bin_a->get_arg0 (),
4415 GE_EXPR, b).is_true ())
4416 return tristate (tristate::TS_TRUE);
4418 return tristate::unknown ();
4421 /* Return true if A and B are equal structurally.
4423 Structural equality means that A and B are equal if the svalues A and B have
4424 the same nodes at the same positions in the tree and the leafs are equal.
4425 Equality for conjured_svalues and initial_svalues is determined by comparing
4426 the pointers while constants are compared by value. That behavior is useful
4427 to check for binaryop_svlaues that evaluate to the same concrete value but
4428 might use one operand with a different type but the same constant value.
4430 For example,
4431 binop_svalue (mult_expr,
4432 initial_svalue (‘size_t’, decl_region (..., 'some_var')),
4433 constant_svalue (‘size_t’, 4))
4435 binop_svalue (mult_expr,
4436 initial_svalue (‘size_t’, decl_region (..., 'some_var'),
4437 constant_svalue (‘sizetype’, 4))
4438 are structurally equal. A concrete C code example, where this occurs, can
4439 be found in test7 of out-of-bounds-5.c. */
4441 tristate
4442 region_model::structural_equality (const svalue *a, const svalue *b) const
4444 /* If A and B are referentially equal, they are also structurally equal. */
4445 if (a == b)
4446 return tristate (tristate::TS_TRUE);
4448 switch (a->get_kind ())
4450 default:
4451 return tristate::unknown ();
4452 /* SK_CONJURED and SK_INITIAL are already handled
4453 by the referential equality above. */
4454 case SK_CONSTANT:
4456 tree a_cst = a->maybe_get_constant ();
4457 tree b_cst = b->maybe_get_constant ();
4458 if (a_cst && b_cst)
4459 return tristate (tree_int_cst_equal (a_cst, b_cst));
4461 return tristate (tristate::TS_FALSE);
4462 case SK_UNARYOP:
4464 const unaryop_svalue *un_a = as_a <const unaryop_svalue *> (a);
4465 if (const unaryop_svalue *un_b = dyn_cast <const unaryop_svalue *> (b))
4466 return tristate (pending_diagnostic::same_tree_p (un_a->get_type (),
4467 un_b->get_type ())
4468 && un_a->get_op () == un_b->get_op ()
4469 && structural_equality (un_a->get_arg (),
4470 un_b->get_arg ()));
4472 return tristate (tristate::TS_FALSE);
4473 case SK_BINOP:
4475 const binop_svalue *bin_a = as_a <const binop_svalue *> (a);
4476 if (const binop_svalue *bin_b = dyn_cast <const binop_svalue *> (b))
4477 return tristate (bin_a->get_op () == bin_b->get_op ()
4478 && structural_equality (bin_a->get_arg0 (),
4479 bin_b->get_arg0 ())
4480 && structural_equality (bin_a->get_arg1 (),
4481 bin_b->get_arg1 ()));
4483 return tristate (tristate::TS_FALSE);
4487 /* Handle various constraints of the form:
4488 LHS: ((bool)INNER_LHS INNER_OP INNER_RHS))
4489 OP : == or !=
4490 RHS: zero
4491 and (with a cast):
4492 LHS: CAST([long]int, ((bool)INNER_LHS INNER_OP INNER_RHS))
4493 OP : == or !=
4494 RHS: zero
4495 by adding constraints for INNER_LHS INNEROP INNER_RHS.
4497 Return true if this function can fully handle the constraint; if
4498 so, add the implied constraint(s) and write true to *OUT if they
4499 are consistent with existing constraints, or write false to *OUT
4500 if they contradicts existing constraints.
4502 Return false for cases that this function doeesn't know how to handle.
4504 For example, if we're checking a stored conditional, we'll have
4505 something like:
4506 LHS: CAST(long int, (&HEAP_ALLOCATED_REGION(8)!=(int *)0B))
4507 OP : NE_EXPR
4508 RHS: zero
4509 which this function can turn into an add_constraint of:
4510 (&HEAP_ALLOCATED_REGION(8) != (int *)0B)
4512 Similarly, optimized && and || conditionals lead to e.g.
4513 if (p && q)
4514 becoming gimple like this:
4515 _1 = p_6 == 0B;
4516 _2 = q_8 == 0B
4517 _3 = _1 | _2
4518 On the "_3 is false" branch we can have constraints of the form:
4519 ((&HEAP_ALLOCATED_REGION(8)!=(int *)0B)
4520 | (&HEAP_ALLOCATED_REGION(10)!=(int *)0B))
4521 == 0
4522 which implies that both _1 and _2 are false,
4523 which this function can turn into a pair of add_constraints of
4524 (&HEAP_ALLOCATED_REGION(8)!=(int *)0B)
4525 and:
4526 (&HEAP_ALLOCATED_REGION(10)!=(int *)0B). */
4528 bool
4529 region_model::add_constraints_from_binop (const svalue *outer_lhs,
4530 enum tree_code outer_op,
4531 const svalue *outer_rhs,
4532 bool *out,
4533 region_model_context *ctxt)
4535 while (const svalue *cast = outer_lhs->maybe_undo_cast ())
4536 outer_lhs = cast;
4537 const binop_svalue *binop_sval = outer_lhs->dyn_cast_binop_svalue ();
4538 if (!binop_sval)
4539 return false;
4540 if (!outer_rhs->all_zeroes_p ())
4541 return false;
4543 const svalue *inner_lhs = binop_sval->get_arg0 ();
4544 enum tree_code inner_op = binop_sval->get_op ();
4545 const svalue *inner_rhs = binop_sval->get_arg1 ();
4547 if (outer_op != NE_EXPR && outer_op != EQ_EXPR)
4548 return false;
4550 /* We have either
4551 - "OUTER_LHS != false" (i.e. OUTER is true), or
4552 - "OUTER_LHS == false" (i.e. OUTER is false). */
4553 bool is_true = outer_op == NE_EXPR;
4555 switch (inner_op)
4557 default:
4558 return false;
4560 case EQ_EXPR:
4561 case NE_EXPR:
4563 /* ...and "(inner_lhs OP inner_rhs) == 0"
4564 then (inner_lhs OP inner_rhs) must have the same
4565 logical value as LHS. */
4566 if (!is_true)
4567 inner_op = invert_tree_comparison (inner_op, false /* honor_nans */);
4568 *out = add_constraint (inner_lhs, inner_op, inner_rhs, ctxt);
4569 return true;
4571 break;
4573 case BIT_AND_EXPR:
4574 if (is_true)
4576 /* ...and "(inner_lhs & inner_rhs) != 0"
4577 then both inner_lhs and inner_rhs must be true. */
4578 const svalue *false_sval
4579 = m_mgr->get_or_create_constant_svalue (boolean_false_node);
4580 bool sat1 = add_constraint (inner_lhs, NE_EXPR, false_sval, ctxt);
4581 bool sat2 = add_constraint (inner_rhs, NE_EXPR, false_sval, ctxt);
4582 *out = sat1 && sat2;
4583 return true;
4585 return false;
4587 case BIT_IOR_EXPR:
4588 if (!is_true)
4590 /* ...and "(inner_lhs | inner_rhs) == 0"
4591 i.e. "(inner_lhs | inner_rhs)" is false
4592 then both inner_lhs and inner_rhs must be false. */
4593 const svalue *false_sval
4594 = m_mgr->get_or_create_constant_svalue (boolean_false_node);
4595 bool sat1 = add_constraint (inner_lhs, EQ_EXPR, false_sval, ctxt);
4596 bool sat2 = add_constraint (inner_rhs, EQ_EXPR, false_sval, ctxt);
4597 *out = sat1 && sat2;
4598 return true;
4600 return false;
4604 /* Attempt to add the constraint "LHS OP RHS" to this region_model.
4605 If it is consistent with existing constraints, add it, and return true.
4606 Return false if it contradicts existing constraints.
4607 Use CTXT for reporting any diagnostics associated with the accesses. */
4609 bool
4610 region_model::add_constraint (tree lhs, enum tree_code op, tree rhs,
4611 region_model_context *ctxt)
4613 /* For now, make no attempt to capture constraints on floating-point
4614 values. */
4615 if (FLOAT_TYPE_P (TREE_TYPE (lhs)) || FLOAT_TYPE_P (TREE_TYPE (rhs)))
4616 return true;
4618 const svalue *lhs_sval = get_rvalue (lhs, ctxt);
4619 const svalue *rhs_sval = get_rvalue (rhs, ctxt);
4621 return add_constraint (lhs_sval, op, rhs_sval, ctxt);
4624 /* Attempt to add the constraint "LHS OP RHS" to this region_model.
4625 If it is consistent with existing constraints, add it, and return true.
4626 Return false if it contradicts existing constraints.
4627 Use CTXT for reporting any diagnostics associated with the accesses. */
4629 bool
4630 region_model::add_constraint (const svalue *lhs,
4631 enum tree_code op,
4632 const svalue *rhs,
4633 region_model_context *ctxt)
4635 tristate t_cond = eval_condition (lhs, op, rhs);
4637 /* If we already have the condition, do nothing. */
4638 if (t_cond.is_true ())
4639 return true;
4641 /* Reject a constraint that would contradict existing knowledge, as
4642 unsatisfiable. */
4643 if (t_cond.is_false ())
4644 return false;
4646 bool out;
4647 if (add_constraints_from_binop (lhs, op, rhs, &out, ctxt))
4648 return out;
4650 /* Attempt to store the constraint. */
4651 if (!m_constraints->add_constraint (lhs, op, rhs))
4652 return false;
4654 /* Notify the context, if any. This exists so that the state machines
4655 in a program_state can be notified about the condition, and so can
4656 set sm-state for e.g. unchecked->checked, both for cfg-edges, and
4657 when synthesizing constraints as above. */
4658 if (ctxt)
4659 ctxt->on_condition (lhs, op, rhs);
4661 /* If we have &REGION == NULL, then drop dynamic extents for REGION (for
4662 the case where REGION is heap-allocated and thus could be NULL). */
4663 if (tree rhs_cst = rhs->maybe_get_constant ())
4664 if (op == EQ_EXPR && zerop (rhs_cst))
4665 if (const region_svalue *region_sval = lhs->dyn_cast_region_svalue ())
4666 unset_dynamic_extents (region_sval->get_pointee ());
4668 return true;
4671 /* As above, but when returning false, if OUT is non-NULL, write a
4672 new rejected_constraint to *OUT. */
4674 bool
4675 region_model::add_constraint (tree lhs, enum tree_code op, tree rhs,
4676 region_model_context *ctxt,
4677 rejected_constraint **out)
4679 bool sat = add_constraint (lhs, op, rhs, ctxt);
4680 if (!sat && out)
4681 *out = new rejected_op_constraint (*this, lhs, op, rhs);
4682 return sat;
4685 /* Determine what is known about the condition "LHS OP RHS" within
4686 this model.
4687 Use CTXT for reporting any diagnostics associated with the accesses. */
4689 tristate
4690 region_model::eval_condition (tree lhs,
4691 enum tree_code op,
4692 tree rhs,
4693 region_model_context *ctxt)
4695 /* For now, make no attempt to model constraints on floating-point
4696 values. */
4697 if (FLOAT_TYPE_P (TREE_TYPE (lhs)) || FLOAT_TYPE_P (TREE_TYPE (rhs)))
4698 return tristate::unknown ();
4700 return eval_condition (get_rvalue (lhs, ctxt), op, get_rvalue (rhs, ctxt));
4703 /* Implementation of region_model::get_representative_path_var.
4704 Attempt to return a path_var that represents SVAL, or return NULL_TREE.
4705 Use VISITED to prevent infinite mutual recursion with the overload for
4706 regions. */
4708 path_var
4709 region_model::get_representative_path_var_1 (const svalue *sval,
4710 svalue_set *visited) const
4712 gcc_assert (sval);
4714 /* Prevent infinite recursion. */
4715 if (visited->contains (sval))
4716 return path_var (NULL_TREE, 0);
4717 visited->add (sval);
4719 /* Handle casts by recursion into get_representative_path_var. */
4720 if (const svalue *cast_sval = sval->maybe_undo_cast ())
4722 path_var result = get_representative_path_var (cast_sval, visited);
4723 tree orig_type = sval->get_type ();
4724 /* If necessary, wrap the result in a cast. */
4725 if (result.m_tree && orig_type)
4726 result.m_tree = build1 (NOP_EXPR, orig_type, result.m_tree);
4727 return result;
4730 auto_vec<path_var> pvs;
4731 m_store.get_representative_path_vars (this, visited, sval, &pvs);
4733 if (tree cst = sval->maybe_get_constant ())
4734 pvs.safe_push (path_var (cst, 0));
4736 /* Handle string literals and various other pointers. */
4737 if (const region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
4739 const region *reg = ptr_sval->get_pointee ();
4740 if (path_var pv = get_representative_path_var (reg, visited))
4741 return path_var (build1 (ADDR_EXPR,
4742 sval->get_type (),
4743 pv.m_tree),
4744 pv.m_stack_depth);
4747 /* If we have a sub_svalue, look for ways to represent the parent. */
4748 if (const sub_svalue *sub_sval = sval->dyn_cast_sub_svalue ())
4750 const svalue *parent_sval = sub_sval->get_parent ();
4751 const region *subreg = sub_sval->get_subregion ();
4752 if (path_var parent_pv
4753 = get_representative_path_var (parent_sval, visited))
4754 if (const field_region *field_reg = subreg->dyn_cast_field_region ())
4755 return path_var (build3 (COMPONENT_REF,
4756 sval->get_type (),
4757 parent_pv.m_tree,
4758 field_reg->get_field (),
4759 NULL_TREE),
4760 parent_pv.m_stack_depth);
4763 /* Handle binops. */
4764 if (const binop_svalue *binop_sval = sval->dyn_cast_binop_svalue ())
4765 if (path_var lhs_pv
4766 = get_representative_path_var (binop_sval->get_arg0 (), visited))
4767 if (path_var rhs_pv
4768 = get_representative_path_var (binop_sval->get_arg1 (), visited))
4769 return path_var (build2 (binop_sval->get_op (),
4770 sval->get_type (),
4771 lhs_pv.m_tree, rhs_pv.m_tree),
4772 lhs_pv.m_stack_depth);
4774 if (pvs.length () < 1)
4775 return path_var (NULL_TREE, 0);
4777 pvs.qsort (readability_comparator);
4778 return pvs[0];
4781 /* Attempt to return a path_var that represents SVAL, or return NULL_TREE.
4782 Use VISITED to prevent infinite mutual recursion with the overload for
4783 regions
4785 This function defers to get_representative_path_var_1 to do the work;
4786 it adds verification that get_representative_path_var_1 returned a tree
4787 of the correct type. */
4789 path_var
4790 region_model::get_representative_path_var (const svalue *sval,
4791 svalue_set *visited) const
4793 if (sval == NULL)
4794 return path_var (NULL_TREE, 0);
4796 tree orig_type = sval->get_type ();
4798 path_var result = get_representative_path_var_1 (sval, visited);
4800 /* Verify that the result has the same type as SVAL, if any. */
4801 if (result.m_tree && orig_type)
4802 gcc_assert (TREE_TYPE (result.m_tree) == orig_type);
4804 return result;
4807 /* Attempt to return a tree that represents SVAL, or return NULL_TREE.
4809 Strip off any top-level cast, to avoid messages like
4810 double-free of '(void *)ptr'
4811 from analyzer diagnostics. */
4813 tree
4814 region_model::get_representative_tree (const svalue *sval) const
4816 svalue_set visited;
4817 tree expr = get_representative_path_var (sval, &visited).m_tree;
4819 /* Strip off any top-level cast. */
4820 if (expr && TREE_CODE (expr) == NOP_EXPR)
4821 expr = TREE_OPERAND (expr, 0);
4823 return fixup_tree_for_diagnostic (expr);
4826 tree
4827 region_model::get_representative_tree (const region *reg) const
4829 svalue_set visited;
4830 tree expr = get_representative_path_var (reg, &visited).m_tree;
4832 /* Strip off any top-level cast. */
4833 if (expr && TREE_CODE (expr) == NOP_EXPR)
4834 expr = TREE_OPERAND (expr, 0);
4836 return fixup_tree_for_diagnostic (expr);
4839 /* Implementation of region_model::get_representative_path_var.
4841 Attempt to return a path_var that represents REG, or return
4842 the NULL path_var.
4843 For example, a region for a field of a local would be a path_var
4844 wrapping a COMPONENT_REF.
4845 Use VISITED to prevent infinite mutual recursion with the overload for
4846 svalues. */
4848 path_var
4849 region_model::get_representative_path_var_1 (const region *reg,
4850 svalue_set *visited) const
4852 switch (reg->get_kind ())
4854 default:
4855 gcc_unreachable ();
4857 case RK_FRAME:
4858 case RK_GLOBALS:
4859 case RK_CODE:
4860 case RK_HEAP:
4861 case RK_STACK:
4862 case RK_ROOT:
4863 /* Regions that represent memory spaces are not expressible as trees. */
4864 return path_var (NULL_TREE, 0);
4866 case RK_FUNCTION:
4868 const function_region *function_reg
4869 = as_a <const function_region *> (reg);
4870 return path_var (function_reg->get_fndecl (), 0);
4872 case RK_LABEL:
4874 const label_region *label_reg = as_a <const label_region *> (reg);
4875 return path_var (label_reg->get_label (), 0);
4878 case RK_SYMBOLIC:
4880 const symbolic_region *symbolic_reg
4881 = as_a <const symbolic_region *> (reg);
4882 const svalue *pointer = symbolic_reg->get_pointer ();
4883 path_var pointer_pv = get_representative_path_var (pointer, visited);
4884 if (!pointer_pv)
4885 return path_var (NULL_TREE, 0);
4886 tree offset = build_int_cst (pointer->get_type (), 0);
4887 return path_var (build2 (MEM_REF,
4888 reg->get_type (),
4889 pointer_pv.m_tree,
4890 offset),
4891 pointer_pv.m_stack_depth);
4893 case RK_DECL:
4895 const decl_region *decl_reg = as_a <const decl_region *> (reg);
4896 return path_var (decl_reg->get_decl (), decl_reg->get_stack_depth ());
4898 case RK_FIELD:
4900 const field_region *field_reg = as_a <const field_region *> (reg);
4901 path_var parent_pv
4902 = get_representative_path_var (reg->get_parent_region (), visited);
4903 if (!parent_pv)
4904 return path_var (NULL_TREE, 0);
4905 return path_var (build3 (COMPONENT_REF,
4906 reg->get_type (),
4907 parent_pv.m_tree,
4908 field_reg->get_field (),
4909 NULL_TREE),
4910 parent_pv.m_stack_depth);
4913 case RK_ELEMENT:
4915 const element_region *element_reg
4916 = as_a <const element_region *> (reg);
4917 path_var parent_pv
4918 = get_representative_path_var (reg->get_parent_region (), visited);
4919 if (!parent_pv)
4920 return path_var (NULL_TREE, 0);
4921 path_var index_pv
4922 = get_representative_path_var (element_reg->get_index (), visited);
4923 if (!index_pv)
4924 return path_var (NULL_TREE, 0);
4925 return path_var (build4 (ARRAY_REF,
4926 reg->get_type (),
4927 parent_pv.m_tree, index_pv.m_tree,
4928 NULL_TREE, NULL_TREE),
4929 parent_pv.m_stack_depth);
4932 case RK_OFFSET:
4934 const offset_region *offset_reg
4935 = as_a <const offset_region *> (reg);
4936 path_var parent_pv
4937 = get_representative_path_var (reg->get_parent_region (), visited);
4938 if (!parent_pv)
4939 return path_var (NULL_TREE, 0);
4940 path_var offset_pv
4941 = get_representative_path_var (offset_reg->get_byte_offset (),
4942 visited);
4943 if (!offset_pv || TREE_CODE (offset_pv.m_tree) != INTEGER_CST)
4944 return path_var (NULL_TREE, 0);
4945 tree addr_parent = build1 (ADDR_EXPR,
4946 build_pointer_type (reg->get_type ()),
4947 parent_pv.m_tree);
4948 return path_var (build2 (MEM_REF,
4949 reg->get_type (),
4950 addr_parent, offset_pv.m_tree),
4951 parent_pv.m_stack_depth);
4954 case RK_SIZED:
4955 return path_var (NULL_TREE, 0);
4957 case RK_CAST:
4959 path_var parent_pv
4960 = get_representative_path_var (reg->get_parent_region (), visited);
4961 if (!parent_pv)
4962 return path_var (NULL_TREE, 0);
4963 return path_var (build1 (NOP_EXPR,
4964 reg->get_type (),
4965 parent_pv.m_tree),
4966 parent_pv.m_stack_depth);
4969 case RK_HEAP_ALLOCATED:
4970 case RK_ALLOCA:
4971 /* No good way to express heap-allocated/alloca regions as trees. */
4972 return path_var (NULL_TREE, 0);
4974 case RK_STRING:
4976 const string_region *string_reg = as_a <const string_region *> (reg);
4977 return path_var (string_reg->get_string_cst (), 0);
4980 case RK_VAR_ARG:
4981 case RK_UNKNOWN:
4982 return path_var (NULL_TREE, 0);
4986 /* Attempt to return a path_var that represents REG, or return
4987 the NULL path_var.
4988 For example, a region for a field of a local would be a path_var
4989 wrapping a COMPONENT_REF.
4990 Use VISITED to prevent infinite mutual recursion with the overload for
4991 svalues.
4993 This function defers to get_representative_path_var_1 to do the work;
4994 it adds verification that get_representative_path_var_1 returned a tree
4995 of the correct type. */
4997 path_var
4998 region_model::get_representative_path_var (const region *reg,
4999 svalue_set *visited) const
5001 path_var result = get_representative_path_var_1 (reg, visited);
5003 /* Verify that the result has the same type as REG, if any. */
5004 if (result.m_tree && reg->get_type ())
5005 gcc_assert (TREE_TYPE (result.m_tree) == reg->get_type ());
5007 return result;
5010 /* Update this model for any phis in SNODE, assuming we came from
5011 LAST_CFG_SUPEREDGE. */
5013 void
5014 region_model::update_for_phis (const supernode *snode,
5015 const cfg_superedge *last_cfg_superedge,
5016 region_model_context *ctxt)
5018 gcc_assert (last_cfg_superedge);
5020 /* Copy this state and pass it to handle_phi so that all of the phi stmts
5021 are effectively handled simultaneously. */
5022 const region_model old_state (*this);
5024 for (gphi_iterator gpi = const_cast<supernode *>(snode)->start_phis ();
5025 !gsi_end_p (gpi); gsi_next (&gpi))
5027 gphi *phi = gpi.phi ();
5029 tree src = last_cfg_superedge->get_phi_arg (phi);
5030 tree lhs = gimple_phi_result (phi);
5032 /* Update next_state based on phi and old_state. */
5033 handle_phi (phi, lhs, src, old_state, ctxt);
5037 /* Attempt to update this model for taking EDGE (where the last statement
5038 was LAST_STMT), returning true if the edge can be taken, false
5039 otherwise.
5040 When returning false, if OUT is non-NULL, write a new rejected_constraint
5041 to it.
5043 For CFG superedges where LAST_STMT is a conditional or a switch
5044 statement, attempt to add the relevant conditions for EDGE to this
5045 model, returning true if they are feasible, or false if they are
5046 impossible.
5048 For call superedges, push frame information and store arguments
5049 into parameters.
5051 For return superedges, pop frame information and store return
5052 values into any lhs.
5054 Rejection of call/return superedges happens elsewhere, in
5055 program_point::on_edge (i.e. based on program point, rather
5056 than program state). */
5058 bool
5059 region_model::maybe_update_for_edge (const superedge &edge,
5060 const gimple *last_stmt,
5061 region_model_context *ctxt,
5062 rejected_constraint **out)
5064 /* Handle frame updates for interprocedural edges. */
5065 switch (edge.m_kind)
5067 default:
5068 break;
5070 case SUPEREDGE_CALL:
5072 const call_superedge *call_edge = as_a <const call_superedge *> (&edge);
5073 update_for_call_superedge (*call_edge, ctxt);
5075 break;
5077 case SUPEREDGE_RETURN:
5079 const return_superedge *return_edge
5080 = as_a <const return_superedge *> (&edge);
5081 update_for_return_superedge (*return_edge, ctxt);
5083 break;
5085 case SUPEREDGE_INTRAPROCEDURAL_CALL:
5086 /* This is a no-op for call summaries; we should already
5087 have handled the effect of the call summary at the call stmt. */
5088 break;
5091 if (last_stmt == NULL)
5092 return true;
5094 /* Apply any constraints for conditionals/switch statements. */
5096 if (const gcond *cond_stmt = dyn_cast <const gcond *> (last_stmt))
5098 const cfg_superedge *cfg_sedge = as_a <const cfg_superedge *> (&edge);
5099 return apply_constraints_for_gcond (*cfg_sedge, cond_stmt, ctxt, out);
5102 if (const gswitch *switch_stmt = dyn_cast <const gswitch *> (last_stmt))
5104 const switch_cfg_superedge *switch_sedge
5105 = as_a <const switch_cfg_superedge *> (&edge);
5106 return apply_constraints_for_gswitch (*switch_sedge, switch_stmt,
5107 ctxt, out);
5110 /* Apply any constraints due to an exception being thrown. */
5111 if (const cfg_superedge *cfg_sedge = dyn_cast <const cfg_superedge *> (&edge))
5112 if (cfg_sedge->get_flags () & EDGE_EH)
5113 return apply_constraints_for_exception (last_stmt, ctxt, out);
5115 return true;
5118 /* Push a new frame_region on to the stack region.
5119 Populate the frame_region with child regions for the function call's
5120 parameters, using values from the arguments at the callsite in the
5121 caller's frame. */
5123 void
5124 region_model::update_for_gcall (const gcall *call_stmt,
5125 region_model_context *ctxt,
5126 function *callee)
5128 /* Build a vec of argument svalues, using the current top
5129 frame for resolving tree expressions. */
5130 auto_vec<const svalue *> arg_svals (gimple_call_num_args (call_stmt));
5132 for (unsigned i = 0; i < gimple_call_num_args (call_stmt); i++)
5134 tree arg = gimple_call_arg (call_stmt, i);
5135 arg_svals.quick_push (get_rvalue (arg, ctxt));
5138 if(!callee)
5140 /* Get the function * from the gcall. */
5141 tree fn_decl = get_fndecl_for_call (call_stmt,ctxt);
5142 callee = DECL_STRUCT_FUNCTION (fn_decl);
5145 push_frame (callee, &arg_svals, ctxt);
5148 /* Pop the top-most frame_region from the stack, and copy the return
5149 region's values (if any) into the region for the lvalue of the LHS of
5150 the call (if any). */
5152 void
5153 region_model::update_for_return_gcall (const gcall *call_stmt,
5154 region_model_context *ctxt)
5156 /* Get the lvalue for the result of the call, passing it to pop_frame,
5157 so that pop_frame can determine the region with respect to the
5158 *caller* frame. */
5159 tree lhs = gimple_call_lhs (call_stmt);
5160 pop_frame (lhs, NULL, ctxt);
5163 /* Extract calling information from the superedge and update the model for the
5164 call */
5166 void
5167 region_model::update_for_call_superedge (const call_superedge &call_edge,
5168 region_model_context *ctxt)
5170 const gcall *call_stmt = call_edge.get_call_stmt ();
5171 update_for_gcall (call_stmt, ctxt, call_edge.get_callee_function ());
5174 /* Extract calling information from the return superedge and update the model
5175 for the returning call */
5177 void
5178 region_model::update_for_return_superedge (const return_superedge &return_edge,
5179 region_model_context *ctxt)
5181 const gcall *call_stmt = return_edge.get_call_stmt ();
5182 update_for_return_gcall (call_stmt, ctxt);
5185 /* Attempt to to use R to replay SUMMARY into this object.
5186 Return true if it is possible. */
5188 bool
5189 region_model::replay_call_summary (call_summary_replay &r,
5190 const region_model &summary)
5192 gcc_assert (summary.get_stack_depth () == 1);
5194 m_store.replay_call_summary (r, summary.m_store);
5196 if (!m_constraints->replay_call_summary (r, *summary.m_constraints))
5197 return false;
5199 for (auto kv : summary.m_dynamic_extents)
5201 const region *summary_reg = kv.first;
5202 const region *caller_reg = r.convert_region_from_summary (summary_reg);
5203 if (!caller_reg)
5204 continue;
5205 const svalue *summary_sval = kv.second;
5206 const svalue *caller_sval = r.convert_svalue_from_summary (summary_sval);
5207 if (!caller_sval)
5208 continue;
5209 m_dynamic_extents.put (caller_reg, caller_sval);
5212 return true;
5215 /* Given a true or false edge guarded by conditional statement COND_STMT,
5216 determine appropriate constraints for the edge to be taken.
5218 If they are feasible, add the constraints and return true.
5220 Return false if the constraints contradict existing knowledge
5221 (and so the edge should not be taken).
5222 When returning false, if OUT is non-NULL, write a new rejected_constraint
5223 to it. */
5225 bool
5226 region_model::apply_constraints_for_gcond (const cfg_superedge &sedge,
5227 const gcond *cond_stmt,
5228 region_model_context *ctxt,
5229 rejected_constraint **out)
5231 ::edge cfg_edge = sedge.get_cfg_edge ();
5232 gcc_assert (cfg_edge != NULL);
5233 gcc_assert (cfg_edge->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE));
5235 enum tree_code op = gimple_cond_code (cond_stmt);
5236 tree lhs = gimple_cond_lhs (cond_stmt);
5237 tree rhs = gimple_cond_rhs (cond_stmt);
5238 if (cfg_edge->flags & EDGE_FALSE_VALUE)
5239 op = invert_tree_comparison (op, false /* honor_nans */);
5240 return add_constraint (lhs, op, rhs, ctxt, out);
5243 /* Given an EDGE guarded by SWITCH_STMT, determine appropriate constraints
5244 for the edge to be taken.
5246 If they are feasible, add the constraints and return true.
5248 Return false if the constraints contradict existing knowledge
5249 (and so the edge should not be taken).
5250 When returning false, if OUT is non-NULL, write a new rejected_constraint
5251 to it. */
5253 bool
5254 region_model::apply_constraints_for_gswitch (const switch_cfg_superedge &edge,
5255 const gswitch *switch_stmt,
5256 region_model_context *ctxt,
5257 rejected_constraint **out)
5259 bounded_ranges_manager *ranges_mgr = get_range_manager ();
5260 const bounded_ranges *all_cases_ranges
5261 = ranges_mgr->get_or_create_ranges_for_switch (&edge, switch_stmt);
5262 tree index = gimple_switch_index (switch_stmt);
5263 const svalue *index_sval = get_rvalue (index, ctxt);
5264 bool sat = m_constraints->add_bounded_ranges (index_sval, all_cases_ranges);
5265 if (!sat && out)
5266 *out = new rejected_ranges_constraint (*this, index, all_cases_ranges);
5267 if (sat && ctxt && !all_cases_ranges->empty_p ())
5268 ctxt->on_bounded_ranges (*index_sval, *all_cases_ranges);
5269 return sat;
5272 /* Apply any constraints due to an exception being thrown at LAST_STMT.
5274 If they are feasible, add the constraints and return true.
5276 Return false if the constraints contradict existing knowledge
5277 (and so the edge should not be taken).
5278 When returning false, if OUT is non-NULL, write a new rejected_constraint
5279 to it. */
5281 bool
5282 region_model::apply_constraints_for_exception (const gimple *last_stmt,
5283 region_model_context *ctxt,
5284 rejected_constraint **out)
5286 gcc_assert (last_stmt);
5287 if (const gcall *call = dyn_cast <const gcall *> (last_stmt))
5288 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
5289 if (is_named_call_p (callee_fndecl, "operator new", call, 1)
5290 || is_named_call_p (callee_fndecl, "operator new []", call, 1))
5292 /* We have an exception thrown from operator new.
5293 Add a constraint that the result was NULL, to avoid a false
5294 leak report due to the result being lost when following
5295 the EH edge. */
5296 if (tree lhs = gimple_call_lhs (call))
5297 return add_constraint (lhs, EQ_EXPR, null_pointer_node, ctxt, out);
5298 return true;
5300 return true;
5303 /* For use with push_frame when handling a top-level call within the analysis.
5304 PARAM has a defined but unknown initial value.
5305 Anything it points to has escaped, since the calling context "knows"
5306 the pointer, and thus calls to unknown functions could read/write into
5307 the region. */
5309 void
5310 region_model::on_top_level_param (tree param,
5311 region_model_context *ctxt)
5313 if (POINTER_TYPE_P (TREE_TYPE (param)))
5315 const region *param_reg = get_lvalue (param, ctxt);
5316 const svalue *init_ptr_sval
5317 = m_mgr->get_or_create_initial_value (param_reg);
5318 const region *pointee_reg = m_mgr->get_symbolic_region (init_ptr_sval);
5319 m_store.mark_as_escaped (pointee_reg);
5323 /* Update this region_model to reflect pushing a frame onto the stack
5324 for a call to FUN.
5326 If ARG_SVALS is non-NULL, use it to populate the parameters
5327 in the new frame.
5328 Otherwise, the params have their initial_svalues.
5330 Return the frame_region for the new frame. */
5332 const region *
5333 region_model::push_frame (function *fun, const vec<const svalue *> *arg_svals,
5334 region_model_context *ctxt)
5336 m_current_frame = m_mgr->get_frame_region (m_current_frame, fun);
5337 if (arg_svals)
5339 /* Arguments supplied from a caller frame. */
5340 tree fndecl = fun->decl;
5341 unsigned idx = 0;
5342 for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm;
5343 iter_parm = DECL_CHAIN (iter_parm), ++idx)
5345 /* If there's a mismatching declaration, the call stmt might
5346 not have enough args. Handle this case by leaving the
5347 rest of the params as uninitialized. */
5348 if (idx >= arg_svals->length ())
5349 break;
5350 tree parm_lval = iter_parm;
5351 if (tree parm_default_ssa = ssa_default_def (fun, iter_parm))
5352 parm_lval = parm_default_ssa;
5353 const region *parm_reg = get_lvalue (parm_lval, ctxt);
5354 const svalue *arg_sval = (*arg_svals)[idx];
5355 set_value (parm_reg, arg_sval, ctxt);
5358 /* Handle any variadic args. */
5359 unsigned va_arg_idx = 0;
5360 for (; idx < arg_svals->length (); idx++, va_arg_idx++)
5362 const svalue *arg_sval = (*arg_svals)[idx];
5363 const region *var_arg_reg
5364 = m_mgr->get_var_arg_region (m_current_frame,
5365 va_arg_idx);
5366 set_value (var_arg_reg, arg_sval, ctxt);
5369 else
5371 /* Otherwise we have a top-level call within the analysis. The params
5372 have defined but unknown initial values.
5373 Anything they point to has escaped. */
5374 tree fndecl = fun->decl;
5375 for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm;
5376 iter_parm = DECL_CHAIN (iter_parm))
5378 if (tree parm_default_ssa = ssa_default_def (fun, iter_parm))
5379 on_top_level_param (parm_default_ssa, ctxt);
5380 else
5381 on_top_level_param (iter_parm, ctxt);
5385 return m_current_frame;
5388 /* Get the function of the top-most frame in this region_model's stack.
5389 There must be such a frame. */
5391 function *
5392 region_model::get_current_function () const
5394 const frame_region *frame = get_current_frame ();
5395 gcc_assert (frame);
5396 return frame->get_function ();
5399 /* Pop the topmost frame_region from this region_model's stack;
5401 If RESULT_LVALUE is non-null, copy any return value from the frame
5402 into the corresponding region (evaluated with respect to the *caller*
5403 frame, rather than the called frame).
5404 If OUT_RESULT is non-null, copy any return value from the frame
5405 into *OUT_RESULT.
5407 Purge the frame region and all its descendent regions.
5408 Convert any pointers that point into such regions into
5409 POISON_KIND_POPPED_STACK svalues. */
5411 void
5412 region_model::pop_frame (tree result_lvalue,
5413 const svalue **out_result,
5414 region_model_context *ctxt)
5416 gcc_assert (m_current_frame);
5418 /* Evaluate the result, within the callee frame. */
5419 const frame_region *frame_reg = m_current_frame;
5420 tree fndecl = m_current_frame->get_function ()->decl;
5421 tree result = DECL_RESULT (fndecl);
5422 const svalue *retval = NULL;
5423 if (result && TREE_TYPE (result) != void_type_node)
5425 retval = get_rvalue (result, ctxt);
5426 if (out_result)
5427 *out_result = retval;
5430 /* Pop the frame. */
5431 m_current_frame = m_current_frame->get_calling_frame ();
5433 if (result_lvalue && retval)
5435 /* Compute result_dst_reg using RESULT_LVALUE *after* popping
5436 the frame, but before poisoning pointers into the old frame. */
5437 const region *result_dst_reg = get_lvalue (result_lvalue, ctxt);
5438 set_value (result_dst_reg, retval, ctxt);
5441 unbind_region_and_descendents (frame_reg,POISON_KIND_POPPED_STACK);
5444 /* Get the number of frames in this region_model's stack. */
5447 region_model::get_stack_depth () const
5449 const frame_region *frame = get_current_frame ();
5450 if (frame)
5451 return frame->get_stack_depth ();
5452 else
5453 return 0;
5456 /* Get the frame_region with the given index within the stack.
5457 The frame_region must exist. */
5459 const frame_region *
5460 region_model::get_frame_at_index (int index) const
5462 const frame_region *frame = get_current_frame ();
5463 gcc_assert (frame);
5464 gcc_assert (index >= 0);
5465 gcc_assert (index <= frame->get_index ());
5466 while (index != frame->get_index ())
5468 frame = frame->get_calling_frame ();
5469 gcc_assert (frame);
5471 return frame;
5474 /* Unbind svalues for any regions in REG and below.
5475 Find any pointers to such regions; convert them to
5476 poisoned values of kind PKIND.
5477 Also purge any dynamic extents. */
5479 void
5480 region_model::unbind_region_and_descendents (const region *reg,
5481 enum poison_kind pkind)
5483 /* Gather a set of base regions to be unbound. */
5484 hash_set<const region *> base_regs;
5485 for (store::cluster_map_t::iterator iter = m_store.begin ();
5486 iter != m_store.end (); ++iter)
5488 const region *iter_base_reg = (*iter).first;
5489 if (iter_base_reg->descendent_of_p (reg))
5490 base_regs.add (iter_base_reg);
5492 for (hash_set<const region *>::iterator iter = base_regs.begin ();
5493 iter != base_regs.end (); ++iter)
5494 m_store.purge_cluster (*iter);
5496 /* Find any pointers to REG or its descendents; convert to poisoned. */
5497 poison_any_pointers_to_descendents (reg, pkind);
5499 /* Purge dynamic extents of any base regions in REG and below
5500 (e.g. VLAs and alloca stack regions). */
5501 for (auto iter : m_dynamic_extents)
5503 const region *iter_reg = iter.first;
5504 if (iter_reg->descendent_of_p (reg))
5505 unset_dynamic_extents (iter_reg);
5509 /* Implementation of BindingVisitor.
5510 Update the bound svalues for regions below REG to use poisoned
5511 values instead. */
5513 struct bad_pointer_finder
5515 bad_pointer_finder (const region *reg, enum poison_kind pkind,
5516 region_model_manager *mgr)
5517 : m_reg (reg), m_pkind (pkind), m_mgr (mgr), m_count (0)
5520 void on_binding (const binding_key *, const svalue *&sval)
5522 if (const region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
5524 const region *ptr_dst = ptr_sval->get_pointee ();
5525 /* Poison ptrs to descendents of REG, but not to REG itself,
5526 otherwise double-free detection doesn't work (since sm-state
5527 for "free" is stored on the original ptr svalue). */
5528 if (ptr_dst->descendent_of_p (m_reg)
5529 && ptr_dst != m_reg)
5531 sval = m_mgr->get_or_create_poisoned_svalue (m_pkind,
5532 sval->get_type ());
5533 ++m_count;
5538 const region *m_reg;
5539 enum poison_kind m_pkind;
5540 region_model_manager *const m_mgr;
5541 int m_count;
5544 /* Find any pointers to REG or its descendents; convert them to
5545 poisoned values of kind PKIND.
5546 Return the number of pointers that were poisoned. */
5549 region_model::poison_any_pointers_to_descendents (const region *reg,
5550 enum poison_kind pkind)
5552 bad_pointer_finder bv (reg, pkind, m_mgr);
5553 m_store.for_each_binding (bv);
5554 return bv.m_count;
5557 /* Attempt to merge THIS with OTHER_MODEL, writing the result
5558 to OUT_MODEL. Use POINT to distinguish values created as a
5559 result of merging. */
5561 bool
5562 region_model::can_merge_with_p (const region_model &other_model,
5563 const program_point &point,
5564 region_model *out_model,
5565 const extrinsic_state *ext_state,
5566 const program_state *state_a,
5567 const program_state *state_b) const
5569 gcc_assert (out_model);
5570 gcc_assert (m_mgr == other_model.m_mgr);
5571 gcc_assert (m_mgr == out_model->m_mgr);
5573 if (m_current_frame != other_model.m_current_frame)
5574 return false;
5575 out_model->m_current_frame = m_current_frame;
5577 model_merger m (this, &other_model, point, out_model,
5578 ext_state, state_a, state_b);
5580 if (!store::can_merge_p (&m_store, &other_model.m_store,
5581 &out_model->m_store, m_mgr->get_store_manager (),
5582 &m))
5583 return false;
5585 if (!m_dynamic_extents.can_merge_with_p (other_model.m_dynamic_extents,
5586 &out_model->m_dynamic_extents))
5587 return false;
5589 /* Merge constraints. */
5590 constraint_manager::merge (*m_constraints,
5591 *other_model.m_constraints,
5592 out_model->m_constraints);
5594 return true;
5597 /* Attempt to get the fndecl used at CALL, if known, or NULL_TREE
5598 otherwise. */
5600 tree
5601 region_model::get_fndecl_for_call (const gcall *call,
5602 region_model_context *ctxt)
5604 tree fn_ptr = gimple_call_fn (call);
5605 if (fn_ptr == NULL_TREE)
5606 return NULL_TREE;
5607 const svalue *fn_ptr_sval = get_rvalue (fn_ptr, ctxt);
5608 if (const region_svalue *fn_ptr_ptr
5609 = fn_ptr_sval->dyn_cast_region_svalue ())
5611 const region *reg = fn_ptr_ptr->get_pointee ();
5612 if (const function_region *fn_reg = reg->dyn_cast_function_region ())
5614 tree fn_decl = fn_reg->get_fndecl ();
5615 cgraph_node *node = cgraph_node::get (fn_decl);
5616 if (!node)
5617 return NULL_TREE;
5618 const cgraph_node *ultimate_node = node->ultimate_alias_target ();
5619 if (ultimate_node)
5620 return ultimate_node->decl;
5624 return NULL_TREE;
5627 /* Would be much simpler to use a lambda here, if it were supported. */
5629 struct append_regions_cb_data
5631 const region_model *model;
5632 auto_vec<const decl_region *> *out;
5635 /* Populate *OUT with all decl_regions in the current
5636 frame that have clusters within the store. */
5638 void
5639 region_model::
5640 get_regions_for_current_frame (auto_vec<const decl_region *> *out) const
5642 append_regions_cb_data data;
5643 data.model = this;
5644 data.out = out;
5645 m_store.for_each_cluster (append_regions_cb, &data);
5648 /* Implementation detail of get_regions_for_current_frame. */
5650 void
5651 region_model::append_regions_cb (const region *base_reg,
5652 append_regions_cb_data *cb_data)
5654 if (base_reg->get_parent_region () != cb_data->model->m_current_frame)
5655 return;
5656 if (const decl_region *decl_reg = base_reg->dyn_cast_decl_region ())
5657 cb_data->out->safe_push (decl_reg);
5661 /* Abstract class for diagnostics related to the use of
5662 floating-point arithmetic where precision is needed. */
5664 class imprecise_floating_point_arithmetic : public pending_diagnostic
5666 public:
5667 int get_controlling_option () const final override
5669 return OPT_Wanalyzer_imprecise_fp_arithmetic;
5673 /* Concrete diagnostic to complain about uses of floating-point arithmetic
5674 in the size argument of malloc etc. */
5676 class float_as_size_arg : public imprecise_floating_point_arithmetic
5678 public:
5679 float_as_size_arg (tree arg) : m_arg (arg)
5682 const char *get_kind () const final override
5684 return "float_as_size_arg_diagnostic";
5687 bool subclass_equal_p (const pending_diagnostic &other) const final override
5689 return same_tree_p (m_arg, ((const float_as_size_arg &) other).m_arg);
5692 bool emit (rich_location *rich_loc) final override
5694 diagnostic_metadata m;
5695 bool warned = warning_meta (rich_loc, m, get_controlling_option (),
5696 "use of floating-point arithmetic here might"
5697 " yield unexpected results");
5698 if (warned)
5699 inform (rich_loc->get_loc (), "only use operands of an integer type"
5700 " inside the size argument");
5701 return warned;
5704 label_text describe_final_event (const evdesc::final_event &ev) final
5705 override
5707 if (m_arg)
5708 return ev.formatted_print ("operand %qE is of type %qT",
5709 m_arg, TREE_TYPE (m_arg));
5710 return ev.formatted_print ("at least one operand of the size argument is"
5711 " of a floating-point type");
5714 private:
5715 tree m_arg;
5718 /* Visitor to find uses of floating-point variables/constants in an svalue. */
5720 class contains_floating_point_visitor : public visitor
5722 public:
5723 contains_floating_point_visitor (const svalue *root_sval) : m_result (NULL)
5725 root_sval->accept (this);
5728 const svalue *get_svalue_to_report ()
5730 return m_result;
5733 void visit_constant_svalue (const constant_svalue *sval) final override
5735 /* At the point the analyzer runs, constant integer operands in a floating
5736 point expression are already implictly converted to floating-points.
5737 Thus, we do prefer to report non-constants such that the diagnostic
5738 always reports a floating-point operand. */
5739 tree type = sval->get_type ();
5740 if (type && FLOAT_TYPE_P (type) && !m_result)
5741 m_result = sval;
5744 void visit_conjured_svalue (const conjured_svalue *sval) final override
5746 tree type = sval->get_type ();
5747 if (type && FLOAT_TYPE_P (type))
5748 m_result = sval;
5751 void visit_initial_svalue (const initial_svalue *sval) final override
5753 tree type = sval->get_type ();
5754 if (type && FLOAT_TYPE_P (type))
5755 m_result = sval;
5758 private:
5759 /* Non-null if at least one floating-point operand was found. */
5760 const svalue *m_result;
5763 /* May complain about uses of floating-point operands in SIZE_IN_BYTES. */
5765 void
5766 region_model::check_dynamic_size_for_floats (const svalue *size_in_bytes,
5767 region_model_context *ctxt) const
5769 gcc_assert (ctxt);
5771 contains_floating_point_visitor v (size_in_bytes);
5772 if (const svalue *float_sval = v.get_svalue_to_report ())
5774 tree diag_arg = get_representative_tree (float_sval);
5775 ctxt->warn (make_unique<float_as_size_arg> (diag_arg));
5779 /* Return a new region describing a heap-allocated block of memory.
5780 Use CTXT to complain about tainted sizes. */
5782 const region *
5783 region_model::create_region_for_heap_alloc (const svalue *size_in_bytes,
5784 region_model_context *ctxt)
5786 const region *reg = m_mgr->create_region_for_heap_alloc ();
5787 if (compat_types_p (size_in_bytes->get_type (), size_type_node))
5788 set_dynamic_extents (reg, size_in_bytes, ctxt);
5789 return reg;
5792 /* Return a new region describing a block of memory allocated within the
5793 current frame.
5794 Use CTXT to complain about tainted sizes. */
5796 const region *
5797 region_model::create_region_for_alloca (const svalue *size_in_bytes,
5798 region_model_context *ctxt)
5800 const region *reg = m_mgr->create_region_for_alloca (m_current_frame);
5801 if (compat_types_p (size_in_bytes->get_type (), size_type_node))
5802 set_dynamic_extents (reg, size_in_bytes, ctxt);
5803 return reg;
5806 /* Record that the size of REG is SIZE_IN_BYTES.
5807 Use CTXT to complain about tainted sizes. */
5809 void
5810 region_model::set_dynamic_extents (const region *reg,
5811 const svalue *size_in_bytes,
5812 region_model_context *ctxt)
5814 assert_compat_types (size_in_bytes->get_type (), size_type_node);
5815 if (ctxt)
5817 check_dynamic_size_for_taint (reg->get_memory_space (), size_in_bytes,
5818 ctxt);
5819 check_dynamic_size_for_floats (size_in_bytes, ctxt);
5821 m_dynamic_extents.put (reg, size_in_bytes);
5824 /* Get the recording of REG in bytes, or NULL if no dynamic size was
5825 recorded. */
5827 const svalue *
5828 region_model::get_dynamic_extents (const region *reg) const
5830 if (const svalue * const *slot = m_dynamic_extents.get (reg))
5831 return *slot;
5832 return NULL;
5835 /* Unset any recorded dynamic size of REG. */
5837 void
5838 region_model::unset_dynamic_extents (const region *reg)
5840 m_dynamic_extents.remove (reg);
5843 /* Information of the layout of a RECORD_TYPE, capturing it as a vector
5844 of items, where each item is either a field or padding. */
5846 class record_layout
5848 public:
5849 /* An item within a record; either a field, or padding after a field. */
5850 struct item
5852 public:
5853 item (const bit_range &br,
5854 tree field,
5855 bool is_padding)
5856 : m_bit_range (br),
5857 m_field (field),
5858 m_is_padding (is_padding)
5862 bit_offset_t get_start_bit_offset () const
5864 return m_bit_range.get_start_bit_offset ();
5866 bit_offset_t get_next_bit_offset () const
5868 return m_bit_range.get_next_bit_offset ();
5871 bool contains_p (bit_offset_t offset) const
5873 return m_bit_range.contains_p (offset);
5876 void dump_to_pp (pretty_printer *pp) const
5878 if (m_is_padding)
5879 pp_printf (pp, "padding after %qD", m_field);
5880 else
5881 pp_printf (pp, "%qD", m_field);
5882 pp_string (pp, ", ");
5883 m_bit_range.dump_to_pp (pp);
5886 bit_range m_bit_range;
5887 tree m_field;
5888 bool m_is_padding;
5891 record_layout (tree record_type)
5893 gcc_assert (TREE_CODE (record_type) == RECORD_TYPE);
5895 for (tree iter = TYPE_FIELDS (record_type); iter != NULL_TREE;
5896 iter = DECL_CHAIN (iter))
5898 if (TREE_CODE (iter) == FIELD_DECL)
5900 int iter_field_offset = int_bit_position (iter);
5901 bit_size_t size_in_bits;
5902 if (!int_size_in_bits (TREE_TYPE (iter), &size_in_bits))
5903 size_in_bits = 0;
5905 maybe_pad_to (iter_field_offset);
5907 /* Add field. */
5908 m_items.safe_push (item (bit_range (iter_field_offset,
5909 size_in_bits),
5910 iter, false));
5914 /* Add any trailing padding. */
5915 bit_size_t size_in_bits;
5916 if (int_size_in_bits (record_type, &size_in_bits))
5917 maybe_pad_to (size_in_bits);
5920 void dump_to_pp (pretty_printer *pp) const
5922 unsigned i;
5923 item *it;
5924 FOR_EACH_VEC_ELT (m_items, i, it)
5926 it->dump_to_pp (pp);
5927 pp_newline (pp);
5931 DEBUG_FUNCTION void dump () const
5933 pretty_printer pp;
5934 pp_format_decoder (&pp) = default_tree_printer;
5935 pp.buffer->stream = stderr;
5936 dump_to_pp (&pp);
5937 pp_flush (&pp);
5940 const record_layout::item *get_item_at (bit_offset_t offset) const
5942 unsigned i;
5943 item *it;
5944 FOR_EACH_VEC_ELT (m_items, i, it)
5945 if (it->contains_p (offset))
5946 return it;
5947 return NULL;
5950 private:
5951 /* Subroutine of ctor. Add padding item to NEXT_OFFSET if necessary. */
5953 void maybe_pad_to (bit_offset_t next_offset)
5955 if (m_items.length () > 0)
5957 const item &last_item = m_items[m_items.length () - 1];
5958 bit_offset_t offset_after_last_item
5959 = last_item.get_next_bit_offset ();
5960 if (next_offset > offset_after_last_item)
5962 bit_size_t padding_size
5963 = next_offset - offset_after_last_item;
5964 m_items.safe_push (item (bit_range (offset_after_last_item,
5965 padding_size),
5966 last_item.m_field, true));
5971 auto_vec<item> m_items;
5974 /* A subclass of pending_diagnostic for complaining about uninitialized data
5975 being copied across a trust boundary to an untrusted output
5976 (e.g. copy_to_user infoleaks in the Linux kernel). */
5978 class exposure_through_uninit_copy
5979 : public pending_diagnostic_subclass<exposure_through_uninit_copy>
5981 public:
5982 exposure_through_uninit_copy (const region *src_region,
5983 const region *dest_region,
5984 const svalue *copied_sval)
5985 : m_src_region (src_region),
5986 m_dest_region (dest_region),
5987 m_copied_sval (copied_sval)
5989 gcc_assert (m_copied_sval->get_kind () == SK_POISONED
5990 || m_copied_sval->get_kind () == SK_COMPOUND);
5993 const char *get_kind () const final override
5995 return "exposure_through_uninit_copy";
5998 bool operator== (const exposure_through_uninit_copy &other) const
6000 return (m_src_region == other.m_src_region
6001 && m_dest_region == other.m_dest_region
6002 && m_copied_sval == other.m_copied_sval);
6005 int get_controlling_option () const final override
6007 return OPT_Wanalyzer_exposure_through_uninit_copy;
6010 bool emit (rich_location *rich_loc) final override
6012 diagnostic_metadata m;
6013 /* CWE-200: Exposure of Sensitive Information to an Unauthorized Actor. */
6014 m.add_cwe (200);
6015 enum memory_space mem_space = get_src_memory_space ();
6016 bool warned;
6017 switch (mem_space)
6019 default:
6020 warned = warning_meta
6021 (rich_loc, m, get_controlling_option (),
6022 "potential exposure of sensitive information"
6023 " by copying uninitialized data across trust boundary");
6024 break;
6025 case MEMSPACE_STACK:
6026 warned = warning_meta
6027 (rich_loc, m, get_controlling_option (),
6028 "potential exposure of sensitive information"
6029 " by copying uninitialized data from stack across trust boundary");
6030 break;
6031 case MEMSPACE_HEAP:
6032 warned = warning_meta
6033 (rich_loc, m, get_controlling_option (),
6034 "potential exposure of sensitive information"
6035 " by copying uninitialized data from heap across trust boundary");
6036 break;
6038 if (warned)
6040 location_t loc = rich_loc->get_loc ();
6041 inform_number_of_uninit_bits (loc);
6042 complain_about_uninit_ranges (loc);
6044 if (mem_space == MEMSPACE_STACK)
6045 maybe_emit_fixit_hint ();
6047 return warned;
6050 label_text describe_final_event (const evdesc::final_event &) final override
6052 enum memory_space mem_space = get_src_memory_space ();
6053 switch (mem_space)
6055 default:
6056 return label_text::borrow ("uninitialized data copied here");
6058 case MEMSPACE_STACK:
6059 return label_text::borrow ("uninitialized data copied from stack here");
6061 case MEMSPACE_HEAP:
6062 return label_text::borrow ("uninitialized data copied from heap here");
6066 void mark_interesting_stuff (interesting_t *interest) final override
6068 if (m_src_region)
6069 interest->add_region_creation (m_src_region);
6072 private:
6073 enum memory_space get_src_memory_space () const
6075 return m_src_region ? m_src_region->get_memory_space () : MEMSPACE_UNKNOWN;
6078 bit_size_t calc_num_uninit_bits () const
6080 switch (m_copied_sval->get_kind ())
6082 default:
6083 gcc_unreachable ();
6084 break;
6085 case SK_POISONED:
6087 const poisoned_svalue *poisoned_sval
6088 = as_a <const poisoned_svalue *> (m_copied_sval);
6089 gcc_assert (poisoned_sval->get_poison_kind () == POISON_KIND_UNINIT);
6091 /* Give up if don't have type information. */
6092 if (m_copied_sval->get_type () == NULL_TREE)
6093 return 0;
6095 bit_size_t size_in_bits;
6096 if (int_size_in_bits (m_copied_sval->get_type (), &size_in_bits))
6097 return size_in_bits;
6099 /* Give up if we can't get the size of the type. */
6100 return 0;
6102 break;
6103 case SK_COMPOUND:
6105 const compound_svalue *compound_sval
6106 = as_a <const compound_svalue *> (m_copied_sval);
6107 bit_size_t result = 0;
6108 /* Find keys for uninit svals. */
6109 for (auto iter : *compound_sval)
6111 const svalue *sval = iter.second;
6112 if (const poisoned_svalue *psval
6113 = sval->dyn_cast_poisoned_svalue ())
6114 if (psval->get_poison_kind () == POISON_KIND_UNINIT)
6116 const binding_key *key = iter.first;
6117 const concrete_binding *ckey
6118 = key->dyn_cast_concrete_binding ();
6119 gcc_assert (ckey);
6120 result += ckey->get_size_in_bits ();
6123 return result;
6128 void inform_number_of_uninit_bits (location_t loc) const
6130 bit_size_t num_uninit_bits = calc_num_uninit_bits ();
6131 if (num_uninit_bits <= 0)
6132 return;
6133 if (num_uninit_bits % BITS_PER_UNIT == 0)
6135 /* Express in bytes. */
6136 byte_size_t num_uninit_bytes = num_uninit_bits / BITS_PER_UNIT;
6137 if (num_uninit_bytes == 1)
6138 inform (loc, "1 byte is uninitialized");
6139 else
6140 inform (loc,
6141 "%wu bytes are uninitialized", num_uninit_bytes.to_uhwi ());
6143 else
6145 /* Express in bits. */
6146 if (num_uninit_bits == 1)
6147 inform (loc, "1 bit is uninitialized");
6148 else
6149 inform (loc,
6150 "%wu bits are uninitialized", num_uninit_bits.to_uhwi ());
6154 void complain_about_uninit_ranges (location_t loc) const
6156 if (const compound_svalue *compound_sval
6157 = m_copied_sval->dyn_cast_compound_svalue ())
6159 /* Find keys for uninit svals. */
6160 auto_vec<const concrete_binding *> uninit_keys;
6161 for (auto iter : *compound_sval)
6163 const svalue *sval = iter.second;
6164 if (const poisoned_svalue *psval
6165 = sval->dyn_cast_poisoned_svalue ())
6166 if (psval->get_poison_kind () == POISON_KIND_UNINIT)
6168 const binding_key *key = iter.first;
6169 const concrete_binding *ckey
6170 = key->dyn_cast_concrete_binding ();
6171 gcc_assert (ckey);
6172 uninit_keys.safe_push (ckey);
6175 /* Complain about them in sorted order. */
6176 uninit_keys.qsort (concrete_binding::cmp_ptr_ptr);
6178 std::unique_ptr<record_layout> layout;
6180 tree type = m_copied_sval->get_type ();
6181 if (type && TREE_CODE (type) == RECORD_TYPE)
6183 // (std::make_unique is C++14)
6184 layout = std::unique_ptr<record_layout> (new record_layout (type));
6186 if (0)
6187 layout->dump ();
6190 unsigned i;
6191 const concrete_binding *ckey;
6192 FOR_EACH_VEC_ELT (uninit_keys, i, ckey)
6194 bit_offset_t start_bit = ckey->get_start_bit_offset ();
6195 bit_offset_t next_bit = ckey->get_next_bit_offset ();
6196 complain_about_uninit_range (loc, start_bit, next_bit,
6197 layout.get ());
6202 void complain_about_uninit_range (location_t loc,
6203 bit_offset_t start_bit,
6204 bit_offset_t next_bit,
6205 const record_layout *layout) const
6207 if (layout)
6209 while (start_bit < next_bit)
6211 if (const record_layout::item *item
6212 = layout->get_item_at (start_bit))
6214 gcc_assert (start_bit >= item->get_start_bit_offset ());
6215 gcc_assert (start_bit < item->get_next_bit_offset ());
6216 if (item->get_start_bit_offset () == start_bit
6217 && item->get_next_bit_offset () <= next_bit)
6218 complain_about_fully_uninit_item (*item);
6219 else
6220 complain_about_partially_uninit_item (*item);
6221 start_bit = item->get_next_bit_offset ();
6222 continue;
6224 else
6225 break;
6229 if (start_bit >= next_bit)
6230 return;
6232 if (start_bit % 8 == 0 && next_bit % 8 == 0)
6234 /* Express in bytes. */
6235 byte_offset_t start_byte = start_bit / 8;
6236 byte_offset_t last_byte = (next_bit / 8) - 1;
6237 if (last_byte == start_byte)
6238 inform (loc,
6239 "byte %wu is uninitialized",
6240 start_byte.to_uhwi ());
6241 else
6242 inform (loc,
6243 "bytes %wu - %wu are uninitialized",
6244 start_byte.to_uhwi (),
6245 last_byte.to_uhwi ());
6247 else
6249 /* Express in bits. */
6250 bit_offset_t last_bit = next_bit - 1;
6251 if (last_bit == start_bit)
6252 inform (loc,
6253 "bit %wu is uninitialized",
6254 start_bit.to_uhwi ());
6255 else
6256 inform (loc,
6257 "bits %wu - %wu are uninitialized",
6258 start_bit.to_uhwi (),
6259 last_bit.to_uhwi ());
6263 static void
6264 complain_about_fully_uninit_item (const record_layout::item &item)
6266 tree field = item.m_field;
6267 bit_size_t num_bits = item.m_bit_range.m_size_in_bits;
6268 if (item.m_is_padding)
6270 if (num_bits % 8 == 0)
6272 /* Express in bytes. */
6273 byte_size_t num_bytes = num_bits / BITS_PER_UNIT;
6274 if (num_bytes == 1)
6275 inform (DECL_SOURCE_LOCATION (field),
6276 "padding after field %qD is uninitialized (1 byte)",
6277 field);
6278 else
6279 inform (DECL_SOURCE_LOCATION (field),
6280 "padding after field %qD is uninitialized (%wu bytes)",
6281 field, num_bytes.to_uhwi ());
6283 else
6285 /* Express in bits. */
6286 if (num_bits == 1)
6287 inform (DECL_SOURCE_LOCATION (field),
6288 "padding after field %qD is uninitialized (1 bit)",
6289 field);
6290 else
6291 inform (DECL_SOURCE_LOCATION (field),
6292 "padding after field %qD is uninitialized (%wu bits)",
6293 field, num_bits.to_uhwi ());
6296 else
6298 if (num_bits % 8 == 0)
6300 /* Express in bytes. */
6301 byte_size_t num_bytes = num_bits / BITS_PER_UNIT;
6302 if (num_bytes == 1)
6303 inform (DECL_SOURCE_LOCATION (field),
6304 "field %qD is uninitialized (1 byte)", field);
6305 else
6306 inform (DECL_SOURCE_LOCATION (field),
6307 "field %qD is uninitialized (%wu bytes)",
6308 field, num_bytes.to_uhwi ());
6310 else
6312 /* Express in bits. */
6313 if (num_bits == 1)
6314 inform (DECL_SOURCE_LOCATION (field),
6315 "field %qD is uninitialized (1 bit)", field);
6316 else
6317 inform (DECL_SOURCE_LOCATION (field),
6318 "field %qD is uninitialized (%wu bits)",
6319 field, num_bits.to_uhwi ());
6324 static void
6325 complain_about_partially_uninit_item (const record_layout::item &item)
6327 tree field = item.m_field;
6328 if (item.m_is_padding)
6329 inform (DECL_SOURCE_LOCATION (field),
6330 "padding after field %qD is partially uninitialized",
6331 field);
6332 else
6333 inform (DECL_SOURCE_LOCATION (field),
6334 "field %qD is partially uninitialized",
6335 field);
6336 /* TODO: ideally we'd describe what parts are uninitialized. */
6339 void maybe_emit_fixit_hint () const
6341 if (tree decl = m_src_region->maybe_get_decl ())
6343 gcc_rich_location hint_richloc (DECL_SOURCE_LOCATION (decl));
6344 hint_richloc.add_fixit_insert_after (" = {0}");
6345 inform (&hint_richloc,
6346 "suggest forcing zero-initialization by"
6347 " providing a %<{0}%> initializer");
6351 private:
6352 const region *m_src_region;
6353 const region *m_dest_region;
6354 const svalue *m_copied_sval;
6357 /* Return true if any part of SVAL is uninitialized. */
6359 static bool
6360 contains_uninit_p (const svalue *sval)
6362 struct uninit_finder : public visitor
6364 public:
6365 uninit_finder () : m_found_uninit (false) {}
6366 void visit_poisoned_svalue (const poisoned_svalue *sval)
6368 if (sval->get_poison_kind () == POISON_KIND_UNINIT)
6369 m_found_uninit = true;
6371 bool m_found_uninit;
6374 uninit_finder v;
6375 sval->accept (&v);
6377 return v.m_found_uninit;
6380 /* Function for use by plugins when simulating writing data through a
6381 pointer to an "untrusted" region DST_REG (and thus crossing a security
6382 boundary), such as copying data to user space in an OS kernel.
6384 Check that COPIED_SVAL is fully initialized. If not, complain about
6385 an infoleak to CTXT.
6387 SRC_REG can be NULL; if non-NULL it is used as a hint in the diagnostic
6388 as to where COPIED_SVAL came from. */
6390 void
6391 region_model::maybe_complain_about_infoleak (const region *dst_reg,
6392 const svalue *copied_sval,
6393 const region *src_reg,
6394 region_model_context *ctxt)
6396 /* Check for exposure. */
6397 if (contains_uninit_p (copied_sval))
6398 ctxt->warn (make_unique<exposure_through_uninit_copy> (src_reg,
6399 dst_reg,
6400 copied_sval));
6403 /* Set errno to a positive symbolic int, as if some error has occurred. */
6405 void
6406 region_model::set_errno (const call_details &cd)
6408 const region *errno_reg = m_mgr->get_errno_region ();
6409 conjured_purge p (this, cd.get_ctxt ());
6410 const svalue *new_errno_sval
6411 = m_mgr->get_or_create_conjured_svalue (integer_type_node,
6412 cd.get_call_stmt (),
6413 errno_reg, p);
6414 const svalue *zero
6415 = m_mgr->get_or_create_int_cst (integer_type_node, 0);
6416 add_constraint (new_errno_sval, GT_EXPR, zero, cd.get_ctxt ());
6417 set_value (errno_reg, new_errno_sval, cd.get_ctxt ());
6420 /* class noop_region_model_context : public region_model_context. */
6422 void
6423 noop_region_model_context::add_note (std::unique_ptr<pending_note>)
6427 void
6428 noop_region_model_context::bifurcate (std::unique_ptr<custom_edge_info>)
6432 void
6433 noop_region_model_context::terminate_path ()
6437 /* struct model_merger. */
6439 /* Dump a multiline representation of this merger to PP. */
6441 void
6442 model_merger::dump_to_pp (pretty_printer *pp, bool simple) const
6444 pp_string (pp, "model A:");
6445 pp_newline (pp);
6446 m_model_a->dump_to_pp (pp, simple, true);
6447 pp_newline (pp);
6449 pp_string (pp, "model B:");
6450 pp_newline (pp);
6451 m_model_b->dump_to_pp (pp, simple, true);
6452 pp_newline (pp);
6454 pp_string (pp, "merged model:");
6455 pp_newline (pp);
6456 m_merged_model->dump_to_pp (pp, simple, true);
6457 pp_newline (pp);
6460 /* Dump a multiline representation of this merger to FILE. */
6462 void
6463 model_merger::dump (FILE *fp, bool simple) const
6465 pretty_printer pp;
6466 pp_format_decoder (&pp) = default_tree_printer;
6467 pp_show_color (&pp) = pp_show_color (global_dc->printer);
6468 pp.buffer->stream = fp;
6469 dump_to_pp (&pp, simple);
6470 pp_flush (&pp);
6473 /* Dump a multiline representation of this merger to stderr. */
6475 DEBUG_FUNCTION void
6476 model_merger::dump (bool simple) const
6478 dump (stderr, simple);
6481 /* Return true if it's OK to merge SVAL with other svalues. */
6483 bool
6484 model_merger::mergeable_svalue_p (const svalue *sval) const
6486 if (m_ext_state)
6488 /* Reject merging svalues that have non-purgable sm-state,
6489 to avoid falsely reporting memory leaks by merging them
6490 with something else. For example, given a local var "p",
6491 reject the merger of a:
6492 store_a mapping "p" to a malloc-ed ptr
6493 with:
6494 store_b mapping "p" to a NULL ptr. */
6495 if (m_state_a)
6496 if (!m_state_a->can_purge_p (*m_ext_state, sval))
6497 return false;
6498 if (m_state_b)
6499 if (!m_state_b->can_purge_p (*m_ext_state, sval))
6500 return false;
6502 return true;
6505 } // namespace ana
6507 /* Dump RMODEL fully to stderr (i.e. without summarization). */
6509 DEBUG_FUNCTION void
6510 debug (const region_model &rmodel)
6512 rmodel.dump (false);
6515 /* class rejected_op_constraint : public rejected_constraint. */
6517 void
6518 rejected_op_constraint::dump_to_pp (pretty_printer *pp) const
6520 region_model m (m_model);
6521 const svalue *lhs_sval = m.get_rvalue (m_lhs, NULL);
6522 const svalue *rhs_sval = m.get_rvalue (m_rhs, NULL);
6523 lhs_sval->dump_to_pp (pp, true);
6524 pp_printf (pp, " %s ", op_symbol_code (m_op));
6525 rhs_sval->dump_to_pp (pp, true);
6528 /* class rejected_ranges_constraint : public rejected_constraint. */
6530 void
6531 rejected_ranges_constraint::dump_to_pp (pretty_printer *pp) const
6533 region_model m (m_model);
6534 const svalue *sval = m.get_rvalue (m_expr, NULL);
6535 sval->dump_to_pp (pp, true);
6536 pp_string (pp, " in ");
6537 m_ranges->dump_to_pp (pp, true);
6540 /* class engine. */
6542 /* engine's ctor. */
6544 engine::engine (const supergraph *sg, logger *logger)
6545 : m_sg (sg), m_mgr (logger)
6549 /* Dump the managed objects by class to LOGGER, and the per-class totals. */
6551 void
6552 engine::log_stats (logger *logger) const
6554 m_mgr.log_stats (logger, true);
6557 namespace ana {
6559 #if CHECKING_P
6561 namespace selftest {
6563 /* Build a constant tree of the given type from STR. */
6565 static tree
6566 build_real_cst_from_string (tree type, const char *str)
6568 REAL_VALUE_TYPE real;
6569 real_from_string (&real, str);
6570 return build_real (type, real);
6573 /* Append various "interesting" constants to OUT (e.g. NaN). */
6575 static void
6576 append_interesting_constants (auto_vec<tree> *out)
6578 out->safe_push (build_int_cst (integer_type_node, 0));
6579 out->safe_push (build_int_cst (integer_type_node, 42));
6580 out->safe_push (build_int_cst (unsigned_type_node, 0));
6581 out->safe_push (build_int_cst (unsigned_type_node, 42));
6582 out->safe_push (build_real_cst_from_string (float_type_node, "QNaN"));
6583 out->safe_push (build_real_cst_from_string (float_type_node, "-QNaN"));
6584 out->safe_push (build_real_cst_from_string (float_type_node, "SNaN"));
6585 out->safe_push (build_real_cst_from_string (float_type_node, "-SNaN"));
6586 out->safe_push (build_real_cst_from_string (float_type_node, "0.0"));
6587 out->safe_push (build_real_cst_from_string (float_type_node, "-0.0"));
6588 out->safe_push (build_real_cst_from_string (float_type_node, "Inf"));
6589 out->safe_push (build_real_cst_from_string (float_type_node, "-Inf"));
6592 /* Verify that tree_cmp is a well-behaved comparator for qsort, even
6593 if the underlying constants aren't comparable. */
6595 static void
6596 test_tree_cmp_on_constants ()
6598 auto_vec<tree> csts;
6599 append_interesting_constants (&csts);
6601 /* Try sorting every triple. */
6602 const unsigned num = csts.length ();
6603 for (unsigned i = 0; i < num; i++)
6604 for (unsigned j = 0; j < num; j++)
6605 for (unsigned k = 0; k < num; k++)
6607 auto_vec<tree> v (3);
6608 v.quick_push (csts[i]);
6609 v.quick_push (csts[j]);
6610 v.quick_push (csts[k]);
6611 v.qsort (tree_cmp);
6615 /* Implementation detail of the ASSERT_CONDITION_* macros. */
6617 void
6618 assert_condition (const location &loc,
6619 region_model &model,
6620 const svalue *lhs, tree_code op, const svalue *rhs,
6621 tristate expected)
6623 tristate actual = model.eval_condition (lhs, op, rhs);
6624 ASSERT_EQ_AT (loc, actual, expected);
6627 /* Implementation detail of the ASSERT_CONDITION_* macros. */
6629 void
6630 assert_condition (const location &loc,
6631 region_model &model,
6632 tree lhs, tree_code op, tree rhs,
6633 tristate expected)
6635 tristate actual = model.eval_condition (lhs, op, rhs, NULL);
6636 ASSERT_EQ_AT (loc, actual, expected);
6639 /* Implementation detail of ASSERT_DUMP_TREE_EQ. */
6641 static void
6642 assert_dump_tree_eq (const location &loc, tree t, const char *expected)
6644 auto_fix_quotes sentinel;
6645 pretty_printer pp;
6646 pp_format_decoder (&pp) = default_tree_printer;
6647 dump_tree (&pp, t);
6648 ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected);
6651 /* Assert that dump_tree (T) is EXPECTED. */
6653 #define ASSERT_DUMP_TREE_EQ(T, EXPECTED) \
6654 SELFTEST_BEGIN_STMT \
6655 assert_dump_tree_eq ((SELFTEST_LOCATION), (T), (EXPECTED)); \
6656 SELFTEST_END_STMT
6658 /* Implementation detail of ASSERT_DUMP_EQ. */
6660 static void
6661 assert_dump_eq (const location &loc,
6662 const region_model &model,
6663 bool summarize,
6664 const char *expected)
6666 auto_fix_quotes sentinel;
6667 pretty_printer pp;
6668 pp_format_decoder (&pp) = default_tree_printer;
6670 model.dump_to_pp (&pp, summarize, true);
6671 ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected);
6674 /* Assert that MODEL.dump_to_pp (SUMMARIZE) is EXPECTED. */
6676 #define ASSERT_DUMP_EQ(MODEL, SUMMARIZE, EXPECTED) \
6677 SELFTEST_BEGIN_STMT \
6678 assert_dump_eq ((SELFTEST_LOCATION), (MODEL), (SUMMARIZE), (EXPECTED)); \
6679 SELFTEST_END_STMT
6681 /* Smoketest for region_model::dump_to_pp. */
6683 static void
6684 test_dump ()
6686 region_model_manager mgr;
6687 region_model model (&mgr);
6689 ASSERT_DUMP_EQ (model, false,
6690 "stack depth: 0\n"
6691 "m_called_unknown_fn: FALSE\n"
6692 "constraint_manager:\n"
6693 " equiv classes:\n"
6694 " constraints:\n");
6695 ASSERT_DUMP_EQ (model, true,
6696 "stack depth: 0\n"
6697 "m_called_unknown_fn: FALSE\n"
6698 "constraint_manager:\n"
6699 " equiv classes:\n"
6700 " constraints:\n");
6703 /* Helper function for selftests. Create a struct or union type named NAME,
6704 with the fields given by the FIELD_DECLS in FIELDS.
6705 If IS_STRUCT is true create a RECORD_TYPE (aka a struct), otherwise
6706 create a UNION_TYPE. */
6708 static tree
6709 make_test_compound_type (const char *name, bool is_struct,
6710 const auto_vec<tree> *fields)
6712 tree t = make_node (is_struct ? RECORD_TYPE : UNION_TYPE);
6713 TYPE_NAME (t) = get_identifier (name);
6714 TYPE_SIZE (t) = 0;
6716 tree fieldlist = NULL;
6717 int i;
6718 tree field;
6719 FOR_EACH_VEC_ELT (*fields, i, field)
6721 gcc_assert (TREE_CODE (field) == FIELD_DECL);
6722 DECL_CONTEXT (field) = t;
6723 fieldlist = chainon (field, fieldlist);
6725 fieldlist = nreverse (fieldlist);
6726 TYPE_FIELDS (t) = fieldlist;
6728 layout_type (t);
6729 return t;
6732 /* Selftest fixture for creating the type "struct coord {int x; int y; };". */
6734 struct coord_test
6736 coord_test ()
6738 auto_vec<tree> fields;
6739 m_x_field = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
6740 get_identifier ("x"), integer_type_node);
6741 fields.safe_push (m_x_field);
6742 m_y_field = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
6743 get_identifier ("y"), integer_type_node);
6744 fields.safe_push (m_y_field);
6745 m_coord_type = make_test_compound_type ("coord", true, &fields);
6748 tree m_x_field;
6749 tree m_y_field;
6750 tree m_coord_type;
6753 /* Verify usage of a struct. */
6755 static void
6756 test_struct ()
6758 coord_test ct;
6760 tree c = build_global_decl ("c", ct.m_coord_type);
6761 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
6762 c, ct.m_x_field, NULL_TREE);
6763 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
6764 c, ct.m_y_field, NULL_TREE);
6766 tree int_17 = build_int_cst (integer_type_node, 17);
6767 tree int_m3 = build_int_cst (integer_type_node, -3);
6769 region_model_manager mgr;
6770 region_model model (&mgr);
6771 model.set_value (c_x, int_17, NULL);
6772 model.set_value (c_y, int_m3, NULL);
6774 /* Verify get_offset for "c.x". */
6776 const region *c_x_reg = model.get_lvalue (c_x, NULL);
6777 region_offset offset = c_x_reg->get_offset (&mgr);
6778 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (c, NULL));
6779 ASSERT_EQ (offset.get_bit_offset (), 0);
6782 /* Verify get_offset for "c.y". */
6784 const region *c_y_reg = model.get_lvalue (c_y, NULL);
6785 region_offset offset = c_y_reg->get_offset (&mgr);
6786 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (c, NULL));
6787 ASSERT_EQ (offset.get_bit_offset (), INT_TYPE_SIZE);
6791 /* Verify usage of an array element. */
6793 static void
6794 test_array_1 ()
6796 tree tlen = size_int (10);
6797 tree arr_type = build_array_type (char_type_node, build_index_type (tlen));
6799 tree a = build_global_decl ("a", arr_type);
6801 region_model_manager mgr;
6802 region_model model (&mgr);
6803 tree int_0 = build_int_cst (integer_type_node, 0);
6804 tree a_0 = build4 (ARRAY_REF, char_type_node,
6805 a, int_0, NULL_TREE, NULL_TREE);
6806 tree char_A = build_int_cst (char_type_node, 'A');
6807 model.set_value (a_0, char_A, NULL);
6810 /* Verify that region_model::get_representative_tree works as expected. */
6812 static void
6813 test_get_representative_tree ()
6815 region_model_manager mgr;
6817 /* STRING_CST. */
6819 tree string_cst = build_string (4, "foo");
6820 region_model m (&mgr);
6821 const svalue *str_sval = m.get_rvalue (string_cst, NULL);
6822 tree rep = m.get_representative_tree (str_sval);
6823 ASSERT_EQ (rep, string_cst);
6826 /* String literal. */
6828 tree string_cst_ptr = build_string_literal (4, "foo");
6829 region_model m (&mgr);
6830 const svalue *str_sval = m.get_rvalue (string_cst_ptr, NULL);
6831 tree rep = m.get_representative_tree (str_sval);
6832 ASSERT_DUMP_TREE_EQ (rep, "&\"foo\"[0]");
6835 /* Value of an element within an array. */
6837 tree tlen = size_int (10);
6838 tree arr_type = build_array_type (char_type_node, build_index_type (tlen));
6839 tree a = build_global_decl ("a", arr_type);
6840 placeholder_svalue test_sval (char_type_node, "test value");
6842 /* Value of a[3]. */
6844 test_region_model_context ctxt;
6845 region_model model (&mgr);
6846 tree int_3 = build_int_cst (integer_type_node, 3);
6847 tree a_3 = build4 (ARRAY_REF, char_type_node,
6848 a, int_3, NULL_TREE, NULL_TREE);
6849 const region *a_3_reg = model.get_lvalue (a_3, &ctxt);
6850 model.set_value (a_3_reg, &test_sval, &ctxt);
6851 tree rep = model.get_representative_tree (&test_sval);
6852 ASSERT_DUMP_TREE_EQ (rep, "a[3]");
6855 /* Value of a[0]. */
6857 test_region_model_context ctxt;
6858 region_model model (&mgr);
6859 tree idx = build_int_cst (integer_type_node, 0);
6860 tree a_0 = build4 (ARRAY_REF, char_type_node,
6861 a, idx, NULL_TREE, NULL_TREE);
6862 const region *a_0_reg = model.get_lvalue (a_0, &ctxt);
6863 model.set_value (a_0_reg, &test_sval, &ctxt);
6864 tree rep = model.get_representative_tree (&test_sval);
6865 ASSERT_DUMP_TREE_EQ (rep, "a[0]");
6869 /* Value of a field within a struct. */
6871 coord_test ct;
6873 tree c = build_global_decl ("c", ct.m_coord_type);
6874 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
6875 c, ct.m_x_field, NULL_TREE);
6876 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
6877 c, ct.m_y_field, NULL_TREE);
6879 test_region_model_context ctxt;
6881 /* Value of initial field. */
6883 region_model m (&mgr);
6884 const region *c_x_reg = m.get_lvalue (c_x, &ctxt);
6885 placeholder_svalue test_sval_x (integer_type_node, "test x val");
6886 m.set_value (c_x_reg, &test_sval_x, &ctxt);
6887 tree rep = m.get_representative_tree (&test_sval_x);
6888 ASSERT_DUMP_TREE_EQ (rep, "c.x");
6891 /* Value of non-initial field. */
6893 region_model m (&mgr);
6894 const region *c_y_reg = m.get_lvalue (c_y, &ctxt);
6895 placeholder_svalue test_sval_y (integer_type_node, "test y val");
6896 m.set_value (c_y_reg, &test_sval_y, &ctxt);
6897 tree rep = m.get_representative_tree (&test_sval_y);
6898 ASSERT_DUMP_TREE_EQ (rep, "c.y");
6903 /* Verify that calling region_model::get_rvalue repeatedly on the same
6904 tree constant retrieves the same svalue *. */
6906 static void
6907 test_unique_constants ()
6909 tree int_0 = build_int_cst (integer_type_node, 0);
6910 tree int_42 = build_int_cst (integer_type_node, 42);
6912 test_region_model_context ctxt;
6913 region_model_manager mgr;
6914 region_model model (&mgr);
6915 ASSERT_EQ (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_0, &ctxt));
6916 ASSERT_EQ (model.get_rvalue (int_42, &ctxt),
6917 model.get_rvalue (int_42, &ctxt));
6918 ASSERT_NE (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_42, &ctxt));
6919 ASSERT_EQ (ctxt.get_num_diagnostics (), 0);
6921 /* A "(const int)42" will be a different tree from "(int)42)"... */
6922 tree const_int_type_node
6923 = build_qualified_type (integer_type_node, TYPE_QUAL_CONST);
6924 tree const_int_42 = build_int_cst (const_int_type_node, 42);
6925 ASSERT_NE (int_42, const_int_42);
6926 /* It should have a different const_svalue. */
6927 const svalue *int_42_sval = model.get_rvalue (int_42, &ctxt);
6928 const svalue *const_int_42_sval = model.get_rvalue (const_int_42, &ctxt);
6929 ASSERT_NE (int_42_sval, const_int_42_sval);
6930 /* But they should compare as equal. */
6931 ASSERT_CONDITION_TRUE (model, int_42_sval, EQ_EXPR, const_int_42_sval);
6932 ASSERT_CONDITION_FALSE (model, int_42_sval, NE_EXPR, const_int_42_sval);
6935 /* Verify that each type gets its own singleton unknown_svalue within a
6936 region_model_manager, and that NULL_TREE gets its own singleton. */
6938 static void
6939 test_unique_unknowns ()
6941 region_model_manager mgr;
6942 const svalue *unknown_int
6943 = mgr.get_or_create_unknown_svalue (integer_type_node);
6944 /* Repeated calls with the same type should get the same "unknown"
6945 svalue. */
6946 const svalue *unknown_int_2
6947 = mgr.get_or_create_unknown_svalue (integer_type_node);
6948 ASSERT_EQ (unknown_int, unknown_int_2);
6950 /* Different types (or the NULL type) should have different
6951 unknown_svalues. */
6952 const svalue *unknown_NULL_type = mgr.get_or_create_unknown_svalue (NULL);
6953 ASSERT_NE (unknown_NULL_type, unknown_int);
6955 /* Repeated calls with NULL for the type should get the same "unknown"
6956 svalue. */
6957 const svalue *unknown_NULL_type_2 = mgr.get_or_create_unknown_svalue (NULL);
6958 ASSERT_EQ (unknown_NULL_type, unknown_NULL_type_2);
6961 /* Verify that initial_svalue are handled as expected. */
6963 static void
6964 test_initial_svalue_folding ()
6966 region_model_manager mgr;
6967 tree x = build_global_decl ("x", integer_type_node);
6968 tree y = build_global_decl ("y", integer_type_node);
6970 test_region_model_context ctxt;
6971 region_model model (&mgr);
6972 const svalue *x_init = model.get_rvalue (x, &ctxt);
6973 const svalue *y_init = model.get_rvalue (y, &ctxt);
6974 ASSERT_NE (x_init, y_init);
6975 const region *x_reg = model.get_lvalue (x, &ctxt);
6976 ASSERT_EQ (x_init, mgr.get_or_create_initial_value (x_reg));
6980 /* Verify that unary ops are folded as expected. */
6982 static void
6983 test_unaryop_svalue_folding ()
6985 region_model_manager mgr;
6986 tree x = build_global_decl ("x", integer_type_node);
6987 tree y = build_global_decl ("y", integer_type_node);
6989 test_region_model_context ctxt;
6990 region_model model (&mgr);
6991 const svalue *x_init = model.get_rvalue (x, &ctxt);
6992 const svalue *y_init = model.get_rvalue (y, &ctxt);
6993 const region *x_reg = model.get_lvalue (x, &ctxt);
6994 ASSERT_EQ (x_init, mgr.get_or_create_initial_value (x_reg));
6996 /* "(int)x" -> "x". */
6997 ASSERT_EQ (x_init, mgr.get_or_create_cast (integer_type_node, x_init));
6999 /* "(void *)x" -> something other than "x". */
7000 ASSERT_NE (x_init, mgr.get_or_create_cast (ptr_type_node, x_init));
7002 /* "!(x == y)" -> "x != y". */
7003 ASSERT_EQ (mgr.get_or_create_unaryop
7004 (boolean_type_node, TRUTH_NOT_EXPR,
7005 mgr.get_or_create_binop (boolean_type_node, EQ_EXPR,
7006 x_init, y_init)),
7007 mgr.get_or_create_binop (boolean_type_node, NE_EXPR,
7008 x_init, y_init));
7009 /* "!(x > y)" -> "x <= y". */
7010 ASSERT_EQ (mgr.get_or_create_unaryop
7011 (boolean_type_node, TRUTH_NOT_EXPR,
7012 mgr.get_or_create_binop (boolean_type_node, GT_EXPR,
7013 x_init, y_init)),
7014 mgr.get_or_create_binop (boolean_type_node, LE_EXPR,
7015 x_init, y_init));
7018 /* Verify that binops on constant svalues are folded. */
7020 static void
7021 test_binop_svalue_folding ()
7023 #define NUM_CSTS 10
7024 tree cst_int[NUM_CSTS];
7025 region_model_manager mgr;
7026 const svalue *cst_sval[NUM_CSTS];
7027 for (int i = 0; i < NUM_CSTS; i++)
7029 cst_int[i] = build_int_cst (integer_type_node, i);
7030 cst_sval[i] = mgr.get_or_create_constant_svalue (cst_int[i]);
7031 ASSERT_EQ (cst_sval[i]->get_kind (), SK_CONSTANT);
7032 ASSERT_EQ (cst_sval[i]->maybe_get_constant (), cst_int[i]);
7035 for (int i = 0; i < NUM_CSTS; i++)
7036 for (int j = 0; j < NUM_CSTS; j++)
7038 if (i != j)
7039 ASSERT_NE (cst_sval[i], cst_sval[j]);
7040 if (i + j < NUM_CSTS)
7042 const svalue *sum
7043 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
7044 cst_sval[i], cst_sval[j]);
7045 ASSERT_EQ (sum, cst_sval[i + j]);
7047 if (i - j >= 0)
7049 const svalue *difference
7050 = mgr.get_or_create_binop (integer_type_node, MINUS_EXPR,
7051 cst_sval[i], cst_sval[j]);
7052 ASSERT_EQ (difference, cst_sval[i - j]);
7054 if (i * j < NUM_CSTS)
7056 const svalue *product
7057 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
7058 cst_sval[i], cst_sval[j]);
7059 ASSERT_EQ (product, cst_sval[i * j]);
7061 const svalue *eq = mgr.get_or_create_binop (integer_type_node, EQ_EXPR,
7062 cst_sval[i], cst_sval[j]);
7063 ASSERT_EQ (eq, i == j ? cst_sval[1] : cst_sval [0]);
7064 const svalue *neq = mgr.get_or_create_binop (integer_type_node, NE_EXPR,
7065 cst_sval[i], cst_sval[j]);
7066 ASSERT_EQ (neq, i != j ? cst_sval[1] : cst_sval [0]);
7067 // etc
7070 tree x = build_global_decl ("x", integer_type_node);
7072 test_region_model_context ctxt;
7073 region_model model (&mgr);
7074 const svalue *x_init = model.get_rvalue (x, &ctxt);
7076 /* PLUS_EXPR folding. */
7077 const svalue *x_init_plus_zero
7078 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
7079 x_init, cst_sval[0]);
7080 ASSERT_EQ (x_init_plus_zero, x_init);
7081 const svalue *zero_plus_x_init
7082 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
7083 cst_sval[0], x_init);
7084 ASSERT_EQ (zero_plus_x_init, x_init);
7086 /* MULT_EXPR folding. */
7087 const svalue *x_init_times_zero
7088 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
7089 x_init, cst_sval[0]);
7090 ASSERT_EQ (x_init_times_zero, cst_sval[0]);
7091 const svalue *zero_times_x_init
7092 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
7093 cst_sval[0], x_init);
7094 ASSERT_EQ (zero_times_x_init, cst_sval[0]);
7096 const svalue *x_init_times_one
7097 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
7098 x_init, cst_sval[1]);
7099 ASSERT_EQ (x_init_times_one, x_init);
7100 const svalue *one_times_x_init
7101 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
7102 cst_sval[1], x_init);
7103 ASSERT_EQ (one_times_x_init, x_init);
7105 // etc
7106 // TODO: do we want to use the match-and-simplify DSL for this?
7108 /* Verify that binops put any constants on the RHS. */
7109 const svalue *four_times_x_init
7110 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
7111 cst_sval[4], x_init);
7112 const svalue *x_init_times_four
7113 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
7114 x_init, cst_sval[4]);
7115 ASSERT_EQ (four_times_x_init, x_init_times_four);
7116 const binop_svalue *binop = four_times_x_init->dyn_cast_binop_svalue ();
7117 ASSERT_EQ (binop->get_op (), MULT_EXPR);
7118 ASSERT_EQ (binop->get_arg0 (), x_init);
7119 ASSERT_EQ (binop->get_arg1 (), cst_sval[4]);
7121 /* Verify that ((x + 1) + 1) == (x + 2). */
7122 const svalue *x_init_plus_one
7123 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
7124 x_init, cst_sval[1]);
7125 const svalue *x_init_plus_two
7126 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
7127 x_init, cst_sval[2]);
7128 const svalue *x_init_plus_one_plus_one
7129 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
7130 x_init_plus_one, cst_sval[1]);
7131 ASSERT_EQ (x_init_plus_one_plus_one, x_init_plus_two);
7133 /* Verify various binops on booleans. */
7135 const svalue *sval_true = mgr.get_or_create_int_cst (boolean_type_node, 1);
7136 const svalue *sval_false = mgr.get_or_create_int_cst (boolean_type_node, 0);
7137 const svalue *sval_unknown
7138 = mgr.get_or_create_unknown_svalue (boolean_type_node);
7139 const placeholder_svalue sval_placeholder (boolean_type_node, "v");
7140 for (auto op : {BIT_IOR_EXPR, TRUTH_OR_EXPR})
7142 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
7143 sval_true, sval_unknown),
7144 sval_true);
7145 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
7146 sval_false, sval_unknown),
7147 sval_unknown);
7148 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
7149 sval_false, &sval_placeholder),
7150 &sval_placeholder);
7152 for (auto op : {BIT_AND_EXPR, TRUTH_AND_EXPR})
7154 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
7155 sval_false, sval_unknown),
7156 sval_false);
7157 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
7158 sval_true, sval_unknown),
7159 sval_unknown);
7160 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
7161 sval_true, &sval_placeholder),
7162 &sval_placeholder);
7167 /* Verify that sub_svalues are folded as expected. */
7169 static void
7170 test_sub_svalue_folding ()
7172 coord_test ct;
7173 tree c = build_global_decl ("c", ct.m_coord_type);
7174 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
7175 c, ct.m_x_field, NULL_TREE);
7177 region_model_manager mgr;
7178 region_model model (&mgr);
7179 test_region_model_context ctxt;
7180 const region *c_x_reg = model.get_lvalue (c_x, &ctxt);
7182 /* Verify that sub_svalue of "unknown" simply
7183 yields an unknown. */
7185 const svalue *unknown = mgr.get_or_create_unknown_svalue (ct.m_coord_type);
7186 const svalue *sub = mgr.get_or_create_sub_svalue (TREE_TYPE (ct.m_x_field),
7187 unknown, c_x_reg);
7188 ASSERT_EQ (sub->get_kind (), SK_UNKNOWN);
7189 ASSERT_EQ (sub->get_type (), TREE_TYPE (ct.m_x_field));
7192 /* Get BIT within VAL as a symbolic value within MGR. */
7194 static const svalue *
7195 get_bit (region_model_manager *mgr,
7196 bit_offset_t bit,
7197 unsigned HOST_WIDE_INT val)
7199 const svalue *inner_svalue
7200 = mgr->get_or_create_int_cst (unsigned_type_node, val);
7201 return mgr->get_or_create_bits_within (boolean_type_node,
7202 bit_range (bit, 1),
7203 inner_svalue);
7206 /* Verify that bits_within_svalues are folded as expected. */
7208 static void
7209 test_bits_within_svalue_folding ()
7211 region_model_manager mgr;
7213 const svalue *zero = mgr.get_or_create_int_cst (boolean_type_node, 0);
7214 const svalue *one = mgr.get_or_create_int_cst (boolean_type_node, 1);
7217 const unsigned val = 0x0000;
7218 for (unsigned bit = 0; bit < 16; bit++)
7219 ASSERT_EQ (get_bit (&mgr, bit, val), zero);
7223 const unsigned val = 0x0001;
7224 ASSERT_EQ (get_bit (&mgr, 0, val), one);
7225 for (unsigned bit = 1; bit < 16; bit++)
7226 ASSERT_EQ (get_bit (&mgr, bit, val), zero);
7230 const unsigned val = 0x8000;
7231 for (unsigned bit = 0; bit < 15; bit++)
7232 ASSERT_EQ (get_bit (&mgr, bit, val), zero);
7233 ASSERT_EQ (get_bit (&mgr, 15, val), one);
7237 const unsigned val = 0xFFFF;
7238 for (unsigned bit = 0; bit < 16; bit++)
7239 ASSERT_EQ (get_bit (&mgr, bit, val), one);
7243 /* Test that region::descendent_of_p works as expected. */
7245 static void
7246 test_descendent_of_p ()
7248 region_model_manager mgr;
7249 const region *stack = mgr.get_stack_region ();
7250 const region *heap = mgr.get_heap_region ();
7251 const region *code = mgr.get_code_region ();
7252 const region *globals = mgr.get_globals_region ();
7254 /* descendent_of_p should return true when used on the region itself. */
7255 ASSERT_TRUE (stack->descendent_of_p (stack));
7256 ASSERT_FALSE (stack->descendent_of_p (heap));
7257 ASSERT_FALSE (stack->descendent_of_p (code));
7258 ASSERT_FALSE (stack->descendent_of_p (globals));
7260 tree x = build_global_decl ("x", integer_type_node);
7261 const region *x_reg = mgr.get_region_for_global (x);
7262 ASSERT_TRUE (x_reg->descendent_of_p (globals));
7264 /* A cast_region should be a descendent of the original region. */
7265 const region *cast_reg = mgr.get_cast_region (x_reg, ptr_type_node);
7266 ASSERT_TRUE (cast_reg->descendent_of_p (x_reg));
7269 /* Verify that bit_range_region works as expected. */
7271 static void
7272 test_bit_range_regions ()
7274 tree x = build_global_decl ("x", integer_type_node);
7275 region_model_manager mgr;
7276 const region *x_reg = mgr.get_region_for_global (x);
7277 const region *byte0
7278 = mgr.get_bit_range (x_reg, char_type_node, bit_range (0, 8));
7279 const region *byte1
7280 = mgr.get_bit_range (x_reg, char_type_node, bit_range (8, 8));
7281 ASSERT_TRUE (byte0->descendent_of_p (x_reg));
7282 ASSERT_TRUE (byte1->descendent_of_p (x_reg));
7283 ASSERT_NE (byte0, byte1);
7286 /* Verify that simple assignments work as expected. */
7288 static void
7289 test_assignment ()
7291 tree int_0 = build_int_cst (integer_type_node, 0);
7292 tree x = build_global_decl ("x", integer_type_node);
7293 tree y = build_global_decl ("y", integer_type_node);
7295 /* "x == 0", then use of y, then "y = 0;". */
7296 region_model_manager mgr;
7297 region_model model (&mgr);
7298 ADD_SAT_CONSTRAINT (model, x, EQ_EXPR, int_0);
7299 ASSERT_CONDITION_UNKNOWN (model, y, EQ_EXPR, int_0);
7300 model.set_value (model.get_lvalue (y, NULL),
7301 model.get_rvalue (int_0, NULL),
7302 NULL);
7303 ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, int_0);
7304 ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, x);
7307 /* Verify that compound assignments work as expected. */
7309 static void
7310 test_compound_assignment ()
7312 coord_test ct;
7314 tree c = build_global_decl ("c", ct.m_coord_type);
7315 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
7316 c, ct.m_x_field, NULL_TREE);
7317 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
7318 c, ct.m_y_field, NULL_TREE);
7319 tree d = build_global_decl ("d", ct.m_coord_type);
7320 tree d_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
7321 d, ct.m_x_field, NULL_TREE);
7322 tree d_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
7323 d, ct.m_y_field, NULL_TREE);
7325 tree int_17 = build_int_cst (integer_type_node, 17);
7326 tree int_m3 = build_int_cst (integer_type_node, -3);
7328 region_model_manager mgr;
7329 region_model model (&mgr);
7330 model.set_value (c_x, int_17, NULL);
7331 model.set_value (c_y, int_m3, NULL);
7333 /* Copy c to d. */
7334 const svalue *sval = model.get_rvalue (c, NULL);
7335 model.set_value (model.get_lvalue (d, NULL), sval, NULL);
7337 /* Check that the fields have the same svalues. */
7338 ASSERT_EQ (model.get_rvalue (c_x, NULL), model.get_rvalue (d_x, NULL));
7339 ASSERT_EQ (model.get_rvalue (c_y, NULL), model.get_rvalue (d_y, NULL));
7342 /* Verify the details of pushing and popping stack frames. */
7344 static void
7345 test_stack_frames ()
7347 tree int_42 = build_int_cst (integer_type_node, 42);
7348 tree int_10 = build_int_cst (integer_type_node, 10);
7349 tree int_5 = build_int_cst (integer_type_node, 5);
7350 tree int_0 = build_int_cst (integer_type_node, 0);
7352 auto_vec <tree> param_types;
7353 tree parent_fndecl = make_fndecl (integer_type_node,
7354 "parent_fn",
7355 param_types);
7356 allocate_struct_function (parent_fndecl, true);
7358 tree child_fndecl = make_fndecl (integer_type_node,
7359 "child_fn",
7360 param_types);
7361 allocate_struct_function (child_fndecl, true);
7363 /* "a" and "b" in the parent frame. */
7364 tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7365 get_identifier ("a"),
7366 integer_type_node);
7367 DECL_CONTEXT (a) = parent_fndecl;
7368 tree b = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7369 get_identifier ("b"),
7370 integer_type_node);
7371 DECL_CONTEXT (b) = parent_fndecl;
7372 /* "x" and "y" in a child frame. */
7373 tree x = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7374 get_identifier ("x"),
7375 integer_type_node);
7376 DECL_CONTEXT (x) = child_fndecl;
7377 tree y = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7378 get_identifier ("y"),
7379 integer_type_node);
7380 DECL_CONTEXT (y) = child_fndecl;
7382 /* "p" global. */
7383 tree p = build_global_decl ("p", ptr_type_node);
7385 /* "q" global. */
7386 tree q = build_global_decl ("q", ptr_type_node);
7388 region_model_manager mgr;
7389 test_region_model_context ctxt;
7390 region_model model (&mgr);
7392 /* Push stack frame for "parent_fn". */
7393 const region *parent_frame_reg
7394 = model.push_frame (DECL_STRUCT_FUNCTION (parent_fndecl),
7395 NULL, &ctxt);
7396 ASSERT_EQ (model.get_current_frame (), parent_frame_reg);
7397 ASSERT_TRUE (model.region_exists_p (parent_frame_reg));
7398 const region *a_in_parent_reg = model.get_lvalue (a, &ctxt);
7399 model.set_value (a_in_parent_reg,
7400 model.get_rvalue (int_42, &ctxt),
7401 &ctxt);
7402 ASSERT_EQ (a_in_parent_reg->maybe_get_frame_region (), parent_frame_reg);
7404 model.add_constraint (b, LT_EXPR, int_10, &ctxt);
7405 ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt),
7406 tristate (tristate::TS_TRUE));
7408 /* Push stack frame for "child_fn". */
7409 const region *child_frame_reg
7410 = model.push_frame (DECL_STRUCT_FUNCTION (child_fndecl), NULL, &ctxt);
7411 ASSERT_EQ (model.get_current_frame (), child_frame_reg);
7412 ASSERT_TRUE (model.region_exists_p (child_frame_reg));
7413 const region *x_in_child_reg = model.get_lvalue (x, &ctxt);
7414 model.set_value (x_in_child_reg,
7415 model.get_rvalue (int_0, &ctxt),
7416 &ctxt);
7417 ASSERT_EQ (x_in_child_reg->maybe_get_frame_region (), child_frame_reg);
7419 model.add_constraint (y, NE_EXPR, int_5, &ctxt);
7420 ASSERT_EQ (model.eval_condition (y, NE_EXPR, int_5, &ctxt),
7421 tristate (tristate::TS_TRUE));
7423 /* Point a global pointer at a local in the child frame: p = &x. */
7424 const region *p_in_globals_reg = model.get_lvalue (p, &ctxt);
7425 model.set_value (p_in_globals_reg,
7426 mgr.get_ptr_svalue (ptr_type_node, x_in_child_reg),
7427 &ctxt);
7428 ASSERT_EQ (p_in_globals_reg->maybe_get_frame_region (), NULL);
7430 /* Point another global pointer at p: q = &p. */
7431 const region *q_in_globals_reg = model.get_lvalue (q, &ctxt);
7432 model.set_value (q_in_globals_reg,
7433 mgr.get_ptr_svalue (ptr_type_node, p_in_globals_reg),
7434 &ctxt);
7436 /* Test region::descendent_of_p. */
7437 ASSERT_TRUE (child_frame_reg->descendent_of_p (child_frame_reg));
7438 ASSERT_TRUE (x_in_child_reg->descendent_of_p (child_frame_reg));
7439 ASSERT_FALSE (a_in_parent_reg->descendent_of_p (child_frame_reg));
7441 /* Pop the "child_fn" frame from the stack. */
7442 model.pop_frame (NULL, NULL, &ctxt);
7443 ASSERT_FALSE (model.region_exists_p (child_frame_reg));
7444 ASSERT_TRUE (model.region_exists_p (parent_frame_reg));
7446 /* Verify that p (which was pointing at the local "x" in the popped
7447 frame) has been poisoned. */
7448 const svalue *new_p_sval = model.get_rvalue (p, NULL);
7449 ASSERT_EQ (new_p_sval->get_kind (), SK_POISONED);
7450 ASSERT_EQ (new_p_sval->dyn_cast_poisoned_svalue ()->get_poison_kind (),
7451 POISON_KIND_POPPED_STACK);
7453 /* Verify that q still points to p, in spite of the region
7454 renumbering. */
7455 const svalue *new_q_sval = model.get_rvalue (q, &ctxt);
7456 ASSERT_EQ (new_q_sval->get_kind (), SK_REGION);
7457 ASSERT_EQ (new_q_sval->maybe_get_region (),
7458 model.get_lvalue (p, &ctxt));
7460 /* Verify that top of stack has been updated. */
7461 ASSERT_EQ (model.get_current_frame (), parent_frame_reg);
7463 /* Verify locals in parent frame. */
7464 /* Verify "a" still has its value. */
7465 const svalue *new_a_sval = model.get_rvalue (a, &ctxt);
7466 ASSERT_EQ (new_a_sval->get_kind (), SK_CONSTANT);
7467 ASSERT_EQ (new_a_sval->dyn_cast_constant_svalue ()->get_constant (),
7468 int_42);
7469 /* Verify "b" still has its constraint. */
7470 ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt),
7471 tristate (tristate::TS_TRUE));
7474 /* Verify that get_representative_path_var works as expected, that
7475 we can map from regions to parms and back within a recursive call
7476 stack. */
7478 static void
7479 test_get_representative_path_var ()
7481 auto_vec <tree> param_types;
7482 tree fndecl = make_fndecl (integer_type_node,
7483 "factorial",
7484 param_types);
7485 allocate_struct_function (fndecl, true);
7487 /* Parm "n". */
7488 tree n = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7489 get_identifier ("n"),
7490 integer_type_node);
7491 DECL_CONTEXT (n) = fndecl;
7493 region_model_manager mgr;
7494 test_region_model_context ctxt;
7495 region_model model (&mgr);
7497 /* Push 5 stack frames for "factorial", each with a param */
7498 auto_vec<const region *> parm_regs;
7499 auto_vec<const svalue *> parm_svals;
7500 for (int depth = 0; depth < 5; depth++)
7502 const region *frame_n_reg
7503 = model.push_frame (DECL_STRUCT_FUNCTION (fndecl), NULL, &ctxt);
7504 const region *parm_n_reg = model.get_lvalue (path_var (n, depth), &ctxt);
7505 parm_regs.safe_push (parm_n_reg);
7507 ASSERT_EQ (parm_n_reg->get_parent_region (), frame_n_reg);
7508 const svalue *sval_n = mgr.get_or_create_initial_value (parm_n_reg);
7509 parm_svals.safe_push (sval_n);
7512 /* Verify that we can recognize that the regions are the parms,
7513 at every depth. */
7514 for (int depth = 0; depth < 5; depth++)
7517 svalue_set visited;
7518 ASSERT_EQ (model.get_representative_path_var (parm_regs[depth],
7519 &visited),
7520 path_var (n, depth + 1));
7522 /* ...and that we can lookup lvalues for locals for all frames,
7523 not just the top. */
7524 ASSERT_EQ (model.get_lvalue (path_var (n, depth), NULL),
7525 parm_regs[depth]);
7526 /* ...and that we can locate the svalues. */
7528 svalue_set visited;
7529 ASSERT_EQ (model.get_representative_path_var (parm_svals[depth],
7530 &visited),
7531 path_var (n, depth + 1));
7536 /* Ensure that region_model::operator== works as expected. */
7538 static void
7539 test_equality_1 ()
7541 tree int_42 = build_int_cst (integer_type_node, 42);
7542 tree int_17 = build_int_cst (integer_type_node, 17);
7544 /* Verify that "empty" region_model instances are equal to each other. */
7545 region_model_manager mgr;
7546 region_model model0 (&mgr);
7547 region_model model1 (&mgr);
7548 ASSERT_EQ (model0, model1);
7550 /* Verify that setting state in model1 makes the models non-equal. */
7551 tree x = build_global_decl ("x", integer_type_node);
7552 model0.set_value (x, int_42, NULL);
7553 ASSERT_EQ (model0.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
7554 ASSERT_NE (model0, model1);
7556 /* Verify the copy-ctor. */
7557 region_model model2 (model0);
7558 ASSERT_EQ (model0, model2);
7559 ASSERT_EQ (model2.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
7560 ASSERT_NE (model1, model2);
7562 /* Verify that models obtained from copy-ctor are independently editable
7563 w/o affecting the original model. */
7564 model2.set_value (x, int_17, NULL);
7565 ASSERT_NE (model0, model2);
7566 ASSERT_EQ (model2.get_rvalue (x, NULL)->maybe_get_constant (), int_17);
7567 ASSERT_EQ (model0.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
7570 /* Verify that region models for
7571 x = 42; y = 113;
7573 y = 113; x = 42;
7574 are equal. */
7576 static void
7577 test_canonicalization_2 ()
7579 tree int_42 = build_int_cst (integer_type_node, 42);
7580 tree int_113 = build_int_cst (integer_type_node, 113);
7581 tree x = build_global_decl ("x", integer_type_node);
7582 tree y = build_global_decl ("y", integer_type_node);
7584 region_model_manager mgr;
7585 region_model model0 (&mgr);
7586 model0.set_value (model0.get_lvalue (x, NULL),
7587 model0.get_rvalue (int_42, NULL),
7588 NULL);
7589 model0.set_value (model0.get_lvalue (y, NULL),
7590 model0.get_rvalue (int_113, NULL),
7591 NULL);
7593 region_model model1 (&mgr);
7594 model1.set_value (model1.get_lvalue (y, NULL),
7595 model1.get_rvalue (int_113, NULL),
7596 NULL);
7597 model1.set_value (model1.get_lvalue (x, NULL),
7598 model1.get_rvalue (int_42, NULL),
7599 NULL);
7601 ASSERT_EQ (model0, model1);
7604 /* Verify that constraints for
7605 x > 3 && y > 42
7607 y > 42 && x > 3
7608 are equal after canonicalization. */
7610 static void
7611 test_canonicalization_3 ()
7613 tree int_3 = build_int_cst (integer_type_node, 3);
7614 tree int_42 = build_int_cst (integer_type_node, 42);
7615 tree x = build_global_decl ("x", integer_type_node);
7616 tree y = build_global_decl ("y", integer_type_node);
7618 region_model_manager mgr;
7619 region_model model0 (&mgr);
7620 model0.add_constraint (x, GT_EXPR, int_3, NULL);
7621 model0.add_constraint (y, GT_EXPR, int_42, NULL);
7623 region_model model1 (&mgr);
7624 model1.add_constraint (y, GT_EXPR, int_42, NULL);
7625 model1.add_constraint (x, GT_EXPR, int_3, NULL);
7627 model0.canonicalize ();
7628 model1.canonicalize ();
7629 ASSERT_EQ (model0, model1);
7632 /* Verify that we can canonicalize a model containing NaN and other real
7633 constants. */
7635 static void
7636 test_canonicalization_4 ()
7638 auto_vec<tree> csts;
7639 append_interesting_constants (&csts);
7641 region_model_manager mgr;
7642 region_model model (&mgr);
7644 for (tree cst : csts)
7645 model.get_rvalue (cst, NULL);
7647 model.canonicalize ();
7650 /* Assert that if we have two region_model instances
7651 with values VAL_A and VAL_B for EXPR that they are
7652 mergable. Write the merged model to *OUT_MERGED_MODEL,
7653 and the merged svalue ptr to *OUT_MERGED_SVALUE.
7654 If VAL_A or VAL_B are NULL_TREE, don't populate EXPR
7655 for that region_model. */
7657 static void
7658 assert_region_models_merge (tree expr, tree val_a, tree val_b,
7659 region_model *out_merged_model,
7660 const svalue **out_merged_svalue)
7662 region_model_manager *mgr = out_merged_model->get_manager ();
7663 program_point point (program_point::origin (*mgr));
7664 test_region_model_context ctxt;
7665 region_model model0 (mgr);
7666 region_model model1 (mgr);
7667 if (val_a)
7668 model0.set_value (model0.get_lvalue (expr, &ctxt),
7669 model0.get_rvalue (val_a, &ctxt),
7670 &ctxt);
7671 if (val_b)
7672 model1.set_value (model1.get_lvalue (expr, &ctxt),
7673 model1.get_rvalue (val_b, &ctxt),
7674 &ctxt);
7676 /* They should be mergeable. */
7677 ASSERT_TRUE (model0.can_merge_with_p (model1, point, out_merged_model));
7678 *out_merged_svalue = out_merged_model->get_rvalue (expr, &ctxt);
7681 /* Verify that we can merge region_model instances. */
7683 static void
7684 test_state_merging ()
7686 tree int_42 = build_int_cst (integer_type_node, 42);
7687 tree int_113 = build_int_cst (integer_type_node, 113);
7688 tree x = build_global_decl ("x", integer_type_node);
7689 tree y = build_global_decl ("y", integer_type_node);
7690 tree z = build_global_decl ("z", integer_type_node);
7691 tree p = build_global_decl ("p", ptr_type_node);
7693 tree addr_of_y = build1 (ADDR_EXPR, ptr_type_node, y);
7694 tree addr_of_z = build1 (ADDR_EXPR, ptr_type_node, z);
7696 auto_vec <tree> param_types;
7697 tree test_fndecl = make_fndecl (integer_type_node, "test_fn", param_types);
7698 allocate_struct_function (test_fndecl, true);
7700 /* Param "a". */
7701 tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7702 get_identifier ("a"),
7703 integer_type_node);
7704 DECL_CONTEXT (a) = test_fndecl;
7705 tree addr_of_a = build1 (ADDR_EXPR, ptr_type_node, a);
7707 /* Param "q", a pointer. */
7708 tree q = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7709 get_identifier ("q"),
7710 ptr_type_node);
7711 DECL_CONTEXT (q) = test_fndecl;
7713 region_model_manager mgr;
7714 program_point point (program_point::origin (mgr));
7717 region_model model0 (&mgr);
7718 region_model model1 (&mgr);
7719 region_model merged (&mgr);
7720 /* Verify empty models can be merged. */
7721 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7722 ASSERT_EQ (model0, merged);
7725 /* Verify that we can merge two contradictory constraints on the
7726 value for a global. */
7727 /* TODO: verify that the merged model doesn't have a value for
7728 the global */
7730 region_model model0 (&mgr);
7731 region_model model1 (&mgr);
7732 region_model merged (&mgr);
7733 test_region_model_context ctxt;
7734 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
7735 model1.add_constraint (x, EQ_EXPR, int_113, &ctxt);
7736 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7737 ASSERT_NE (model0, merged);
7738 ASSERT_NE (model1, merged);
7741 /* Verify handling of a PARM_DECL. */
7743 test_region_model_context ctxt;
7744 region_model model0 (&mgr);
7745 region_model model1 (&mgr);
7746 ASSERT_EQ (model0.get_stack_depth (), 0);
7747 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, &ctxt);
7748 ASSERT_EQ (model0.get_stack_depth (), 1);
7749 model1.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, &ctxt);
7751 placeholder_svalue test_sval (integer_type_node, "test sval");
7752 model0.set_value (model0.get_lvalue (a, &ctxt), &test_sval, &ctxt);
7753 model1.set_value (model1.get_lvalue (a, &ctxt), &test_sval, &ctxt);
7754 ASSERT_EQ (model0, model1);
7756 /* They should be mergeable, and the result should be the same. */
7757 region_model merged (&mgr);
7758 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7759 ASSERT_EQ (model0, merged);
7760 /* In particular, "a" should have the placeholder value. */
7761 ASSERT_EQ (merged.get_rvalue (a, &ctxt), &test_sval);
7764 /* Verify handling of a global. */
7766 test_region_model_context ctxt;
7767 region_model model0 (&mgr);
7768 region_model model1 (&mgr);
7770 placeholder_svalue test_sval (integer_type_node, "test sval");
7771 model0.set_value (model0.get_lvalue (x, &ctxt), &test_sval, &ctxt);
7772 model1.set_value (model1.get_lvalue (x, &ctxt), &test_sval, &ctxt);
7773 ASSERT_EQ (model0, model1);
7775 /* They should be mergeable, and the result should be the same. */
7776 region_model merged (&mgr);
7777 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7778 ASSERT_EQ (model0, merged);
7779 /* In particular, "x" should have the placeholder value. */
7780 ASSERT_EQ (merged.get_rvalue (x, &ctxt), &test_sval);
7783 /* Use global-handling to verify various combinations of values. */
7785 /* Two equal constant values. */
7787 region_model merged (&mgr);
7788 const svalue *merged_x_sval;
7789 assert_region_models_merge (x, int_42, int_42, &merged, &merged_x_sval);
7791 /* In particular, there should be a constant value for "x". */
7792 ASSERT_EQ (merged_x_sval->get_kind (), SK_CONSTANT);
7793 ASSERT_EQ (merged_x_sval->dyn_cast_constant_svalue ()->get_constant (),
7794 int_42);
7797 /* Two non-equal constant values. */
7799 region_model merged (&mgr);
7800 const svalue *merged_x_sval;
7801 assert_region_models_merge (x, int_42, int_113, &merged, &merged_x_sval);
7803 /* In particular, there should be a "widening" value for "x". */
7804 ASSERT_EQ (merged_x_sval->get_kind (), SK_WIDENING);
7807 /* Initial and constant. */
7809 region_model merged (&mgr);
7810 const svalue *merged_x_sval;
7811 assert_region_models_merge (x, NULL_TREE, int_113, &merged, &merged_x_sval);
7813 /* In particular, there should be an unknown value for "x". */
7814 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
7817 /* Constant and initial. */
7819 region_model merged (&mgr);
7820 const svalue *merged_x_sval;
7821 assert_region_models_merge (x, int_42, NULL_TREE, &merged, &merged_x_sval);
7823 /* In particular, there should be an unknown value for "x". */
7824 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
7827 /* Unknown and constant. */
7828 // TODO
7830 /* Pointers: NULL and NULL. */
7831 // TODO
7833 /* Pointers: NULL and non-NULL. */
7834 // TODO
7836 /* Pointers: non-NULL and non-NULL: ptr to a local. */
7838 region_model model0 (&mgr);
7839 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
7840 model0.set_value (model0.get_lvalue (p, NULL),
7841 model0.get_rvalue (addr_of_a, NULL), NULL);
7843 region_model model1 (model0);
7844 ASSERT_EQ (model0, model1);
7846 /* They should be mergeable, and the result should be the same. */
7847 region_model merged (&mgr);
7848 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7849 ASSERT_EQ (model0, merged);
7852 /* Pointers: non-NULL and non-NULL: ptr to a global. */
7854 region_model merged (&mgr);
7855 /* p == &y in both input models. */
7856 const svalue *merged_p_sval;
7857 assert_region_models_merge (p, addr_of_y, addr_of_y, &merged,
7858 &merged_p_sval);
7860 /* We should get p == &y in the merged model. */
7861 ASSERT_EQ (merged_p_sval->get_kind (), SK_REGION);
7862 const region_svalue *merged_p_ptr
7863 = merged_p_sval->dyn_cast_region_svalue ();
7864 const region *merged_p_star_reg = merged_p_ptr->get_pointee ();
7865 ASSERT_EQ (merged_p_star_reg, merged.get_lvalue (y, NULL));
7868 /* Pointers: non-NULL ptrs to different globals: should be unknown. */
7870 region_model merged (&mgr);
7871 /* x == &y vs x == &z in the input models; these are actually casts
7872 of the ptrs to "int". */
7873 const svalue *merged_x_sval;
7874 // TODO:
7875 assert_region_models_merge (x, addr_of_y, addr_of_z, &merged,
7876 &merged_x_sval);
7878 /* We should get x == unknown in the merged model. */
7879 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
7882 /* Pointers: non-NULL and non-NULL: ptr to a heap region. */
7884 test_region_model_context ctxt;
7885 region_model model0 (&mgr);
7886 tree size = build_int_cst (size_type_node, 1024);
7887 const svalue *size_sval = mgr.get_or_create_constant_svalue (size);
7888 const region *new_reg
7889 = model0.create_region_for_heap_alloc (size_sval, &ctxt);
7890 const svalue *ptr_sval = mgr.get_ptr_svalue (ptr_type_node, new_reg);
7891 model0.set_value (model0.get_lvalue (p, &ctxt),
7892 ptr_sval, &ctxt);
7894 region_model model1 (model0);
7896 ASSERT_EQ (model0, model1);
7898 region_model merged (&mgr);
7899 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7901 /* The merged model ought to be identical. */
7902 ASSERT_EQ (model0, merged);
7905 /* Two regions sharing the same placeholder svalue should continue sharing
7906 it after self-merger. */
7908 test_region_model_context ctxt;
7909 region_model model0 (&mgr);
7910 placeholder_svalue placeholder_sval (integer_type_node, "test");
7911 model0.set_value (model0.get_lvalue (x, &ctxt),
7912 &placeholder_sval, &ctxt);
7913 model0.set_value (model0.get_lvalue (y, &ctxt), &placeholder_sval, &ctxt);
7914 region_model model1 (model0);
7916 /* They should be mergeable, and the result should be the same. */
7917 region_model merged (&mgr);
7918 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7919 ASSERT_EQ (model0, merged);
7921 /* In particular, we should have x == y. */
7922 ASSERT_EQ (merged.eval_condition (x, EQ_EXPR, y, &ctxt),
7923 tristate (tristate::TS_TRUE));
7927 region_model model0 (&mgr);
7928 region_model model1 (&mgr);
7929 test_region_model_context ctxt;
7930 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
7931 model1.add_constraint (x, NE_EXPR, int_42, &ctxt);
7932 region_model merged (&mgr);
7933 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7937 region_model model0 (&mgr);
7938 region_model model1 (&mgr);
7939 test_region_model_context ctxt;
7940 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
7941 model1.add_constraint (x, NE_EXPR, int_42, &ctxt);
7942 model1.add_constraint (x, EQ_EXPR, int_113, &ctxt);
7943 region_model merged (&mgr);
7944 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7947 // TODO: what can't we merge? need at least one such test
7949 /* TODO: various things
7950 - heap regions
7951 - value merging:
7952 - every combination, but in particular
7953 - pairs of regions
7956 /* Views. */
7958 test_region_model_context ctxt;
7959 region_model model0 (&mgr);
7961 const region *x_reg = model0.get_lvalue (x, &ctxt);
7962 const region *x_as_ptr = mgr.get_cast_region (x_reg, ptr_type_node);
7963 model0.set_value (x_as_ptr, model0.get_rvalue (addr_of_y, &ctxt), &ctxt);
7965 region_model model1 (model0);
7966 ASSERT_EQ (model1, model0);
7968 /* They should be mergeable, and the result should be the same. */
7969 region_model merged (&mgr);
7970 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7973 /* Verify that we can merge a model in which a local in an older stack
7974 frame points to a local in a more recent stack frame. */
7976 region_model model0 (&mgr);
7977 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
7978 const region *q_in_first_frame = model0.get_lvalue (q, NULL);
7980 /* Push a second frame. */
7981 const region *reg_2nd_frame
7982 = model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
7984 /* Have a pointer in the older frame point to a local in the
7985 more recent frame. */
7986 const svalue *sval_ptr = model0.get_rvalue (addr_of_a, NULL);
7987 model0.set_value (q_in_first_frame, sval_ptr, NULL);
7989 /* Verify that it's pointing at the newer frame. */
7990 const region *reg_pointee = sval_ptr->maybe_get_region ();
7991 ASSERT_EQ (reg_pointee->get_parent_region (), reg_2nd_frame);
7993 model0.canonicalize ();
7995 region_model model1 (model0);
7996 ASSERT_EQ (model0, model1);
7998 /* They should be mergeable, and the result should be the same
7999 (after canonicalization, at least). */
8000 region_model merged (&mgr);
8001 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
8002 merged.canonicalize ();
8003 ASSERT_EQ (model0, merged);
8006 /* Verify that we can merge a model in which a local points to a global. */
8008 region_model model0 (&mgr);
8009 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
8010 model0.set_value (model0.get_lvalue (q, NULL),
8011 model0.get_rvalue (addr_of_y, NULL), NULL);
8013 region_model model1 (model0);
8014 ASSERT_EQ (model0, model1);
8016 /* They should be mergeable, and the result should be the same
8017 (after canonicalization, at least). */
8018 region_model merged (&mgr);
8019 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
8020 ASSERT_EQ (model0, merged);
8024 /* Verify that constraints are correctly merged when merging region_model
8025 instances. */
8027 static void
8028 test_constraint_merging ()
8030 tree int_0 = build_int_cst (integer_type_node, 0);
8031 tree int_5 = build_int_cst (integer_type_node, 5);
8032 tree x = build_global_decl ("x", integer_type_node);
8033 tree y = build_global_decl ("y", integer_type_node);
8034 tree z = build_global_decl ("z", integer_type_node);
8035 tree n = build_global_decl ("n", integer_type_node);
8037 region_model_manager mgr;
8038 test_region_model_context ctxt;
8040 /* model0: 0 <= (x == y) < n. */
8041 region_model model0 (&mgr);
8042 model0.add_constraint (x, EQ_EXPR, y, &ctxt);
8043 model0.add_constraint (x, GE_EXPR, int_0, NULL);
8044 model0.add_constraint (x, LT_EXPR, n, NULL);
8046 /* model1: z != 5 && (0 <= x < n). */
8047 region_model model1 (&mgr);
8048 model1.add_constraint (z, NE_EXPR, int_5, NULL);
8049 model1.add_constraint (x, GE_EXPR, int_0, NULL);
8050 model1.add_constraint (x, LT_EXPR, n, NULL);
8052 /* They should be mergeable; the merged constraints should
8053 be: (0 <= x < n). */
8054 program_point point (program_point::origin (mgr));
8055 region_model merged (&mgr);
8056 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
8058 ASSERT_EQ (merged.eval_condition (x, GE_EXPR, int_0, &ctxt),
8059 tristate (tristate::TS_TRUE));
8060 ASSERT_EQ (merged.eval_condition (x, LT_EXPR, n, &ctxt),
8061 tristate (tristate::TS_TRUE));
8063 ASSERT_EQ (merged.eval_condition (z, NE_EXPR, int_5, &ctxt),
8064 tristate (tristate::TS_UNKNOWN));
8065 ASSERT_EQ (merged.eval_condition (x, LT_EXPR, y, &ctxt),
8066 tristate (tristate::TS_UNKNOWN));
8069 /* Verify that widening_svalue::eval_condition_without_cm works as
8070 expected. */
8072 static void
8073 test_widening_constraints ()
8075 region_model_manager mgr;
8076 function_point point (program_point::origin (mgr).get_function_point ());
8077 tree int_0 = build_int_cst (integer_type_node, 0);
8078 tree int_m1 = build_int_cst (integer_type_node, -1);
8079 tree int_1 = build_int_cst (integer_type_node, 1);
8080 tree int_256 = build_int_cst (integer_type_node, 256);
8081 test_region_model_context ctxt;
8082 const svalue *int_0_sval = mgr.get_or_create_constant_svalue (int_0);
8083 const svalue *int_1_sval = mgr.get_or_create_constant_svalue (int_1);
8084 const svalue *w_zero_then_one_sval
8085 = mgr.get_or_create_widening_svalue (integer_type_node, point,
8086 int_0_sval, int_1_sval);
8087 const widening_svalue *w_zero_then_one
8088 = w_zero_then_one_sval->dyn_cast_widening_svalue ();
8089 ASSERT_EQ (w_zero_then_one->get_direction (),
8090 widening_svalue::DIR_ASCENDING);
8091 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_m1),
8092 tristate::TS_FALSE);
8093 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_0),
8094 tristate::TS_FALSE);
8095 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_1),
8096 tristate::TS_UNKNOWN);
8097 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_256),
8098 tristate::TS_UNKNOWN);
8100 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_m1),
8101 tristate::TS_FALSE);
8102 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_0),
8103 tristate::TS_UNKNOWN);
8104 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_1),
8105 tristate::TS_UNKNOWN);
8106 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_256),
8107 tristate::TS_UNKNOWN);
8109 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_m1),
8110 tristate::TS_TRUE);
8111 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_0),
8112 tristate::TS_UNKNOWN);
8113 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_1),
8114 tristate::TS_UNKNOWN);
8115 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_256),
8116 tristate::TS_UNKNOWN);
8118 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_m1),
8119 tristate::TS_TRUE);
8120 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_0),
8121 tristate::TS_TRUE);
8122 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_1),
8123 tristate::TS_UNKNOWN);
8124 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_256),
8125 tristate::TS_UNKNOWN);
8127 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_m1),
8128 tristate::TS_FALSE);
8129 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_0),
8130 tristate::TS_UNKNOWN);
8131 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_1),
8132 tristate::TS_UNKNOWN);
8133 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_256),
8134 tristate::TS_UNKNOWN);
8136 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_m1),
8137 tristate::TS_TRUE);
8138 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_0),
8139 tristate::TS_UNKNOWN);
8140 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_1),
8141 tristate::TS_UNKNOWN);
8142 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_256),
8143 tristate::TS_UNKNOWN);
8146 /* Verify merging constraints for states simulating successive iterations
8147 of a loop.
8148 Simulate:
8149 for (i = 0; i < 256; i++)
8150 [...body...]
8151 i.e. this gimple:.
8152 i_15 = 0;
8153 goto <bb 4>;
8155 <bb 4> :
8156 i_11 = PHI <i_15(2), i_23(3)>
8157 if (i_11 <= 255)
8158 goto <bb 3>;
8159 else
8160 goto [AFTER LOOP]
8162 <bb 3> :
8163 [LOOP BODY]
8164 i_23 = i_11 + 1;
8166 and thus these ops (and resultant states):
8167 i_11 = PHI()
8168 {i_11: 0}
8169 add_constraint (i_11 <= 255) [for the true edge]
8170 {i_11: 0} [constraint was a no-op]
8171 i_23 = i_11 + 1;
8172 {i_22: 1}
8173 i_11 = PHI()
8174 {i_11: WIDENED (at phi, 0, 1)}
8175 add_constraint (i_11 <= 255) [for the true edge]
8176 {i_11: WIDENED (at phi, 0, 1); WIDENED <= 255}
8177 i_23 = i_11 + 1;
8178 {i_23: (WIDENED (at phi, 0, 1) + 1); WIDENED <= 255}
8179 i_11 = PHI(); merge with state at phi above
8180 {i_11: WIDENED (at phi, 0, 1); WIDENED <= 256}
8181 [changing meaning of "WIDENED" here]
8182 if (i_11 <= 255)
8183 T: {i_11: WIDENED (at phi, 0, 1); WIDENED <= 255}; cache hit
8184 F: {i_11: 256}
8187 static void
8188 test_iteration_1 ()
8190 region_model_manager mgr;
8191 program_point point (program_point::origin (mgr));
8193 tree int_0 = build_int_cst (integer_type_node, 0);
8194 tree int_1 = build_int_cst (integer_type_node, 1);
8195 tree int_256 = build_int_cst (integer_type_node, 256);
8196 tree int_257 = build_int_cst (integer_type_node, 257);
8197 tree i = build_global_decl ("i", integer_type_node);
8199 test_region_model_context ctxt;
8201 /* model0: i: 0. */
8202 region_model model0 (&mgr);
8203 model0.set_value (i, int_0, &ctxt);
8205 /* model1: i: 1. */
8206 region_model model1 (&mgr);
8207 model1.set_value (i, int_1, &ctxt);
8209 /* Should merge "i" to a widened value. */
8210 region_model model2 (&mgr);
8211 ASSERT_TRUE (model1.can_merge_with_p (model0, point, &model2));
8212 const svalue *merged_i = model2.get_rvalue (i, &ctxt);
8213 ASSERT_EQ (merged_i->get_kind (), SK_WIDENING);
8214 const widening_svalue *w = merged_i->dyn_cast_widening_svalue ();
8215 ASSERT_EQ (w->get_direction (), widening_svalue::DIR_ASCENDING);
8217 /* Add constraint: i < 256 */
8218 model2.add_constraint (i, LT_EXPR, int_256, &ctxt);
8219 ASSERT_EQ (model2.eval_condition (i, LT_EXPR, int_256, &ctxt),
8220 tristate (tristate::TS_TRUE));
8221 ASSERT_EQ (model2.eval_condition (i, GE_EXPR, int_0, &ctxt),
8222 tristate (tristate::TS_TRUE));
8224 /* Try merging with the initial state. */
8225 region_model model3 (&mgr);
8226 ASSERT_TRUE (model2.can_merge_with_p (model0, point, &model3));
8227 /* Merging the merged value with the initial value should be idempotent,
8228 so that the analysis converges. */
8229 ASSERT_EQ (model3.get_rvalue (i, &ctxt), merged_i);
8230 /* Merger of 0 and a widening value with constraint < CST
8231 should retain the constraint, even though it was implicit
8232 for the 0 case. */
8233 ASSERT_EQ (model3.eval_condition (i, LT_EXPR, int_256, &ctxt),
8234 tristate (tristate::TS_TRUE));
8235 /* ...and we should have equality: the analysis should have converged. */
8236 ASSERT_EQ (model3, model2);
8238 /* "i_23 = i_11 + 1;" */
8239 region_model model4 (model3);
8240 ASSERT_EQ (model4, model2);
8241 model4.set_value (i, build2 (PLUS_EXPR, integer_type_node, i, int_1), &ctxt);
8242 const svalue *plus_one = model4.get_rvalue (i, &ctxt);
8243 ASSERT_EQ (plus_one->get_kind (), SK_BINOP);
8245 /* Try merging with the "i: 1" state. */
8246 region_model model5 (&mgr);
8247 ASSERT_TRUE (model4.can_merge_with_p (model1, point, &model5));
8248 ASSERT_EQ (model5.get_rvalue (i, &ctxt), plus_one);
8249 ASSERT_EQ (model5, model4);
8251 /* "i_11 = PHI();" merge with state at phi above.
8252 For i, we should have a merger of WIDENING with WIDENING + 1,
8253 and this should be WIDENING again. */
8254 region_model model6 (&mgr);
8255 ASSERT_TRUE (model5.can_merge_with_p (model2, point, &model6));
8256 const svalue *merged_widening = model6.get_rvalue (i, &ctxt);
8257 ASSERT_EQ (merged_widening->get_kind (), SK_WIDENING);
8259 ASSERT_CONDITION_TRUE (model6, i, LT_EXPR, int_257);
8262 /* Verify that if we mark a pointer to a malloc-ed region as non-NULL,
8263 all cast pointers to that region are also known to be non-NULL. */
8265 static void
8266 test_malloc_constraints ()
8268 region_model_manager mgr;
8269 region_model model (&mgr);
8270 tree p = build_global_decl ("p", ptr_type_node);
8271 tree char_star = build_pointer_type (char_type_node);
8272 tree q = build_global_decl ("q", char_star);
8273 tree null_ptr = build_int_cst (ptr_type_node, 0);
8275 const svalue *size_in_bytes
8276 = mgr.get_or_create_unknown_svalue (size_type_node);
8277 const region *reg = model.create_region_for_heap_alloc (size_in_bytes, NULL);
8278 const svalue *sval = mgr.get_ptr_svalue (ptr_type_node, reg);
8279 model.set_value (model.get_lvalue (p, NULL), sval, NULL);
8280 model.set_value (q, p, NULL);
8282 ASSERT_CONDITION_UNKNOWN (model, p, NE_EXPR, null_ptr);
8283 ASSERT_CONDITION_UNKNOWN (model, p, EQ_EXPR, null_ptr);
8284 ASSERT_CONDITION_UNKNOWN (model, q, NE_EXPR, null_ptr);
8285 ASSERT_CONDITION_UNKNOWN (model, q, EQ_EXPR, null_ptr);
8287 model.add_constraint (p, NE_EXPR, null_ptr, NULL);
8289 ASSERT_CONDITION_TRUE (model, p, NE_EXPR, null_ptr);
8290 ASSERT_CONDITION_FALSE (model, p, EQ_EXPR, null_ptr);
8291 ASSERT_CONDITION_TRUE (model, q, NE_EXPR, null_ptr);
8292 ASSERT_CONDITION_FALSE (model, q, EQ_EXPR, null_ptr);
8295 /* Smoketest of getting and setting the value of a variable. */
8297 static void
8298 test_var ()
8300 /* "int i;" */
8301 tree i = build_global_decl ("i", integer_type_node);
8303 tree int_17 = build_int_cst (integer_type_node, 17);
8304 tree int_m3 = build_int_cst (integer_type_node, -3);
8306 region_model_manager mgr;
8307 region_model model (&mgr);
8309 const region *i_reg = model.get_lvalue (i, NULL);
8310 ASSERT_EQ (i_reg->get_kind (), RK_DECL);
8312 /* Reading "i" should give a symbolic "initial value". */
8313 const svalue *sval_init = model.get_rvalue (i, NULL);
8314 ASSERT_EQ (sval_init->get_kind (), SK_INITIAL);
8315 ASSERT_EQ (sval_init->dyn_cast_initial_svalue ()->get_region (), i_reg);
8316 /* ..and doing it again should give the same "initial value". */
8317 ASSERT_EQ (model.get_rvalue (i, NULL), sval_init);
8319 /* "i = 17;". */
8320 model.set_value (i, int_17, NULL);
8321 ASSERT_EQ (model.get_rvalue (i, NULL),
8322 model.get_rvalue (int_17, NULL));
8324 /* "i = -3;". */
8325 model.set_value (i, int_m3, NULL);
8326 ASSERT_EQ (model.get_rvalue (i, NULL),
8327 model.get_rvalue (int_m3, NULL));
8329 /* Verify get_offset for "i". */
8331 region_offset offset = i_reg->get_offset (&mgr);
8332 ASSERT_EQ (offset.get_base_region (), i_reg);
8333 ASSERT_EQ (offset.get_bit_offset (), 0);
8337 static void
8338 test_array_2 ()
8340 /* "int arr[10];" */
8341 tree tlen = size_int (10);
8342 tree arr_type
8343 = build_array_type (integer_type_node, build_index_type (tlen));
8344 tree arr = build_global_decl ("arr", arr_type);
8346 /* "int i;" */
8347 tree i = build_global_decl ("i", integer_type_node);
8349 tree int_0 = build_int_cst (integer_type_node, 0);
8350 tree int_1 = build_int_cst (integer_type_node, 1);
8352 tree arr_0 = build4 (ARRAY_REF, integer_type_node,
8353 arr, int_0, NULL_TREE, NULL_TREE);
8354 tree arr_1 = build4 (ARRAY_REF, integer_type_node,
8355 arr, int_1, NULL_TREE, NULL_TREE);
8356 tree arr_i = build4 (ARRAY_REF, integer_type_node,
8357 arr, i, NULL_TREE, NULL_TREE);
8359 tree int_17 = build_int_cst (integer_type_node, 17);
8360 tree int_42 = build_int_cst (integer_type_node, 42);
8361 tree int_m3 = build_int_cst (integer_type_node, -3);
8363 region_model_manager mgr;
8364 region_model model (&mgr);
8365 /* "arr[0] = 17;". */
8366 model.set_value (arr_0, int_17, NULL);
8367 /* "arr[1] = -3;". */
8368 model.set_value (arr_1, int_m3, NULL);
8370 ASSERT_EQ (model.get_rvalue (arr_0, NULL), model.get_rvalue (int_17, NULL));
8371 ASSERT_EQ (model.get_rvalue (arr_1, NULL), model.get_rvalue (int_m3, NULL));
8373 /* Overwrite a pre-existing binding: "arr[1] = 42;". */
8374 model.set_value (arr_1, int_42, NULL);
8375 ASSERT_EQ (model.get_rvalue (arr_1, NULL), model.get_rvalue (int_42, NULL));
8377 /* Verify get_offset for "arr[0]". */
8379 const region *arr_0_reg = model.get_lvalue (arr_0, NULL);
8380 region_offset offset = arr_0_reg->get_offset (&mgr);
8381 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
8382 ASSERT_EQ (offset.get_bit_offset (), 0);
8385 /* Verify get_offset for "arr[1]". */
8387 const region *arr_1_reg = model.get_lvalue (arr_1, NULL);
8388 region_offset offset = arr_1_reg->get_offset (&mgr);
8389 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
8390 ASSERT_EQ (offset.get_bit_offset (), INT_TYPE_SIZE);
8393 /* Verify get_offset for "arr[i]". */
8395 const region *arr_i_reg = model.get_lvalue (arr_i, NULL);
8396 region_offset offset = arr_i_reg->get_offset (&mgr);
8397 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
8398 ASSERT_EQ (offset.get_symbolic_byte_offset ()->get_kind (), SK_BINOP);
8401 /* "arr[i] = i;" - this should remove the earlier bindings. */
8402 model.set_value (arr_i, i, NULL);
8403 ASSERT_EQ (model.get_rvalue (arr_i, NULL), model.get_rvalue (i, NULL));
8404 ASSERT_EQ (model.get_rvalue (arr_0, NULL)->get_kind (), SK_UNKNOWN);
8406 /* "arr[0] = 17;" - this should remove the arr[i] binding. */
8407 model.set_value (arr_0, int_17, NULL);
8408 ASSERT_EQ (model.get_rvalue (arr_0, NULL), model.get_rvalue (int_17, NULL));
8409 ASSERT_EQ (model.get_rvalue (arr_i, NULL)->get_kind (), SK_UNKNOWN);
8412 /* Smoketest of dereferencing a pointer via MEM_REF. */
8414 static void
8415 test_mem_ref ()
8418 x = 17;
8419 p = &x;
8422 tree x = build_global_decl ("x", integer_type_node);
8423 tree int_star = build_pointer_type (integer_type_node);
8424 tree p = build_global_decl ("p", int_star);
8426 tree int_17 = build_int_cst (integer_type_node, 17);
8427 tree addr_of_x = build1 (ADDR_EXPR, int_star, x);
8428 tree offset_0 = build_int_cst (integer_type_node, 0);
8429 tree star_p = build2 (MEM_REF, integer_type_node, p, offset_0);
8431 region_model_manager mgr;
8432 region_model model (&mgr);
8434 /* "x = 17;". */
8435 model.set_value (x, int_17, NULL);
8437 /* "p = &x;". */
8438 model.set_value (p, addr_of_x, NULL);
8440 const svalue *sval = model.get_rvalue (star_p, NULL);
8441 ASSERT_EQ (sval->maybe_get_constant (), int_17);
8444 /* Test for a POINTER_PLUS_EXPR followed by a MEM_REF.
8445 Analogous to this code:
8446 void test_6 (int a[10])
8448 __analyzer_eval (a[3] == 42); [should be UNKNOWN]
8449 a[3] = 42;
8450 __analyzer_eval (a[3] == 42); [should be TRUE]
8452 from data-model-1.c, which looks like this at the gimple level:
8453 # __analyzer_eval (a[3] == 42); [should be UNKNOWN]
8454 int *_1 = a_10(D) + 12; # POINTER_PLUS_EXPR
8455 int _2 = *_1; # MEM_REF
8456 _Bool _3 = _2 == 42;
8457 int _4 = (int) _3;
8458 __analyzer_eval (_4);
8460 # a[3] = 42;
8461 int *_5 = a_10(D) + 12; # POINTER_PLUS_EXPR
8462 *_5 = 42; # MEM_REF
8464 # __analyzer_eval (a[3] == 42); [should be TRUE]
8465 int *_6 = a_10(D) + 12; # POINTER_PLUS_EXPR
8466 int _7 = *_6; # MEM_REF
8467 _Bool _8 = _7 == 42;
8468 int _9 = (int) _8;
8469 __analyzer_eval (_9); */
8471 static void
8472 test_POINTER_PLUS_EXPR_then_MEM_REF ()
8474 tree int_star = build_pointer_type (integer_type_node);
8475 tree a = build_global_decl ("a", int_star);
8476 tree offset_12 = build_int_cst (size_type_node, 12);
8477 tree pointer_plus_expr = build2 (POINTER_PLUS_EXPR, int_star, a, offset_12);
8478 tree offset_0 = build_int_cst (integer_type_node, 0);
8479 tree mem_ref = build2 (MEM_REF, integer_type_node,
8480 pointer_plus_expr, offset_0);
8481 region_model_manager mgr;
8482 region_model m (&mgr);
8484 tree int_42 = build_int_cst (integer_type_node, 42);
8485 m.set_value (mem_ref, int_42, NULL);
8486 ASSERT_EQ (m.get_rvalue (mem_ref, NULL)->maybe_get_constant (), int_42);
8489 /* Verify that malloc works. */
8491 static void
8492 test_malloc ()
8494 tree int_star = build_pointer_type (integer_type_node);
8495 tree p = build_global_decl ("p", int_star);
8496 tree n = build_global_decl ("n", integer_type_node);
8497 tree n_times_4 = build2 (MULT_EXPR, size_type_node,
8498 n, build_int_cst (size_type_node, 4));
8500 region_model_manager mgr;
8501 test_region_model_context ctxt;
8502 region_model model (&mgr);
8504 /* "p = malloc (n * 4);". */
8505 const svalue *size_sval = model.get_rvalue (n_times_4, &ctxt);
8506 const region *reg = model.create_region_for_heap_alloc (size_sval, &ctxt);
8507 const svalue *ptr = mgr.get_ptr_svalue (int_star, reg);
8508 model.set_value (model.get_lvalue (p, &ctxt), ptr, &ctxt);
8509 ASSERT_EQ (model.get_capacity (reg), size_sval);
8512 /* Verify that alloca works. */
8514 static void
8515 test_alloca ()
8517 auto_vec <tree> param_types;
8518 tree fndecl = make_fndecl (integer_type_node,
8519 "test_fn",
8520 param_types);
8521 allocate_struct_function (fndecl, true);
8524 tree int_star = build_pointer_type (integer_type_node);
8525 tree p = build_global_decl ("p", int_star);
8526 tree n = build_global_decl ("n", integer_type_node);
8527 tree n_times_4 = build2 (MULT_EXPR, size_type_node,
8528 n, build_int_cst (size_type_node, 4));
8530 region_model_manager mgr;
8531 test_region_model_context ctxt;
8532 region_model model (&mgr);
8534 /* Push stack frame. */
8535 const region *frame_reg
8536 = model.push_frame (DECL_STRUCT_FUNCTION (fndecl),
8537 NULL, &ctxt);
8538 /* "p = alloca (n * 4);". */
8539 const svalue *size_sval = model.get_rvalue (n_times_4, &ctxt);
8540 const region *reg = model.create_region_for_alloca (size_sval, &ctxt);
8541 ASSERT_EQ (reg->get_parent_region (), frame_reg);
8542 const svalue *ptr = mgr.get_ptr_svalue (int_star, reg);
8543 model.set_value (model.get_lvalue (p, &ctxt), ptr, &ctxt);
8544 ASSERT_EQ (model.get_capacity (reg), size_sval);
8546 /* Verify that the pointers to the alloca region are replaced by
8547 poisoned values when the frame is popped. */
8548 model.pop_frame (NULL, NULL, &ctxt);
8549 ASSERT_EQ (model.get_rvalue (p, NULL)->get_kind (), SK_POISONED);
8552 /* Verify that svalue::involves_p works. */
8554 static void
8555 test_involves_p ()
8557 region_model_manager mgr;
8558 tree int_star = build_pointer_type (integer_type_node);
8559 tree p = build_global_decl ("p", int_star);
8560 tree q = build_global_decl ("q", int_star);
8562 test_region_model_context ctxt;
8563 region_model model (&mgr);
8564 const svalue *p_init = model.get_rvalue (p, &ctxt);
8565 const svalue *q_init = model.get_rvalue (q, &ctxt);
8567 ASSERT_TRUE (p_init->involves_p (p_init));
8568 ASSERT_FALSE (p_init->involves_p (q_init));
8570 const region *star_p_reg = mgr.get_symbolic_region (p_init);
8571 const region *star_q_reg = mgr.get_symbolic_region (q_init);
8573 const svalue *init_star_p = mgr.get_or_create_initial_value (star_p_reg);
8574 const svalue *init_star_q = mgr.get_or_create_initial_value (star_q_reg);
8576 ASSERT_TRUE (init_star_p->involves_p (p_init));
8577 ASSERT_FALSE (p_init->involves_p (init_star_p));
8578 ASSERT_FALSE (init_star_p->involves_p (q_init));
8579 ASSERT_TRUE (init_star_q->involves_p (q_init));
8580 ASSERT_FALSE (init_star_q->involves_p (p_init));
8583 /* Run all of the selftests within this file. */
8585 void
8586 analyzer_region_model_cc_tests ()
8588 test_tree_cmp_on_constants ();
8589 test_dump ();
8590 test_struct ();
8591 test_array_1 ();
8592 test_get_representative_tree ();
8593 test_unique_constants ();
8594 test_unique_unknowns ();
8595 test_initial_svalue_folding ();
8596 test_unaryop_svalue_folding ();
8597 test_binop_svalue_folding ();
8598 test_sub_svalue_folding ();
8599 test_bits_within_svalue_folding ();
8600 test_descendent_of_p ();
8601 test_bit_range_regions ();
8602 test_assignment ();
8603 test_compound_assignment ();
8604 test_stack_frames ();
8605 test_get_representative_path_var ();
8606 test_equality_1 ();
8607 test_canonicalization_2 ();
8608 test_canonicalization_3 ();
8609 test_canonicalization_4 ();
8610 test_state_merging ();
8611 test_constraint_merging ();
8612 test_widening_constraints ();
8613 test_iteration_1 ();
8614 test_malloc_constraints ();
8615 test_var ();
8616 test_array_2 ();
8617 test_mem_ref ();
8618 test_POINTER_PLUS_EXPR_then_MEM_REF ();
8619 test_malloc ();
8620 test_alloca ();
8621 test_involves_p ();
8624 } // namespace selftest
8626 #endif /* CHECKING_P */
8628 } // namespace ana
8630 #endif /* #if ENABLE_ANALYZER */