analyzer: fixes to side-effects for built-in functions [PR107565]
[official-gcc.git] / gcc / analyzer / region-model.cc
blob2187aecbe9127fc6d29dab78f91ffb8a16b573e7
1 /* Classes for modeling the state of memory.
2 Copyright (C) 2019-2023 Free Software Foundation, Inc.
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #define INCLUDE_MEMORY
23 #include "system.h"
24 #include "coretypes.h"
25 #include "make-unique.h"
26 #include "tree.h"
27 #include "function.h"
28 #include "basic-block.h"
29 #include "gimple.h"
30 #include "gimple-iterator.h"
31 #include "diagnostic-core.h"
32 #include "graphviz.h"
33 #include "options.h"
34 #include "cgraph.h"
35 #include "tree-dfa.h"
36 #include "stringpool.h"
37 #include "convert.h"
38 #include "target.h"
39 #include "fold-const.h"
40 #include "tree-pretty-print.h"
41 #include "diagnostic-color.h"
42 #include "diagnostic-metadata.h"
43 #include "bitmap.h"
44 #include "selftest.h"
45 #include "analyzer/analyzer.h"
46 #include "analyzer/analyzer-logging.h"
47 #include "ordered-hash-map.h"
48 #include "options.h"
49 #include "cgraph.h"
50 #include "cfg.h"
51 #include "analyzer/supergraph.h"
52 #include "sbitmap.h"
53 #include "analyzer/call-string.h"
54 #include "analyzer/program-point.h"
55 #include "analyzer/store.h"
56 #include "analyzer/region-model.h"
57 #include "analyzer/constraint-manager.h"
58 #include "diagnostic-event-id.h"
59 #include "analyzer/sm.h"
60 #include "diagnostic-event-id.h"
61 #include "analyzer/sm.h"
62 #include "analyzer/pending-diagnostic.h"
63 #include "analyzer/region-model-reachability.h"
64 #include "analyzer/analyzer-selftests.h"
65 #include "analyzer/program-state.h"
66 #include "analyzer/call-summary.h"
67 #include "stor-layout.h"
68 #include "attribs.h"
69 #include "tree-object-size.h"
70 #include "gimple-ssa.h"
71 #include "tree-phinodes.h"
72 #include "tree-ssa-operands.h"
73 #include "ssa-iterators.h"
74 #include "calls.h"
75 #include "is-a.h"
76 #include "gcc-rich-location.h"
77 #include "analyzer/checker-event.h"
78 #include "analyzer/checker-path.h"
79 #include "analyzer/feasible-graph.h"
81 #if ENABLE_ANALYZER
83 namespace ana {
85 /* Dump T to PP in language-independent form, for debugging/logging/dumping
86 purposes. */
88 void
89 dump_tree (pretty_printer *pp, tree t)
91 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
94 /* Dump T to PP in language-independent form in quotes, for
95 debugging/logging/dumping purposes. */
97 void
98 dump_quoted_tree (pretty_printer *pp, tree t)
100 pp_begin_quote (pp, pp_show_color (pp));
101 dump_tree (pp, t);
102 pp_end_quote (pp, pp_show_color (pp));
105 /* Equivalent to pp_printf (pp, "%qT", t), to avoid nesting pp_printf
106 calls within other pp_printf calls.
108 default_tree_printer handles 'T' and some other codes by calling
109 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
110 dump_generic_node calls pp_printf in various places, leading to
111 garbled output.
113 Ideally pp_printf could be made to be reentrant, but in the meantime
114 this function provides a workaround. */
116 void
117 print_quoted_type (pretty_printer *pp, tree t)
119 pp_begin_quote (pp, pp_show_color (pp));
120 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
121 pp_end_quote (pp, pp_show_color (pp));
124 /* class region_to_value_map. */
126 /* Assignment operator for region_to_value_map. */
128 region_to_value_map &
129 region_to_value_map::operator= (const region_to_value_map &other)
131 m_hash_map.empty ();
132 for (auto iter : other.m_hash_map)
134 const region *reg = iter.first;
135 const svalue *sval = iter.second;
136 m_hash_map.put (reg, sval);
138 return *this;
141 /* Equality operator for region_to_value_map. */
143 bool
144 region_to_value_map::operator== (const region_to_value_map &other) const
146 if (m_hash_map.elements () != other.m_hash_map.elements ())
147 return false;
149 for (auto iter : *this)
151 const region *reg = iter.first;
152 const svalue *sval = iter.second;
153 const svalue * const *other_slot = other.get (reg);
154 if (other_slot == NULL)
155 return false;
156 if (sval != *other_slot)
157 return false;
160 return true;
163 /* Dump this object to PP. */
165 void
166 region_to_value_map::dump_to_pp (pretty_printer *pp, bool simple,
167 bool multiline) const
169 auto_vec<const region *> regs;
170 for (iterator iter = begin (); iter != end (); ++iter)
171 regs.safe_push ((*iter).first);
172 regs.qsort (region::cmp_ptr_ptr);
173 if (multiline)
174 pp_newline (pp);
175 else
176 pp_string (pp, " {");
177 unsigned i;
178 const region *reg;
179 FOR_EACH_VEC_ELT (regs, i, reg)
181 if (multiline)
182 pp_string (pp, " ");
183 else if (i > 0)
184 pp_string (pp, ", ");
185 reg->dump_to_pp (pp, simple);
186 pp_string (pp, ": ");
187 const svalue *sval = *get (reg);
188 sval->dump_to_pp (pp, true);
189 if (multiline)
190 pp_newline (pp);
192 if (!multiline)
193 pp_string (pp, "}");
196 /* Dump this object to stderr. */
198 DEBUG_FUNCTION void
199 region_to_value_map::dump (bool simple) const
201 pretty_printer pp;
202 pp_format_decoder (&pp) = default_tree_printer;
203 pp_show_color (&pp) = pp_show_color (global_dc->printer);
204 pp.buffer->stream = stderr;
205 dump_to_pp (&pp, simple, true);
206 pp_newline (&pp);
207 pp_flush (&pp);
211 /* Attempt to merge THIS with OTHER, writing the result
212 to OUT.
214 For now, write (region, value) mappings that are in common between THIS
215 and OTHER to OUT, effectively taking the intersection.
217 Reject merger of different values. */
219 bool
220 region_to_value_map::can_merge_with_p (const region_to_value_map &other,
221 region_to_value_map *out) const
223 for (auto iter : *this)
225 const region *iter_reg = iter.first;
226 const svalue *iter_sval = iter.second;
227 const svalue * const * other_slot = other.get (iter_reg);
228 if (other_slot)
230 if (iter_sval == *other_slot)
231 out->put (iter_reg, iter_sval);
232 else
233 return false;
236 return true;
239 /* Purge any state involving SVAL. */
241 void
242 region_to_value_map::purge_state_involving (const svalue *sval)
244 auto_vec<const region *> to_purge;
245 for (auto iter : *this)
247 const region *iter_reg = iter.first;
248 const svalue *iter_sval = iter.second;
249 if (iter_reg->involves_p (sval) || iter_sval->involves_p (sval))
250 to_purge.safe_push (iter_reg);
252 for (auto iter : to_purge)
253 m_hash_map.remove (iter);
256 /* class region_model. */
258 /* Ctor for region_model: construct an "empty" model. */
260 region_model::region_model (region_model_manager *mgr)
261 : m_mgr (mgr), m_store (), m_current_frame (NULL),
262 m_dynamic_extents ()
264 m_constraints = new constraint_manager (mgr);
267 /* region_model's copy ctor. */
269 region_model::region_model (const region_model &other)
270 : m_mgr (other.m_mgr), m_store (other.m_store),
271 m_constraints (new constraint_manager (*other.m_constraints)),
272 m_current_frame (other.m_current_frame),
273 m_dynamic_extents (other.m_dynamic_extents)
277 /* region_model's dtor. */
279 region_model::~region_model ()
281 delete m_constraints;
284 /* region_model's assignment operator. */
286 region_model &
287 region_model::operator= (const region_model &other)
289 /* m_mgr is const. */
290 gcc_assert (m_mgr == other.m_mgr);
292 m_store = other.m_store;
294 delete m_constraints;
295 m_constraints = new constraint_manager (*other.m_constraints);
297 m_current_frame = other.m_current_frame;
299 m_dynamic_extents = other.m_dynamic_extents;
301 return *this;
304 /* Equality operator for region_model.
306 Amongst other things this directly compares the stores and the constraint
307 managers, so for this to be meaningful both this and OTHER should
308 have been canonicalized. */
310 bool
311 region_model::operator== (const region_model &other) const
313 /* We can only compare instances that use the same manager. */
314 gcc_assert (m_mgr == other.m_mgr);
316 if (m_store != other.m_store)
317 return false;
319 if (*m_constraints != *other.m_constraints)
320 return false;
322 if (m_current_frame != other.m_current_frame)
323 return false;
325 if (m_dynamic_extents != other.m_dynamic_extents)
326 return false;
328 gcc_checking_assert (hash () == other.hash ());
330 return true;
333 /* Generate a hash value for this region_model. */
335 hashval_t
336 region_model::hash () const
338 hashval_t result = m_store.hash ();
339 result ^= m_constraints->hash ();
340 return result;
343 /* Dump a representation of this model to PP, showing the
344 stack, the store, and any constraints.
345 Use SIMPLE to control how svalues and regions are printed. */
347 void
348 region_model::dump_to_pp (pretty_printer *pp, bool simple,
349 bool multiline) const
351 /* Dump stack. */
352 pp_printf (pp, "stack depth: %i", get_stack_depth ());
353 if (multiline)
354 pp_newline (pp);
355 else
356 pp_string (pp, " {");
357 for (const frame_region *iter_frame = m_current_frame; iter_frame;
358 iter_frame = iter_frame->get_calling_frame ())
360 if (multiline)
361 pp_string (pp, " ");
362 else if (iter_frame != m_current_frame)
363 pp_string (pp, ", ");
364 pp_printf (pp, "frame (index %i): ", iter_frame->get_index ());
365 iter_frame->dump_to_pp (pp, simple);
366 if (multiline)
367 pp_newline (pp);
369 if (!multiline)
370 pp_string (pp, "}");
372 /* Dump store. */
373 if (!multiline)
374 pp_string (pp, ", {");
375 m_store.dump_to_pp (pp, simple, multiline,
376 m_mgr->get_store_manager ());
377 if (!multiline)
378 pp_string (pp, "}");
380 /* Dump constraints. */
381 pp_string (pp, "constraint_manager:");
382 if (multiline)
383 pp_newline (pp);
384 else
385 pp_string (pp, " {");
386 m_constraints->dump_to_pp (pp, multiline);
387 if (!multiline)
388 pp_string (pp, "}");
390 /* Dump sizes of dynamic regions, if any are known. */
391 if (!m_dynamic_extents.is_empty ())
393 pp_string (pp, "dynamic_extents:");
394 m_dynamic_extents.dump_to_pp (pp, simple, multiline);
398 /* Dump a representation of this model to FILE. */
400 void
401 region_model::dump (FILE *fp, bool simple, bool multiline) const
403 pretty_printer pp;
404 pp_format_decoder (&pp) = default_tree_printer;
405 pp_show_color (&pp) = pp_show_color (global_dc->printer);
406 pp.buffer->stream = fp;
407 dump_to_pp (&pp, simple, multiline);
408 pp_newline (&pp);
409 pp_flush (&pp);
412 /* Dump a multiline representation of this model to stderr. */
414 DEBUG_FUNCTION void
415 region_model::dump (bool simple) const
417 dump (stderr, simple, true);
420 /* Dump a multiline representation of this model to stderr. */
422 DEBUG_FUNCTION void
423 region_model::debug () const
425 dump (true);
428 /* Assert that this object is valid. */
430 void
431 region_model::validate () const
433 m_store.validate ();
436 /* Canonicalize the store and constraints, to maximize the chance of
437 equality between region_model instances. */
439 void
440 region_model::canonicalize ()
442 m_store.canonicalize (m_mgr->get_store_manager ());
443 m_constraints->canonicalize ();
446 /* Return true if this region_model is in canonical form. */
448 bool
449 region_model::canonicalized_p () const
451 region_model copy (*this);
452 copy.canonicalize ();
453 return *this == copy;
456 /* See the comment for store::loop_replay_fixup. */
458 void
459 region_model::loop_replay_fixup (const region_model *dst_state)
461 m_store.loop_replay_fixup (dst_state->get_store (), m_mgr);
464 /* A subclass of pending_diagnostic for complaining about uses of
465 poisoned values. */
467 class poisoned_value_diagnostic
468 : public pending_diagnostic_subclass<poisoned_value_diagnostic>
470 public:
471 poisoned_value_diagnostic (tree expr, enum poison_kind pkind,
472 const region *src_region,
473 tree check_expr)
474 : m_expr (expr), m_pkind (pkind),
475 m_src_region (src_region),
476 m_check_expr (check_expr)
479 const char *get_kind () const final override { return "poisoned_value_diagnostic"; }
481 bool use_of_uninit_p () const final override
483 return m_pkind == POISON_KIND_UNINIT;
486 bool operator== (const poisoned_value_diagnostic &other) const
488 return (m_expr == other.m_expr
489 && m_pkind == other.m_pkind
490 && m_src_region == other.m_src_region);
493 int get_controlling_option () const final override
495 switch (m_pkind)
497 default:
498 gcc_unreachable ();
499 case POISON_KIND_UNINIT:
500 return OPT_Wanalyzer_use_of_uninitialized_value;
501 case POISON_KIND_FREED:
502 return OPT_Wanalyzer_use_after_free;
503 case POISON_KIND_POPPED_STACK:
504 return OPT_Wanalyzer_use_of_pointer_in_stale_stack_frame;
508 bool terminate_path_p () const final override { return true; }
510 bool emit (rich_location *rich_loc) final override
512 switch (m_pkind)
514 default:
515 gcc_unreachable ();
516 case POISON_KIND_UNINIT:
518 diagnostic_metadata m;
519 m.add_cwe (457); /* "CWE-457: Use of Uninitialized Variable". */
520 return warning_meta (rich_loc, m, get_controlling_option (),
521 "use of uninitialized value %qE",
522 m_expr);
524 break;
525 case POISON_KIND_FREED:
527 diagnostic_metadata m;
528 m.add_cwe (416); /* "CWE-416: Use After Free". */
529 return warning_meta (rich_loc, m, get_controlling_option (),
530 "use after %<free%> of %qE",
531 m_expr);
533 break;
534 case POISON_KIND_POPPED_STACK:
536 /* TODO: which CWE? */
537 return warning_at
538 (rich_loc, get_controlling_option (),
539 "dereferencing pointer %qE to within stale stack frame",
540 m_expr);
542 break;
546 label_text describe_final_event (const evdesc::final_event &ev) final override
548 switch (m_pkind)
550 default:
551 gcc_unreachable ();
552 case POISON_KIND_UNINIT:
553 return ev.formatted_print ("use of uninitialized value %qE here",
554 m_expr);
555 case POISON_KIND_FREED:
556 return ev.formatted_print ("use after %<free%> of %qE here",
557 m_expr);
558 case POISON_KIND_POPPED_STACK:
559 return ev.formatted_print
560 ("dereferencing pointer %qE to within stale stack frame",
561 m_expr);
565 void mark_interesting_stuff (interesting_t *interest) final override
567 if (m_src_region)
568 interest->add_region_creation (m_src_region);
571 /* Attempt to suppress false positives.
572 Reject paths where the value of the underlying region isn't poisoned.
573 This can happen due to state merging when exploring the exploded graph,
574 where the more precise analysis during feasibility analysis finds that
575 the region is in fact valid.
576 To do this we need to get the value from the fgraph. Unfortunately
577 we can't simply query the state of m_src_region (from the enode),
578 since it might be a different region in the fnode state (e.g. with
579 heap-allocated regions, the numbering could be different).
580 Hence we access m_check_expr, if available. */
582 bool check_valid_fpath_p (const feasible_node &fnode,
583 const gimple *emission_stmt)
584 const final override
586 if (!m_check_expr)
587 return true;
589 /* We've reached the enode, but not necessarily the right function_point.
590 Try to get the state at the correct stmt. */
591 region_model emission_model (fnode.get_model ().get_manager());
592 if (!fnode.get_state_at_stmt (emission_stmt, &emission_model))
593 /* Couldn't get state; accept this diagnostic. */
594 return true;
596 const svalue *fsval = emission_model.get_rvalue (m_check_expr, NULL);
597 /* Check to see if the expr is also poisoned in FNODE (and in the
598 same way). */
599 const poisoned_svalue * fspval = fsval->dyn_cast_poisoned_svalue ();
600 if (!fspval)
601 return false;
602 if (fspval->get_poison_kind () != m_pkind)
603 return false;
604 return true;
607 private:
608 tree m_expr;
609 enum poison_kind m_pkind;
610 const region *m_src_region;
611 tree m_check_expr;
614 /* A subclass of pending_diagnostic for complaining about shifts
615 by negative counts. */
617 class shift_count_negative_diagnostic
618 : public pending_diagnostic_subclass<shift_count_negative_diagnostic>
620 public:
621 shift_count_negative_diagnostic (const gassign *assign, tree count_cst)
622 : m_assign (assign), m_count_cst (count_cst)
625 const char *get_kind () const final override
627 return "shift_count_negative_diagnostic";
630 bool operator== (const shift_count_negative_diagnostic &other) const
632 return (m_assign == other.m_assign
633 && same_tree_p (m_count_cst, other.m_count_cst));
636 int get_controlling_option () const final override
638 return OPT_Wanalyzer_shift_count_negative;
641 bool emit (rich_location *rich_loc) final override
643 return warning_at (rich_loc, get_controlling_option (),
644 "shift by negative count (%qE)", m_count_cst);
647 label_text describe_final_event (const evdesc::final_event &ev) final override
649 return ev.formatted_print ("shift by negative amount here (%qE)", m_count_cst);
652 private:
653 const gassign *m_assign;
654 tree m_count_cst;
657 /* A subclass of pending_diagnostic for complaining about shifts
658 by counts >= the width of the operand type. */
660 class shift_count_overflow_diagnostic
661 : public pending_diagnostic_subclass<shift_count_overflow_diagnostic>
663 public:
664 shift_count_overflow_diagnostic (const gassign *assign,
665 int operand_precision,
666 tree count_cst)
667 : m_assign (assign), m_operand_precision (operand_precision),
668 m_count_cst (count_cst)
671 const char *get_kind () const final override
673 return "shift_count_overflow_diagnostic";
676 bool operator== (const shift_count_overflow_diagnostic &other) const
678 return (m_assign == other.m_assign
679 && m_operand_precision == other.m_operand_precision
680 && same_tree_p (m_count_cst, other.m_count_cst));
683 int get_controlling_option () const final override
685 return OPT_Wanalyzer_shift_count_overflow;
688 bool emit (rich_location *rich_loc) final override
690 return warning_at (rich_loc, get_controlling_option (),
691 "shift by count (%qE) >= precision of type (%qi)",
692 m_count_cst, m_operand_precision);
695 label_text describe_final_event (const evdesc::final_event &ev) final override
697 return ev.formatted_print ("shift by count %qE here", m_count_cst);
700 private:
701 const gassign *m_assign;
702 int m_operand_precision;
703 tree m_count_cst;
706 /* If ASSIGN is a stmt that can be modelled via
707 set_value (lhs_reg, SVALUE, CTXT)
708 for some SVALUE, get the SVALUE.
709 Otherwise return NULL. */
711 const svalue *
712 region_model::get_gassign_result (const gassign *assign,
713 region_model_context *ctxt)
715 tree lhs = gimple_assign_lhs (assign);
716 tree rhs1 = gimple_assign_rhs1 (assign);
717 enum tree_code op = gimple_assign_rhs_code (assign);
718 switch (op)
720 default:
721 return NULL;
723 case POINTER_PLUS_EXPR:
725 /* e.g. "_1 = a_10(D) + 12;" */
726 tree ptr = rhs1;
727 tree offset = gimple_assign_rhs2 (assign);
729 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
730 const svalue *offset_sval = get_rvalue (offset, ctxt);
731 /* Quoting tree.def, "the second operand [of a POINTER_PLUS_EXPR]
732 is an integer of type sizetype". */
733 offset_sval = m_mgr->get_or_create_cast (size_type_node, offset_sval);
735 const svalue *sval_binop
736 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
737 ptr_sval, offset_sval);
738 return sval_binop;
740 break;
742 case POINTER_DIFF_EXPR:
744 /* e.g. "_1 = p_2(D) - q_3(D);". */
745 tree rhs2 = gimple_assign_rhs2 (assign);
746 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
747 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
749 // TODO: perhaps fold to zero if they're known to be equal?
751 const svalue *sval_binop
752 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
753 rhs1_sval, rhs2_sval);
754 return sval_binop;
756 break;
758 /* Assignments of the form
759 set_value (lvalue (LHS), rvalue (EXPR))
760 for various EXPR.
761 We already have the lvalue for the LHS above, as "lhs_reg". */
762 case ADDR_EXPR: /* LHS = &RHS; */
763 case BIT_FIELD_REF:
764 case COMPONENT_REF: /* LHS = op0.op1; */
765 case MEM_REF:
766 case REAL_CST:
767 case COMPLEX_CST:
768 case VECTOR_CST:
769 case INTEGER_CST:
770 case ARRAY_REF:
771 case SSA_NAME: /* LHS = VAR; */
772 case VAR_DECL: /* LHS = VAR; */
773 case PARM_DECL:/* LHS = VAR; */
774 case REALPART_EXPR:
775 case IMAGPART_EXPR:
776 return get_rvalue (rhs1, ctxt);
778 case ABS_EXPR:
779 case ABSU_EXPR:
780 case CONJ_EXPR:
781 case BIT_NOT_EXPR:
782 case FIX_TRUNC_EXPR:
783 case FLOAT_EXPR:
784 case NEGATE_EXPR:
785 case NOP_EXPR:
786 case VIEW_CONVERT_EXPR:
788 /* Unary ops. */
789 const svalue *rhs_sval = get_rvalue (rhs1, ctxt);
790 const svalue *sval_unaryop
791 = m_mgr->get_or_create_unaryop (TREE_TYPE (lhs), op, rhs_sval);
792 return sval_unaryop;
795 case EQ_EXPR:
796 case GE_EXPR:
797 case LE_EXPR:
798 case NE_EXPR:
799 case GT_EXPR:
800 case LT_EXPR:
801 case UNORDERED_EXPR:
802 case ORDERED_EXPR:
804 tree rhs2 = gimple_assign_rhs2 (assign);
806 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
807 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
809 if (TREE_TYPE (lhs) == boolean_type_node)
811 /* Consider constraints between svalues. */
812 tristate t = eval_condition (rhs1_sval, op, rhs2_sval);
813 if (t.is_known ())
814 return m_mgr->get_or_create_constant_svalue
815 (t.is_true () ? boolean_true_node : boolean_false_node);
818 /* Otherwise, generate a symbolic binary op. */
819 const svalue *sval_binop
820 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
821 rhs1_sval, rhs2_sval);
822 return sval_binop;
824 break;
826 case PLUS_EXPR:
827 case MINUS_EXPR:
828 case MULT_EXPR:
829 case MULT_HIGHPART_EXPR:
830 case TRUNC_DIV_EXPR:
831 case CEIL_DIV_EXPR:
832 case FLOOR_DIV_EXPR:
833 case ROUND_DIV_EXPR:
834 case TRUNC_MOD_EXPR:
835 case CEIL_MOD_EXPR:
836 case FLOOR_MOD_EXPR:
837 case ROUND_MOD_EXPR:
838 case RDIV_EXPR:
839 case EXACT_DIV_EXPR:
840 case LSHIFT_EXPR:
841 case RSHIFT_EXPR:
842 case LROTATE_EXPR:
843 case RROTATE_EXPR:
844 case BIT_IOR_EXPR:
845 case BIT_XOR_EXPR:
846 case BIT_AND_EXPR:
847 case MIN_EXPR:
848 case MAX_EXPR:
849 case COMPLEX_EXPR:
851 /* Binary ops. */
852 tree rhs2 = gimple_assign_rhs2 (assign);
854 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
855 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
857 if (ctxt && (op == LSHIFT_EXPR || op == RSHIFT_EXPR))
859 /* "INT34-C. Do not shift an expression by a negative number of bits
860 or by greater than or equal to the number of bits that exist in
861 the operand." */
862 if (const tree rhs2_cst = rhs2_sval->maybe_get_constant ())
863 if (TREE_CODE (rhs2_cst) == INTEGER_CST)
865 if (tree_int_cst_sgn (rhs2_cst) < 0)
866 ctxt->warn
867 (make_unique<shift_count_negative_diagnostic>
868 (assign, rhs2_cst));
869 else if (compare_tree_int (rhs2_cst,
870 TYPE_PRECISION (TREE_TYPE (rhs1)))
871 >= 0)
872 ctxt->warn
873 (make_unique<shift_count_overflow_diagnostic>
874 (assign,
875 int (TYPE_PRECISION (TREE_TYPE (rhs1))),
876 rhs2_cst));
880 const svalue *sval_binop
881 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
882 rhs1_sval, rhs2_sval);
883 return sval_binop;
886 /* Vector expressions. In theory we could implement these elementwise,
887 but for now, simply return unknown values. */
888 case VEC_DUPLICATE_EXPR:
889 case VEC_SERIES_EXPR:
890 case VEC_COND_EXPR:
891 case VEC_PERM_EXPR:
892 case VEC_WIDEN_MULT_HI_EXPR:
893 case VEC_WIDEN_MULT_LO_EXPR:
894 case VEC_WIDEN_MULT_EVEN_EXPR:
895 case VEC_WIDEN_MULT_ODD_EXPR:
896 case VEC_UNPACK_HI_EXPR:
897 case VEC_UNPACK_LO_EXPR:
898 case VEC_UNPACK_FLOAT_HI_EXPR:
899 case VEC_UNPACK_FLOAT_LO_EXPR:
900 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
901 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
902 case VEC_PACK_TRUNC_EXPR:
903 case VEC_PACK_SAT_EXPR:
904 case VEC_PACK_FIX_TRUNC_EXPR:
905 case VEC_PACK_FLOAT_EXPR:
906 case VEC_WIDEN_LSHIFT_HI_EXPR:
907 case VEC_WIDEN_LSHIFT_LO_EXPR:
908 return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (lhs));
912 /* Workaround for discarding certain false positives from
913 -Wanalyzer-use-of-uninitialized-value
914 of the form:
915 ((A OR-IF B) OR-IF C)
916 and:
917 ((A AND-IF B) AND-IF C)
918 where evaluating B is redundant, but could involve simple accesses of
919 uninitialized locals.
921 When optimization is turned on the FE can immediately fold compound
922 conditionals. Specifically, c_parser_condition parses this condition:
923 ((A OR-IF B) OR-IF C)
924 and calls c_fully_fold on the condition.
925 Within c_fully_fold, fold_truth_andor is called, which bails when
926 optimization is off, but if any optimization is turned on can convert the
927 ((A OR-IF B) OR-IF C)
928 into:
929 ((A OR B) OR_IF C)
930 for sufficiently simple B
931 i.e. the inner OR-IF becomes an OR.
932 At gimplification time the inner OR becomes BIT_IOR_EXPR (in gimplify_expr),
933 giving this for the inner condition:
934 tmp = A | B;
935 if (tmp)
936 thus effectively synthesizing a redundant access of B when optimization
937 is turned on, when compared to:
938 if (A) goto L1; else goto L4;
939 L1: if (B) goto L2; else goto L4;
940 L2: if (C) goto L3; else goto L4;
941 for the unoptimized case.
943 Return true if CTXT appears to be handling such a short-circuitable stmt,
944 such as the def-stmt for B for the:
945 tmp = A | B;
946 case above, for the case where A is true and thus B would have been
947 short-circuited without optimization, using MODEL for the value of A. */
949 static bool
950 within_short_circuited_stmt_p (const region_model *model,
951 const gassign *assign_stmt)
953 /* We must have an assignment to a temporary of _Bool type. */
954 tree lhs = gimple_assign_lhs (assign_stmt);
955 if (TREE_TYPE (lhs) != boolean_type_node)
956 return false;
957 if (TREE_CODE (lhs) != SSA_NAME)
958 return false;
959 if (SSA_NAME_VAR (lhs) != NULL_TREE)
960 return false;
962 /* The temporary bool must be used exactly once: as the second arg of
963 a BIT_IOR_EXPR or BIT_AND_EXPR. */
964 use_operand_p use_op;
965 gimple *use_stmt;
966 if (!single_imm_use (lhs, &use_op, &use_stmt))
967 return false;
968 const gassign *use_assign = dyn_cast <const gassign *> (use_stmt);
969 if (!use_assign)
970 return false;
971 enum tree_code op = gimple_assign_rhs_code (use_assign);
972 if (!(op == BIT_IOR_EXPR ||op == BIT_AND_EXPR))
973 return false;
974 if (!(gimple_assign_rhs1 (use_assign) != lhs
975 && gimple_assign_rhs2 (use_assign) == lhs))
976 return false;
978 /* The first arg of the bitwise stmt must have a known value in MODEL
979 that implies that the value of the second arg doesn't matter, i.e.
980 1 for bitwise or, 0 for bitwise and. */
981 tree other_arg = gimple_assign_rhs1 (use_assign);
982 /* Use a NULL ctxt here to avoid generating warnings. */
983 const svalue *other_arg_sval = model->get_rvalue (other_arg, NULL);
984 tree other_arg_cst = other_arg_sval->maybe_get_constant ();
985 if (!other_arg_cst)
986 return false;
987 switch (op)
989 default:
990 gcc_unreachable ();
991 case BIT_IOR_EXPR:
992 if (zerop (other_arg_cst))
993 return false;
994 break;
995 case BIT_AND_EXPR:
996 if (!zerop (other_arg_cst))
997 return false;
998 break;
1001 /* All tests passed. We appear to be in a stmt that generates a boolean
1002 temporary with a value that won't matter. */
1003 return true;
1006 /* Workaround for discarding certain false positives from
1007 -Wanalyzer-use-of-uninitialized-value
1008 seen with -ftrivial-auto-var-init=.
1010 -ftrivial-auto-var-init= will generate calls to IFN_DEFERRED_INIT.
1012 If the address of the var is taken, gimplification will give us
1013 something like:
1015 _1 = .DEFERRED_INIT (4, 2, &"len"[0]);
1016 len = _1;
1018 The result of DEFERRED_INIT will be an uninit value; we don't
1019 want to emit a false positive for "len = _1;"
1021 Return true if ASSIGN_STMT is such a stmt. */
1023 static bool
1024 due_to_ifn_deferred_init_p (const gassign *assign_stmt)
1027 /* We must have an assignment to a decl from an SSA name that's the
1028 result of a IFN_DEFERRED_INIT call. */
1029 if (gimple_assign_rhs_code (assign_stmt) != SSA_NAME)
1030 return false;
1031 tree lhs = gimple_assign_lhs (assign_stmt);
1032 if (TREE_CODE (lhs) != VAR_DECL)
1033 return false;
1034 tree rhs = gimple_assign_rhs1 (assign_stmt);
1035 if (TREE_CODE (rhs) != SSA_NAME)
1036 return false;
1037 const gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
1038 const gcall *call = dyn_cast <const gcall *> (def_stmt);
1039 if (!call)
1040 return false;
1041 if (gimple_call_internal_p (call)
1042 && gimple_call_internal_fn (call) == IFN_DEFERRED_INIT)
1043 return true;
1044 return false;
1047 /* Check for SVAL being poisoned, adding a warning to CTXT.
1048 Return SVAL, or, if a warning is added, another value, to avoid
1049 repeatedly complaining about the same poisoned value in followup code.
1050 SRC_REGION is a hint about where SVAL came from, and can be NULL. */
1052 const svalue *
1053 region_model::check_for_poison (const svalue *sval,
1054 tree expr,
1055 const region *src_region,
1056 region_model_context *ctxt) const
1058 if (!ctxt)
1059 return sval;
1061 if (const poisoned_svalue *poisoned_sval = sval->dyn_cast_poisoned_svalue ())
1063 enum poison_kind pkind = poisoned_sval->get_poison_kind ();
1065 /* Ignore uninitialized uses of empty types; there's nothing
1066 to initialize. */
1067 if (pkind == POISON_KIND_UNINIT
1068 && sval->get_type ()
1069 && is_empty_type (sval->get_type ()))
1070 return sval;
1072 if (pkind == POISON_KIND_UNINIT)
1073 if (const gimple *curr_stmt = ctxt->get_stmt ())
1074 if (const gassign *assign_stmt
1075 = dyn_cast <const gassign *> (curr_stmt))
1077 /* Special case to avoid certain false positives. */
1078 if (within_short_circuited_stmt_p (this, assign_stmt))
1079 return sval;
1081 /* Special case to avoid false positive on
1082 -ftrivial-auto-var-init=. */
1083 if (due_to_ifn_deferred_init_p (assign_stmt))
1084 return sval;
1087 /* If we have an SSA name for a temporary, we don't want to print
1088 '<unknown>'.
1089 Poisoned values are shared by type, and so we can't reconstruct
1090 the tree other than via the def stmts, using
1091 fixup_tree_for_diagnostic. */
1092 tree diag_arg = fixup_tree_for_diagnostic (expr);
1093 if (src_region == NULL && pkind == POISON_KIND_UNINIT)
1094 src_region = get_region_for_poisoned_expr (expr);
1096 /* Can we reliably get the poisoned value from "expr"?
1097 This is for use by poisoned_value_diagnostic::check_valid_fpath_p.
1098 Unfortunately, we might not have a reliable value for EXPR.
1099 Hence we only query its value now, and only use it if we get the
1100 poisoned value back again. */
1101 tree check_expr = expr;
1102 const svalue *foo_sval = get_rvalue (expr, NULL);
1103 if (foo_sval == sval)
1104 check_expr = expr;
1105 else
1106 check_expr = NULL;
1107 if (ctxt->warn (make_unique<poisoned_value_diagnostic> (diag_arg,
1108 pkind,
1109 src_region,
1110 check_expr)))
1112 /* We only want to report use of a poisoned value at the first
1113 place it gets used; return an unknown value to avoid generating
1114 a chain of followup warnings. */
1115 sval = m_mgr->get_or_create_unknown_svalue (sval->get_type ());
1118 return sval;
1121 return sval;
1124 /* Attempt to get a region for describing EXPR, the source of region of
1125 a poisoned_svalue for use in a poisoned_value_diagnostic.
1126 Return NULL if there is no good region to use. */
1128 const region *
1129 region_model::get_region_for_poisoned_expr (tree expr) const
1131 if (TREE_CODE (expr) == SSA_NAME)
1133 tree decl = SSA_NAME_VAR (expr);
1134 if (decl && DECL_P (decl))
1135 expr = decl;
1136 else
1137 return NULL;
1139 return get_lvalue (expr, NULL);
1142 /* Update this model for the ASSIGN stmt, using CTXT to report any
1143 diagnostics. */
1145 void
1146 region_model::on_assignment (const gassign *assign, region_model_context *ctxt)
1148 tree lhs = gimple_assign_lhs (assign);
1149 tree rhs1 = gimple_assign_rhs1 (assign);
1151 const region *lhs_reg = get_lvalue (lhs, ctxt);
1153 /* Most assignments are handled by:
1154 set_value (lhs_reg, SVALUE, CTXT)
1155 for some SVALUE. */
1156 if (const svalue *sval = get_gassign_result (assign, ctxt))
1158 tree expr = get_diagnostic_tree_for_gassign (assign);
1159 check_for_poison (sval, expr, NULL, ctxt);
1160 set_value (lhs_reg, sval, ctxt);
1161 return;
1164 enum tree_code op = gimple_assign_rhs_code (assign);
1165 switch (op)
1167 default:
1169 if (0)
1170 sorry_at (assign->location, "unhandled assignment op: %qs",
1171 get_tree_code_name (op));
1172 const svalue *unknown_sval
1173 = m_mgr->get_or_create_unknown_svalue (TREE_TYPE (lhs));
1174 set_value (lhs_reg, unknown_sval, ctxt);
1176 break;
1178 case CONSTRUCTOR:
1180 if (TREE_CLOBBER_P (rhs1))
1182 /* e.g. "x ={v} {CLOBBER};" */
1183 clobber_region (lhs_reg);
1185 else
1187 /* Any CONSTRUCTOR that survives to this point is either
1188 just a zero-init of everything, or a vector. */
1189 if (!CONSTRUCTOR_NO_CLEARING (rhs1))
1190 zero_fill_region (lhs_reg);
1191 unsigned ix;
1192 tree index;
1193 tree val;
1194 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), ix, index, val)
1196 gcc_assert (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE);
1197 if (!index)
1198 index = build_int_cst (integer_type_node, ix);
1199 gcc_assert (TREE_CODE (index) == INTEGER_CST);
1200 const svalue *index_sval
1201 = m_mgr->get_or_create_constant_svalue (index);
1202 gcc_assert (index_sval);
1203 const region *sub_reg
1204 = m_mgr->get_element_region (lhs_reg,
1205 TREE_TYPE (val),
1206 index_sval);
1207 const svalue *val_sval = get_rvalue (val, ctxt);
1208 set_value (sub_reg, val_sval, ctxt);
1212 break;
1214 case STRING_CST:
1216 /* e.g. "struct s2 x = {{'A', 'B', 'C', 'D'}};". */
1217 const svalue *rhs_sval = get_rvalue (rhs1, ctxt);
1218 m_store.set_value (m_mgr->get_store_manager(), lhs_reg, rhs_sval,
1219 ctxt ? ctxt->get_uncertainty () : NULL);
1221 break;
1225 /* Handle the pre-sm-state part of STMT, modifying this object in-place.
1226 Write true to *OUT_UNKNOWN_SIDE_EFFECTS if the stmt has unknown
1227 side effects. */
1229 void
1230 region_model::on_stmt_pre (const gimple *stmt,
1231 bool *out_unknown_side_effects,
1232 region_model_context *ctxt)
1234 switch (gimple_code (stmt))
1236 default:
1237 /* No-op for now. */
1238 break;
1240 case GIMPLE_ASSIGN:
1242 const gassign *assign = as_a <const gassign *> (stmt);
1243 on_assignment (assign, ctxt);
1245 break;
1247 case GIMPLE_ASM:
1249 const gasm *asm_stmt = as_a <const gasm *> (stmt);
1250 on_asm_stmt (asm_stmt, ctxt);
1252 break;
1254 case GIMPLE_CALL:
1256 /* Track whether we have a gcall to a function that's not recognized by
1257 anything, for which we don't have a function body, or for which we
1258 don't know the fndecl. */
1259 const gcall *call = as_a <const gcall *> (stmt);
1260 *out_unknown_side_effects = on_call_pre (call, ctxt);
1262 break;
1264 case GIMPLE_RETURN:
1266 const greturn *return_ = as_a <const greturn *> (stmt);
1267 on_return (return_, ctxt);
1269 break;
1273 /* Ensure that all arguments at the call described by CD are checked
1274 for poisoned values, by calling get_rvalue on each argument. */
1276 void
1277 region_model::check_call_args (const call_details &cd) const
1279 for (unsigned arg_idx = 0; arg_idx < cd.num_args (); arg_idx++)
1280 cd.get_arg_svalue (arg_idx);
1283 /* Return true if CD is known to be a call to a function with
1284 __attribute__((const)). */
1286 static bool
1287 const_fn_p (const call_details &cd)
1289 tree fndecl = cd.get_fndecl_for_call ();
1290 if (!fndecl)
1291 return false;
1292 gcc_assert (DECL_P (fndecl));
1293 return TREE_READONLY (fndecl);
1296 /* If this CD is known to be a call to a function with
1297 __attribute__((const)), attempt to get a const_fn_result_svalue
1298 based on the arguments, or return NULL otherwise. */
1300 static const svalue *
1301 maybe_get_const_fn_result (const call_details &cd)
1303 if (!const_fn_p (cd))
1304 return NULL;
1306 unsigned num_args = cd.num_args ();
1307 if (num_args > const_fn_result_svalue::MAX_INPUTS)
1308 /* Too many arguments. */
1309 return NULL;
1311 auto_vec<const svalue *> inputs (num_args);
1312 for (unsigned arg_idx = 0; arg_idx < num_args; arg_idx++)
1314 const svalue *arg_sval = cd.get_arg_svalue (arg_idx);
1315 if (!arg_sval->can_have_associated_state_p ())
1316 return NULL;
1317 inputs.quick_push (arg_sval);
1320 region_model_manager *mgr = cd.get_manager ();
1321 const svalue *sval
1322 = mgr->get_or_create_const_fn_result_svalue (cd.get_lhs_type (),
1323 cd.get_fndecl_for_call (),
1324 inputs);
1325 return sval;
1328 /* Update this model for an outcome of a call that returns a specific
1329 integer constant.
1330 If UNMERGEABLE, then make the result unmergeable, e.g. to prevent
1331 the state-merger code from merging success and failure outcomes. */
1333 void
1334 region_model::update_for_int_cst_return (const call_details &cd,
1335 int retval,
1336 bool unmergeable)
1338 if (!cd.get_lhs_type ())
1339 return;
1340 if (TREE_CODE (cd.get_lhs_type ()) != INTEGER_TYPE)
1341 return;
1342 const svalue *result
1343 = m_mgr->get_or_create_int_cst (cd.get_lhs_type (), retval);
1344 if (unmergeable)
1345 result = m_mgr->get_or_create_unmergeable (result);
1346 set_value (cd.get_lhs_region (), result, cd.get_ctxt ());
1349 /* Update this model for an outcome of a call that returns zero.
1350 If UNMERGEABLE, then make the result unmergeable, e.g. to prevent
1351 the state-merger code from merging success and failure outcomes. */
1353 void
1354 region_model::update_for_zero_return (const call_details &cd,
1355 bool unmergeable)
1357 update_for_int_cst_return (cd, 0, unmergeable);
1360 /* Update this model for an outcome of a call that returns non-zero. */
1362 void
1363 region_model::update_for_nonzero_return (const call_details &cd)
1365 if (!cd.get_lhs_type ())
1366 return;
1367 if (TREE_CODE (cd.get_lhs_type ()) != INTEGER_TYPE)
1368 return;
1369 const svalue *zero
1370 = m_mgr->get_or_create_int_cst (cd.get_lhs_type (), 0);
1371 const svalue *result
1372 = get_store_value (cd.get_lhs_region (), cd.get_ctxt ());
1373 add_constraint (result, NE_EXPR, zero, cd.get_ctxt ());
1376 /* Subroutine of region_model::maybe_get_copy_bounds.
1377 The Linux kernel commonly uses
1378 min_t([unsigned] long, VAR, sizeof(T));
1379 to set an upper bound on the size of a copy_to_user.
1380 Attempt to simplify such sizes by trying to get the upper bound as a
1381 constant.
1382 Return the simplified svalue if possible, or NULL otherwise. */
1384 static const svalue *
1385 maybe_simplify_upper_bound (const svalue *num_bytes_sval,
1386 region_model_manager *mgr)
1388 tree type = num_bytes_sval->get_type ();
1389 while (const svalue *raw = num_bytes_sval->maybe_undo_cast ())
1390 num_bytes_sval = raw;
1391 if (const binop_svalue *binop_sval = num_bytes_sval->dyn_cast_binop_svalue ())
1392 if (binop_sval->get_op () == MIN_EXPR)
1393 if (binop_sval->get_arg1 ()->get_kind () == SK_CONSTANT)
1395 return mgr->get_or_create_cast (type, binop_sval->get_arg1 ());
1396 /* TODO: we might want to also capture the constraint
1397 when recording the diagnostic, or note that we're using
1398 the upper bound. */
1400 return NULL;
1403 /* Attempt to get an upper bound for the size of a copy when simulating a
1404 copy function.
1406 NUM_BYTES_SVAL is the symbolic value for the size of the copy.
1407 Use it if it's constant, otherwise try to simplify it. Failing
1408 that, use the size of SRC_REG if constant.
1410 Return a symbolic value for an upper limit on the number of bytes
1411 copied, or NULL if no such value could be determined. */
1413 const svalue *
1414 region_model::maybe_get_copy_bounds (const region *src_reg,
1415 const svalue *num_bytes_sval)
1417 if (num_bytes_sval->maybe_get_constant ())
1418 return num_bytes_sval;
1420 if (const svalue *simplified
1421 = maybe_simplify_upper_bound (num_bytes_sval, m_mgr))
1422 num_bytes_sval = simplified;
1424 if (num_bytes_sval->maybe_get_constant ())
1425 return num_bytes_sval;
1427 /* For now, try just guessing the size as the capacity of the
1428 base region of the src.
1429 This is a hack; we might get too large a value. */
1430 const region *src_base_reg = src_reg->get_base_region ();
1431 num_bytes_sval = get_capacity (src_base_reg);
1433 if (num_bytes_sval->maybe_get_constant ())
1434 return num_bytes_sval;
1436 /* Non-constant: give up. */
1437 return NULL;
1440 /* Get any known_function for FNDECL for call CD.
1442 The call must match all assumptions made by the known_function (such as
1443 e.g. "argument 1's type must be a pointer type").
1445 Return NULL if no known_function is found, or it does not match the
1446 assumption(s). */
1448 const known_function *
1449 region_model::get_known_function (tree fndecl, const call_details &cd) const
1451 known_function_manager *known_fn_mgr = m_mgr->get_known_function_manager ();
1452 return known_fn_mgr->get_match (fndecl, cd);
1455 /* Get any known_function for IFN, or NULL. */
1457 const known_function *
1458 region_model::get_known_function (enum internal_fn ifn) const
1460 known_function_manager *known_fn_mgr = m_mgr->get_known_function_manager ();
1461 return known_fn_mgr->get_internal_fn (ifn);
1464 /* Update this model for the CALL stmt, using CTXT to report any
1465 diagnostics - the first half.
1467 Updates to the region_model that should be made *before* sm-states
1468 are updated are done here; other updates to the region_model are done
1469 in region_model::on_call_post.
1471 Return true if the function call has unknown side effects (it wasn't
1472 recognized and we don't have a body for it, or are unable to tell which
1473 fndecl it is). */
1475 bool
1476 region_model::on_call_pre (const gcall *call, region_model_context *ctxt)
1478 call_details cd (call, this, ctxt);
1480 /* Special-case for IFN_DEFERRED_INIT.
1481 We want to report uninitialized variables with -fanalyzer (treating
1482 -ftrivial-auto-var-init= as purely a mitigation feature).
1483 Handle IFN_DEFERRED_INIT by treating it as no-op: don't touch the
1484 lhs of the call, so that it is still uninitialized from the point of
1485 view of the analyzer. */
1486 if (gimple_call_internal_p (call)
1487 && gimple_call_internal_fn (call) == IFN_DEFERRED_INIT)
1488 return false; /* No side effects. */
1490 /* Get svalues for all of the arguments at the callsite, to ensure that we
1491 complain about any uninitialized arguments. This might lead to
1492 duplicates if any of the handling below also looks up the svalues,
1493 but the deduplication code should deal with that. */
1494 if (ctxt)
1495 check_call_args (cd);
1497 tree callee_fndecl = get_fndecl_for_call (call, ctxt);
1499 /* Some of the cases below update the lhs of the call based on the
1500 return value, but not all. Provide a default value, which may
1501 get overwritten below. */
1502 if (tree lhs = gimple_call_lhs (call))
1504 const region *lhs_region = get_lvalue (lhs, ctxt);
1505 const svalue *sval = maybe_get_const_fn_result (cd);
1506 if (!sval)
1508 if (callee_fndecl
1509 && lookup_attribute ("malloc", DECL_ATTRIBUTES (callee_fndecl)))
1511 const region *new_reg
1512 = get_or_create_region_for_heap_alloc (NULL, ctxt);
1513 mark_region_as_unknown (new_reg, NULL);
1514 sval = m_mgr->get_ptr_svalue (cd.get_lhs_type (), new_reg);
1516 else
1517 /* For the common case of functions without __attribute__((const)),
1518 use a conjured value, and purge any prior state involving that
1519 value (in case this is in a loop). */
1520 sval = m_mgr->get_or_create_conjured_svalue (TREE_TYPE (lhs), call,
1521 lhs_region,
1522 conjured_purge (this,
1523 ctxt));
1525 set_value (lhs_region, sval, ctxt);
1528 if (gimple_call_internal_p (call))
1529 if (const known_function *kf
1530 = get_known_function (gimple_call_internal_fn (call)))
1532 kf->impl_call_pre (cd);
1533 return false; /* No further side effects. */
1536 if (!callee_fndecl)
1537 return true; /* Unknown side effects. */
1539 if (const known_function *kf = get_known_function (callee_fndecl, cd))
1541 kf->impl_call_pre (cd);
1542 return false; /* No further side effects. */
1545 const int callee_fndecl_flags = flags_from_decl_or_type (callee_fndecl);
1546 if (callee_fndecl_flags & (ECF_CONST | ECF_PURE))
1547 return false; /* No side effects. */
1549 if (fndecl_built_in_p (callee_fndecl))
1550 return true; /* Unknown side effects. */
1552 if (!fndecl_has_gimple_body_p (callee_fndecl))
1553 return true; /* Unknown side effects. */
1555 return false; /* No side effects. */
1558 /* Update this model for the CALL stmt, using CTXT to report any
1559 diagnostics - the second half.
1561 Updates to the region_model that should be made *after* sm-states
1562 are updated are done here; other updates to the region_model are done
1563 in region_model::on_call_pre.
1565 If UNKNOWN_SIDE_EFFECTS is true, also call handle_unrecognized_call
1566 to purge state. */
1568 void
1569 region_model::on_call_post (const gcall *call,
1570 bool unknown_side_effects,
1571 region_model_context *ctxt)
1573 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
1575 call_details cd (call, this, ctxt);
1576 if (const known_function *kf = get_known_function (callee_fndecl, cd))
1578 kf->impl_call_post (cd);
1579 return;
1581 /* Was this fndecl referenced by
1582 __attribute__((malloc(FOO)))? */
1583 if (lookup_attribute ("*dealloc", DECL_ATTRIBUTES (callee_fndecl)))
1585 impl_deallocation_call (cd);
1586 return;
1590 if (unknown_side_effects)
1591 handle_unrecognized_call (call, ctxt);
1594 /* Purge state involving SVAL from this region_model, using CTXT
1595 (if non-NULL) to purge other state in a program_state.
1597 For example, if we're at the def-stmt of an SSA name, then we need to
1598 purge any state for svalues that involve that SSA name. This avoids
1599 false positives in loops, since a symbolic value referring to the
1600 SSA name will be referring to the previous value of that SSA name.
1602 For example, in:
1603 while ((e = hashmap_iter_next(&iter))) {
1604 struct oid2strbuf *e_strbuf = (struct oid2strbuf *)e;
1605 free (e_strbuf->value);
1607 at the def-stmt of e_8:
1608 e_8 = hashmap_iter_next (&iter);
1609 we should purge the "freed" state of:
1610 INIT_VAL(CAST_REG(‘struct oid2strbuf’, (*INIT_VAL(e_8))).value)
1611 which is the "e_strbuf->value" value from the previous iteration,
1612 or we will erroneously report a double-free - the "e_8" within it
1613 refers to the previous value. */
1615 void
1616 region_model::purge_state_involving (const svalue *sval,
1617 region_model_context *ctxt)
1619 if (!sval->can_have_associated_state_p ())
1620 return;
1621 m_store.purge_state_involving (sval, m_mgr);
1622 m_constraints->purge_state_involving (sval);
1623 m_dynamic_extents.purge_state_involving (sval);
1624 if (ctxt)
1625 ctxt->purge_state_involving (sval);
1628 /* A pending_note subclass for adding a note about an
1629 __attribute__((access, ...)) to a diagnostic. */
1631 class reason_attr_access : public pending_note_subclass<reason_attr_access>
1633 public:
1634 reason_attr_access (tree callee_fndecl, const attr_access &access)
1635 : m_callee_fndecl (callee_fndecl),
1636 m_ptr_argno (access.ptrarg),
1637 m_access_str (TREE_STRING_POINTER (access.to_external_string ()))
1641 const char *get_kind () const final override { return "reason_attr_access"; }
1643 void emit () const final override
1645 inform (DECL_SOURCE_LOCATION (m_callee_fndecl),
1646 "parameter %i of %qD marked with attribute %qs",
1647 m_ptr_argno + 1, m_callee_fndecl, m_access_str);
1650 bool operator== (const reason_attr_access &other) const
1652 return (m_callee_fndecl == other.m_callee_fndecl
1653 && m_ptr_argno == other.m_ptr_argno
1654 && !strcmp (m_access_str, other.m_access_str));
1657 private:
1658 tree m_callee_fndecl;
1659 unsigned m_ptr_argno;
1660 const char *m_access_str;
1663 /* Check CALL a call to external function CALLEE_FNDECL based on
1664 any __attribute__ ((access, ....) on the latter, complaining to
1665 CTXT about any issues.
1667 Currently we merely call check_region_for_write on any regions
1668 pointed to by arguments marked with a "write_only" or "read_write"
1669 attribute. */
1671 void
1672 region_model::
1673 check_external_function_for_access_attr (const gcall *call,
1674 tree callee_fndecl,
1675 region_model_context *ctxt) const
1677 gcc_assert (call);
1678 gcc_assert (callee_fndecl);
1679 gcc_assert (ctxt);
1681 tree fntype = TREE_TYPE (callee_fndecl);
1682 if (!fntype)
1683 return;
1685 if (!TYPE_ATTRIBUTES (fntype))
1686 return;
1688 /* Initialize a map of attribute access specifications for arguments
1689 to the function call. */
1690 rdwr_map rdwr_idx;
1691 init_attr_rdwr_indices (&rdwr_idx, TYPE_ATTRIBUTES (fntype));
1693 unsigned argno = 0;
1695 for (tree iter = TYPE_ARG_TYPES (fntype); iter;
1696 iter = TREE_CHAIN (iter), ++argno)
1698 const attr_access* access = rdwr_idx.get (argno);
1699 if (!access)
1700 continue;
1702 /* Ignore any duplicate entry in the map for the size argument. */
1703 if (access->ptrarg != argno)
1704 continue;
1706 if (access->mode == access_write_only
1707 || access->mode == access_read_write)
1709 /* Subclass of decorated_region_model_context that
1710 adds a note about the attr access to any saved diagnostics. */
1711 class annotating_ctxt : public note_adding_context
1713 public:
1714 annotating_ctxt (tree callee_fndecl,
1715 const attr_access &access,
1716 region_model_context *ctxt)
1717 : note_adding_context (ctxt),
1718 m_callee_fndecl (callee_fndecl),
1719 m_access (access)
1722 std::unique_ptr<pending_note> make_note () final override
1724 return make_unique<reason_attr_access>
1725 (m_callee_fndecl, m_access);
1727 private:
1728 tree m_callee_fndecl;
1729 const attr_access &m_access;
1732 /* Use this ctxt below so that any diagnostics get the
1733 note added to them. */
1734 annotating_ctxt my_ctxt (callee_fndecl, *access, ctxt);
1736 tree ptr_tree = gimple_call_arg (call, access->ptrarg);
1737 const svalue *ptr_sval = get_rvalue (ptr_tree, &my_ctxt);
1738 const region *reg = deref_rvalue (ptr_sval, ptr_tree, &my_ctxt);
1739 check_region_for_write (reg, &my_ctxt);
1740 /* We don't use the size arg for now. */
1745 /* Handle a call CALL to a function with unknown behavior.
1747 Traverse the regions in this model, determining what regions are
1748 reachable from pointer arguments to CALL and from global variables,
1749 recursively.
1751 Set all reachable regions to new unknown values and purge sm-state
1752 from their values, and from values that point to them. */
1754 void
1755 region_model::handle_unrecognized_call (const gcall *call,
1756 region_model_context *ctxt)
1758 tree fndecl = get_fndecl_for_call (call, ctxt);
1760 if (fndecl && ctxt)
1761 check_external_function_for_access_attr (call, fndecl, ctxt);
1763 reachable_regions reachable_regs (this);
1765 /* Determine the reachable regions and their mutability. */
1767 /* Add globals and regions that already escaped in previous
1768 unknown calls. */
1769 m_store.for_each_cluster (reachable_regions::init_cluster_cb,
1770 &reachable_regs);
1772 /* Params that are pointers. */
1773 tree iter_param_types = NULL_TREE;
1774 if (fndecl)
1775 iter_param_types = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
1776 for (unsigned arg_idx = 0; arg_idx < gimple_call_num_args (call); arg_idx++)
1778 /* Track expected param type, where available. */
1779 tree param_type = NULL_TREE;
1780 if (iter_param_types)
1782 param_type = TREE_VALUE (iter_param_types);
1783 gcc_assert (param_type);
1784 iter_param_types = TREE_CHAIN (iter_param_types);
1787 tree parm = gimple_call_arg (call, arg_idx);
1788 const svalue *parm_sval = get_rvalue (parm, ctxt);
1789 reachable_regs.handle_parm (parm_sval, param_type);
1793 uncertainty_t *uncertainty = ctxt ? ctxt->get_uncertainty () : NULL;
1795 /* Purge sm-state for the svalues that were reachable,
1796 both in non-mutable and mutable form. */
1797 for (svalue_set::iterator iter
1798 = reachable_regs.begin_reachable_svals ();
1799 iter != reachable_regs.end_reachable_svals (); ++iter)
1801 const svalue *sval = (*iter);
1802 if (ctxt)
1803 ctxt->on_unknown_change (sval, false);
1805 for (svalue_set::iterator iter
1806 = reachable_regs.begin_mutable_svals ();
1807 iter != reachable_regs.end_mutable_svals (); ++iter)
1809 const svalue *sval = (*iter);
1810 if (ctxt)
1811 ctxt->on_unknown_change (sval, true);
1812 if (uncertainty)
1813 uncertainty->on_mutable_sval_at_unknown_call (sval);
1816 /* Mark any clusters that have escaped. */
1817 reachable_regs.mark_escaped_clusters (ctxt);
1819 /* Update bindings for all clusters that have escaped, whether above,
1820 or previously. */
1821 m_store.on_unknown_fncall (call, m_mgr->get_store_manager (),
1822 conjured_purge (this, ctxt));
1824 /* Purge dynamic extents from any regions that have escaped mutably:
1825 realloc could have been called on them. */
1826 for (hash_set<const region *>::iterator
1827 iter = reachable_regs.begin_mutable_base_regs ();
1828 iter != reachable_regs.end_mutable_base_regs ();
1829 ++iter)
1831 const region *base_reg = (*iter);
1832 unset_dynamic_extents (base_reg);
1836 /* Traverse the regions in this model, determining what regions are
1837 reachable from the store and populating *OUT.
1839 If EXTRA_SVAL is non-NULL, treat it as an additional "root"
1840 for reachability (for handling return values from functions when
1841 analyzing return of the only function on the stack).
1843 If UNCERTAINTY is non-NULL, treat any svalues that were recorded
1844 within it as being maybe-bound as additional "roots" for reachability.
1846 Find svalues that haven't leaked. */
1848 void
1849 region_model::get_reachable_svalues (svalue_set *out,
1850 const svalue *extra_sval,
1851 const uncertainty_t *uncertainty)
1853 reachable_regions reachable_regs (this);
1855 /* Add globals and regions that already escaped in previous
1856 unknown calls. */
1857 m_store.for_each_cluster (reachable_regions::init_cluster_cb,
1858 &reachable_regs);
1860 if (extra_sval)
1861 reachable_regs.handle_sval (extra_sval);
1863 if (uncertainty)
1864 for (uncertainty_t::iterator iter
1865 = uncertainty->begin_maybe_bound_svals ();
1866 iter != uncertainty->end_maybe_bound_svals (); ++iter)
1867 reachable_regs.handle_sval (*iter);
1869 /* Get regions for locals that have explicitly bound values. */
1870 for (store::cluster_map_t::iterator iter = m_store.begin ();
1871 iter != m_store.end (); ++iter)
1873 const region *base_reg = (*iter).first;
1874 if (const region *parent = base_reg->get_parent_region ())
1875 if (parent->get_kind () == RK_FRAME)
1876 reachable_regs.add (base_reg, false);
1879 /* Populate *OUT based on the values that were reachable. */
1880 for (svalue_set::iterator iter
1881 = reachable_regs.begin_reachable_svals ();
1882 iter != reachable_regs.end_reachable_svals (); ++iter)
1883 out->add (*iter);
1886 /* Update this model for the RETURN_STMT, using CTXT to report any
1887 diagnostics. */
1889 void
1890 region_model::on_return (const greturn *return_stmt, region_model_context *ctxt)
1892 tree callee = get_current_function ()->decl;
1893 tree lhs = DECL_RESULT (callee);
1894 tree rhs = gimple_return_retval (return_stmt);
1896 if (lhs && rhs)
1898 const svalue *sval = get_rvalue (rhs, ctxt);
1899 const region *ret_reg = get_lvalue (lhs, ctxt);
1900 set_value (ret_reg, sval, ctxt);
1904 /* Update this model for a call and return of setjmp/sigsetjmp at CALL within
1905 ENODE, using CTXT to report any diagnostics.
1907 This is for the initial direct invocation of setjmp/sigsetjmp (which returns
1908 0), as opposed to any second return due to longjmp/sigsetjmp. */
1910 void
1911 region_model::on_setjmp (const gcall *call, const exploded_node *enode,
1912 region_model_context *ctxt)
1914 const svalue *buf_ptr = get_rvalue (gimple_call_arg (call, 0), ctxt);
1915 const region *buf_reg = deref_rvalue (buf_ptr, gimple_call_arg (call, 0),
1916 ctxt);
1918 /* Create a setjmp_svalue for this call and store it in BUF_REG's
1919 region. */
1920 if (buf_reg)
1922 setjmp_record r (enode, call);
1923 const svalue *sval
1924 = m_mgr->get_or_create_setjmp_svalue (r, buf_reg->get_type ());
1925 set_value (buf_reg, sval, ctxt);
1928 /* Direct calls to setjmp return 0. */
1929 if (tree lhs = gimple_call_lhs (call))
1931 const svalue *new_sval
1932 = m_mgr->get_or_create_int_cst (TREE_TYPE (lhs), 0);
1933 const region *lhs_reg = get_lvalue (lhs, ctxt);
1934 set_value (lhs_reg, new_sval, ctxt);
1938 /* Update this region_model for rewinding from a "longjmp" at LONGJMP_CALL
1939 to a "setjmp" at SETJMP_CALL where the final stack depth should be
1940 SETJMP_STACK_DEPTH. Pop any stack frames. Leak detection is *not*
1941 done, and should be done by the caller. */
1943 void
1944 region_model::on_longjmp (const gcall *longjmp_call, const gcall *setjmp_call,
1945 int setjmp_stack_depth, region_model_context *ctxt)
1947 /* Evaluate the val, using the frame of the "longjmp". */
1948 tree fake_retval = gimple_call_arg (longjmp_call, 1);
1949 const svalue *fake_retval_sval = get_rvalue (fake_retval, ctxt);
1951 /* Pop any frames until we reach the stack depth of the function where
1952 setjmp was called. */
1953 gcc_assert (get_stack_depth () >= setjmp_stack_depth);
1954 while (get_stack_depth () > setjmp_stack_depth)
1955 pop_frame (NULL, NULL, ctxt);
1957 gcc_assert (get_stack_depth () == setjmp_stack_depth);
1959 /* Assign to LHS of "setjmp" in new_state. */
1960 if (tree lhs = gimple_call_lhs (setjmp_call))
1962 /* Passing 0 as the val to longjmp leads to setjmp returning 1. */
1963 const svalue *zero_sval
1964 = m_mgr->get_or_create_int_cst (TREE_TYPE (fake_retval), 0);
1965 tristate eq_zero = eval_condition (fake_retval_sval, EQ_EXPR, zero_sval);
1966 /* If we have 0, use 1. */
1967 if (eq_zero.is_true ())
1969 const svalue *one_sval
1970 = m_mgr->get_or_create_int_cst (TREE_TYPE (fake_retval), 1);
1971 fake_retval_sval = one_sval;
1973 else
1975 /* Otherwise note that the value is nonzero. */
1976 m_constraints->add_constraint (fake_retval_sval, NE_EXPR, zero_sval);
1979 /* Decorate the return value from setjmp as being unmergeable,
1980 so that we don't attempt to merge states with it as zero
1981 with states in which it's nonzero, leading to a clean distinction
1982 in the exploded_graph betweeen the first return and the second
1983 return. */
1984 fake_retval_sval = m_mgr->get_or_create_unmergeable (fake_retval_sval);
1986 const region *lhs_reg = get_lvalue (lhs, ctxt);
1987 set_value (lhs_reg, fake_retval_sval, ctxt);
1991 /* Update this region_model for a phi stmt of the form
1992 LHS = PHI <...RHS...>.
1993 where RHS is for the appropriate edge.
1994 Get state from OLD_STATE so that all of the phi stmts for a basic block
1995 are effectively handled simultaneously. */
1997 void
1998 region_model::handle_phi (const gphi *phi,
1999 tree lhs, tree rhs,
2000 const region_model &old_state,
2001 region_model_context *ctxt)
2003 /* For now, don't bother tracking the .MEM SSA names. */
2004 if (tree var = SSA_NAME_VAR (lhs))
2005 if (TREE_CODE (var) == VAR_DECL)
2006 if (VAR_DECL_IS_VIRTUAL_OPERAND (var))
2007 return;
2009 const svalue *src_sval = old_state.get_rvalue (rhs, ctxt);
2010 const region *dst_reg = old_state.get_lvalue (lhs, ctxt);
2012 set_value (dst_reg, src_sval, ctxt);
2014 if (ctxt)
2015 ctxt->on_phi (phi, rhs);
2018 /* Implementation of region_model::get_lvalue; the latter adds type-checking.
2020 Get the id of the region for PV within this region_model,
2021 emitting any diagnostics to CTXT. */
2023 const region *
2024 region_model::get_lvalue_1 (path_var pv, region_model_context *ctxt) const
2026 tree expr = pv.m_tree;
2028 gcc_assert (expr);
2030 switch (TREE_CODE (expr))
2032 default:
2033 return m_mgr->get_region_for_unexpected_tree_code (ctxt, expr,
2034 dump_location_t ());
2036 case ARRAY_REF:
2038 tree array = TREE_OPERAND (expr, 0);
2039 tree index = TREE_OPERAND (expr, 1);
2041 const region *array_reg = get_lvalue (array, ctxt);
2042 const svalue *index_sval = get_rvalue (index, ctxt);
2043 return m_mgr->get_element_region (array_reg,
2044 TREE_TYPE (TREE_TYPE (array)),
2045 index_sval);
2047 break;
2049 case BIT_FIELD_REF:
2051 tree inner_expr = TREE_OPERAND (expr, 0);
2052 const region *inner_reg = get_lvalue (inner_expr, ctxt);
2053 tree num_bits = TREE_OPERAND (expr, 1);
2054 tree first_bit_offset = TREE_OPERAND (expr, 2);
2055 gcc_assert (TREE_CODE (num_bits) == INTEGER_CST);
2056 gcc_assert (TREE_CODE (first_bit_offset) == INTEGER_CST);
2057 bit_range bits (TREE_INT_CST_LOW (first_bit_offset),
2058 TREE_INT_CST_LOW (num_bits));
2059 return m_mgr->get_bit_range (inner_reg, TREE_TYPE (expr), bits);
2061 break;
2063 case MEM_REF:
2065 tree ptr = TREE_OPERAND (expr, 0);
2066 tree offset = TREE_OPERAND (expr, 1);
2067 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
2068 const svalue *offset_sval = get_rvalue (offset, ctxt);
2069 const region *star_ptr = deref_rvalue (ptr_sval, ptr, ctxt);
2070 return m_mgr->get_offset_region (star_ptr,
2071 TREE_TYPE (expr),
2072 offset_sval);
2074 break;
2076 case FUNCTION_DECL:
2077 return m_mgr->get_region_for_fndecl (expr);
2079 case LABEL_DECL:
2080 return m_mgr->get_region_for_label (expr);
2082 case VAR_DECL:
2083 /* Handle globals. */
2084 if (is_global_var (expr))
2085 return m_mgr->get_region_for_global (expr);
2087 /* Fall through. */
2089 case SSA_NAME:
2090 case PARM_DECL:
2091 case RESULT_DECL:
2093 gcc_assert (TREE_CODE (expr) == SSA_NAME
2094 || TREE_CODE (expr) == PARM_DECL
2095 || TREE_CODE (expr) == VAR_DECL
2096 || TREE_CODE (expr) == RESULT_DECL);
2098 int stack_index = pv.m_stack_depth;
2099 const frame_region *frame = get_frame_at_index (stack_index);
2100 gcc_assert (frame);
2101 return frame->get_region_for_local (m_mgr, expr, ctxt);
2104 case COMPONENT_REF:
2106 /* obj.field */
2107 tree obj = TREE_OPERAND (expr, 0);
2108 tree field = TREE_OPERAND (expr, 1);
2109 const region *obj_reg = get_lvalue (obj, ctxt);
2110 return m_mgr->get_field_region (obj_reg, field);
2112 break;
2114 case STRING_CST:
2115 return m_mgr->get_region_for_string (expr);
2119 /* Assert that SRC_TYPE can be converted to DST_TYPE as a no-op. */
2121 static void
2122 assert_compat_types (tree src_type, tree dst_type)
2124 if (src_type && dst_type && !VOID_TYPE_P (dst_type))
2126 #if CHECKING_P
2127 if (!(useless_type_conversion_p (src_type, dst_type)))
2128 internal_error ("incompatible types: %qT and %qT", src_type, dst_type);
2129 #endif
2133 /* Return true if SRC_TYPE can be converted to DST_TYPE as a no-op. */
2135 bool
2136 compat_types_p (tree src_type, tree dst_type)
2138 if (src_type && dst_type && !VOID_TYPE_P (dst_type))
2139 if (!(useless_type_conversion_p (src_type, dst_type)))
2140 return false;
2141 return true;
2144 /* Get the region for PV within this region_model,
2145 emitting any diagnostics to CTXT. */
2147 const region *
2148 region_model::get_lvalue (path_var pv, region_model_context *ctxt) const
2150 if (pv.m_tree == NULL_TREE)
2151 return NULL;
2153 const region *result_reg = get_lvalue_1 (pv, ctxt);
2154 assert_compat_types (result_reg->get_type (), TREE_TYPE (pv.m_tree));
2155 return result_reg;
2158 /* Get the region for EXPR within this region_model (assuming the most
2159 recent stack frame if it's a local). */
2161 const region *
2162 region_model::get_lvalue (tree expr, region_model_context *ctxt) const
2164 return get_lvalue (path_var (expr, get_stack_depth () - 1), ctxt);
2167 /* Implementation of region_model::get_rvalue; the latter adds type-checking.
2169 Get the value of PV within this region_model,
2170 emitting any diagnostics to CTXT. */
2172 const svalue *
2173 region_model::get_rvalue_1 (path_var pv, region_model_context *ctxt) const
2175 gcc_assert (pv.m_tree);
2177 switch (TREE_CODE (pv.m_tree))
2179 default:
2180 return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (pv.m_tree));
2182 case ADDR_EXPR:
2184 /* "&EXPR". */
2185 tree expr = pv.m_tree;
2186 tree op0 = TREE_OPERAND (expr, 0);
2187 const region *expr_reg = get_lvalue (op0, ctxt);
2188 return m_mgr->get_ptr_svalue (TREE_TYPE (expr), expr_reg);
2190 break;
2192 case BIT_FIELD_REF:
2194 tree expr = pv.m_tree;
2195 tree op0 = TREE_OPERAND (expr, 0);
2196 const region *reg = get_lvalue (op0, ctxt);
2197 tree num_bits = TREE_OPERAND (expr, 1);
2198 tree first_bit_offset = TREE_OPERAND (expr, 2);
2199 gcc_assert (TREE_CODE (num_bits) == INTEGER_CST);
2200 gcc_assert (TREE_CODE (first_bit_offset) == INTEGER_CST);
2201 bit_range bits (TREE_INT_CST_LOW (first_bit_offset),
2202 TREE_INT_CST_LOW (num_bits));
2203 return get_rvalue_for_bits (TREE_TYPE (expr), reg, bits, ctxt);
2206 case SSA_NAME:
2207 case VAR_DECL:
2208 case PARM_DECL:
2209 case RESULT_DECL:
2210 case ARRAY_REF:
2212 const region *reg = get_lvalue (pv, ctxt);
2213 return get_store_value (reg, ctxt);
2216 case REALPART_EXPR:
2217 case IMAGPART_EXPR:
2218 case VIEW_CONVERT_EXPR:
2220 tree expr = pv.m_tree;
2221 tree arg = TREE_OPERAND (expr, 0);
2222 const svalue *arg_sval = get_rvalue (arg, ctxt);
2223 const svalue *sval_unaryop
2224 = m_mgr->get_or_create_unaryop (TREE_TYPE (expr), TREE_CODE (expr),
2225 arg_sval);
2226 return sval_unaryop;
2229 case INTEGER_CST:
2230 case REAL_CST:
2231 case COMPLEX_CST:
2232 case VECTOR_CST:
2233 case STRING_CST:
2234 return m_mgr->get_or_create_constant_svalue (pv.m_tree);
2236 case POINTER_PLUS_EXPR:
2238 tree expr = pv.m_tree;
2239 tree ptr = TREE_OPERAND (expr, 0);
2240 tree offset = TREE_OPERAND (expr, 1);
2241 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
2242 const svalue *offset_sval = get_rvalue (offset, ctxt);
2243 const svalue *sval_binop
2244 = m_mgr->get_or_create_binop (TREE_TYPE (expr), POINTER_PLUS_EXPR,
2245 ptr_sval, offset_sval);
2246 return sval_binop;
2249 /* Binary ops. */
2250 case PLUS_EXPR:
2251 case MULT_EXPR:
2252 case BIT_AND_EXPR:
2253 case BIT_IOR_EXPR:
2254 case BIT_XOR_EXPR:
2256 tree expr = pv.m_tree;
2257 tree arg0 = TREE_OPERAND (expr, 0);
2258 tree arg1 = TREE_OPERAND (expr, 1);
2259 const svalue *arg0_sval = get_rvalue (arg0, ctxt);
2260 const svalue *arg1_sval = get_rvalue (arg1, ctxt);
2261 const svalue *sval_binop
2262 = m_mgr->get_or_create_binop (TREE_TYPE (expr), TREE_CODE (expr),
2263 arg0_sval, arg1_sval);
2264 return sval_binop;
2267 case COMPONENT_REF:
2268 case MEM_REF:
2270 const region *ref_reg = get_lvalue (pv, ctxt);
2271 return get_store_value (ref_reg, ctxt);
2273 case OBJ_TYPE_REF:
2275 tree expr = OBJ_TYPE_REF_EXPR (pv.m_tree);
2276 return get_rvalue (expr, ctxt);
2281 /* Get the value of PV within this region_model,
2282 emitting any diagnostics to CTXT. */
2284 const svalue *
2285 region_model::get_rvalue (path_var pv, region_model_context *ctxt) const
2287 if (pv.m_tree == NULL_TREE)
2288 return NULL;
2290 const svalue *result_sval = get_rvalue_1 (pv, ctxt);
2292 assert_compat_types (result_sval->get_type (), TREE_TYPE (pv.m_tree));
2294 result_sval = check_for_poison (result_sval, pv.m_tree, NULL, ctxt);
2296 return result_sval;
2299 /* Get the value of EXPR within this region_model (assuming the most
2300 recent stack frame if it's a local). */
2302 const svalue *
2303 region_model::get_rvalue (tree expr, region_model_context *ctxt) const
2305 return get_rvalue (path_var (expr, get_stack_depth () - 1), ctxt);
2308 /* Return true if this model is on a path with "main" as the entrypoint
2309 (as opposed to one in which we're merely analyzing a subset of the
2310 path through the code). */
2312 bool
2313 region_model::called_from_main_p () const
2315 if (!m_current_frame)
2316 return false;
2317 /* Determine if the oldest stack frame in this model is for "main". */
2318 const frame_region *frame0 = get_frame_at_index (0);
2319 gcc_assert (frame0);
2320 return id_equal (DECL_NAME (frame0->get_function ()->decl), "main");
2323 /* Subroutine of region_model::get_store_value for when REG is (or is within)
2324 a global variable that hasn't been touched since the start of this path
2325 (or was implicitly touched due to a call to an unknown function). */
2327 const svalue *
2328 region_model::get_initial_value_for_global (const region *reg) const
2330 /* Get the decl that REG is for (or is within). */
2331 const decl_region *base_reg
2332 = reg->get_base_region ()->dyn_cast_decl_region ();
2333 gcc_assert (base_reg);
2334 tree decl = base_reg->get_decl ();
2336 /* Special-case: to avoid having to explicitly update all previously
2337 untracked globals when calling an unknown fn, they implicitly have
2338 an unknown value if an unknown call has occurred, unless this is
2339 static to-this-TU and hasn't escaped. Globals that have escaped
2340 are explicitly tracked, so we shouldn't hit this case for them. */
2341 if (m_store.called_unknown_fn_p ()
2342 && TREE_PUBLIC (decl)
2343 && !TREE_READONLY (decl))
2344 return m_mgr->get_or_create_unknown_svalue (reg->get_type ());
2346 /* If we are on a path from the entrypoint from "main" and we have a
2347 global decl defined in this TU that hasn't been touched yet, then
2348 the initial value of REG can be taken from the initialization value
2349 of the decl. */
2350 if (called_from_main_p () || TREE_READONLY (decl))
2352 /* Attempt to get the initializer value for base_reg. */
2353 if (const svalue *base_reg_init
2354 = base_reg->get_svalue_for_initializer (m_mgr))
2356 if (reg == base_reg)
2357 return base_reg_init;
2358 else
2360 /* Get the value for REG within base_reg_init. */
2361 binding_cluster c (base_reg);
2362 c.bind (m_mgr->get_store_manager (), base_reg, base_reg_init);
2363 const svalue *sval
2364 = c.get_any_binding (m_mgr->get_store_manager (), reg);
2365 if (sval)
2367 if (reg->get_type ())
2368 sval = m_mgr->get_or_create_cast (reg->get_type (),
2369 sval);
2370 return sval;
2376 /* Otherwise, return INIT_VAL(REG). */
2377 return m_mgr->get_or_create_initial_value (reg);
2380 /* Get a value for REG, looking it up in the store, or otherwise falling
2381 back to "initial" or "unknown" values.
2382 Use CTXT to report any warnings associated with reading from REG. */
2384 const svalue *
2385 region_model::get_store_value (const region *reg,
2386 region_model_context *ctxt) const
2388 /* Getting the value of an empty region gives an unknown_svalue. */
2389 if (reg->empty_p ())
2390 return m_mgr->get_or_create_unknown_svalue (reg->get_type ());
2392 check_region_for_read (reg, ctxt);
2394 /* Special-case: handle var_decls in the constant pool. */
2395 if (const decl_region *decl_reg = reg->dyn_cast_decl_region ())
2396 if (const svalue *sval = decl_reg->maybe_get_constant_value (m_mgr))
2397 return sval;
2399 const svalue *sval
2400 = m_store.get_any_binding (m_mgr->get_store_manager (), reg);
2401 if (sval)
2403 if (reg->get_type ())
2404 sval = m_mgr->get_or_create_cast (reg->get_type (), sval);
2405 return sval;
2408 /* Special-case: read at a constant index within a STRING_CST. */
2409 if (const offset_region *offset_reg = reg->dyn_cast_offset_region ())
2410 if (tree byte_offset_cst
2411 = offset_reg->get_byte_offset ()->maybe_get_constant ())
2412 if (const string_region *str_reg
2413 = reg->get_parent_region ()->dyn_cast_string_region ())
2415 tree string_cst = str_reg->get_string_cst ();
2416 if (const svalue *char_sval
2417 = m_mgr->maybe_get_char_from_string_cst (string_cst,
2418 byte_offset_cst))
2419 return m_mgr->get_or_create_cast (reg->get_type (), char_sval);
2422 /* Special-case: read the initial char of a STRING_CST. */
2423 if (const cast_region *cast_reg = reg->dyn_cast_cast_region ())
2424 if (const string_region *str_reg
2425 = cast_reg->get_original_region ()->dyn_cast_string_region ())
2427 tree string_cst = str_reg->get_string_cst ();
2428 tree byte_offset_cst = build_int_cst (integer_type_node, 0);
2429 if (const svalue *char_sval
2430 = m_mgr->maybe_get_char_from_string_cst (string_cst,
2431 byte_offset_cst))
2432 return m_mgr->get_or_create_cast (reg->get_type (), char_sval);
2435 /* Otherwise we implicitly have the initial value of the region
2436 (if the cluster had been touched, binding_cluster::get_any_binding,
2437 would have returned UNKNOWN, and we would already have returned
2438 that above). */
2440 /* Handle globals. */
2441 if (reg->get_base_region ()->get_parent_region ()->get_kind ()
2442 == RK_GLOBALS)
2443 return get_initial_value_for_global (reg);
2445 return m_mgr->get_or_create_initial_value (reg);
2448 /* Return false if REG does not exist, true if it may do.
2449 This is for detecting regions within the stack that don't exist anymore
2450 after frames are popped. */
2452 bool
2453 region_model::region_exists_p (const region *reg) const
2455 /* If within a stack frame, check that the stack frame is live. */
2456 if (const frame_region *enclosing_frame = reg->maybe_get_frame_region ())
2458 /* Check that the current frame is the enclosing frame, or is called
2459 by it. */
2460 for (const frame_region *iter_frame = get_current_frame (); iter_frame;
2461 iter_frame = iter_frame->get_calling_frame ())
2462 if (iter_frame == enclosing_frame)
2463 return true;
2464 return false;
2467 return true;
2470 /* Get a region for referencing PTR_SVAL, creating a region if need be, and
2471 potentially generating warnings via CTXT.
2472 PTR_SVAL must be of pointer type.
2473 PTR_TREE if non-NULL can be used when emitting diagnostics. */
2475 const region *
2476 region_model::deref_rvalue (const svalue *ptr_sval, tree ptr_tree,
2477 region_model_context *ctxt) const
2479 gcc_assert (ptr_sval);
2480 gcc_assert (POINTER_TYPE_P (ptr_sval->get_type ()));
2482 /* If we're dereferencing PTR_SVAL, assume that it is non-NULL; add this
2483 as a constraint. This suppresses false positives from
2484 -Wanalyzer-null-dereference for the case where we later have an
2485 if (PTR_SVAL) that would occur if we considered the false branch
2486 and transitioned the malloc state machine from start->null. */
2487 tree null_ptr_cst = build_int_cst (ptr_sval->get_type (), 0);
2488 const svalue *null_ptr = m_mgr->get_or_create_constant_svalue (null_ptr_cst);
2489 m_constraints->add_constraint (ptr_sval, NE_EXPR, null_ptr);
2491 switch (ptr_sval->get_kind ())
2493 default:
2494 break;
2496 case SK_REGION:
2498 const region_svalue *region_sval
2499 = as_a <const region_svalue *> (ptr_sval);
2500 return region_sval->get_pointee ();
2503 case SK_BINOP:
2505 const binop_svalue *binop_sval
2506 = as_a <const binop_svalue *> (ptr_sval);
2507 switch (binop_sval->get_op ())
2509 case POINTER_PLUS_EXPR:
2511 /* If we have a symbolic value expressing pointer arithmentic,
2512 try to convert it to a suitable region. */
2513 const region *parent_region
2514 = deref_rvalue (binop_sval->get_arg0 (), NULL_TREE, ctxt);
2515 const svalue *offset = binop_sval->get_arg1 ();
2516 tree type= TREE_TYPE (ptr_sval->get_type ());
2517 return m_mgr->get_offset_region (parent_region, type, offset);
2519 default:
2520 break;
2523 break;
2525 case SK_POISONED:
2527 if (ctxt)
2529 tree ptr = get_representative_tree (ptr_sval);
2530 /* If we can't get a representative tree for PTR_SVAL
2531 (e.g. if it hasn't been bound into the store), then
2532 fall back on PTR_TREE, if non-NULL. */
2533 if (!ptr)
2534 ptr = ptr_tree;
2535 if (ptr)
2537 const poisoned_svalue *poisoned_sval
2538 = as_a <const poisoned_svalue *> (ptr_sval);
2539 enum poison_kind pkind = poisoned_sval->get_poison_kind ();
2540 ctxt->warn (make_unique<poisoned_value_diagnostic>
2541 (ptr, pkind, NULL, NULL));
2545 break;
2548 return m_mgr->get_symbolic_region (ptr_sval);
2551 /* Attempt to get BITS within any value of REG, as TYPE.
2552 In particular, extract values from compound_svalues for the case
2553 where there's a concrete binding at BITS.
2554 Return an unknown svalue if we can't handle the given case.
2555 Use CTXT to report any warnings associated with reading from REG. */
2557 const svalue *
2558 region_model::get_rvalue_for_bits (tree type,
2559 const region *reg,
2560 const bit_range &bits,
2561 region_model_context *ctxt) const
2563 const svalue *sval = get_store_value (reg, ctxt);
2564 return m_mgr->get_or_create_bits_within (type, bits, sval);
2567 /* A subclass of pending_diagnostic for complaining about writes to
2568 constant regions of memory. */
2570 class write_to_const_diagnostic
2571 : public pending_diagnostic_subclass<write_to_const_diagnostic>
2573 public:
2574 write_to_const_diagnostic (const region *reg, tree decl)
2575 : m_reg (reg), m_decl (decl)
2578 const char *get_kind () const final override
2580 return "write_to_const_diagnostic";
2583 bool operator== (const write_to_const_diagnostic &other) const
2585 return (m_reg == other.m_reg
2586 && m_decl == other.m_decl);
2589 int get_controlling_option () const final override
2591 return OPT_Wanalyzer_write_to_const;
2594 bool emit (rich_location *rich_loc) final override
2596 auto_diagnostic_group d;
2597 bool warned;
2598 switch (m_reg->get_kind ())
2600 default:
2601 warned = warning_at (rich_loc, get_controlling_option (),
2602 "write to %<const%> object %qE", m_decl);
2603 break;
2604 case RK_FUNCTION:
2605 warned = warning_at (rich_loc, get_controlling_option (),
2606 "write to function %qE", m_decl);
2607 break;
2608 case RK_LABEL:
2609 warned = warning_at (rich_loc, get_controlling_option (),
2610 "write to label %qE", m_decl);
2611 break;
2613 if (warned)
2614 inform (DECL_SOURCE_LOCATION (m_decl), "declared here");
2615 return warned;
2618 label_text describe_final_event (const evdesc::final_event &ev) final override
2620 switch (m_reg->get_kind ())
2622 default:
2623 return ev.formatted_print ("write to %<const%> object %qE here", m_decl);
2624 case RK_FUNCTION:
2625 return ev.formatted_print ("write to function %qE here", m_decl);
2626 case RK_LABEL:
2627 return ev.formatted_print ("write to label %qE here", m_decl);
2631 private:
2632 const region *m_reg;
2633 tree m_decl;
2636 /* A subclass of pending_diagnostic for complaining about writes to
2637 string literals. */
2639 class write_to_string_literal_diagnostic
2640 : public pending_diagnostic_subclass<write_to_string_literal_diagnostic>
2642 public:
2643 write_to_string_literal_diagnostic (const region *reg)
2644 : m_reg (reg)
2647 const char *get_kind () const final override
2649 return "write_to_string_literal_diagnostic";
2652 bool operator== (const write_to_string_literal_diagnostic &other) const
2654 return m_reg == other.m_reg;
2657 int get_controlling_option () const final override
2659 return OPT_Wanalyzer_write_to_string_literal;
2662 bool emit (rich_location *rich_loc) final override
2664 return warning_at (rich_loc, get_controlling_option (),
2665 "write to string literal");
2666 /* Ideally we would show the location of the STRING_CST as well,
2667 but it is not available at this point. */
2670 label_text describe_final_event (const evdesc::final_event &ev) final override
2672 return ev.formatted_print ("write to string literal here");
2675 private:
2676 const region *m_reg;
2679 /* Use CTXT to warn If DEST_REG is a region that shouldn't be written to. */
2681 void
2682 region_model::check_for_writable_region (const region* dest_reg,
2683 region_model_context *ctxt) const
2685 /* Fail gracefully if CTXT is NULL. */
2686 if (!ctxt)
2687 return;
2689 const region *base_reg = dest_reg->get_base_region ();
2690 switch (base_reg->get_kind ())
2692 default:
2693 break;
2694 case RK_FUNCTION:
2696 const function_region *func_reg = as_a <const function_region *> (base_reg);
2697 tree fndecl = func_reg->get_fndecl ();
2698 ctxt->warn (make_unique<write_to_const_diagnostic>
2699 (func_reg, fndecl));
2701 break;
2702 case RK_LABEL:
2704 const label_region *label_reg = as_a <const label_region *> (base_reg);
2705 tree label = label_reg->get_label ();
2706 ctxt->warn (make_unique<write_to_const_diagnostic>
2707 (label_reg, label));
2709 break;
2710 case RK_DECL:
2712 const decl_region *decl_reg = as_a <const decl_region *> (base_reg);
2713 tree decl = decl_reg->get_decl ();
2714 /* Warn about writes to const globals.
2715 Don't warn for writes to const locals, and params in particular,
2716 since we would warn in push_frame when setting them up (e.g the
2717 "this" param is "T* const"). */
2718 if (TREE_READONLY (decl)
2719 && is_global_var (decl))
2720 ctxt->warn (make_unique<write_to_const_diagnostic> (dest_reg, decl));
2722 break;
2723 case RK_STRING:
2724 ctxt->warn (make_unique<write_to_string_literal_diagnostic> (dest_reg));
2725 break;
2729 /* Get the capacity of REG in bytes. */
2731 const svalue *
2732 region_model::get_capacity (const region *reg) const
2734 switch (reg->get_kind ())
2736 default:
2737 break;
2738 case RK_DECL:
2740 const decl_region *decl_reg = as_a <const decl_region *> (reg);
2741 tree decl = decl_reg->get_decl ();
2742 if (TREE_CODE (decl) == SSA_NAME)
2744 tree type = TREE_TYPE (decl);
2745 tree size = TYPE_SIZE (type);
2746 return get_rvalue (size, NULL);
2748 else
2750 tree size = decl_init_size (decl, false);
2751 if (size)
2752 return get_rvalue (size, NULL);
2755 break;
2756 case RK_SIZED:
2757 /* Look through sized regions to get at the capacity
2758 of the underlying regions. */
2759 return get_capacity (reg->get_parent_region ());
2762 if (const svalue *recorded = get_dynamic_extents (reg))
2763 return recorded;
2765 return m_mgr->get_or_create_unknown_svalue (sizetype);
2768 /* Return the string size, including the 0-terminator, if SVAL is a
2769 constant_svalue holding a string. Otherwise, return an unknown_svalue. */
2771 const svalue *
2772 region_model::get_string_size (const svalue *sval) const
2774 tree cst = sval->maybe_get_constant ();
2775 if (!cst || TREE_CODE (cst) != STRING_CST)
2776 return m_mgr->get_or_create_unknown_svalue (size_type_node);
2778 tree out = build_int_cst (size_type_node, TREE_STRING_LENGTH (cst));
2779 return m_mgr->get_or_create_constant_svalue (out);
2782 /* Return the string size, including the 0-terminator, if REG is a
2783 string_region. Otherwise, return an unknown_svalue. */
2785 const svalue *
2786 region_model::get_string_size (const region *reg) const
2788 const string_region *str_reg = dyn_cast <const string_region *> (reg);
2789 if (!str_reg)
2790 return m_mgr->get_or_create_unknown_svalue (size_type_node);
2792 tree cst = str_reg->get_string_cst ();
2793 tree out = build_int_cst (size_type_node, TREE_STRING_LENGTH (cst));
2794 return m_mgr->get_or_create_constant_svalue (out);
2797 /* If CTXT is non-NULL, use it to warn about any problems accessing REG,
2798 using DIR to determine if this access is a read or write. */
2800 void
2801 region_model::check_region_access (const region *reg,
2802 enum access_direction dir,
2803 region_model_context *ctxt) const
2805 /* Fail gracefully if CTXT is NULL. */
2806 if (!ctxt)
2807 return;
2809 check_region_for_taint (reg, dir, ctxt);
2810 check_region_bounds (reg, dir, ctxt);
2812 switch (dir)
2814 default:
2815 gcc_unreachable ();
2816 case DIR_READ:
2817 /* Currently a no-op. */
2818 break;
2819 case DIR_WRITE:
2820 check_for_writable_region (reg, ctxt);
2821 break;
2825 /* If CTXT is non-NULL, use it to warn about any problems writing to REG. */
2827 void
2828 region_model::check_region_for_write (const region *dest_reg,
2829 region_model_context *ctxt) const
2831 check_region_access (dest_reg, DIR_WRITE, ctxt);
2834 /* If CTXT is non-NULL, use it to warn about any problems reading from REG. */
2836 void
2837 region_model::check_region_for_read (const region *src_reg,
2838 region_model_context *ctxt) const
2840 check_region_access (src_reg, DIR_READ, ctxt);
2843 /* Concrete subclass for casts of pointers that lead to trailing bytes. */
2845 class dubious_allocation_size
2846 : public pending_diagnostic_subclass<dubious_allocation_size>
2848 public:
2849 dubious_allocation_size (const region *lhs, const region *rhs)
2850 : m_lhs (lhs), m_rhs (rhs), m_expr (NULL_TREE),
2851 m_has_allocation_event (false)
2854 dubious_allocation_size (const region *lhs, const region *rhs,
2855 tree expr)
2856 : m_lhs (lhs), m_rhs (rhs), m_expr (expr),
2857 m_has_allocation_event (false)
2860 const char *get_kind () const final override
2862 return "dubious_allocation_size";
2865 bool operator== (const dubious_allocation_size &other) const
2867 return m_lhs == other.m_lhs && m_rhs == other.m_rhs
2868 && pending_diagnostic::same_tree_p (m_expr, other.m_expr);
2871 int get_controlling_option () const final override
2873 return OPT_Wanalyzer_allocation_size;
2876 bool emit (rich_location *rich_loc) final override
2878 diagnostic_metadata m;
2879 m.add_cwe (131);
2881 return warning_meta (rich_loc, m, get_controlling_option (),
2882 "allocated buffer size is not a multiple"
2883 " of the pointee's size");
2886 label_text describe_final_event (const evdesc::final_event &ev) final
2887 override
2889 tree pointee_type = TREE_TYPE (m_lhs->get_type ());
2890 if (m_has_allocation_event)
2891 return ev.formatted_print ("assigned to %qT here;"
2892 " %<sizeof (%T)%> is %qE",
2893 m_lhs->get_type (), pointee_type,
2894 size_in_bytes (pointee_type));
2895 /* Fallback: Typically, we should always see an allocation_event
2896 before. */
2897 if (m_expr)
2899 if (TREE_CODE (m_expr) == INTEGER_CST)
2900 return ev.formatted_print ("allocated %E bytes and assigned to"
2901 " %qT here; %<sizeof (%T)%> is %qE",
2902 m_expr, m_lhs->get_type (), pointee_type,
2903 size_in_bytes (pointee_type));
2904 else
2905 return ev.formatted_print ("allocated %qE bytes and assigned to"
2906 " %qT here; %<sizeof (%T)%> is %qE",
2907 m_expr, m_lhs->get_type (), pointee_type,
2908 size_in_bytes (pointee_type));
2911 return ev.formatted_print ("allocated and assigned to %qT here;"
2912 " %<sizeof (%T)%> is %qE",
2913 m_lhs->get_type (), pointee_type,
2914 size_in_bytes (pointee_type));
2917 void
2918 add_region_creation_events (const region *,
2919 tree capacity,
2920 const event_loc_info &loc_info,
2921 checker_path &emission_path) final override
2923 emission_path.add_event
2924 (make_unique<region_creation_event_allocation_size> (capacity, loc_info));
2926 m_has_allocation_event = true;
2929 void mark_interesting_stuff (interesting_t *interest) final override
2931 interest->add_region_creation (m_rhs);
2934 private:
2935 const region *m_lhs;
2936 const region *m_rhs;
2937 const tree m_expr;
2938 bool m_has_allocation_event;
2941 /* Return true on dubious allocation sizes for constant sizes. */
2943 static bool
2944 capacity_compatible_with_type (tree cst, tree pointee_size_tree,
2945 bool is_struct)
2947 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
2948 gcc_assert (TREE_CODE (pointee_size_tree) == INTEGER_CST);
2950 unsigned HOST_WIDE_INT pointee_size = TREE_INT_CST_LOW (pointee_size_tree);
2951 unsigned HOST_WIDE_INT alloc_size = TREE_INT_CST_LOW (cst);
2953 if (is_struct)
2954 return alloc_size == 0 || alloc_size >= pointee_size;
2955 return alloc_size % pointee_size == 0;
2958 static bool
2959 capacity_compatible_with_type (tree cst, tree pointee_size_tree)
2961 return capacity_compatible_with_type (cst, pointee_size_tree, false);
2964 /* Checks whether SVAL could be a multiple of SIZE_CST.
2966 It works by visiting all svalues inside SVAL until it reaches
2967 atomic nodes. From those, it goes back up again and adds each
2968 node that might be a multiple of SIZE_CST to the RESULT_SET. */
2970 class size_visitor : public visitor
2972 public:
2973 size_visitor (tree size_cst, const svalue *root_sval, constraint_manager *cm)
2974 : m_size_cst (size_cst), m_root_sval (root_sval), m_cm (cm)
2976 m_root_sval->accept (this);
2979 bool get_result ()
2981 return result_set.contains (m_root_sval);
2984 void visit_constant_svalue (const constant_svalue *sval) final override
2986 check_constant (sval->get_constant (), sval);
2989 void visit_unknown_svalue (const unknown_svalue *sval ATTRIBUTE_UNUSED)
2990 final override
2992 result_set.add (sval);
2995 void visit_poisoned_svalue (const poisoned_svalue *sval ATTRIBUTE_UNUSED)
2996 final override
2998 result_set.add (sval);
3001 void visit_unaryop_svalue (const unaryop_svalue *sval) final override
3003 const svalue *arg = sval->get_arg ();
3004 if (result_set.contains (arg))
3005 result_set.add (sval);
3008 void visit_binop_svalue (const binop_svalue *sval) final override
3010 const svalue *arg0 = sval->get_arg0 ();
3011 const svalue *arg1 = sval->get_arg1 ();
3013 if (sval->get_op () == MULT_EXPR)
3015 if (result_set.contains (arg0) || result_set.contains (arg1))
3016 result_set.add (sval);
3018 else
3020 if (result_set.contains (arg0) && result_set.contains (arg1))
3021 result_set.add (sval);
3025 void visit_repeated_svalue (const repeated_svalue *sval) final override
3027 sval->get_inner_svalue ()->accept (this);
3028 if (result_set.contains (sval->get_inner_svalue ()))
3029 result_set.add (sval);
3032 void visit_unmergeable_svalue (const unmergeable_svalue *sval) final override
3034 sval->get_arg ()->accept (this);
3035 if (result_set.contains (sval->get_arg ()))
3036 result_set.add (sval);
3039 void visit_widening_svalue (const widening_svalue *sval) final override
3041 const svalue *base = sval->get_base_svalue ();
3042 const svalue *iter = sval->get_iter_svalue ();
3044 if (result_set.contains (base) && result_set.contains (iter))
3045 result_set.add (sval);
3048 void visit_conjured_svalue (const conjured_svalue *sval ATTRIBUTE_UNUSED)
3049 final override
3051 equiv_class_id id (-1);
3052 if (m_cm->get_equiv_class_by_svalue (sval, &id))
3054 if (tree cst = id.get_obj (*m_cm).get_any_constant ())
3055 check_constant (cst, sval);
3056 else
3057 result_set.add (sval);
3061 void visit_asm_output_svalue (const asm_output_svalue *sval ATTRIBUTE_UNUSED)
3062 final override
3064 result_set.add (sval);
3067 void visit_const_fn_result_svalue (const const_fn_result_svalue
3068 *sval ATTRIBUTE_UNUSED) final override
3070 result_set.add (sval);
3073 private:
3074 void check_constant (tree cst, const svalue *sval)
3076 switch (TREE_CODE (cst))
3078 default:
3079 /* Assume all unhandled operands are compatible. */
3080 result_set.add (sval);
3081 break;
3082 case INTEGER_CST:
3083 if (capacity_compatible_with_type (cst, m_size_cst))
3084 result_set.add (sval);
3085 break;
3089 tree m_size_cst;
3090 const svalue *m_root_sval;
3091 constraint_manager *m_cm;
3092 svalue_set result_set; /* Used as a mapping of svalue*->bool. */
3095 /* Return true if a struct or union either uses the inheritance pattern,
3096 where the first field is a base struct, or the flexible array member
3097 pattern, where the last field is an array without a specified size. */
3099 static bool
3100 struct_or_union_with_inheritance_p (tree struc)
3102 tree iter = TYPE_FIELDS (struc);
3103 if (iter == NULL_TREE)
3104 return false;
3105 if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (iter)))
3106 return true;
3108 tree last_field;
3109 while (iter != NULL_TREE)
3111 last_field = iter;
3112 iter = DECL_CHAIN (iter);
3115 if (last_field != NULL_TREE
3116 && TREE_CODE (TREE_TYPE (last_field)) == ARRAY_TYPE)
3117 return true;
3119 return false;
3122 /* Return true if the lhs and rhs of an assignment have different types. */
3124 static bool
3125 is_any_cast_p (const gimple *stmt)
3127 if (const gassign *assign = dyn_cast <const gassign *> (stmt))
3128 return gimple_assign_cast_p (assign)
3129 || !pending_diagnostic::same_tree_p (
3130 TREE_TYPE (gimple_assign_lhs (assign)),
3131 TREE_TYPE (gimple_assign_rhs1 (assign)));
3132 else if (const gcall *call = dyn_cast <const gcall *> (stmt))
3134 tree lhs = gimple_call_lhs (call);
3135 return lhs != NULL_TREE && !pending_diagnostic::same_tree_p (
3136 TREE_TYPE (gimple_call_lhs (call)),
3137 gimple_call_return_type (call));
3140 return false;
3143 /* On pointer assignments, check whether the buffer size of
3144 RHS_SVAL is compatible with the type of the LHS_REG.
3145 Use a non-null CTXT to report allocation size warnings. */
3147 void
3148 region_model::check_region_size (const region *lhs_reg, const svalue *rhs_sval,
3149 region_model_context *ctxt) const
3151 if (!ctxt || ctxt->get_stmt () == NULL)
3152 return;
3153 /* Only report warnings on assignments that actually change the type. */
3154 if (!is_any_cast_p (ctxt->get_stmt ()))
3155 return;
3157 const region_svalue *reg_sval = dyn_cast <const region_svalue *> (rhs_sval);
3158 if (!reg_sval)
3159 return;
3161 tree pointer_type = lhs_reg->get_type ();
3162 if (pointer_type == NULL_TREE || !POINTER_TYPE_P (pointer_type))
3163 return;
3165 tree pointee_type = TREE_TYPE (pointer_type);
3166 /* Make sure that the type on the left-hand size actually has a size. */
3167 if (pointee_type == NULL_TREE || VOID_TYPE_P (pointee_type)
3168 || TYPE_SIZE_UNIT (pointee_type) == NULL_TREE)
3169 return;
3171 /* Bail out early on pointers to structs where we can
3172 not deduce whether the buffer size is compatible. */
3173 bool is_struct = RECORD_OR_UNION_TYPE_P (pointee_type);
3174 if (is_struct && struct_or_union_with_inheritance_p (pointee_type))
3175 return;
3177 tree pointee_size_tree = size_in_bytes (pointee_type);
3178 /* We give up if the type size is not known at compile-time or the
3179 type size is always compatible regardless of the buffer size. */
3180 if (TREE_CODE (pointee_size_tree) != INTEGER_CST
3181 || integer_zerop (pointee_size_tree)
3182 || integer_onep (pointee_size_tree))
3183 return;
3185 const region *rhs_reg = reg_sval->get_pointee ();
3186 const svalue *capacity = get_capacity (rhs_reg);
3187 switch (capacity->get_kind ())
3189 case svalue_kind::SK_CONSTANT:
3191 const constant_svalue *cst_cap_sval
3192 = as_a <const constant_svalue *> (capacity);
3193 tree cst_cap = cst_cap_sval->get_constant ();
3194 if (TREE_CODE (cst_cap) == INTEGER_CST
3195 && !capacity_compatible_with_type (cst_cap, pointee_size_tree,
3196 is_struct))
3197 ctxt->warn (make_unique <dubious_allocation_size> (lhs_reg, rhs_reg,
3198 cst_cap));
3200 break;
3201 default:
3203 if (!is_struct)
3205 size_visitor v (pointee_size_tree, capacity, m_constraints);
3206 if (!v.get_result ())
3208 tree expr = get_representative_tree (capacity);
3209 ctxt->warn (make_unique <dubious_allocation_size> (lhs_reg,
3210 rhs_reg,
3211 expr));
3214 break;
3219 /* Set the value of the region given by LHS_REG to the value given
3220 by RHS_SVAL.
3221 Use CTXT to report any warnings associated with writing to LHS_REG. */
3223 void
3224 region_model::set_value (const region *lhs_reg, const svalue *rhs_sval,
3225 region_model_context *ctxt)
3227 gcc_assert (lhs_reg);
3228 gcc_assert (rhs_sval);
3230 /* Setting the value of an empty region is a no-op. */
3231 if (lhs_reg->empty_p ())
3232 return;
3234 check_region_size (lhs_reg, rhs_sval, ctxt);
3236 check_region_for_write (lhs_reg, ctxt);
3238 m_store.set_value (m_mgr->get_store_manager(), lhs_reg, rhs_sval,
3239 ctxt ? ctxt->get_uncertainty () : NULL);
3242 /* Set the value of the region given by LHS to the value given by RHS. */
3244 void
3245 region_model::set_value (tree lhs, tree rhs, region_model_context *ctxt)
3247 const region *lhs_reg = get_lvalue (lhs, ctxt);
3248 const svalue *rhs_sval = get_rvalue (rhs, ctxt);
3249 gcc_assert (lhs_reg);
3250 gcc_assert (rhs_sval);
3251 set_value (lhs_reg, rhs_sval, ctxt);
3254 /* Remove all bindings overlapping REG within the store. */
3256 void
3257 region_model::clobber_region (const region *reg)
3259 m_store.clobber_region (m_mgr->get_store_manager(), reg);
3262 /* Remove any bindings for REG within the store. */
3264 void
3265 region_model::purge_region (const region *reg)
3267 m_store.purge_region (m_mgr->get_store_manager(), reg);
3270 /* Fill REG with SVAL. */
3272 void
3273 region_model::fill_region (const region *reg, const svalue *sval)
3275 m_store.fill_region (m_mgr->get_store_manager(), reg, sval);
3278 /* Zero-fill REG. */
3280 void
3281 region_model::zero_fill_region (const region *reg)
3283 m_store.zero_fill_region (m_mgr->get_store_manager(), reg);
3286 /* Mark REG as having unknown content. */
3288 void
3289 region_model::mark_region_as_unknown (const region *reg,
3290 uncertainty_t *uncertainty)
3292 m_store.mark_region_as_unknown (m_mgr->get_store_manager(), reg,
3293 uncertainty);
3296 /* Determine what is known about the condition "LHS_SVAL OP RHS_SVAL" within
3297 this model. */
3299 tristate
3300 region_model::eval_condition (const svalue *lhs,
3301 enum tree_code op,
3302 const svalue *rhs) const
3304 gcc_assert (lhs);
3305 gcc_assert (rhs);
3307 /* For now, make no attempt to capture constraints on floating-point
3308 values. */
3309 if ((lhs->get_type () && FLOAT_TYPE_P (lhs->get_type ()))
3310 || (rhs->get_type () && FLOAT_TYPE_P (rhs->get_type ())))
3311 return tristate::unknown ();
3313 /* See what we know based on the values. */
3315 /* Unwrap any unmergeable values. */
3316 lhs = lhs->unwrap_any_unmergeable ();
3317 rhs = rhs->unwrap_any_unmergeable ();
3319 if (lhs == rhs)
3321 /* If we have the same svalue, then we have equality
3322 (apart from NaN-handling).
3323 TODO: should this definitely be the case for poisoned values? */
3324 /* Poisoned and unknown values are "unknowable". */
3325 if (lhs->get_kind () == SK_POISONED
3326 || lhs->get_kind () == SK_UNKNOWN)
3327 return tristate::TS_UNKNOWN;
3329 switch (op)
3331 case EQ_EXPR:
3332 case GE_EXPR:
3333 case LE_EXPR:
3334 return tristate::TS_TRUE;
3336 case NE_EXPR:
3337 case GT_EXPR:
3338 case LT_EXPR:
3339 return tristate::TS_FALSE;
3341 default:
3342 /* For other ops, use the logic below. */
3343 break;
3347 /* If we have a pair of region_svalues, compare them. */
3348 if (const region_svalue *lhs_ptr = lhs->dyn_cast_region_svalue ())
3349 if (const region_svalue *rhs_ptr = rhs->dyn_cast_region_svalue ())
3351 tristate res = region_svalue::eval_condition (lhs_ptr, op, rhs_ptr);
3352 if (res.is_known ())
3353 return res;
3354 /* Otherwise, only known through constraints. */
3357 if (const constant_svalue *cst_lhs = lhs->dyn_cast_constant_svalue ())
3359 /* If we have a pair of constants, compare them. */
3360 if (const constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ())
3361 return constant_svalue::eval_condition (cst_lhs, op, cst_rhs);
3362 else
3364 /* When we have one constant, put it on the RHS. */
3365 std::swap (lhs, rhs);
3366 op = swap_tree_comparison (op);
3369 gcc_assert (lhs->get_kind () != SK_CONSTANT);
3371 /* Handle comparison against zero. */
3372 if (const constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ())
3373 if (zerop (cst_rhs->get_constant ()))
3375 if (const region_svalue *ptr = lhs->dyn_cast_region_svalue ())
3377 /* A region_svalue is a non-NULL pointer, except in certain
3378 special cases (see the comment for region::non_null_p). */
3379 const region *pointee = ptr->get_pointee ();
3380 if (pointee->non_null_p ())
3382 switch (op)
3384 default:
3385 gcc_unreachable ();
3387 case EQ_EXPR:
3388 case GE_EXPR:
3389 case LE_EXPR:
3390 return tristate::TS_FALSE;
3392 case NE_EXPR:
3393 case GT_EXPR:
3394 case LT_EXPR:
3395 return tristate::TS_TRUE;
3399 else if (const binop_svalue *binop = lhs->dyn_cast_binop_svalue ())
3401 /* Treat offsets from a non-NULL pointer as being non-NULL. This
3402 isn't strictly true, in that eventually ptr++ will wrap
3403 around and be NULL, but it won't occur in practise and thus
3404 can be used to suppress effectively false positives that we
3405 shouldn't warn for. */
3406 if (binop->get_op () == POINTER_PLUS_EXPR)
3408 tristate lhs_ts = eval_condition (binop->get_arg0 (), op, rhs);
3409 if (lhs_ts.is_known ())
3410 return lhs_ts;
3413 else if (const unaryop_svalue *unaryop
3414 = lhs->dyn_cast_unaryop_svalue ())
3416 if (unaryop->get_op () == NEGATE_EXPR)
3418 /* e.g. "-X <= 0" is equivalent to X >= 0". */
3419 tristate lhs_ts = eval_condition (unaryop->get_arg (),
3420 swap_tree_comparison (op),
3421 rhs);
3422 if (lhs_ts.is_known ())
3423 return lhs_ts;
3428 /* Handle rejection of equality for comparisons of the initial values of
3429 "external" values (such as params) with the address of locals. */
3430 if (const initial_svalue *init_lhs = lhs->dyn_cast_initial_svalue ())
3431 if (const region_svalue *rhs_ptr = rhs->dyn_cast_region_svalue ())
3433 tristate res = compare_initial_and_pointer (init_lhs, rhs_ptr);
3434 if (res.is_known ())
3435 return res;
3437 if (const initial_svalue *init_rhs = rhs->dyn_cast_initial_svalue ())
3438 if (const region_svalue *lhs_ptr = lhs->dyn_cast_region_svalue ())
3440 tristate res = compare_initial_and_pointer (init_rhs, lhs_ptr);
3441 if (res.is_known ())
3442 return res;
3445 if (const widening_svalue *widen_lhs = lhs->dyn_cast_widening_svalue ())
3446 if (tree rhs_cst = rhs->maybe_get_constant ())
3448 tristate res = widen_lhs->eval_condition_without_cm (op, rhs_cst);
3449 if (res.is_known ())
3450 return res;
3453 /* Handle comparisons between two svalues with more than one operand. */
3454 if (const binop_svalue *binop = lhs->dyn_cast_binop_svalue ())
3456 switch (op)
3458 default:
3459 break;
3460 case EQ_EXPR:
3462 /* TODO: binops can be equal even if they are not structurally
3463 equal in case of commutative operators. */
3464 tristate res = structural_equality (lhs, rhs);
3465 if (res.is_true ())
3466 return res;
3468 break;
3469 case LE_EXPR:
3471 tristate res = structural_equality (lhs, rhs);
3472 if (res.is_true ())
3473 return res;
3475 break;
3476 case GE_EXPR:
3478 tristate res = structural_equality (lhs, rhs);
3479 if (res.is_true ())
3480 return res;
3481 res = symbolic_greater_than (binop, rhs);
3482 if (res.is_true ())
3483 return res;
3485 break;
3486 case GT_EXPR:
3488 tristate res = symbolic_greater_than (binop, rhs);
3489 if (res.is_true ())
3490 return res;
3492 break;
3496 /* Otherwise, try constraints.
3497 Cast to const to ensure we don't change the constraint_manager as we
3498 do this (e.g. by creating equivalence classes). */
3499 const constraint_manager *constraints = m_constraints;
3500 return constraints->eval_condition (lhs, op, rhs);
3503 /* Subroutine of region_model::eval_condition, for rejecting
3504 equality of INIT_VAL(PARM) with &LOCAL. */
3506 tristate
3507 region_model::compare_initial_and_pointer (const initial_svalue *init,
3508 const region_svalue *ptr) const
3510 const region *pointee = ptr->get_pointee ();
3512 /* If we have a pointer to something within a stack frame, it can't be the
3513 initial value of a param. */
3514 if (pointee->maybe_get_frame_region ())
3515 if (init->initial_value_of_param_p ())
3516 return tristate::TS_FALSE;
3518 return tristate::TS_UNKNOWN;
3521 /* Return true if SVAL is definitely positive. */
3523 static bool
3524 is_positive_svalue (const svalue *sval)
3526 if (tree cst = sval->maybe_get_constant ())
3527 return !zerop (cst) && get_range_pos_neg (cst) == 1;
3528 tree type = sval->get_type ();
3529 if (!type)
3530 return false;
3531 /* Consider a binary operation size_t + int. The analyzer wraps the int in
3532 an unaryop_svalue, converting it to a size_t, but in the dynamic execution
3533 the result is smaller than the first operand. Thus, we have to look if
3534 the argument of the unaryop_svalue is also positive. */
3535 if (const unaryop_svalue *un_op = dyn_cast <const unaryop_svalue *> (sval))
3536 return CONVERT_EXPR_CODE_P (un_op->get_op ()) && TYPE_UNSIGNED (type)
3537 && is_positive_svalue (un_op->get_arg ());
3538 return TYPE_UNSIGNED (type);
3541 /* Return true if A is definitely larger than B.
3543 Limitation: does not account for integer overflows and does not try to
3544 return false, so it can not be used negated. */
3546 tristate
3547 region_model::symbolic_greater_than (const binop_svalue *bin_a,
3548 const svalue *b) const
3550 if (bin_a->get_op () == PLUS_EXPR || bin_a->get_op () == MULT_EXPR)
3552 /* Eliminate the right-hand side of both svalues. */
3553 if (const binop_svalue *bin_b = dyn_cast <const binop_svalue *> (b))
3554 if (bin_a->get_op () == bin_b->get_op ()
3555 && eval_condition (bin_a->get_arg1 (),
3556 GT_EXPR,
3557 bin_b->get_arg1 ()).is_true ()
3558 && eval_condition (bin_a->get_arg0 (),
3559 GE_EXPR,
3560 bin_b->get_arg0 ()).is_true ())
3561 return tristate (tristate::TS_TRUE);
3563 /* Otherwise, try to remove a positive offset or factor from BIN_A. */
3564 if (is_positive_svalue (bin_a->get_arg1 ())
3565 && eval_condition (bin_a->get_arg0 (),
3566 GE_EXPR, b).is_true ())
3567 return tristate (tristate::TS_TRUE);
3569 return tristate::unknown ();
3572 /* Return true if A and B are equal structurally.
3574 Structural equality means that A and B are equal if the svalues A and B have
3575 the same nodes at the same positions in the tree and the leafs are equal.
3576 Equality for conjured_svalues and initial_svalues is determined by comparing
3577 the pointers while constants are compared by value. That behavior is useful
3578 to check for binaryop_svlaues that evaluate to the same concrete value but
3579 might use one operand with a different type but the same constant value.
3581 For example,
3582 binop_svalue (mult_expr,
3583 initial_svalue (‘size_t’, decl_region (..., 'some_var')),
3584 constant_svalue (‘size_t’, 4))
3586 binop_svalue (mult_expr,
3587 initial_svalue (‘size_t’, decl_region (..., 'some_var'),
3588 constant_svalue (‘sizetype’, 4))
3589 are structurally equal. A concrete C code example, where this occurs, can
3590 be found in test7 of out-of-bounds-5.c. */
3592 tristate
3593 region_model::structural_equality (const svalue *a, const svalue *b) const
3595 /* If A and B are referentially equal, they are also structurally equal. */
3596 if (a == b)
3597 return tristate (tristate::TS_TRUE);
3599 switch (a->get_kind ())
3601 default:
3602 return tristate::unknown ();
3603 /* SK_CONJURED and SK_INITIAL are already handled
3604 by the referential equality above. */
3605 case SK_CONSTANT:
3607 tree a_cst = a->maybe_get_constant ();
3608 tree b_cst = b->maybe_get_constant ();
3609 if (a_cst && b_cst)
3610 return tristate (tree_int_cst_equal (a_cst, b_cst));
3612 return tristate (tristate::TS_FALSE);
3613 case SK_UNARYOP:
3615 const unaryop_svalue *un_a = as_a <const unaryop_svalue *> (a);
3616 if (const unaryop_svalue *un_b = dyn_cast <const unaryop_svalue *> (b))
3617 return tristate (pending_diagnostic::same_tree_p (un_a->get_type (),
3618 un_b->get_type ())
3619 && un_a->get_op () == un_b->get_op ()
3620 && structural_equality (un_a->get_arg (),
3621 un_b->get_arg ()));
3623 return tristate (tristate::TS_FALSE);
3624 case SK_BINOP:
3626 const binop_svalue *bin_a = as_a <const binop_svalue *> (a);
3627 if (const binop_svalue *bin_b = dyn_cast <const binop_svalue *> (b))
3628 return tristate (bin_a->get_op () == bin_b->get_op ()
3629 && structural_equality (bin_a->get_arg0 (),
3630 bin_b->get_arg0 ())
3631 && structural_equality (bin_a->get_arg1 (),
3632 bin_b->get_arg1 ()));
3634 return tristate (tristate::TS_FALSE);
3638 /* Handle various constraints of the form:
3639 LHS: ((bool)INNER_LHS INNER_OP INNER_RHS))
3640 OP : == or !=
3641 RHS: zero
3642 and (with a cast):
3643 LHS: CAST([long]int, ((bool)INNER_LHS INNER_OP INNER_RHS))
3644 OP : == or !=
3645 RHS: zero
3646 by adding constraints for INNER_LHS INNEROP INNER_RHS.
3648 Return true if this function can fully handle the constraint; if
3649 so, add the implied constraint(s) and write true to *OUT if they
3650 are consistent with existing constraints, or write false to *OUT
3651 if they contradicts existing constraints.
3653 Return false for cases that this function doeesn't know how to handle.
3655 For example, if we're checking a stored conditional, we'll have
3656 something like:
3657 LHS: CAST(long int, (&HEAP_ALLOCATED_REGION(8)!=(int *)0B))
3658 OP : NE_EXPR
3659 RHS: zero
3660 which this function can turn into an add_constraint of:
3661 (&HEAP_ALLOCATED_REGION(8) != (int *)0B)
3663 Similarly, optimized && and || conditionals lead to e.g.
3664 if (p && q)
3665 becoming gimple like this:
3666 _1 = p_6 == 0B;
3667 _2 = q_8 == 0B
3668 _3 = _1 | _2
3669 On the "_3 is false" branch we can have constraints of the form:
3670 ((&HEAP_ALLOCATED_REGION(8)!=(int *)0B)
3671 | (&HEAP_ALLOCATED_REGION(10)!=(int *)0B))
3672 == 0
3673 which implies that both _1 and _2 are false,
3674 which this function can turn into a pair of add_constraints of
3675 (&HEAP_ALLOCATED_REGION(8)!=(int *)0B)
3676 and:
3677 (&HEAP_ALLOCATED_REGION(10)!=(int *)0B). */
3679 bool
3680 region_model::add_constraints_from_binop (const svalue *outer_lhs,
3681 enum tree_code outer_op,
3682 const svalue *outer_rhs,
3683 bool *out,
3684 region_model_context *ctxt)
3686 while (const svalue *cast = outer_lhs->maybe_undo_cast ())
3687 outer_lhs = cast;
3688 const binop_svalue *binop_sval = outer_lhs->dyn_cast_binop_svalue ();
3689 if (!binop_sval)
3690 return false;
3691 if (!outer_rhs->all_zeroes_p ())
3692 return false;
3694 const svalue *inner_lhs = binop_sval->get_arg0 ();
3695 enum tree_code inner_op = binop_sval->get_op ();
3696 const svalue *inner_rhs = binop_sval->get_arg1 ();
3698 if (outer_op != NE_EXPR && outer_op != EQ_EXPR)
3699 return false;
3701 /* We have either
3702 - "OUTER_LHS != false" (i.e. OUTER is true), or
3703 - "OUTER_LHS == false" (i.e. OUTER is false). */
3704 bool is_true = outer_op == NE_EXPR;
3706 switch (inner_op)
3708 default:
3709 return false;
3711 case EQ_EXPR:
3712 case NE_EXPR:
3714 /* ...and "(inner_lhs OP inner_rhs) == 0"
3715 then (inner_lhs OP inner_rhs) must have the same
3716 logical value as LHS. */
3717 if (!is_true)
3718 inner_op = invert_tree_comparison (inner_op, false /* honor_nans */);
3719 *out = add_constraint (inner_lhs, inner_op, inner_rhs, ctxt);
3720 return true;
3722 break;
3724 case BIT_AND_EXPR:
3725 if (is_true)
3727 /* ...and "(inner_lhs & inner_rhs) != 0"
3728 then both inner_lhs and inner_rhs must be true. */
3729 const svalue *false_sval
3730 = m_mgr->get_or_create_constant_svalue (boolean_false_node);
3731 bool sat1 = add_constraint (inner_lhs, NE_EXPR, false_sval, ctxt);
3732 bool sat2 = add_constraint (inner_rhs, NE_EXPR, false_sval, ctxt);
3733 *out = sat1 && sat2;
3734 return true;
3736 return false;
3738 case BIT_IOR_EXPR:
3739 if (!is_true)
3741 /* ...and "(inner_lhs | inner_rhs) == 0"
3742 i.e. "(inner_lhs | inner_rhs)" is false
3743 then both inner_lhs and inner_rhs must be false. */
3744 const svalue *false_sval
3745 = m_mgr->get_or_create_constant_svalue (boolean_false_node);
3746 bool sat1 = add_constraint (inner_lhs, EQ_EXPR, false_sval, ctxt);
3747 bool sat2 = add_constraint (inner_rhs, EQ_EXPR, false_sval, ctxt);
3748 *out = sat1 && sat2;
3749 return true;
3751 return false;
3755 /* Attempt to add the constraint "LHS OP RHS" to this region_model.
3756 If it is consistent with existing constraints, add it, and return true.
3757 Return false if it contradicts existing constraints.
3758 Use CTXT for reporting any diagnostics associated with the accesses. */
3760 bool
3761 region_model::add_constraint (tree lhs, enum tree_code op, tree rhs,
3762 region_model_context *ctxt)
3764 /* For now, make no attempt to capture constraints on floating-point
3765 values. */
3766 if (FLOAT_TYPE_P (TREE_TYPE (lhs)) || FLOAT_TYPE_P (TREE_TYPE (rhs)))
3767 return true;
3769 const svalue *lhs_sval = get_rvalue (lhs, ctxt);
3770 const svalue *rhs_sval = get_rvalue (rhs, ctxt);
3772 return add_constraint (lhs_sval, op, rhs_sval, ctxt);
3775 /* Attempt to add the constraint "LHS OP RHS" to this region_model.
3776 If it is consistent with existing constraints, add it, and return true.
3777 Return false if it contradicts existing constraints.
3778 Use CTXT for reporting any diagnostics associated with the accesses. */
3780 bool
3781 region_model::add_constraint (const svalue *lhs,
3782 enum tree_code op,
3783 const svalue *rhs,
3784 region_model_context *ctxt)
3786 tristate t_cond = eval_condition (lhs, op, rhs);
3788 /* If we already have the condition, do nothing. */
3789 if (t_cond.is_true ())
3790 return true;
3792 /* Reject a constraint that would contradict existing knowledge, as
3793 unsatisfiable. */
3794 if (t_cond.is_false ())
3795 return false;
3797 bool out;
3798 if (add_constraints_from_binop (lhs, op, rhs, &out, ctxt))
3799 return out;
3801 /* Attempt to store the constraint. */
3802 if (!m_constraints->add_constraint (lhs, op, rhs))
3803 return false;
3805 /* Notify the context, if any. This exists so that the state machines
3806 in a program_state can be notified about the condition, and so can
3807 set sm-state for e.g. unchecked->checked, both for cfg-edges, and
3808 when synthesizing constraints as above. */
3809 if (ctxt)
3810 ctxt->on_condition (lhs, op, rhs);
3812 /* If we have &REGION == NULL, then drop dynamic extents for REGION (for
3813 the case where REGION is heap-allocated and thus could be NULL). */
3814 if (tree rhs_cst = rhs->maybe_get_constant ())
3815 if (op == EQ_EXPR && zerop (rhs_cst))
3816 if (const region_svalue *region_sval = lhs->dyn_cast_region_svalue ())
3817 unset_dynamic_extents (region_sval->get_pointee ());
3819 return true;
3822 /* As above, but when returning false, if OUT is non-NULL, write a
3823 new rejected_constraint to *OUT. */
3825 bool
3826 region_model::add_constraint (tree lhs, enum tree_code op, tree rhs,
3827 region_model_context *ctxt,
3828 rejected_constraint **out)
3830 bool sat = add_constraint (lhs, op, rhs, ctxt);
3831 if (!sat && out)
3832 *out = new rejected_op_constraint (*this, lhs, op, rhs);
3833 return sat;
3836 /* Determine what is known about the condition "LHS OP RHS" within
3837 this model.
3838 Use CTXT for reporting any diagnostics associated with the accesses. */
3840 tristate
3841 region_model::eval_condition (tree lhs,
3842 enum tree_code op,
3843 tree rhs,
3844 region_model_context *ctxt) const
3846 /* For now, make no attempt to model constraints on floating-point
3847 values. */
3848 if (FLOAT_TYPE_P (TREE_TYPE (lhs)) || FLOAT_TYPE_P (TREE_TYPE (rhs)))
3849 return tristate::unknown ();
3851 return eval_condition (get_rvalue (lhs, ctxt), op, get_rvalue (rhs, ctxt));
3854 /* Implementation of region_model::get_representative_path_var.
3855 Attempt to return a path_var that represents SVAL, or return NULL_TREE.
3856 Use VISITED to prevent infinite mutual recursion with the overload for
3857 regions. */
3859 path_var
3860 region_model::get_representative_path_var_1 (const svalue *sval,
3861 svalue_set *visited) const
3863 gcc_assert (sval);
3865 /* Prevent infinite recursion. */
3866 if (visited->contains (sval))
3867 return path_var (NULL_TREE, 0);
3868 visited->add (sval);
3870 /* Handle casts by recursion into get_representative_path_var. */
3871 if (const svalue *cast_sval = sval->maybe_undo_cast ())
3873 path_var result = get_representative_path_var (cast_sval, visited);
3874 tree orig_type = sval->get_type ();
3875 /* If necessary, wrap the result in a cast. */
3876 if (result.m_tree && orig_type)
3877 result.m_tree = build1 (NOP_EXPR, orig_type, result.m_tree);
3878 return result;
3881 auto_vec<path_var> pvs;
3882 m_store.get_representative_path_vars (this, visited, sval, &pvs);
3884 if (tree cst = sval->maybe_get_constant ())
3885 pvs.safe_push (path_var (cst, 0));
3887 /* Handle string literals and various other pointers. */
3888 if (const region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
3890 const region *reg = ptr_sval->get_pointee ();
3891 if (path_var pv = get_representative_path_var (reg, visited))
3892 return path_var (build1 (ADDR_EXPR,
3893 sval->get_type (),
3894 pv.m_tree),
3895 pv.m_stack_depth);
3898 /* If we have a sub_svalue, look for ways to represent the parent. */
3899 if (const sub_svalue *sub_sval = sval->dyn_cast_sub_svalue ())
3901 const svalue *parent_sval = sub_sval->get_parent ();
3902 const region *subreg = sub_sval->get_subregion ();
3903 if (path_var parent_pv
3904 = get_representative_path_var (parent_sval, visited))
3905 if (const field_region *field_reg = subreg->dyn_cast_field_region ())
3906 return path_var (build3 (COMPONENT_REF,
3907 sval->get_type (),
3908 parent_pv.m_tree,
3909 field_reg->get_field (),
3910 NULL_TREE),
3911 parent_pv.m_stack_depth);
3914 /* Handle binops. */
3915 if (const binop_svalue *binop_sval = sval->dyn_cast_binop_svalue ())
3916 if (path_var lhs_pv
3917 = get_representative_path_var (binop_sval->get_arg0 (), visited))
3918 if (path_var rhs_pv
3919 = get_representative_path_var (binop_sval->get_arg1 (), visited))
3920 return path_var (build2 (binop_sval->get_op (),
3921 sval->get_type (),
3922 lhs_pv.m_tree, rhs_pv.m_tree),
3923 lhs_pv.m_stack_depth);
3925 if (pvs.length () < 1)
3926 return path_var (NULL_TREE, 0);
3928 pvs.qsort (readability_comparator);
3929 return pvs[0];
3932 /* Attempt to return a path_var that represents SVAL, or return NULL_TREE.
3933 Use VISITED to prevent infinite mutual recursion with the overload for
3934 regions
3936 This function defers to get_representative_path_var_1 to do the work;
3937 it adds verification that get_representative_path_var_1 returned a tree
3938 of the correct type. */
3940 path_var
3941 region_model::get_representative_path_var (const svalue *sval,
3942 svalue_set *visited) const
3944 if (sval == NULL)
3945 return path_var (NULL_TREE, 0);
3947 tree orig_type = sval->get_type ();
3949 path_var result = get_representative_path_var_1 (sval, visited);
3951 /* Verify that the result has the same type as SVAL, if any. */
3952 if (result.m_tree && orig_type)
3953 gcc_assert (TREE_TYPE (result.m_tree) == orig_type);
3955 return result;
3958 /* Attempt to return a tree that represents SVAL, or return NULL_TREE.
3960 Strip off any top-level cast, to avoid messages like
3961 double-free of '(void *)ptr'
3962 from analyzer diagnostics. */
3964 tree
3965 region_model::get_representative_tree (const svalue *sval) const
3967 svalue_set visited;
3968 tree expr = get_representative_path_var (sval, &visited).m_tree;
3970 /* Strip off any top-level cast. */
3971 if (expr && TREE_CODE (expr) == NOP_EXPR)
3972 expr = TREE_OPERAND (expr, 0);
3974 return fixup_tree_for_diagnostic (expr);
3977 tree
3978 region_model::get_representative_tree (const region *reg) const
3980 svalue_set visited;
3981 tree expr = get_representative_path_var (reg, &visited).m_tree;
3983 /* Strip off any top-level cast. */
3984 if (expr && TREE_CODE (expr) == NOP_EXPR)
3985 expr = TREE_OPERAND (expr, 0);
3987 return fixup_tree_for_diagnostic (expr);
3990 /* Implementation of region_model::get_representative_path_var.
3992 Attempt to return a path_var that represents REG, or return
3993 the NULL path_var.
3994 For example, a region for a field of a local would be a path_var
3995 wrapping a COMPONENT_REF.
3996 Use VISITED to prevent infinite mutual recursion with the overload for
3997 svalues. */
3999 path_var
4000 region_model::get_representative_path_var_1 (const region *reg,
4001 svalue_set *visited) const
4003 switch (reg->get_kind ())
4005 default:
4006 gcc_unreachable ();
4008 case RK_FRAME:
4009 case RK_GLOBALS:
4010 case RK_CODE:
4011 case RK_HEAP:
4012 case RK_STACK:
4013 case RK_THREAD_LOCAL:
4014 case RK_ROOT:
4015 /* Regions that represent memory spaces are not expressible as trees. */
4016 return path_var (NULL_TREE, 0);
4018 case RK_FUNCTION:
4020 const function_region *function_reg
4021 = as_a <const function_region *> (reg);
4022 return path_var (function_reg->get_fndecl (), 0);
4024 case RK_LABEL:
4026 const label_region *label_reg = as_a <const label_region *> (reg);
4027 return path_var (label_reg->get_label (), 0);
4030 case RK_SYMBOLIC:
4032 const symbolic_region *symbolic_reg
4033 = as_a <const symbolic_region *> (reg);
4034 const svalue *pointer = symbolic_reg->get_pointer ();
4035 path_var pointer_pv = get_representative_path_var (pointer, visited);
4036 if (!pointer_pv)
4037 return path_var (NULL_TREE, 0);
4038 tree offset = build_int_cst (pointer->get_type (), 0);
4039 return path_var (build2 (MEM_REF,
4040 reg->get_type (),
4041 pointer_pv.m_tree,
4042 offset),
4043 pointer_pv.m_stack_depth);
4045 case RK_DECL:
4047 const decl_region *decl_reg = as_a <const decl_region *> (reg);
4048 return path_var (decl_reg->get_decl (), decl_reg->get_stack_depth ());
4050 case RK_FIELD:
4052 const field_region *field_reg = as_a <const field_region *> (reg);
4053 path_var parent_pv
4054 = get_representative_path_var (reg->get_parent_region (), visited);
4055 if (!parent_pv)
4056 return path_var (NULL_TREE, 0);
4057 return path_var (build3 (COMPONENT_REF,
4058 reg->get_type (),
4059 parent_pv.m_tree,
4060 field_reg->get_field (),
4061 NULL_TREE),
4062 parent_pv.m_stack_depth);
4065 case RK_ELEMENT:
4067 const element_region *element_reg
4068 = as_a <const element_region *> (reg);
4069 path_var parent_pv
4070 = get_representative_path_var (reg->get_parent_region (), visited);
4071 if (!parent_pv)
4072 return path_var (NULL_TREE, 0);
4073 path_var index_pv
4074 = get_representative_path_var (element_reg->get_index (), visited);
4075 if (!index_pv)
4076 return path_var (NULL_TREE, 0);
4077 return path_var (build4 (ARRAY_REF,
4078 reg->get_type (),
4079 parent_pv.m_tree, index_pv.m_tree,
4080 NULL_TREE, NULL_TREE),
4081 parent_pv.m_stack_depth);
4084 case RK_OFFSET:
4086 const offset_region *offset_reg
4087 = as_a <const offset_region *> (reg);
4088 path_var parent_pv
4089 = get_representative_path_var (reg->get_parent_region (), visited);
4090 if (!parent_pv)
4091 return path_var (NULL_TREE, 0);
4092 path_var offset_pv
4093 = get_representative_path_var (offset_reg->get_byte_offset (),
4094 visited);
4095 if (!offset_pv || TREE_CODE (offset_pv.m_tree) != INTEGER_CST)
4096 return path_var (NULL_TREE, 0);
4097 tree addr_parent = build1 (ADDR_EXPR,
4098 build_pointer_type (reg->get_type ()),
4099 parent_pv.m_tree);
4100 return path_var (build2 (MEM_REF,
4101 reg->get_type (),
4102 addr_parent, offset_pv.m_tree),
4103 parent_pv.m_stack_depth);
4106 case RK_SIZED:
4107 return path_var (NULL_TREE, 0);
4109 case RK_CAST:
4111 path_var parent_pv
4112 = get_representative_path_var (reg->get_parent_region (), visited);
4113 if (!parent_pv)
4114 return path_var (NULL_TREE, 0);
4115 return path_var (build1 (NOP_EXPR,
4116 reg->get_type (),
4117 parent_pv.m_tree),
4118 parent_pv.m_stack_depth);
4121 case RK_HEAP_ALLOCATED:
4122 case RK_ALLOCA:
4123 /* No good way to express heap-allocated/alloca regions as trees. */
4124 return path_var (NULL_TREE, 0);
4126 case RK_STRING:
4128 const string_region *string_reg = as_a <const string_region *> (reg);
4129 return path_var (string_reg->get_string_cst (), 0);
4132 case RK_VAR_ARG:
4133 case RK_ERRNO:
4134 case RK_UNKNOWN:
4135 return path_var (NULL_TREE, 0);
4139 /* Attempt to return a path_var that represents REG, or return
4140 the NULL path_var.
4141 For example, a region for a field of a local would be a path_var
4142 wrapping a COMPONENT_REF.
4143 Use VISITED to prevent infinite mutual recursion with the overload for
4144 svalues.
4146 This function defers to get_representative_path_var_1 to do the work;
4147 it adds verification that get_representative_path_var_1 returned a tree
4148 of the correct type. */
4150 path_var
4151 region_model::get_representative_path_var (const region *reg,
4152 svalue_set *visited) const
4154 path_var result = get_representative_path_var_1 (reg, visited);
4156 /* Verify that the result has the same type as REG, if any. */
4157 if (result.m_tree && reg->get_type ())
4158 gcc_assert (TREE_TYPE (result.m_tree) == reg->get_type ());
4160 return result;
4163 /* Update this model for any phis in SNODE, assuming we came from
4164 LAST_CFG_SUPEREDGE. */
4166 void
4167 region_model::update_for_phis (const supernode *snode,
4168 const cfg_superedge *last_cfg_superedge,
4169 region_model_context *ctxt)
4171 gcc_assert (last_cfg_superedge);
4173 /* Copy this state and pass it to handle_phi so that all of the phi stmts
4174 are effectively handled simultaneously. */
4175 const region_model old_state (*this);
4177 for (gphi_iterator gpi = const_cast<supernode *>(snode)->start_phis ();
4178 !gsi_end_p (gpi); gsi_next (&gpi))
4180 gphi *phi = gpi.phi ();
4182 tree src = last_cfg_superedge->get_phi_arg (phi);
4183 tree lhs = gimple_phi_result (phi);
4185 /* Update next_state based on phi and old_state. */
4186 handle_phi (phi, lhs, src, old_state, ctxt);
4190 /* Attempt to update this model for taking EDGE (where the last statement
4191 was LAST_STMT), returning true if the edge can be taken, false
4192 otherwise.
4193 When returning false, if OUT is non-NULL, write a new rejected_constraint
4194 to it.
4196 For CFG superedges where LAST_STMT is a conditional or a switch
4197 statement, attempt to add the relevant conditions for EDGE to this
4198 model, returning true if they are feasible, or false if they are
4199 impossible.
4201 For call superedges, push frame information and store arguments
4202 into parameters.
4204 For return superedges, pop frame information and store return
4205 values into any lhs.
4207 Rejection of call/return superedges happens elsewhere, in
4208 program_point::on_edge (i.e. based on program point, rather
4209 than program state). */
4211 bool
4212 region_model::maybe_update_for_edge (const superedge &edge,
4213 const gimple *last_stmt,
4214 region_model_context *ctxt,
4215 rejected_constraint **out)
4217 /* Handle frame updates for interprocedural edges. */
4218 switch (edge.m_kind)
4220 default:
4221 break;
4223 case SUPEREDGE_CALL:
4225 const call_superedge *call_edge = as_a <const call_superedge *> (&edge);
4226 update_for_call_superedge (*call_edge, ctxt);
4228 break;
4230 case SUPEREDGE_RETURN:
4232 const return_superedge *return_edge
4233 = as_a <const return_superedge *> (&edge);
4234 update_for_return_superedge (*return_edge, ctxt);
4236 break;
4238 case SUPEREDGE_INTRAPROCEDURAL_CALL:
4239 /* This is a no-op for call summaries; we should already
4240 have handled the effect of the call summary at the call stmt. */
4241 break;
4244 if (last_stmt == NULL)
4245 return true;
4247 /* Apply any constraints for conditionals/switch statements. */
4249 if (const gcond *cond_stmt = dyn_cast <const gcond *> (last_stmt))
4251 const cfg_superedge *cfg_sedge = as_a <const cfg_superedge *> (&edge);
4252 return apply_constraints_for_gcond (*cfg_sedge, cond_stmt, ctxt, out);
4255 if (const gswitch *switch_stmt = dyn_cast <const gswitch *> (last_stmt))
4257 const switch_cfg_superedge *switch_sedge
4258 = as_a <const switch_cfg_superedge *> (&edge);
4259 return apply_constraints_for_gswitch (*switch_sedge, switch_stmt,
4260 ctxt, out);
4263 /* Apply any constraints due to an exception being thrown. */
4264 if (const cfg_superedge *cfg_sedge = dyn_cast <const cfg_superedge *> (&edge))
4265 if (cfg_sedge->get_flags () & EDGE_EH)
4266 return apply_constraints_for_exception (last_stmt, ctxt, out);
4268 return true;
4271 /* Push a new frame_region on to the stack region.
4272 Populate the frame_region with child regions for the function call's
4273 parameters, using values from the arguments at the callsite in the
4274 caller's frame. */
4276 void
4277 region_model::update_for_gcall (const gcall *call_stmt,
4278 region_model_context *ctxt,
4279 function *callee)
4281 /* Build a vec of argument svalues, using the current top
4282 frame for resolving tree expressions. */
4283 auto_vec<const svalue *> arg_svals (gimple_call_num_args (call_stmt));
4285 for (unsigned i = 0; i < gimple_call_num_args (call_stmt); i++)
4287 tree arg = gimple_call_arg (call_stmt, i);
4288 arg_svals.quick_push (get_rvalue (arg, ctxt));
4291 if(!callee)
4293 /* Get the function * from the gcall. */
4294 tree fn_decl = get_fndecl_for_call (call_stmt,ctxt);
4295 callee = DECL_STRUCT_FUNCTION (fn_decl);
4298 push_frame (callee, &arg_svals, ctxt);
4301 /* Pop the top-most frame_region from the stack, and copy the return
4302 region's values (if any) into the region for the lvalue of the LHS of
4303 the call (if any). */
4305 void
4306 region_model::update_for_return_gcall (const gcall *call_stmt,
4307 region_model_context *ctxt)
4309 /* Get the lvalue for the result of the call, passing it to pop_frame,
4310 so that pop_frame can determine the region with respect to the
4311 *caller* frame. */
4312 tree lhs = gimple_call_lhs (call_stmt);
4313 pop_frame (lhs, NULL, ctxt);
4316 /* Extract calling information from the superedge and update the model for the
4317 call */
4319 void
4320 region_model::update_for_call_superedge (const call_superedge &call_edge,
4321 region_model_context *ctxt)
4323 const gcall *call_stmt = call_edge.get_call_stmt ();
4324 update_for_gcall (call_stmt, ctxt, call_edge.get_callee_function ());
4327 /* Extract calling information from the return superedge and update the model
4328 for the returning call */
4330 void
4331 region_model::update_for_return_superedge (const return_superedge &return_edge,
4332 region_model_context *ctxt)
4334 const gcall *call_stmt = return_edge.get_call_stmt ();
4335 update_for_return_gcall (call_stmt, ctxt);
4338 /* Attempt to to use R to replay SUMMARY into this object.
4339 Return true if it is possible. */
4341 bool
4342 region_model::replay_call_summary (call_summary_replay &r,
4343 const region_model &summary)
4345 gcc_assert (summary.get_stack_depth () == 1);
4347 m_store.replay_call_summary (r, summary.m_store);
4349 if (!m_constraints->replay_call_summary (r, *summary.m_constraints))
4350 return false;
4352 for (auto kv : summary.m_dynamic_extents)
4354 const region *summary_reg = kv.first;
4355 const region *caller_reg = r.convert_region_from_summary (summary_reg);
4356 if (!caller_reg)
4357 continue;
4358 const svalue *summary_sval = kv.second;
4359 const svalue *caller_sval = r.convert_svalue_from_summary (summary_sval);
4360 if (!caller_sval)
4361 continue;
4362 m_dynamic_extents.put (caller_reg, caller_sval);
4365 return true;
4368 /* Given a true or false edge guarded by conditional statement COND_STMT,
4369 determine appropriate constraints for the edge to be taken.
4371 If they are feasible, add the constraints and return true.
4373 Return false if the constraints contradict existing knowledge
4374 (and so the edge should not be taken).
4375 When returning false, if OUT is non-NULL, write a new rejected_constraint
4376 to it. */
4378 bool
4379 region_model::apply_constraints_for_gcond (const cfg_superedge &sedge,
4380 const gcond *cond_stmt,
4381 region_model_context *ctxt,
4382 rejected_constraint **out)
4384 ::edge cfg_edge = sedge.get_cfg_edge ();
4385 gcc_assert (cfg_edge != NULL);
4386 gcc_assert (cfg_edge->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE));
4388 enum tree_code op = gimple_cond_code (cond_stmt);
4389 tree lhs = gimple_cond_lhs (cond_stmt);
4390 tree rhs = gimple_cond_rhs (cond_stmt);
4391 if (cfg_edge->flags & EDGE_FALSE_VALUE)
4392 op = invert_tree_comparison (op, false /* honor_nans */);
4393 return add_constraint (lhs, op, rhs, ctxt, out);
4396 /* Return true iff SWITCH_STMT has a non-default label that contains
4397 INT_CST. */
4399 static bool
4400 has_nondefault_case_for_value_p (const gswitch *switch_stmt, tree int_cst)
4402 /* We expect the initial label to be the default; skip it. */
4403 gcc_assert (CASE_LOW (gimple_switch_label (switch_stmt, 0)) == NULL);
4404 unsigned min_idx = 1;
4405 unsigned max_idx = gimple_switch_num_labels (switch_stmt) - 1;
4407 /* Binary search: try to find the label containing INT_CST.
4408 This requires the cases to be sorted by CASE_LOW (done by the
4409 gimplifier). */
4410 while (max_idx >= min_idx)
4412 unsigned case_idx = (min_idx + max_idx) / 2;
4413 tree label = gimple_switch_label (switch_stmt, case_idx);
4414 tree low = CASE_LOW (label);
4415 gcc_assert (low);
4416 tree high = CASE_HIGH (label);
4417 if (!high)
4418 high = low;
4419 if (tree_int_cst_compare (int_cst, low) < 0)
4421 /* INT_CST is below the range of this label. */
4422 gcc_assert (case_idx > 0);
4423 max_idx = case_idx - 1;
4425 else if (tree_int_cst_compare (int_cst, high) > 0)
4427 /* INT_CST is above the range of this case. */
4428 min_idx = case_idx + 1;
4430 else
4431 /* This case contains INT_CST. */
4432 return true;
4434 /* Not found. */
4435 return false;
4438 /* Return true iff SWITCH_STMT (which must be on an enum value)
4439 has nondefault cases handling all values in the enum. */
4441 static bool
4442 has_nondefault_cases_for_all_enum_values_p (const gswitch *switch_stmt)
4444 gcc_assert (switch_stmt);
4445 tree type = TREE_TYPE (gimple_switch_index (switch_stmt));
4446 gcc_assert (TREE_CODE (type) == ENUMERAL_TYPE);
4448 for (tree enum_val_iter = TYPE_VALUES (type);
4449 enum_val_iter;
4450 enum_val_iter = TREE_CHAIN (enum_val_iter))
4452 tree enum_val = TREE_VALUE (enum_val_iter);
4453 gcc_assert (TREE_CODE (enum_val) == CONST_DECL);
4454 gcc_assert (TREE_CODE (DECL_INITIAL (enum_val)) == INTEGER_CST);
4455 if (!has_nondefault_case_for_value_p (switch_stmt,
4456 DECL_INITIAL (enum_val)))
4457 return false;
4459 return true;
4462 /* Given an EDGE guarded by SWITCH_STMT, determine appropriate constraints
4463 for the edge to be taken.
4465 If they are feasible, add the constraints and return true.
4467 Return false if the constraints contradict existing knowledge
4468 (and so the edge should not be taken).
4469 When returning false, if OUT is non-NULL, write a new rejected_constraint
4470 to it. */
4472 bool
4473 region_model::apply_constraints_for_gswitch (const switch_cfg_superedge &edge,
4474 const gswitch *switch_stmt,
4475 region_model_context *ctxt,
4476 rejected_constraint **out)
4478 tree index = gimple_switch_index (switch_stmt);
4479 const svalue *index_sval = get_rvalue (index, ctxt);
4481 /* If we're switching based on an enum type, assume that the user is only
4482 working with values from the enum. Hence if this is an
4483 implicitly-created "default", assume it doesn't get followed.
4484 This fixes numerous "uninitialized" false positives where we otherwise
4485 consider jumping past the initialization cases. */
4487 if (/* Don't check during feasibility-checking (when ctxt is NULL). */
4488 ctxt
4489 /* Must be an enum value. */
4490 && index_sval->get_type ()
4491 && TREE_CODE (TREE_TYPE (index)) == ENUMERAL_TYPE
4492 && TREE_CODE (index_sval->get_type ()) == ENUMERAL_TYPE
4493 /* If we have a constant, then we can check it directly. */
4494 && index_sval->get_kind () != SK_CONSTANT
4495 && edge.implicitly_created_default_p ()
4496 && has_nondefault_cases_for_all_enum_values_p (switch_stmt)
4497 /* Don't do this if there's a chance that the index is
4498 attacker-controlled. */
4499 && !ctxt->possibly_tainted_p (index_sval))
4501 if (out)
4502 *out = new rejected_default_case (*this);
4503 return false;
4506 bounded_ranges_manager *ranges_mgr = get_range_manager ();
4507 const bounded_ranges *all_cases_ranges
4508 = ranges_mgr->get_or_create_ranges_for_switch (&edge, switch_stmt);
4509 bool sat = m_constraints->add_bounded_ranges (index_sval, all_cases_ranges);
4510 if (!sat && out)
4511 *out = new rejected_ranges_constraint (*this, index, all_cases_ranges);
4512 if (sat && ctxt && !all_cases_ranges->empty_p ())
4513 ctxt->on_bounded_ranges (*index_sval, *all_cases_ranges);
4514 return sat;
4517 /* Apply any constraints due to an exception being thrown at LAST_STMT.
4519 If they are feasible, add the constraints and return true.
4521 Return false if the constraints contradict existing knowledge
4522 (and so the edge should not be taken).
4523 When returning false, if OUT is non-NULL, write a new rejected_constraint
4524 to it. */
4526 bool
4527 region_model::apply_constraints_for_exception (const gimple *last_stmt,
4528 region_model_context *ctxt,
4529 rejected_constraint **out)
4531 gcc_assert (last_stmt);
4532 if (const gcall *call = dyn_cast <const gcall *> (last_stmt))
4533 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
4534 if (is_named_call_p (callee_fndecl, "operator new", call, 1)
4535 || is_named_call_p (callee_fndecl, "operator new []", call, 1))
4537 /* We have an exception thrown from operator new.
4538 Add a constraint that the result was NULL, to avoid a false
4539 leak report due to the result being lost when following
4540 the EH edge. */
4541 if (tree lhs = gimple_call_lhs (call))
4542 return add_constraint (lhs, EQ_EXPR, null_pointer_node, ctxt, out);
4543 return true;
4545 return true;
4548 /* For use with push_frame when handling a top-level call within the analysis.
4549 PARAM has a defined but unknown initial value.
4550 Anything it points to has escaped, since the calling context "knows"
4551 the pointer, and thus calls to unknown functions could read/write into
4552 the region.
4553 If NONNULL is true, then assume that PARAM must be non-NULL. */
4555 void
4556 region_model::on_top_level_param (tree param,
4557 bool nonnull,
4558 region_model_context *ctxt)
4560 if (POINTER_TYPE_P (TREE_TYPE (param)))
4562 const region *param_reg = get_lvalue (param, ctxt);
4563 const svalue *init_ptr_sval
4564 = m_mgr->get_or_create_initial_value (param_reg);
4565 const region *pointee_reg = m_mgr->get_symbolic_region (init_ptr_sval);
4566 m_store.mark_as_escaped (pointee_reg);
4567 if (nonnull)
4569 const svalue *null_ptr_sval
4570 = m_mgr->get_or_create_null_ptr (TREE_TYPE (param));
4571 add_constraint (init_ptr_sval, NE_EXPR, null_ptr_sval, ctxt);
4576 /* Update this region_model to reflect pushing a frame onto the stack
4577 for a call to FUN.
4579 If ARG_SVALS is non-NULL, use it to populate the parameters
4580 in the new frame.
4581 Otherwise, the params have their initial_svalues.
4583 Return the frame_region for the new frame. */
4585 const region *
4586 region_model::push_frame (function *fun, const vec<const svalue *> *arg_svals,
4587 region_model_context *ctxt)
4589 m_current_frame = m_mgr->get_frame_region (m_current_frame, fun);
4590 if (arg_svals)
4592 /* Arguments supplied from a caller frame. */
4593 tree fndecl = fun->decl;
4594 unsigned idx = 0;
4595 for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm;
4596 iter_parm = DECL_CHAIN (iter_parm), ++idx)
4598 /* If there's a mismatching declaration, the call stmt might
4599 not have enough args. Handle this case by leaving the
4600 rest of the params as uninitialized. */
4601 if (idx >= arg_svals->length ())
4602 break;
4603 tree parm_lval = iter_parm;
4604 if (tree parm_default_ssa = ssa_default_def (fun, iter_parm))
4605 parm_lval = parm_default_ssa;
4606 const region *parm_reg = get_lvalue (parm_lval, ctxt);
4607 const svalue *arg_sval = (*arg_svals)[idx];
4608 set_value (parm_reg, arg_sval, ctxt);
4611 /* Handle any variadic args. */
4612 unsigned va_arg_idx = 0;
4613 for (; idx < arg_svals->length (); idx++, va_arg_idx++)
4615 const svalue *arg_sval = (*arg_svals)[idx];
4616 const region *var_arg_reg
4617 = m_mgr->get_var_arg_region (m_current_frame,
4618 va_arg_idx);
4619 set_value (var_arg_reg, arg_sval, ctxt);
4622 else
4624 /* Otherwise we have a top-level call within the analysis. The params
4625 have defined but unknown initial values.
4626 Anything they point to has escaped. */
4627 tree fndecl = fun->decl;
4629 /* Handle "__attribute__((nonnull))". */
4630 tree fntype = TREE_TYPE (fndecl);
4631 bitmap nonnull_args = get_nonnull_args (fntype);
4633 unsigned parm_idx = 0;
4634 for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm;
4635 iter_parm = DECL_CHAIN (iter_parm))
4637 bool non_null = (nonnull_args
4638 ? (bitmap_empty_p (nonnull_args)
4639 || bitmap_bit_p (nonnull_args, parm_idx))
4640 : false);
4641 if (tree parm_default_ssa = ssa_default_def (fun, iter_parm))
4642 on_top_level_param (parm_default_ssa, non_null, ctxt);
4643 else
4644 on_top_level_param (iter_parm, non_null, ctxt);
4645 parm_idx++;
4648 BITMAP_FREE (nonnull_args);
4651 return m_current_frame;
4654 /* Get the function of the top-most frame in this region_model's stack.
4655 There must be such a frame. */
4657 function *
4658 region_model::get_current_function () const
4660 const frame_region *frame = get_current_frame ();
4661 gcc_assert (frame);
4662 return frame->get_function ();
4665 /* Pop the topmost frame_region from this region_model's stack;
4667 If RESULT_LVALUE is non-null, copy any return value from the frame
4668 into the corresponding region (evaluated with respect to the *caller*
4669 frame, rather than the called frame).
4670 If OUT_RESULT is non-null, copy any return value from the frame
4671 into *OUT_RESULT.
4673 Purge the frame region and all its descendent regions.
4674 Convert any pointers that point into such regions into
4675 POISON_KIND_POPPED_STACK svalues. */
4677 void
4678 region_model::pop_frame (tree result_lvalue,
4679 const svalue **out_result,
4680 region_model_context *ctxt)
4682 gcc_assert (m_current_frame);
4684 const frame_region *frame_reg = m_current_frame;
4686 /* Notify state machines. */
4687 if (ctxt)
4688 ctxt->on_pop_frame (frame_reg);
4690 /* Evaluate the result, within the callee frame. */
4691 tree fndecl = m_current_frame->get_function ()->decl;
4692 tree result = DECL_RESULT (fndecl);
4693 const svalue *retval = NULL;
4694 if (result && TREE_TYPE (result) != void_type_node)
4696 retval = get_rvalue (result, ctxt);
4697 if (out_result)
4698 *out_result = retval;
4701 /* Pop the frame. */
4702 m_current_frame = m_current_frame->get_calling_frame ();
4704 if (result_lvalue && retval)
4706 /* Compute result_dst_reg using RESULT_LVALUE *after* popping
4707 the frame, but before poisoning pointers into the old frame. */
4708 const region *result_dst_reg = get_lvalue (result_lvalue, ctxt);
4709 set_value (result_dst_reg, retval, ctxt);
4712 unbind_region_and_descendents (frame_reg,POISON_KIND_POPPED_STACK);
4715 /* Get the number of frames in this region_model's stack. */
4718 region_model::get_stack_depth () const
4720 const frame_region *frame = get_current_frame ();
4721 if (frame)
4722 return frame->get_stack_depth ();
4723 else
4724 return 0;
4727 /* Get the frame_region with the given index within the stack.
4728 The frame_region must exist. */
4730 const frame_region *
4731 region_model::get_frame_at_index (int index) const
4733 const frame_region *frame = get_current_frame ();
4734 gcc_assert (frame);
4735 gcc_assert (index >= 0);
4736 gcc_assert (index <= frame->get_index ());
4737 while (index != frame->get_index ())
4739 frame = frame->get_calling_frame ();
4740 gcc_assert (frame);
4742 return frame;
4745 /* Unbind svalues for any regions in REG and below.
4746 Find any pointers to such regions; convert them to
4747 poisoned values of kind PKIND.
4748 Also purge any dynamic extents. */
4750 void
4751 region_model::unbind_region_and_descendents (const region *reg,
4752 enum poison_kind pkind)
4754 /* Gather a set of base regions to be unbound. */
4755 hash_set<const region *> base_regs;
4756 for (store::cluster_map_t::iterator iter = m_store.begin ();
4757 iter != m_store.end (); ++iter)
4759 const region *iter_base_reg = (*iter).first;
4760 if (iter_base_reg->descendent_of_p (reg))
4761 base_regs.add (iter_base_reg);
4763 for (hash_set<const region *>::iterator iter = base_regs.begin ();
4764 iter != base_regs.end (); ++iter)
4765 m_store.purge_cluster (*iter);
4767 /* Find any pointers to REG or its descendents; convert to poisoned. */
4768 poison_any_pointers_to_descendents (reg, pkind);
4770 /* Purge dynamic extents of any base regions in REG and below
4771 (e.g. VLAs and alloca stack regions). */
4772 for (auto iter : m_dynamic_extents)
4774 const region *iter_reg = iter.first;
4775 if (iter_reg->descendent_of_p (reg))
4776 unset_dynamic_extents (iter_reg);
4780 /* Implementation of BindingVisitor.
4781 Update the bound svalues for regions below REG to use poisoned
4782 values instead. */
4784 struct bad_pointer_finder
4786 bad_pointer_finder (const region *reg, enum poison_kind pkind,
4787 region_model_manager *mgr)
4788 : m_reg (reg), m_pkind (pkind), m_mgr (mgr), m_count (0)
4791 void on_binding (const binding_key *, const svalue *&sval)
4793 if (const region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
4795 const region *ptr_dst = ptr_sval->get_pointee ();
4796 /* Poison ptrs to descendents of REG, but not to REG itself,
4797 otherwise double-free detection doesn't work (since sm-state
4798 for "free" is stored on the original ptr svalue). */
4799 if (ptr_dst->descendent_of_p (m_reg)
4800 && ptr_dst != m_reg)
4802 sval = m_mgr->get_or_create_poisoned_svalue (m_pkind,
4803 sval->get_type ());
4804 ++m_count;
4809 const region *m_reg;
4810 enum poison_kind m_pkind;
4811 region_model_manager *const m_mgr;
4812 int m_count;
4815 /* Find any pointers to REG or its descendents; convert them to
4816 poisoned values of kind PKIND.
4817 Return the number of pointers that were poisoned. */
4820 region_model::poison_any_pointers_to_descendents (const region *reg,
4821 enum poison_kind pkind)
4823 bad_pointer_finder bv (reg, pkind, m_mgr);
4824 m_store.for_each_binding (bv);
4825 return bv.m_count;
4828 /* Attempt to merge THIS with OTHER_MODEL, writing the result
4829 to OUT_MODEL. Use POINT to distinguish values created as a
4830 result of merging. */
4832 bool
4833 region_model::can_merge_with_p (const region_model &other_model,
4834 const program_point &point,
4835 region_model *out_model,
4836 const extrinsic_state *ext_state,
4837 const program_state *state_a,
4838 const program_state *state_b) const
4840 gcc_assert (out_model);
4841 gcc_assert (m_mgr == other_model.m_mgr);
4842 gcc_assert (m_mgr == out_model->m_mgr);
4844 if (m_current_frame != other_model.m_current_frame)
4845 return false;
4846 out_model->m_current_frame = m_current_frame;
4848 model_merger m (this, &other_model, point, out_model,
4849 ext_state, state_a, state_b);
4851 if (!store::can_merge_p (&m_store, &other_model.m_store,
4852 &out_model->m_store, m_mgr->get_store_manager (),
4853 &m))
4854 return false;
4856 if (!m_dynamic_extents.can_merge_with_p (other_model.m_dynamic_extents,
4857 &out_model->m_dynamic_extents))
4858 return false;
4860 /* Merge constraints. */
4861 constraint_manager::merge (*m_constraints,
4862 *other_model.m_constraints,
4863 out_model->m_constraints);
4865 return true;
4868 /* Attempt to get the fndecl used at CALL, if known, or NULL_TREE
4869 otherwise. */
4871 tree
4872 region_model::get_fndecl_for_call (const gcall *call,
4873 region_model_context *ctxt)
4875 tree fn_ptr = gimple_call_fn (call);
4876 if (fn_ptr == NULL_TREE)
4877 return NULL_TREE;
4878 const svalue *fn_ptr_sval = get_rvalue (fn_ptr, ctxt);
4879 if (const region_svalue *fn_ptr_ptr
4880 = fn_ptr_sval->dyn_cast_region_svalue ())
4882 const region *reg = fn_ptr_ptr->get_pointee ();
4883 if (const function_region *fn_reg = reg->dyn_cast_function_region ())
4885 tree fn_decl = fn_reg->get_fndecl ();
4886 cgraph_node *node = cgraph_node::get (fn_decl);
4887 if (!node)
4888 return NULL_TREE;
4889 const cgraph_node *ultimate_node = node->ultimate_alias_target ();
4890 if (ultimate_node)
4891 return ultimate_node->decl;
4895 return NULL_TREE;
4898 /* Would be much simpler to use a lambda here, if it were supported. */
4900 struct append_regions_cb_data
4902 const region_model *model;
4903 auto_vec<const decl_region *> *out;
4906 /* Populate *OUT with all decl_regions in the current
4907 frame that have clusters within the store. */
4909 void
4910 region_model::
4911 get_regions_for_current_frame (auto_vec<const decl_region *> *out) const
4913 append_regions_cb_data data;
4914 data.model = this;
4915 data.out = out;
4916 m_store.for_each_cluster (append_regions_cb, &data);
4919 /* Implementation detail of get_regions_for_current_frame. */
4921 void
4922 region_model::append_regions_cb (const region *base_reg,
4923 append_regions_cb_data *cb_data)
4925 if (base_reg->get_parent_region () != cb_data->model->m_current_frame)
4926 return;
4927 if (const decl_region *decl_reg = base_reg->dyn_cast_decl_region ())
4928 cb_data->out->safe_push (decl_reg);
4932 /* Abstract class for diagnostics related to the use of
4933 floating-point arithmetic where precision is needed. */
4935 class imprecise_floating_point_arithmetic : public pending_diagnostic
4937 public:
4938 int get_controlling_option () const final override
4940 return OPT_Wanalyzer_imprecise_fp_arithmetic;
4944 /* Concrete diagnostic to complain about uses of floating-point arithmetic
4945 in the size argument of malloc etc. */
4947 class float_as_size_arg : public imprecise_floating_point_arithmetic
4949 public:
4950 float_as_size_arg (tree arg) : m_arg (arg)
4953 const char *get_kind () const final override
4955 return "float_as_size_arg_diagnostic";
4958 bool subclass_equal_p (const pending_diagnostic &other) const final override
4960 return same_tree_p (m_arg, ((const float_as_size_arg &) other).m_arg);
4963 bool emit (rich_location *rich_loc) final override
4965 diagnostic_metadata m;
4966 bool warned = warning_meta (rich_loc, m, get_controlling_option (),
4967 "use of floating-point arithmetic here might"
4968 " yield unexpected results");
4969 if (warned)
4970 inform (rich_loc->get_loc (), "only use operands of an integer type"
4971 " inside the size argument");
4972 return warned;
4975 label_text describe_final_event (const evdesc::final_event &ev) final
4976 override
4978 if (m_arg)
4979 return ev.formatted_print ("operand %qE is of type %qT",
4980 m_arg, TREE_TYPE (m_arg));
4981 return ev.formatted_print ("at least one operand of the size argument is"
4982 " of a floating-point type");
4985 private:
4986 tree m_arg;
4989 /* Visitor to find uses of floating-point variables/constants in an svalue. */
4991 class contains_floating_point_visitor : public visitor
4993 public:
4994 contains_floating_point_visitor (const svalue *root_sval) : m_result (NULL)
4996 root_sval->accept (this);
4999 const svalue *get_svalue_to_report ()
5001 return m_result;
5004 void visit_constant_svalue (const constant_svalue *sval) final override
5006 /* At the point the analyzer runs, constant integer operands in a floating
5007 point expression are already implictly converted to floating-points.
5008 Thus, we do prefer to report non-constants such that the diagnostic
5009 always reports a floating-point operand. */
5010 tree type = sval->get_type ();
5011 if (type && FLOAT_TYPE_P (type) && !m_result)
5012 m_result = sval;
5015 void visit_conjured_svalue (const conjured_svalue *sval) final override
5017 tree type = sval->get_type ();
5018 if (type && FLOAT_TYPE_P (type))
5019 m_result = sval;
5022 void visit_initial_svalue (const initial_svalue *sval) final override
5024 tree type = sval->get_type ();
5025 if (type && FLOAT_TYPE_P (type))
5026 m_result = sval;
5029 private:
5030 /* Non-null if at least one floating-point operand was found. */
5031 const svalue *m_result;
5034 /* May complain about uses of floating-point operands in SIZE_IN_BYTES. */
5036 void
5037 region_model::check_dynamic_size_for_floats (const svalue *size_in_bytes,
5038 region_model_context *ctxt) const
5040 gcc_assert (ctxt);
5042 contains_floating_point_visitor v (size_in_bytes);
5043 if (const svalue *float_sval = v.get_svalue_to_report ())
5045 tree diag_arg = get_representative_tree (float_sval);
5046 ctxt->warn (make_unique<float_as_size_arg> (diag_arg));
5050 /* Return a region describing a heap-allocated block of memory.
5051 Use CTXT to complain about tainted sizes.
5053 Reuse an existing heap_allocated_region if it's not being referenced by
5054 this region_model; otherwise create a new one. */
5056 const region *
5057 region_model::get_or_create_region_for_heap_alloc (const svalue *size_in_bytes,
5058 region_model_context *ctxt)
5060 /* Determine which regions are referenced in this region_model, so that
5061 we can reuse an existing heap_allocated_region if it's not in use on
5062 this path. */
5063 auto_bitmap base_regs_in_use;
5064 get_referenced_base_regions (base_regs_in_use);
5066 /* Don't reuse regions that are marked as TOUCHED. */
5067 for (store::cluster_map_t::iterator iter = m_store.begin ();
5068 iter != m_store.end (); ++iter)
5069 if ((*iter).second->touched_p ())
5071 const region *base_reg = (*iter).first;
5072 bitmap_set_bit (base_regs_in_use, base_reg->get_id ());
5075 const region *reg
5076 = m_mgr->get_or_create_region_for_heap_alloc (base_regs_in_use);
5077 if (size_in_bytes)
5078 if (compat_types_p (size_in_bytes->get_type (), size_type_node))
5079 set_dynamic_extents (reg, size_in_bytes, ctxt);
5080 return reg;
5083 /* Populate OUT_IDS with the set of IDs of those base regions which are
5084 reachable in this region_model. */
5086 void
5087 region_model::get_referenced_base_regions (auto_bitmap &out_ids) const
5089 reachable_regions reachable_regs (const_cast<region_model *> (this));
5090 m_store.for_each_cluster (reachable_regions::init_cluster_cb,
5091 &reachable_regs);
5092 /* Get regions for locals that have explicitly bound values. */
5093 for (store::cluster_map_t::iterator iter = m_store.begin ();
5094 iter != m_store.end (); ++iter)
5096 const region *base_reg = (*iter).first;
5097 if (const region *parent = base_reg->get_parent_region ())
5098 if (parent->get_kind () == RK_FRAME)
5099 reachable_regs.add (base_reg, false);
5102 bitmap_clear (out_ids);
5103 for (auto iter_reg : reachable_regs)
5104 bitmap_set_bit (out_ids, iter_reg->get_id ());
5107 /* Return a new region describing a block of memory allocated within the
5108 current frame.
5109 Use CTXT to complain about tainted sizes. */
5111 const region *
5112 region_model::create_region_for_alloca (const svalue *size_in_bytes,
5113 region_model_context *ctxt)
5115 const region *reg = m_mgr->create_region_for_alloca (m_current_frame);
5116 if (compat_types_p (size_in_bytes->get_type (), size_type_node))
5117 set_dynamic_extents (reg, size_in_bytes, ctxt);
5118 return reg;
5121 /* Record that the size of REG is SIZE_IN_BYTES.
5122 Use CTXT to complain about tainted sizes. */
5124 void
5125 region_model::set_dynamic_extents (const region *reg,
5126 const svalue *size_in_bytes,
5127 region_model_context *ctxt)
5129 assert_compat_types (size_in_bytes->get_type (), size_type_node);
5130 if (ctxt)
5132 check_dynamic_size_for_taint (reg->get_memory_space (), size_in_bytes,
5133 ctxt);
5134 check_dynamic_size_for_floats (size_in_bytes, ctxt);
5136 m_dynamic_extents.put (reg, size_in_bytes);
5139 /* Get the recording of REG in bytes, or NULL if no dynamic size was
5140 recorded. */
5142 const svalue *
5143 region_model::get_dynamic_extents (const region *reg) const
5145 if (const svalue * const *slot = m_dynamic_extents.get (reg))
5146 return *slot;
5147 return NULL;
5150 /* Unset any recorded dynamic size of REG. */
5152 void
5153 region_model::unset_dynamic_extents (const region *reg)
5155 m_dynamic_extents.remove (reg);
5158 /* Information of the layout of a RECORD_TYPE, capturing it as a vector
5159 of items, where each item is either a field or padding. */
5161 class record_layout
5163 public:
5164 /* An item within a record; either a field, or padding after a field. */
5165 struct item
5167 public:
5168 item (const bit_range &br,
5169 tree field,
5170 bool is_padding)
5171 : m_bit_range (br),
5172 m_field (field),
5173 m_is_padding (is_padding)
5177 bit_offset_t get_start_bit_offset () const
5179 return m_bit_range.get_start_bit_offset ();
5181 bit_offset_t get_next_bit_offset () const
5183 return m_bit_range.get_next_bit_offset ();
5186 bool contains_p (bit_offset_t offset) const
5188 return m_bit_range.contains_p (offset);
5191 void dump_to_pp (pretty_printer *pp) const
5193 if (m_is_padding)
5194 pp_printf (pp, "padding after %qD", m_field);
5195 else
5196 pp_printf (pp, "%qD", m_field);
5197 pp_string (pp, ", ");
5198 m_bit_range.dump_to_pp (pp);
5201 bit_range m_bit_range;
5202 tree m_field;
5203 bool m_is_padding;
5206 record_layout (tree record_type)
5208 gcc_assert (TREE_CODE (record_type) == RECORD_TYPE);
5210 for (tree iter = TYPE_FIELDS (record_type); iter != NULL_TREE;
5211 iter = DECL_CHAIN (iter))
5213 if (TREE_CODE (iter) == FIELD_DECL)
5215 int iter_field_offset = int_bit_position (iter);
5216 bit_size_t size_in_bits;
5217 if (!int_size_in_bits (TREE_TYPE (iter), &size_in_bits))
5218 size_in_bits = 0;
5220 maybe_pad_to (iter_field_offset);
5222 /* Add field. */
5223 m_items.safe_push (item (bit_range (iter_field_offset,
5224 size_in_bits),
5225 iter, false));
5229 /* Add any trailing padding. */
5230 bit_size_t size_in_bits;
5231 if (int_size_in_bits (record_type, &size_in_bits))
5232 maybe_pad_to (size_in_bits);
5235 void dump_to_pp (pretty_printer *pp) const
5237 unsigned i;
5238 item *it;
5239 FOR_EACH_VEC_ELT (m_items, i, it)
5241 it->dump_to_pp (pp);
5242 pp_newline (pp);
5246 DEBUG_FUNCTION void dump () const
5248 pretty_printer pp;
5249 pp_format_decoder (&pp) = default_tree_printer;
5250 pp.buffer->stream = stderr;
5251 dump_to_pp (&pp);
5252 pp_flush (&pp);
5255 const record_layout::item *get_item_at (bit_offset_t offset) const
5257 unsigned i;
5258 item *it;
5259 FOR_EACH_VEC_ELT (m_items, i, it)
5260 if (it->contains_p (offset))
5261 return it;
5262 return NULL;
5265 private:
5266 /* Subroutine of ctor. Add padding item to NEXT_OFFSET if necessary. */
5268 void maybe_pad_to (bit_offset_t next_offset)
5270 if (m_items.length () > 0)
5272 const item &last_item = m_items[m_items.length () - 1];
5273 bit_offset_t offset_after_last_item
5274 = last_item.get_next_bit_offset ();
5275 if (next_offset > offset_after_last_item)
5277 bit_size_t padding_size
5278 = next_offset - offset_after_last_item;
5279 m_items.safe_push (item (bit_range (offset_after_last_item,
5280 padding_size),
5281 last_item.m_field, true));
5286 auto_vec<item> m_items;
5289 /* A subclass of pending_diagnostic for complaining about uninitialized data
5290 being copied across a trust boundary to an untrusted output
5291 (e.g. copy_to_user infoleaks in the Linux kernel). */
5293 class exposure_through_uninit_copy
5294 : public pending_diagnostic_subclass<exposure_through_uninit_copy>
5296 public:
5297 exposure_through_uninit_copy (const region *src_region,
5298 const region *dest_region,
5299 const svalue *copied_sval)
5300 : m_src_region (src_region),
5301 m_dest_region (dest_region),
5302 m_copied_sval (copied_sval)
5304 gcc_assert (m_copied_sval->get_kind () == SK_POISONED
5305 || m_copied_sval->get_kind () == SK_COMPOUND);
5308 const char *get_kind () const final override
5310 return "exposure_through_uninit_copy";
5313 bool operator== (const exposure_through_uninit_copy &other) const
5315 return (m_src_region == other.m_src_region
5316 && m_dest_region == other.m_dest_region
5317 && m_copied_sval == other.m_copied_sval);
5320 int get_controlling_option () const final override
5322 return OPT_Wanalyzer_exposure_through_uninit_copy;
5325 bool emit (rich_location *rich_loc) final override
5327 diagnostic_metadata m;
5328 /* CWE-200: Exposure of Sensitive Information to an Unauthorized Actor. */
5329 m.add_cwe (200);
5330 enum memory_space mem_space = get_src_memory_space ();
5331 bool warned;
5332 switch (mem_space)
5334 default:
5335 warned = warning_meta
5336 (rich_loc, m, get_controlling_option (),
5337 "potential exposure of sensitive information"
5338 " by copying uninitialized data across trust boundary");
5339 break;
5340 case MEMSPACE_STACK:
5341 warned = warning_meta
5342 (rich_loc, m, get_controlling_option (),
5343 "potential exposure of sensitive information"
5344 " by copying uninitialized data from stack across trust boundary");
5345 break;
5346 case MEMSPACE_HEAP:
5347 warned = warning_meta
5348 (rich_loc, m, get_controlling_option (),
5349 "potential exposure of sensitive information"
5350 " by copying uninitialized data from heap across trust boundary");
5351 break;
5353 if (warned)
5355 location_t loc = rich_loc->get_loc ();
5356 inform_number_of_uninit_bits (loc);
5357 complain_about_uninit_ranges (loc);
5359 if (mem_space == MEMSPACE_STACK)
5360 maybe_emit_fixit_hint ();
5362 return warned;
5365 label_text describe_final_event (const evdesc::final_event &) final override
5367 enum memory_space mem_space = get_src_memory_space ();
5368 switch (mem_space)
5370 default:
5371 return label_text::borrow ("uninitialized data copied here");
5373 case MEMSPACE_STACK:
5374 return label_text::borrow ("uninitialized data copied from stack here");
5376 case MEMSPACE_HEAP:
5377 return label_text::borrow ("uninitialized data copied from heap here");
5381 void mark_interesting_stuff (interesting_t *interest) final override
5383 if (m_src_region)
5384 interest->add_region_creation (m_src_region);
5387 private:
5388 enum memory_space get_src_memory_space () const
5390 return m_src_region ? m_src_region->get_memory_space () : MEMSPACE_UNKNOWN;
5393 bit_size_t calc_num_uninit_bits () const
5395 switch (m_copied_sval->get_kind ())
5397 default:
5398 gcc_unreachable ();
5399 break;
5400 case SK_POISONED:
5402 const poisoned_svalue *poisoned_sval
5403 = as_a <const poisoned_svalue *> (m_copied_sval);
5404 gcc_assert (poisoned_sval->get_poison_kind () == POISON_KIND_UNINIT);
5406 /* Give up if don't have type information. */
5407 if (m_copied_sval->get_type () == NULL_TREE)
5408 return 0;
5410 bit_size_t size_in_bits;
5411 if (int_size_in_bits (m_copied_sval->get_type (), &size_in_bits))
5412 return size_in_bits;
5414 /* Give up if we can't get the size of the type. */
5415 return 0;
5417 break;
5418 case SK_COMPOUND:
5420 const compound_svalue *compound_sval
5421 = as_a <const compound_svalue *> (m_copied_sval);
5422 bit_size_t result = 0;
5423 /* Find keys for uninit svals. */
5424 for (auto iter : *compound_sval)
5426 const svalue *sval = iter.second;
5427 if (const poisoned_svalue *psval
5428 = sval->dyn_cast_poisoned_svalue ())
5429 if (psval->get_poison_kind () == POISON_KIND_UNINIT)
5431 const binding_key *key = iter.first;
5432 const concrete_binding *ckey
5433 = key->dyn_cast_concrete_binding ();
5434 gcc_assert (ckey);
5435 result += ckey->get_size_in_bits ();
5438 return result;
5443 void inform_number_of_uninit_bits (location_t loc) const
5445 bit_size_t num_uninit_bits = calc_num_uninit_bits ();
5446 if (num_uninit_bits <= 0)
5447 return;
5448 if (num_uninit_bits % BITS_PER_UNIT == 0)
5450 /* Express in bytes. */
5451 byte_size_t num_uninit_bytes = num_uninit_bits / BITS_PER_UNIT;
5452 if (num_uninit_bytes == 1)
5453 inform (loc, "1 byte is uninitialized");
5454 else
5455 inform (loc,
5456 "%wu bytes are uninitialized", num_uninit_bytes.to_uhwi ());
5458 else
5460 /* Express in bits. */
5461 if (num_uninit_bits == 1)
5462 inform (loc, "1 bit is uninitialized");
5463 else
5464 inform (loc,
5465 "%wu bits are uninitialized", num_uninit_bits.to_uhwi ());
5469 void complain_about_uninit_ranges (location_t loc) const
5471 if (const compound_svalue *compound_sval
5472 = m_copied_sval->dyn_cast_compound_svalue ())
5474 /* Find keys for uninit svals. */
5475 auto_vec<const concrete_binding *> uninit_keys;
5476 for (auto iter : *compound_sval)
5478 const svalue *sval = iter.second;
5479 if (const poisoned_svalue *psval
5480 = sval->dyn_cast_poisoned_svalue ())
5481 if (psval->get_poison_kind () == POISON_KIND_UNINIT)
5483 const binding_key *key = iter.first;
5484 const concrete_binding *ckey
5485 = key->dyn_cast_concrete_binding ();
5486 gcc_assert (ckey);
5487 uninit_keys.safe_push (ckey);
5490 /* Complain about them in sorted order. */
5491 uninit_keys.qsort (concrete_binding::cmp_ptr_ptr);
5493 std::unique_ptr<record_layout> layout;
5495 tree type = m_copied_sval->get_type ();
5496 if (type && TREE_CODE (type) == RECORD_TYPE)
5498 // (std::make_unique is C++14)
5499 layout = std::unique_ptr<record_layout> (new record_layout (type));
5501 if (0)
5502 layout->dump ();
5505 unsigned i;
5506 const concrete_binding *ckey;
5507 FOR_EACH_VEC_ELT (uninit_keys, i, ckey)
5509 bit_offset_t start_bit = ckey->get_start_bit_offset ();
5510 bit_offset_t next_bit = ckey->get_next_bit_offset ();
5511 complain_about_uninit_range (loc, start_bit, next_bit,
5512 layout.get ());
5517 void complain_about_uninit_range (location_t loc,
5518 bit_offset_t start_bit,
5519 bit_offset_t next_bit,
5520 const record_layout *layout) const
5522 if (layout)
5524 while (start_bit < next_bit)
5526 if (const record_layout::item *item
5527 = layout->get_item_at (start_bit))
5529 gcc_assert (start_bit >= item->get_start_bit_offset ());
5530 gcc_assert (start_bit < item->get_next_bit_offset ());
5531 if (item->get_start_bit_offset () == start_bit
5532 && item->get_next_bit_offset () <= next_bit)
5533 complain_about_fully_uninit_item (*item);
5534 else
5535 complain_about_partially_uninit_item (*item);
5536 start_bit = item->get_next_bit_offset ();
5537 continue;
5539 else
5540 break;
5544 if (start_bit >= next_bit)
5545 return;
5547 if (start_bit % 8 == 0 && next_bit % 8 == 0)
5549 /* Express in bytes. */
5550 byte_offset_t start_byte = start_bit / 8;
5551 byte_offset_t last_byte = (next_bit / 8) - 1;
5552 if (last_byte == start_byte)
5553 inform (loc,
5554 "byte %wu is uninitialized",
5555 start_byte.to_uhwi ());
5556 else
5557 inform (loc,
5558 "bytes %wu - %wu are uninitialized",
5559 start_byte.to_uhwi (),
5560 last_byte.to_uhwi ());
5562 else
5564 /* Express in bits. */
5565 bit_offset_t last_bit = next_bit - 1;
5566 if (last_bit == start_bit)
5567 inform (loc,
5568 "bit %wu is uninitialized",
5569 start_bit.to_uhwi ());
5570 else
5571 inform (loc,
5572 "bits %wu - %wu are uninitialized",
5573 start_bit.to_uhwi (),
5574 last_bit.to_uhwi ());
5578 static void
5579 complain_about_fully_uninit_item (const record_layout::item &item)
5581 tree field = item.m_field;
5582 bit_size_t num_bits = item.m_bit_range.m_size_in_bits;
5583 if (item.m_is_padding)
5585 if (num_bits % 8 == 0)
5587 /* Express in bytes. */
5588 byte_size_t num_bytes = num_bits / BITS_PER_UNIT;
5589 if (num_bytes == 1)
5590 inform (DECL_SOURCE_LOCATION (field),
5591 "padding after field %qD is uninitialized (1 byte)",
5592 field);
5593 else
5594 inform (DECL_SOURCE_LOCATION (field),
5595 "padding after field %qD is uninitialized (%wu bytes)",
5596 field, num_bytes.to_uhwi ());
5598 else
5600 /* Express in bits. */
5601 if (num_bits == 1)
5602 inform (DECL_SOURCE_LOCATION (field),
5603 "padding after field %qD is uninitialized (1 bit)",
5604 field);
5605 else
5606 inform (DECL_SOURCE_LOCATION (field),
5607 "padding after field %qD is uninitialized (%wu bits)",
5608 field, num_bits.to_uhwi ());
5611 else
5613 if (num_bits % 8 == 0)
5615 /* Express in bytes. */
5616 byte_size_t num_bytes = num_bits / BITS_PER_UNIT;
5617 if (num_bytes == 1)
5618 inform (DECL_SOURCE_LOCATION (field),
5619 "field %qD is uninitialized (1 byte)", field);
5620 else
5621 inform (DECL_SOURCE_LOCATION (field),
5622 "field %qD is uninitialized (%wu bytes)",
5623 field, num_bytes.to_uhwi ());
5625 else
5627 /* Express in bits. */
5628 if (num_bits == 1)
5629 inform (DECL_SOURCE_LOCATION (field),
5630 "field %qD is uninitialized (1 bit)", field);
5631 else
5632 inform (DECL_SOURCE_LOCATION (field),
5633 "field %qD is uninitialized (%wu bits)",
5634 field, num_bits.to_uhwi ());
5639 static void
5640 complain_about_partially_uninit_item (const record_layout::item &item)
5642 tree field = item.m_field;
5643 if (item.m_is_padding)
5644 inform (DECL_SOURCE_LOCATION (field),
5645 "padding after field %qD is partially uninitialized",
5646 field);
5647 else
5648 inform (DECL_SOURCE_LOCATION (field),
5649 "field %qD is partially uninitialized",
5650 field);
5651 /* TODO: ideally we'd describe what parts are uninitialized. */
5654 void maybe_emit_fixit_hint () const
5656 if (tree decl = m_src_region->maybe_get_decl ())
5658 gcc_rich_location hint_richloc (DECL_SOURCE_LOCATION (decl));
5659 hint_richloc.add_fixit_insert_after (" = {0}");
5660 inform (&hint_richloc,
5661 "suggest forcing zero-initialization by"
5662 " providing a %<{0}%> initializer");
5666 private:
5667 const region *m_src_region;
5668 const region *m_dest_region;
5669 const svalue *m_copied_sval;
5672 /* Return true if any part of SVAL is uninitialized. */
5674 static bool
5675 contains_uninit_p (const svalue *sval)
5677 struct uninit_finder : public visitor
5679 public:
5680 uninit_finder () : m_found_uninit (false) {}
5681 void visit_poisoned_svalue (const poisoned_svalue *sval)
5683 if (sval->get_poison_kind () == POISON_KIND_UNINIT)
5684 m_found_uninit = true;
5686 bool m_found_uninit;
5689 uninit_finder v;
5690 sval->accept (&v);
5692 return v.m_found_uninit;
5695 /* Function for use by plugins when simulating writing data through a
5696 pointer to an "untrusted" region DST_REG (and thus crossing a security
5697 boundary), such as copying data to user space in an OS kernel.
5699 Check that COPIED_SVAL is fully initialized. If not, complain about
5700 an infoleak to CTXT.
5702 SRC_REG can be NULL; if non-NULL it is used as a hint in the diagnostic
5703 as to where COPIED_SVAL came from. */
5705 void
5706 region_model::maybe_complain_about_infoleak (const region *dst_reg,
5707 const svalue *copied_sval,
5708 const region *src_reg,
5709 region_model_context *ctxt)
5711 /* Check for exposure. */
5712 if (contains_uninit_p (copied_sval))
5713 ctxt->warn (make_unique<exposure_through_uninit_copy> (src_reg,
5714 dst_reg,
5715 copied_sval));
5718 /* Set errno to a positive symbolic int, as if some error has occurred. */
5720 void
5721 region_model::set_errno (const call_details &cd)
5723 const region *errno_reg = m_mgr->get_errno_region ();
5724 conjured_purge p (this, cd.get_ctxt ());
5725 const svalue *new_errno_sval
5726 = m_mgr->get_or_create_conjured_svalue (integer_type_node,
5727 cd.get_call_stmt (),
5728 errno_reg, p);
5729 const svalue *zero
5730 = m_mgr->get_or_create_int_cst (integer_type_node, 0);
5731 add_constraint (new_errno_sval, GT_EXPR, zero, cd.get_ctxt ());
5732 set_value (errno_reg, new_errno_sval, cd.get_ctxt ());
5735 /* class noop_region_model_context : public region_model_context. */
5737 void
5738 noop_region_model_context::add_note (std::unique_ptr<pending_note>)
5742 void
5743 noop_region_model_context::bifurcate (std::unique_ptr<custom_edge_info>)
5747 void
5748 noop_region_model_context::terminate_path ()
5752 /* struct model_merger. */
5754 /* Dump a multiline representation of this merger to PP. */
5756 void
5757 model_merger::dump_to_pp (pretty_printer *pp, bool simple) const
5759 pp_string (pp, "model A:");
5760 pp_newline (pp);
5761 m_model_a->dump_to_pp (pp, simple, true);
5762 pp_newline (pp);
5764 pp_string (pp, "model B:");
5765 pp_newline (pp);
5766 m_model_b->dump_to_pp (pp, simple, true);
5767 pp_newline (pp);
5769 pp_string (pp, "merged model:");
5770 pp_newline (pp);
5771 m_merged_model->dump_to_pp (pp, simple, true);
5772 pp_newline (pp);
5775 /* Dump a multiline representation of this merger to FILE. */
5777 void
5778 model_merger::dump (FILE *fp, bool simple) const
5780 pretty_printer pp;
5781 pp_format_decoder (&pp) = default_tree_printer;
5782 pp_show_color (&pp) = pp_show_color (global_dc->printer);
5783 pp.buffer->stream = fp;
5784 dump_to_pp (&pp, simple);
5785 pp_flush (&pp);
5788 /* Dump a multiline representation of this merger to stderr. */
5790 DEBUG_FUNCTION void
5791 model_merger::dump (bool simple) const
5793 dump (stderr, simple);
5796 /* Return true if it's OK to merge SVAL with other svalues. */
5798 bool
5799 model_merger::mergeable_svalue_p (const svalue *sval) const
5801 if (m_ext_state)
5803 /* Reject merging svalues that have non-purgable sm-state,
5804 to avoid falsely reporting memory leaks by merging them
5805 with something else. For example, given a local var "p",
5806 reject the merger of a:
5807 store_a mapping "p" to a malloc-ed ptr
5808 with:
5809 store_b mapping "p" to a NULL ptr. */
5810 if (m_state_a)
5811 if (!m_state_a->can_purge_p (*m_ext_state, sval))
5812 return false;
5813 if (m_state_b)
5814 if (!m_state_b->can_purge_p (*m_ext_state, sval))
5815 return false;
5817 return true;
5820 } // namespace ana
5822 /* Dump RMODEL fully to stderr (i.e. without summarization). */
5824 DEBUG_FUNCTION void
5825 debug (const region_model &rmodel)
5827 rmodel.dump (false);
5830 /* class rejected_op_constraint : public rejected_constraint. */
5832 void
5833 rejected_op_constraint::dump_to_pp (pretty_printer *pp) const
5835 region_model m (m_model);
5836 const svalue *lhs_sval = m.get_rvalue (m_lhs, NULL);
5837 const svalue *rhs_sval = m.get_rvalue (m_rhs, NULL);
5838 lhs_sval->dump_to_pp (pp, true);
5839 pp_printf (pp, " %s ", op_symbol_code (m_op));
5840 rhs_sval->dump_to_pp (pp, true);
5843 /* class rejected_default_case : public rejected_constraint. */
5845 void
5846 rejected_default_case::dump_to_pp (pretty_printer *pp) const
5848 pp_string (pp, "implicit default for enum");
5851 /* class rejected_ranges_constraint : public rejected_constraint. */
5853 void
5854 rejected_ranges_constraint::dump_to_pp (pretty_printer *pp) const
5856 region_model m (m_model);
5857 const svalue *sval = m.get_rvalue (m_expr, NULL);
5858 sval->dump_to_pp (pp, true);
5859 pp_string (pp, " in ");
5860 m_ranges->dump_to_pp (pp, true);
5863 /* class engine. */
5865 /* engine's ctor. */
5867 engine::engine (const supergraph *sg, logger *logger)
5868 : m_sg (sg), m_mgr (logger)
5872 /* Dump the managed objects by class to LOGGER, and the per-class totals. */
5874 void
5875 engine::log_stats (logger *logger) const
5877 m_mgr.log_stats (logger, true);
5880 namespace ana {
5882 #if CHECKING_P
5884 namespace selftest {
5886 /* Build a constant tree of the given type from STR. */
5888 static tree
5889 build_real_cst_from_string (tree type, const char *str)
5891 REAL_VALUE_TYPE real;
5892 real_from_string (&real, str);
5893 return build_real (type, real);
5896 /* Append various "interesting" constants to OUT (e.g. NaN). */
5898 static void
5899 append_interesting_constants (auto_vec<tree> *out)
5901 out->safe_push (build_int_cst (integer_type_node, 0));
5902 out->safe_push (build_int_cst (integer_type_node, 42));
5903 out->safe_push (build_int_cst (unsigned_type_node, 0));
5904 out->safe_push (build_int_cst (unsigned_type_node, 42));
5905 out->safe_push (build_real_cst_from_string (float_type_node, "QNaN"));
5906 out->safe_push (build_real_cst_from_string (float_type_node, "-QNaN"));
5907 out->safe_push (build_real_cst_from_string (float_type_node, "SNaN"));
5908 out->safe_push (build_real_cst_from_string (float_type_node, "-SNaN"));
5909 out->safe_push (build_real_cst_from_string (float_type_node, "0.0"));
5910 out->safe_push (build_real_cst_from_string (float_type_node, "-0.0"));
5911 out->safe_push (build_real_cst_from_string (float_type_node, "Inf"));
5912 out->safe_push (build_real_cst_from_string (float_type_node, "-Inf"));
5915 /* Verify that tree_cmp is a well-behaved comparator for qsort, even
5916 if the underlying constants aren't comparable. */
5918 static void
5919 test_tree_cmp_on_constants ()
5921 auto_vec<tree> csts;
5922 append_interesting_constants (&csts);
5924 /* Try sorting every triple. */
5925 const unsigned num = csts.length ();
5926 for (unsigned i = 0; i < num; i++)
5927 for (unsigned j = 0; j < num; j++)
5928 for (unsigned k = 0; k < num; k++)
5930 auto_vec<tree> v (3);
5931 v.quick_push (csts[i]);
5932 v.quick_push (csts[j]);
5933 v.quick_push (csts[k]);
5934 v.qsort (tree_cmp);
5938 /* Implementation detail of the ASSERT_CONDITION_* macros. */
5940 void
5941 assert_condition (const location &loc,
5942 region_model &model,
5943 const svalue *lhs, tree_code op, const svalue *rhs,
5944 tristate expected)
5946 tristate actual = model.eval_condition (lhs, op, rhs);
5947 ASSERT_EQ_AT (loc, actual, expected);
5950 /* Implementation detail of the ASSERT_CONDITION_* macros. */
5952 void
5953 assert_condition (const location &loc,
5954 region_model &model,
5955 tree lhs, tree_code op, tree rhs,
5956 tristate expected)
5958 tristate actual = model.eval_condition (lhs, op, rhs, NULL);
5959 ASSERT_EQ_AT (loc, actual, expected);
5962 /* Implementation detail of ASSERT_DUMP_TREE_EQ. */
5964 static void
5965 assert_dump_tree_eq (const location &loc, tree t, const char *expected)
5967 auto_fix_quotes sentinel;
5968 pretty_printer pp;
5969 pp_format_decoder (&pp) = default_tree_printer;
5970 dump_tree (&pp, t);
5971 ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected);
5974 /* Assert that dump_tree (T) is EXPECTED. */
5976 #define ASSERT_DUMP_TREE_EQ(T, EXPECTED) \
5977 SELFTEST_BEGIN_STMT \
5978 assert_dump_tree_eq ((SELFTEST_LOCATION), (T), (EXPECTED)); \
5979 SELFTEST_END_STMT
5981 /* Implementation detail of ASSERT_DUMP_EQ. */
5983 static void
5984 assert_dump_eq (const location &loc,
5985 const region_model &model,
5986 bool summarize,
5987 const char *expected)
5989 auto_fix_quotes sentinel;
5990 pretty_printer pp;
5991 pp_format_decoder (&pp) = default_tree_printer;
5993 model.dump_to_pp (&pp, summarize, true);
5994 ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected);
5997 /* Assert that MODEL.dump_to_pp (SUMMARIZE) is EXPECTED. */
5999 #define ASSERT_DUMP_EQ(MODEL, SUMMARIZE, EXPECTED) \
6000 SELFTEST_BEGIN_STMT \
6001 assert_dump_eq ((SELFTEST_LOCATION), (MODEL), (SUMMARIZE), (EXPECTED)); \
6002 SELFTEST_END_STMT
6004 /* Smoketest for region_model::dump_to_pp. */
6006 static void
6007 test_dump ()
6009 region_model_manager mgr;
6010 region_model model (&mgr);
6012 ASSERT_DUMP_EQ (model, false,
6013 "stack depth: 0\n"
6014 "m_called_unknown_fn: FALSE\n"
6015 "constraint_manager:\n"
6016 " equiv classes:\n"
6017 " constraints:\n");
6018 ASSERT_DUMP_EQ (model, true,
6019 "stack depth: 0\n"
6020 "m_called_unknown_fn: FALSE\n"
6021 "constraint_manager:\n"
6022 " equiv classes:\n"
6023 " constraints:\n");
6026 /* Helper function for selftests. Create a struct or union type named NAME,
6027 with the fields given by the FIELD_DECLS in FIELDS.
6028 If IS_STRUCT is true create a RECORD_TYPE (aka a struct), otherwise
6029 create a UNION_TYPE. */
6031 static tree
6032 make_test_compound_type (const char *name, bool is_struct,
6033 const auto_vec<tree> *fields)
6035 tree t = make_node (is_struct ? RECORD_TYPE : UNION_TYPE);
6036 TYPE_NAME (t) = get_identifier (name);
6037 TYPE_SIZE (t) = 0;
6039 tree fieldlist = NULL;
6040 int i;
6041 tree field;
6042 FOR_EACH_VEC_ELT (*fields, i, field)
6044 gcc_assert (TREE_CODE (field) == FIELD_DECL);
6045 DECL_CONTEXT (field) = t;
6046 fieldlist = chainon (field, fieldlist);
6048 fieldlist = nreverse (fieldlist);
6049 TYPE_FIELDS (t) = fieldlist;
6051 layout_type (t);
6052 return t;
6055 /* Selftest fixture for creating the type "struct coord {int x; int y; };". */
6057 struct coord_test
6059 coord_test ()
6061 auto_vec<tree> fields;
6062 m_x_field = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
6063 get_identifier ("x"), integer_type_node);
6064 fields.safe_push (m_x_field);
6065 m_y_field = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
6066 get_identifier ("y"), integer_type_node);
6067 fields.safe_push (m_y_field);
6068 m_coord_type = make_test_compound_type ("coord", true, &fields);
6071 tree m_x_field;
6072 tree m_y_field;
6073 tree m_coord_type;
6076 /* Verify usage of a struct. */
6078 static void
6079 test_struct ()
6081 coord_test ct;
6083 tree c = build_global_decl ("c", ct.m_coord_type);
6084 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
6085 c, ct.m_x_field, NULL_TREE);
6086 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
6087 c, ct.m_y_field, NULL_TREE);
6089 tree int_17 = build_int_cst (integer_type_node, 17);
6090 tree int_m3 = build_int_cst (integer_type_node, -3);
6092 region_model_manager mgr;
6093 region_model model (&mgr);
6094 model.set_value (c_x, int_17, NULL);
6095 model.set_value (c_y, int_m3, NULL);
6097 /* Verify get_offset for "c.x". */
6099 const region *c_x_reg = model.get_lvalue (c_x, NULL);
6100 region_offset offset = c_x_reg->get_offset (&mgr);
6101 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (c, NULL));
6102 ASSERT_EQ (offset.get_bit_offset (), 0);
6105 /* Verify get_offset for "c.y". */
6107 const region *c_y_reg = model.get_lvalue (c_y, NULL);
6108 region_offset offset = c_y_reg->get_offset (&mgr);
6109 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (c, NULL));
6110 ASSERT_EQ (offset.get_bit_offset (), INT_TYPE_SIZE);
6114 /* Verify usage of an array element. */
6116 static void
6117 test_array_1 ()
6119 tree tlen = size_int (10);
6120 tree arr_type = build_array_type (char_type_node, build_index_type (tlen));
6122 tree a = build_global_decl ("a", arr_type);
6124 region_model_manager mgr;
6125 region_model model (&mgr);
6126 tree int_0 = build_int_cst (integer_type_node, 0);
6127 tree a_0 = build4 (ARRAY_REF, char_type_node,
6128 a, int_0, NULL_TREE, NULL_TREE);
6129 tree char_A = build_int_cst (char_type_node, 'A');
6130 model.set_value (a_0, char_A, NULL);
6133 /* Verify that region_model::get_representative_tree works as expected. */
6135 static void
6136 test_get_representative_tree ()
6138 region_model_manager mgr;
6140 /* STRING_CST. */
6142 tree string_cst = build_string (4, "foo");
6143 region_model m (&mgr);
6144 const svalue *str_sval = m.get_rvalue (string_cst, NULL);
6145 tree rep = m.get_representative_tree (str_sval);
6146 ASSERT_EQ (rep, string_cst);
6149 /* String literal. */
6151 tree string_cst_ptr = build_string_literal (4, "foo");
6152 region_model m (&mgr);
6153 const svalue *str_sval = m.get_rvalue (string_cst_ptr, NULL);
6154 tree rep = m.get_representative_tree (str_sval);
6155 ASSERT_DUMP_TREE_EQ (rep, "&\"foo\"[0]");
6158 /* Value of an element within an array. */
6160 tree tlen = size_int (10);
6161 tree arr_type = build_array_type (char_type_node, build_index_type (tlen));
6162 tree a = build_global_decl ("a", arr_type);
6163 placeholder_svalue test_sval (char_type_node, "test value");
6165 /* Value of a[3]. */
6167 test_region_model_context ctxt;
6168 region_model model (&mgr);
6169 tree int_3 = build_int_cst (integer_type_node, 3);
6170 tree a_3 = build4 (ARRAY_REF, char_type_node,
6171 a, int_3, NULL_TREE, NULL_TREE);
6172 const region *a_3_reg = model.get_lvalue (a_3, &ctxt);
6173 model.set_value (a_3_reg, &test_sval, &ctxt);
6174 tree rep = model.get_representative_tree (&test_sval);
6175 ASSERT_DUMP_TREE_EQ (rep, "a[3]");
6178 /* Value of a[0]. */
6180 test_region_model_context ctxt;
6181 region_model model (&mgr);
6182 tree idx = build_int_cst (integer_type_node, 0);
6183 tree a_0 = build4 (ARRAY_REF, char_type_node,
6184 a, idx, NULL_TREE, NULL_TREE);
6185 const region *a_0_reg = model.get_lvalue (a_0, &ctxt);
6186 model.set_value (a_0_reg, &test_sval, &ctxt);
6187 tree rep = model.get_representative_tree (&test_sval);
6188 ASSERT_DUMP_TREE_EQ (rep, "a[0]");
6192 /* Value of a field within a struct. */
6194 coord_test ct;
6196 tree c = build_global_decl ("c", ct.m_coord_type);
6197 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
6198 c, ct.m_x_field, NULL_TREE);
6199 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
6200 c, ct.m_y_field, NULL_TREE);
6202 test_region_model_context ctxt;
6204 /* Value of initial field. */
6206 region_model m (&mgr);
6207 const region *c_x_reg = m.get_lvalue (c_x, &ctxt);
6208 placeholder_svalue test_sval_x (integer_type_node, "test x val");
6209 m.set_value (c_x_reg, &test_sval_x, &ctxt);
6210 tree rep = m.get_representative_tree (&test_sval_x);
6211 ASSERT_DUMP_TREE_EQ (rep, "c.x");
6214 /* Value of non-initial field. */
6216 region_model m (&mgr);
6217 const region *c_y_reg = m.get_lvalue (c_y, &ctxt);
6218 placeholder_svalue test_sval_y (integer_type_node, "test y val");
6219 m.set_value (c_y_reg, &test_sval_y, &ctxt);
6220 tree rep = m.get_representative_tree (&test_sval_y);
6221 ASSERT_DUMP_TREE_EQ (rep, "c.y");
6226 /* Verify that calling region_model::get_rvalue repeatedly on the same
6227 tree constant retrieves the same svalue *. */
6229 static void
6230 test_unique_constants ()
6232 tree int_0 = build_int_cst (integer_type_node, 0);
6233 tree int_42 = build_int_cst (integer_type_node, 42);
6235 test_region_model_context ctxt;
6236 region_model_manager mgr;
6237 region_model model (&mgr);
6238 ASSERT_EQ (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_0, &ctxt));
6239 ASSERT_EQ (model.get_rvalue (int_42, &ctxt),
6240 model.get_rvalue (int_42, &ctxt));
6241 ASSERT_NE (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_42, &ctxt));
6242 ASSERT_EQ (ctxt.get_num_diagnostics (), 0);
6244 /* A "(const int)42" will be a different tree from "(int)42)"... */
6245 tree const_int_type_node
6246 = build_qualified_type (integer_type_node, TYPE_QUAL_CONST);
6247 tree const_int_42 = build_int_cst (const_int_type_node, 42);
6248 ASSERT_NE (int_42, const_int_42);
6249 /* It should have a different const_svalue. */
6250 const svalue *int_42_sval = model.get_rvalue (int_42, &ctxt);
6251 const svalue *const_int_42_sval = model.get_rvalue (const_int_42, &ctxt);
6252 ASSERT_NE (int_42_sval, const_int_42_sval);
6253 /* But they should compare as equal. */
6254 ASSERT_CONDITION_TRUE (model, int_42_sval, EQ_EXPR, const_int_42_sval);
6255 ASSERT_CONDITION_FALSE (model, int_42_sval, NE_EXPR, const_int_42_sval);
6258 /* Verify that each type gets its own singleton unknown_svalue within a
6259 region_model_manager, and that NULL_TREE gets its own singleton. */
6261 static void
6262 test_unique_unknowns ()
6264 region_model_manager mgr;
6265 const svalue *unknown_int
6266 = mgr.get_or_create_unknown_svalue (integer_type_node);
6267 /* Repeated calls with the same type should get the same "unknown"
6268 svalue. */
6269 const svalue *unknown_int_2
6270 = mgr.get_or_create_unknown_svalue (integer_type_node);
6271 ASSERT_EQ (unknown_int, unknown_int_2);
6273 /* Different types (or the NULL type) should have different
6274 unknown_svalues. */
6275 const svalue *unknown_NULL_type = mgr.get_or_create_unknown_svalue (NULL);
6276 ASSERT_NE (unknown_NULL_type, unknown_int);
6278 /* Repeated calls with NULL for the type should get the same "unknown"
6279 svalue. */
6280 const svalue *unknown_NULL_type_2 = mgr.get_or_create_unknown_svalue (NULL);
6281 ASSERT_EQ (unknown_NULL_type, unknown_NULL_type_2);
6284 /* Verify that initial_svalue are handled as expected. */
6286 static void
6287 test_initial_svalue_folding ()
6289 region_model_manager mgr;
6290 tree x = build_global_decl ("x", integer_type_node);
6291 tree y = build_global_decl ("y", integer_type_node);
6293 test_region_model_context ctxt;
6294 region_model model (&mgr);
6295 const svalue *x_init = model.get_rvalue (x, &ctxt);
6296 const svalue *y_init = model.get_rvalue (y, &ctxt);
6297 ASSERT_NE (x_init, y_init);
6298 const region *x_reg = model.get_lvalue (x, &ctxt);
6299 ASSERT_EQ (x_init, mgr.get_or_create_initial_value (x_reg));
6303 /* Verify that unary ops are folded as expected. */
6305 static void
6306 test_unaryop_svalue_folding ()
6308 region_model_manager mgr;
6309 tree x = build_global_decl ("x", integer_type_node);
6310 tree y = build_global_decl ("y", integer_type_node);
6312 test_region_model_context ctxt;
6313 region_model model (&mgr);
6314 const svalue *x_init = model.get_rvalue (x, &ctxt);
6315 const svalue *y_init = model.get_rvalue (y, &ctxt);
6316 const region *x_reg = model.get_lvalue (x, &ctxt);
6317 ASSERT_EQ (x_init, mgr.get_or_create_initial_value (x_reg));
6319 /* "(int)x" -> "x". */
6320 ASSERT_EQ (x_init, mgr.get_or_create_cast (integer_type_node, x_init));
6322 /* "(void *)x" -> something other than "x". */
6323 ASSERT_NE (x_init, mgr.get_or_create_cast (ptr_type_node, x_init));
6325 /* "!(x == y)" -> "x != y". */
6326 ASSERT_EQ (mgr.get_or_create_unaryop
6327 (boolean_type_node, TRUTH_NOT_EXPR,
6328 mgr.get_or_create_binop (boolean_type_node, EQ_EXPR,
6329 x_init, y_init)),
6330 mgr.get_or_create_binop (boolean_type_node, NE_EXPR,
6331 x_init, y_init));
6332 /* "!(x > y)" -> "x <= y". */
6333 ASSERT_EQ (mgr.get_or_create_unaryop
6334 (boolean_type_node, TRUTH_NOT_EXPR,
6335 mgr.get_or_create_binop (boolean_type_node, GT_EXPR,
6336 x_init, y_init)),
6337 mgr.get_or_create_binop (boolean_type_node, LE_EXPR,
6338 x_init, y_init));
6341 /* Verify that binops on constant svalues are folded. */
6343 static void
6344 test_binop_svalue_folding ()
6346 #define NUM_CSTS 10
6347 tree cst_int[NUM_CSTS];
6348 region_model_manager mgr;
6349 const svalue *cst_sval[NUM_CSTS];
6350 for (int i = 0; i < NUM_CSTS; i++)
6352 cst_int[i] = build_int_cst (integer_type_node, i);
6353 cst_sval[i] = mgr.get_or_create_constant_svalue (cst_int[i]);
6354 ASSERT_EQ (cst_sval[i]->get_kind (), SK_CONSTANT);
6355 ASSERT_EQ (cst_sval[i]->maybe_get_constant (), cst_int[i]);
6358 for (int i = 0; i < NUM_CSTS; i++)
6359 for (int j = 0; j < NUM_CSTS; j++)
6361 if (i != j)
6362 ASSERT_NE (cst_sval[i], cst_sval[j]);
6363 if (i + j < NUM_CSTS)
6365 const svalue *sum
6366 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
6367 cst_sval[i], cst_sval[j]);
6368 ASSERT_EQ (sum, cst_sval[i + j]);
6370 if (i - j >= 0)
6372 const svalue *difference
6373 = mgr.get_or_create_binop (integer_type_node, MINUS_EXPR,
6374 cst_sval[i], cst_sval[j]);
6375 ASSERT_EQ (difference, cst_sval[i - j]);
6377 if (i * j < NUM_CSTS)
6379 const svalue *product
6380 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6381 cst_sval[i], cst_sval[j]);
6382 ASSERT_EQ (product, cst_sval[i * j]);
6384 const svalue *eq = mgr.get_or_create_binop (integer_type_node, EQ_EXPR,
6385 cst_sval[i], cst_sval[j]);
6386 ASSERT_EQ (eq, i == j ? cst_sval[1] : cst_sval [0]);
6387 const svalue *neq = mgr.get_or_create_binop (integer_type_node, NE_EXPR,
6388 cst_sval[i], cst_sval[j]);
6389 ASSERT_EQ (neq, i != j ? cst_sval[1] : cst_sval [0]);
6390 // etc
6393 tree x = build_global_decl ("x", integer_type_node);
6395 test_region_model_context ctxt;
6396 region_model model (&mgr);
6397 const svalue *x_init = model.get_rvalue (x, &ctxt);
6399 /* PLUS_EXPR folding. */
6400 const svalue *x_init_plus_zero
6401 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
6402 x_init, cst_sval[0]);
6403 ASSERT_EQ (x_init_plus_zero, x_init);
6404 const svalue *zero_plus_x_init
6405 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
6406 cst_sval[0], x_init);
6407 ASSERT_EQ (zero_plus_x_init, x_init);
6409 /* MULT_EXPR folding. */
6410 const svalue *x_init_times_zero
6411 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6412 x_init, cst_sval[0]);
6413 ASSERT_EQ (x_init_times_zero, cst_sval[0]);
6414 const svalue *zero_times_x_init
6415 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6416 cst_sval[0], x_init);
6417 ASSERT_EQ (zero_times_x_init, cst_sval[0]);
6419 const svalue *x_init_times_one
6420 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6421 x_init, cst_sval[1]);
6422 ASSERT_EQ (x_init_times_one, x_init);
6423 const svalue *one_times_x_init
6424 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6425 cst_sval[1], x_init);
6426 ASSERT_EQ (one_times_x_init, x_init);
6428 // etc
6429 // TODO: do we want to use the match-and-simplify DSL for this?
6431 /* Verify that binops put any constants on the RHS. */
6432 const svalue *four_times_x_init
6433 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6434 cst_sval[4], x_init);
6435 const svalue *x_init_times_four
6436 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6437 x_init, cst_sval[4]);
6438 ASSERT_EQ (four_times_x_init, x_init_times_four);
6439 const binop_svalue *binop = four_times_x_init->dyn_cast_binop_svalue ();
6440 ASSERT_EQ (binop->get_op (), MULT_EXPR);
6441 ASSERT_EQ (binop->get_arg0 (), x_init);
6442 ASSERT_EQ (binop->get_arg1 (), cst_sval[4]);
6444 /* Verify that ((x + 1) + 1) == (x + 2). */
6445 const svalue *x_init_plus_one
6446 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
6447 x_init, cst_sval[1]);
6448 const svalue *x_init_plus_two
6449 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
6450 x_init, cst_sval[2]);
6451 const svalue *x_init_plus_one_plus_one
6452 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
6453 x_init_plus_one, cst_sval[1]);
6454 ASSERT_EQ (x_init_plus_one_plus_one, x_init_plus_two);
6456 /* Verify various binops on booleans. */
6458 const svalue *sval_true = mgr.get_or_create_int_cst (boolean_type_node, 1);
6459 const svalue *sval_false = mgr.get_or_create_int_cst (boolean_type_node, 0);
6460 const svalue *sval_unknown
6461 = mgr.get_or_create_unknown_svalue (boolean_type_node);
6462 const placeholder_svalue sval_placeholder (boolean_type_node, "v");
6463 for (auto op : {BIT_IOR_EXPR, TRUTH_OR_EXPR})
6465 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
6466 sval_true, sval_unknown),
6467 sval_true);
6468 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
6469 sval_false, sval_unknown),
6470 sval_unknown);
6471 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
6472 sval_false, &sval_placeholder),
6473 &sval_placeholder);
6475 for (auto op : {BIT_AND_EXPR, TRUTH_AND_EXPR})
6477 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
6478 sval_false, sval_unknown),
6479 sval_false);
6480 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
6481 sval_true, sval_unknown),
6482 sval_unknown);
6483 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
6484 sval_true, &sval_placeholder),
6485 &sval_placeholder);
6490 /* Verify that sub_svalues are folded as expected. */
6492 static void
6493 test_sub_svalue_folding ()
6495 coord_test ct;
6496 tree c = build_global_decl ("c", ct.m_coord_type);
6497 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
6498 c, ct.m_x_field, NULL_TREE);
6500 region_model_manager mgr;
6501 region_model model (&mgr);
6502 test_region_model_context ctxt;
6503 const region *c_x_reg = model.get_lvalue (c_x, &ctxt);
6505 /* Verify that sub_svalue of "unknown" simply
6506 yields an unknown. */
6508 const svalue *unknown = mgr.get_or_create_unknown_svalue (ct.m_coord_type);
6509 const svalue *sub = mgr.get_or_create_sub_svalue (TREE_TYPE (ct.m_x_field),
6510 unknown, c_x_reg);
6511 ASSERT_EQ (sub->get_kind (), SK_UNKNOWN);
6512 ASSERT_EQ (sub->get_type (), TREE_TYPE (ct.m_x_field));
6515 /* Get BIT within VAL as a symbolic value within MGR. */
6517 static const svalue *
6518 get_bit (region_model_manager *mgr,
6519 bit_offset_t bit,
6520 unsigned HOST_WIDE_INT val)
6522 const svalue *inner_svalue
6523 = mgr->get_or_create_int_cst (unsigned_type_node, val);
6524 return mgr->get_or_create_bits_within (boolean_type_node,
6525 bit_range (bit, 1),
6526 inner_svalue);
6529 /* Verify that bits_within_svalues are folded as expected. */
6531 static void
6532 test_bits_within_svalue_folding ()
6534 region_model_manager mgr;
6536 const svalue *zero = mgr.get_or_create_int_cst (boolean_type_node, 0);
6537 const svalue *one = mgr.get_or_create_int_cst (boolean_type_node, 1);
6540 const unsigned val = 0x0000;
6541 for (unsigned bit = 0; bit < 16; bit++)
6542 ASSERT_EQ (get_bit (&mgr, bit, val), zero);
6546 const unsigned val = 0x0001;
6547 ASSERT_EQ (get_bit (&mgr, 0, val), one);
6548 for (unsigned bit = 1; bit < 16; bit++)
6549 ASSERT_EQ (get_bit (&mgr, bit, val), zero);
6553 const unsigned val = 0x8000;
6554 for (unsigned bit = 0; bit < 15; bit++)
6555 ASSERT_EQ (get_bit (&mgr, bit, val), zero);
6556 ASSERT_EQ (get_bit (&mgr, 15, val), one);
6560 const unsigned val = 0xFFFF;
6561 for (unsigned bit = 0; bit < 16; bit++)
6562 ASSERT_EQ (get_bit (&mgr, bit, val), one);
6566 /* Test that region::descendent_of_p works as expected. */
6568 static void
6569 test_descendent_of_p ()
6571 region_model_manager mgr;
6572 const region *stack = mgr.get_stack_region ();
6573 const region *heap = mgr.get_heap_region ();
6574 const region *code = mgr.get_code_region ();
6575 const region *globals = mgr.get_globals_region ();
6577 /* descendent_of_p should return true when used on the region itself. */
6578 ASSERT_TRUE (stack->descendent_of_p (stack));
6579 ASSERT_FALSE (stack->descendent_of_p (heap));
6580 ASSERT_FALSE (stack->descendent_of_p (code));
6581 ASSERT_FALSE (stack->descendent_of_p (globals));
6583 tree x = build_global_decl ("x", integer_type_node);
6584 const region *x_reg = mgr.get_region_for_global (x);
6585 ASSERT_TRUE (x_reg->descendent_of_p (globals));
6587 /* A cast_region should be a descendent of the original region. */
6588 const region *cast_reg = mgr.get_cast_region (x_reg, ptr_type_node);
6589 ASSERT_TRUE (cast_reg->descendent_of_p (x_reg));
6592 /* Verify that bit_range_region works as expected. */
6594 static void
6595 test_bit_range_regions ()
6597 tree x = build_global_decl ("x", integer_type_node);
6598 region_model_manager mgr;
6599 const region *x_reg = mgr.get_region_for_global (x);
6600 const region *byte0
6601 = mgr.get_bit_range (x_reg, char_type_node, bit_range (0, 8));
6602 const region *byte1
6603 = mgr.get_bit_range (x_reg, char_type_node, bit_range (8, 8));
6604 ASSERT_TRUE (byte0->descendent_of_p (x_reg));
6605 ASSERT_TRUE (byte1->descendent_of_p (x_reg));
6606 ASSERT_NE (byte0, byte1);
6609 /* Verify that simple assignments work as expected. */
6611 static void
6612 test_assignment ()
6614 tree int_0 = build_int_cst (integer_type_node, 0);
6615 tree x = build_global_decl ("x", integer_type_node);
6616 tree y = build_global_decl ("y", integer_type_node);
6618 /* "x == 0", then use of y, then "y = 0;". */
6619 region_model_manager mgr;
6620 region_model model (&mgr);
6621 ADD_SAT_CONSTRAINT (model, x, EQ_EXPR, int_0);
6622 ASSERT_CONDITION_UNKNOWN (model, y, EQ_EXPR, int_0);
6623 model.set_value (model.get_lvalue (y, NULL),
6624 model.get_rvalue (int_0, NULL),
6625 NULL);
6626 ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, int_0);
6627 ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, x);
6630 /* Verify that compound assignments work as expected. */
6632 static void
6633 test_compound_assignment ()
6635 coord_test ct;
6637 tree c = build_global_decl ("c", ct.m_coord_type);
6638 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
6639 c, ct.m_x_field, NULL_TREE);
6640 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
6641 c, ct.m_y_field, NULL_TREE);
6642 tree d = build_global_decl ("d", ct.m_coord_type);
6643 tree d_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
6644 d, ct.m_x_field, NULL_TREE);
6645 tree d_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
6646 d, ct.m_y_field, NULL_TREE);
6648 tree int_17 = build_int_cst (integer_type_node, 17);
6649 tree int_m3 = build_int_cst (integer_type_node, -3);
6651 region_model_manager mgr;
6652 region_model model (&mgr);
6653 model.set_value (c_x, int_17, NULL);
6654 model.set_value (c_y, int_m3, NULL);
6656 /* Copy c to d. */
6657 const svalue *sval = model.get_rvalue (c, NULL);
6658 model.set_value (model.get_lvalue (d, NULL), sval, NULL);
6660 /* Check that the fields have the same svalues. */
6661 ASSERT_EQ (model.get_rvalue (c_x, NULL), model.get_rvalue (d_x, NULL));
6662 ASSERT_EQ (model.get_rvalue (c_y, NULL), model.get_rvalue (d_y, NULL));
6665 /* Verify the details of pushing and popping stack frames. */
6667 static void
6668 test_stack_frames ()
6670 tree int_42 = build_int_cst (integer_type_node, 42);
6671 tree int_10 = build_int_cst (integer_type_node, 10);
6672 tree int_5 = build_int_cst (integer_type_node, 5);
6673 tree int_0 = build_int_cst (integer_type_node, 0);
6675 auto_vec <tree> param_types;
6676 tree parent_fndecl = make_fndecl (integer_type_node,
6677 "parent_fn",
6678 param_types);
6679 allocate_struct_function (parent_fndecl, true);
6681 tree child_fndecl = make_fndecl (integer_type_node,
6682 "child_fn",
6683 param_types);
6684 allocate_struct_function (child_fndecl, true);
6686 /* "a" and "b" in the parent frame. */
6687 tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL,
6688 get_identifier ("a"),
6689 integer_type_node);
6690 DECL_CONTEXT (a) = parent_fndecl;
6691 tree b = build_decl (UNKNOWN_LOCATION, PARM_DECL,
6692 get_identifier ("b"),
6693 integer_type_node);
6694 DECL_CONTEXT (b) = parent_fndecl;
6695 /* "x" and "y" in a child frame. */
6696 tree x = build_decl (UNKNOWN_LOCATION, PARM_DECL,
6697 get_identifier ("x"),
6698 integer_type_node);
6699 DECL_CONTEXT (x) = child_fndecl;
6700 tree y = build_decl (UNKNOWN_LOCATION, PARM_DECL,
6701 get_identifier ("y"),
6702 integer_type_node);
6703 DECL_CONTEXT (y) = child_fndecl;
6705 /* "p" global. */
6706 tree p = build_global_decl ("p", ptr_type_node);
6708 /* "q" global. */
6709 tree q = build_global_decl ("q", ptr_type_node);
6711 region_model_manager mgr;
6712 test_region_model_context ctxt;
6713 region_model model (&mgr);
6715 /* Push stack frame for "parent_fn". */
6716 const region *parent_frame_reg
6717 = model.push_frame (DECL_STRUCT_FUNCTION (parent_fndecl),
6718 NULL, &ctxt);
6719 ASSERT_EQ (model.get_current_frame (), parent_frame_reg);
6720 ASSERT_TRUE (model.region_exists_p (parent_frame_reg));
6721 const region *a_in_parent_reg = model.get_lvalue (a, &ctxt);
6722 model.set_value (a_in_parent_reg,
6723 model.get_rvalue (int_42, &ctxt),
6724 &ctxt);
6725 ASSERT_EQ (a_in_parent_reg->maybe_get_frame_region (), parent_frame_reg);
6727 model.add_constraint (b, LT_EXPR, int_10, &ctxt);
6728 ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt),
6729 tristate (tristate::TS_TRUE));
6731 /* Push stack frame for "child_fn". */
6732 const region *child_frame_reg
6733 = model.push_frame (DECL_STRUCT_FUNCTION (child_fndecl), NULL, &ctxt);
6734 ASSERT_EQ (model.get_current_frame (), child_frame_reg);
6735 ASSERT_TRUE (model.region_exists_p (child_frame_reg));
6736 const region *x_in_child_reg = model.get_lvalue (x, &ctxt);
6737 model.set_value (x_in_child_reg,
6738 model.get_rvalue (int_0, &ctxt),
6739 &ctxt);
6740 ASSERT_EQ (x_in_child_reg->maybe_get_frame_region (), child_frame_reg);
6742 model.add_constraint (y, NE_EXPR, int_5, &ctxt);
6743 ASSERT_EQ (model.eval_condition (y, NE_EXPR, int_5, &ctxt),
6744 tristate (tristate::TS_TRUE));
6746 /* Point a global pointer at a local in the child frame: p = &x. */
6747 const region *p_in_globals_reg = model.get_lvalue (p, &ctxt);
6748 model.set_value (p_in_globals_reg,
6749 mgr.get_ptr_svalue (ptr_type_node, x_in_child_reg),
6750 &ctxt);
6751 ASSERT_EQ (p_in_globals_reg->maybe_get_frame_region (), NULL);
6753 /* Point another global pointer at p: q = &p. */
6754 const region *q_in_globals_reg = model.get_lvalue (q, &ctxt);
6755 model.set_value (q_in_globals_reg,
6756 mgr.get_ptr_svalue (ptr_type_node, p_in_globals_reg),
6757 &ctxt);
6759 /* Test region::descendent_of_p. */
6760 ASSERT_TRUE (child_frame_reg->descendent_of_p (child_frame_reg));
6761 ASSERT_TRUE (x_in_child_reg->descendent_of_p (child_frame_reg));
6762 ASSERT_FALSE (a_in_parent_reg->descendent_of_p (child_frame_reg));
6764 /* Pop the "child_fn" frame from the stack. */
6765 model.pop_frame (NULL, NULL, &ctxt);
6766 ASSERT_FALSE (model.region_exists_p (child_frame_reg));
6767 ASSERT_TRUE (model.region_exists_p (parent_frame_reg));
6769 /* Verify that p (which was pointing at the local "x" in the popped
6770 frame) has been poisoned. */
6771 const svalue *new_p_sval = model.get_rvalue (p, NULL);
6772 ASSERT_EQ (new_p_sval->get_kind (), SK_POISONED);
6773 ASSERT_EQ (new_p_sval->dyn_cast_poisoned_svalue ()->get_poison_kind (),
6774 POISON_KIND_POPPED_STACK);
6776 /* Verify that q still points to p, in spite of the region
6777 renumbering. */
6778 const svalue *new_q_sval = model.get_rvalue (q, &ctxt);
6779 ASSERT_EQ (new_q_sval->get_kind (), SK_REGION);
6780 ASSERT_EQ (new_q_sval->maybe_get_region (),
6781 model.get_lvalue (p, &ctxt));
6783 /* Verify that top of stack has been updated. */
6784 ASSERT_EQ (model.get_current_frame (), parent_frame_reg);
6786 /* Verify locals in parent frame. */
6787 /* Verify "a" still has its value. */
6788 const svalue *new_a_sval = model.get_rvalue (a, &ctxt);
6789 ASSERT_EQ (new_a_sval->get_kind (), SK_CONSTANT);
6790 ASSERT_EQ (new_a_sval->dyn_cast_constant_svalue ()->get_constant (),
6791 int_42);
6792 /* Verify "b" still has its constraint. */
6793 ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt),
6794 tristate (tristate::TS_TRUE));
6797 /* Verify that get_representative_path_var works as expected, that
6798 we can map from regions to parms and back within a recursive call
6799 stack. */
6801 static void
6802 test_get_representative_path_var ()
6804 auto_vec <tree> param_types;
6805 tree fndecl = make_fndecl (integer_type_node,
6806 "factorial",
6807 param_types);
6808 allocate_struct_function (fndecl, true);
6810 /* Parm "n". */
6811 tree n = build_decl (UNKNOWN_LOCATION, PARM_DECL,
6812 get_identifier ("n"),
6813 integer_type_node);
6814 DECL_CONTEXT (n) = fndecl;
6816 region_model_manager mgr;
6817 test_region_model_context ctxt;
6818 region_model model (&mgr);
6820 /* Push 5 stack frames for "factorial", each with a param */
6821 auto_vec<const region *> parm_regs;
6822 auto_vec<const svalue *> parm_svals;
6823 for (int depth = 0; depth < 5; depth++)
6825 const region *frame_n_reg
6826 = model.push_frame (DECL_STRUCT_FUNCTION (fndecl), NULL, &ctxt);
6827 const region *parm_n_reg = model.get_lvalue (path_var (n, depth), &ctxt);
6828 parm_regs.safe_push (parm_n_reg);
6830 ASSERT_EQ (parm_n_reg->get_parent_region (), frame_n_reg);
6831 const svalue *sval_n = mgr.get_or_create_initial_value (parm_n_reg);
6832 parm_svals.safe_push (sval_n);
6835 /* Verify that we can recognize that the regions are the parms,
6836 at every depth. */
6837 for (int depth = 0; depth < 5; depth++)
6840 svalue_set visited;
6841 ASSERT_EQ (model.get_representative_path_var (parm_regs[depth],
6842 &visited),
6843 path_var (n, depth + 1));
6845 /* ...and that we can lookup lvalues for locals for all frames,
6846 not just the top. */
6847 ASSERT_EQ (model.get_lvalue (path_var (n, depth), NULL),
6848 parm_regs[depth]);
6849 /* ...and that we can locate the svalues. */
6851 svalue_set visited;
6852 ASSERT_EQ (model.get_representative_path_var (parm_svals[depth],
6853 &visited),
6854 path_var (n, depth + 1));
6859 /* Ensure that region_model::operator== works as expected. */
6861 static void
6862 test_equality_1 ()
6864 tree int_42 = build_int_cst (integer_type_node, 42);
6865 tree int_17 = build_int_cst (integer_type_node, 17);
6867 /* Verify that "empty" region_model instances are equal to each other. */
6868 region_model_manager mgr;
6869 region_model model0 (&mgr);
6870 region_model model1 (&mgr);
6871 ASSERT_EQ (model0, model1);
6873 /* Verify that setting state in model1 makes the models non-equal. */
6874 tree x = build_global_decl ("x", integer_type_node);
6875 model0.set_value (x, int_42, NULL);
6876 ASSERT_EQ (model0.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
6877 ASSERT_NE (model0, model1);
6879 /* Verify the copy-ctor. */
6880 region_model model2 (model0);
6881 ASSERT_EQ (model0, model2);
6882 ASSERT_EQ (model2.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
6883 ASSERT_NE (model1, model2);
6885 /* Verify that models obtained from copy-ctor are independently editable
6886 w/o affecting the original model. */
6887 model2.set_value (x, int_17, NULL);
6888 ASSERT_NE (model0, model2);
6889 ASSERT_EQ (model2.get_rvalue (x, NULL)->maybe_get_constant (), int_17);
6890 ASSERT_EQ (model0.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
6893 /* Verify that region models for
6894 x = 42; y = 113;
6896 y = 113; x = 42;
6897 are equal. */
6899 static void
6900 test_canonicalization_2 ()
6902 tree int_42 = build_int_cst (integer_type_node, 42);
6903 tree int_113 = build_int_cst (integer_type_node, 113);
6904 tree x = build_global_decl ("x", integer_type_node);
6905 tree y = build_global_decl ("y", integer_type_node);
6907 region_model_manager mgr;
6908 region_model model0 (&mgr);
6909 model0.set_value (model0.get_lvalue (x, NULL),
6910 model0.get_rvalue (int_42, NULL),
6911 NULL);
6912 model0.set_value (model0.get_lvalue (y, NULL),
6913 model0.get_rvalue (int_113, NULL),
6914 NULL);
6916 region_model model1 (&mgr);
6917 model1.set_value (model1.get_lvalue (y, NULL),
6918 model1.get_rvalue (int_113, NULL),
6919 NULL);
6920 model1.set_value (model1.get_lvalue (x, NULL),
6921 model1.get_rvalue (int_42, NULL),
6922 NULL);
6924 ASSERT_EQ (model0, model1);
6927 /* Verify that constraints for
6928 x > 3 && y > 42
6930 y > 42 && x > 3
6931 are equal after canonicalization. */
6933 static void
6934 test_canonicalization_3 ()
6936 tree int_3 = build_int_cst (integer_type_node, 3);
6937 tree int_42 = build_int_cst (integer_type_node, 42);
6938 tree x = build_global_decl ("x", integer_type_node);
6939 tree y = build_global_decl ("y", integer_type_node);
6941 region_model_manager mgr;
6942 region_model model0 (&mgr);
6943 model0.add_constraint (x, GT_EXPR, int_3, NULL);
6944 model0.add_constraint (y, GT_EXPR, int_42, NULL);
6946 region_model model1 (&mgr);
6947 model1.add_constraint (y, GT_EXPR, int_42, NULL);
6948 model1.add_constraint (x, GT_EXPR, int_3, NULL);
6950 model0.canonicalize ();
6951 model1.canonicalize ();
6952 ASSERT_EQ (model0, model1);
6955 /* Verify that we can canonicalize a model containing NaN and other real
6956 constants. */
6958 static void
6959 test_canonicalization_4 ()
6961 auto_vec<tree> csts;
6962 append_interesting_constants (&csts);
6964 region_model_manager mgr;
6965 region_model model (&mgr);
6967 for (tree cst : csts)
6968 model.get_rvalue (cst, NULL);
6970 model.canonicalize ();
6973 /* Assert that if we have two region_model instances
6974 with values VAL_A and VAL_B for EXPR that they are
6975 mergable. Write the merged model to *OUT_MERGED_MODEL,
6976 and the merged svalue ptr to *OUT_MERGED_SVALUE.
6977 If VAL_A or VAL_B are NULL_TREE, don't populate EXPR
6978 for that region_model. */
6980 static void
6981 assert_region_models_merge (tree expr, tree val_a, tree val_b,
6982 region_model *out_merged_model,
6983 const svalue **out_merged_svalue)
6985 region_model_manager *mgr = out_merged_model->get_manager ();
6986 program_point point (program_point::origin (*mgr));
6987 test_region_model_context ctxt;
6988 region_model model0 (mgr);
6989 region_model model1 (mgr);
6990 if (val_a)
6991 model0.set_value (model0.get_lvalue (expr, &ctxt),
6992 model0.get_rvalue (val_a, &ctxt),
6993 &ctxt);
6994 if (val_b)
6995 model1.set_value (model1.get_lvalue (expr, &ctxt),
6996 model1.get_rvalue (val_b, &ctxt),
6997 &ctxt);
6999 /* They should be mergeable. */
7000 ASSERT_TRUE (model0.can_merge_with_p (model1, point, out_merged_model));
7001 *out_merged_svalue = out_merged_model->get_rvalue (expr, &ctxt);
7004 /* Verify that we can merge region_model instances. */
7006 static void
7007 test_state_merging ()
7009 tree int_42 = build_int_cst (integer_type_node, 42);
7010 tree int_113 = build_int_cst (integer_type_node, 113);
7011 tree x = build_global_decl ("x", integer_type_node);
7012 tree y = build_global_decl ("y", integer_type_node);
7013 tree z = build_global_decl ("z", integer_type_node);
7014 tree p = build_global_decl ("p", ptr_type_node);
7016 tree addr_of_y = build1 (ADDR_EXPR, ptr_type_node, y);
7017 tree addr_of_z = build1 (ADDR_EXPR, ptr_type_node, z);
7019 auto_vec <tree> param_types;
7020 tree test_fndecl = make_fndecl (integer_type_node, "test_fn", param_types);
7021 allocate_struct_function (test_fndecl, true);
7023 /* Param "a". */
7024 tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7025 get_identifier ("a"),
7026 integer_type_node);
7027 DECL_CONTEXT (a) = test_fndecl;
7028 tree addr_of_a = build1 (ADDR_EXPR, ptr_type_node, a);
7030 /* Param "q", a pointer. */
7031 tree q = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7032 get_identifier ("q"),
7033 ptr_type_node);
7034 DECL_CONTEXT (q) = test_fndecl;
7036 region_model_manager mgr;
7037 program_point point (program_point::origin (mgr));
7040 region_model model0 (&mgr);
7041 region_model model1 (&mgr);
7042 region_model merged (&mgr);
7043 /* Verify empty models can be merged. */
7044 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7045 ASSERT_EQ (model0, merged);
7048 /* Verify that we can merge two contradictory constraints on the
7049 value for a global. */
7050 /* TODO: verify that the merged model doesn't have a value for
7051 the global */
7053 region_model model0 (&mgr);
7054 region_model model1 (&mgr);
7055 region_model merged (&mgr);
7056 test_region_model_context ctxt;
7057 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
7058 model1.add_constraint (x, EQ_EXPR, int_113, &ctxt);
7059 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7060 ASSERT_NE (model0, merged);
7061 ASSERT_NE (model1, merged);
7064 /* Verify handling of a PARM_DECL. */
7066 test_region_model_context ctxt;
7067 region_model model0 (&mgr);
7068 region_model model1 (&mgr);
7069 ASSERT_EQ (model0.get_stack_depth (), 0);
7070 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, &ctxt);
7071 ASSERT_EQ (model0.get_stack_depth (), 1);
7072 model1.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, &ctxt);
7074 placeholder_svalue test_sval (integer_type_node, "test sval");
7075 model0.set_value (model0.get_lvalue (a, &ctxt), &test_sval, &ctxt);
7076 model1.set_value (model1.get_lvalue (a, &ctxt), &test_sval, &ctxt);
7077 ASSERT_EQ (model0, model1);
7079 /* They should be mergeable, and the result should be the same. */
7080 region_model merged (&mgr);
7081 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7082 ASSERT_EQ (model0, merged);
7083 /* In particular, "a" should have the placeholder value. */
7084 ASSERT_EQ (merged.get_rvalue (a, &ctxt), &test_sval);
7087 /* Verify handling of a global. */
7089 test_region_model_context ctxt;
7090 region_model model0 (&mgr);
7091 region_model model1 (&mgr);
7093 placeholder_svalue test_sval (integer_type_node, "test sval");
7094 model0.set_value (model0.get_lvalue (x, &ctxt), &test_sval, &ctxt);
7095 model1.set_value (model1.get_lvalue (x, &ctxt), &test_sval, &ctxt);
7096 ASSERT_EQ (model0, model1);
7098 /* They should be mergeable, and the result should be the same. */
7099 region_model merged (&mgr);
7100 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7101 ASSERT_EQ (model0, merged);
7102 /* In particular, "x" should have the placeholder value. */
7103 ASSERT_EQ (merged.get_rvalue (x, &ctxt), &test_sval);
7106 /* Use global-handling to verify various combinations of values. */
7108 /* Two equal constant values. */
7110 region_model merged (&mgr);
7111 const svalue *merged_x_sval;
7112 assert_region_models_merge (x, int_42, int_42, &merged, &merged_x_sval);
7114 /* In particular, there should be a constant value for "x". */
7115 ASSERT_EQ (merged_x_sval->get_kind (), SK_CONSTANT);
7116 ASSERT_EQ (merged_x_sval->dyn_cast_constant_svalue ()->get_constant (),
7117 int_42);
7120 /* Two non-equal constant values. */
7122 region_model merged (&mgr);
7123 const svalue *merged_x_sval;
7124 assert_region_models_merge (x, int_42, int_113, &merged, &merged_x_sval);
7126 /* In particular, there should be a "widening" value for "x". */
7127 ASSERT_EQ (merged_x_sval->get_kind (), SK_WIDENING);
7130 /* Initial and constant. */
7132 region_model merged (&mgr);
7133 const svalue *merged_x_sval;
7134 assert_region_models_merge (x, NULL_TREE, int_113, &merged, &merged_x_sval);
7136 /* In particular, there should be an unknown value for "x". */
7137 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
7140 /* Constant and initial. */
7142 region_model merged (&mgr);
7143 const svalue *merged_x_sval;
7144 assert_region_models_merge (x, int_42, NULL_TREE, &merged, &merged_x_sval);
7146 /* In particular, there should be an unknown value for "x". */
7147 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
7150 /* Unknown and constant. */
7151 // TODO
7153 /* Pointers: NULL and NULL. */
7154 // TODO
7156 /* Pointers: NULL and non-NULL. */
7157 // TODO
7159 /* Pointers: non-NULL and non-NULL: ptr to a local. */
7161 region_model model0 (&mgr);
7162 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
7163 model0.set_value (model0.get_lvalue (p, NULL),
7164 model0.get_rvalue (addr_of_a, NULL), NULL);
7166 region_model model1 (model0);
7167 ASSERT_EQ (model0, model1);
7169 /* They should be mergeable, and the result should be the same. */
7170 region_model merged (&mgr);
7171 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7172 ASSERT_EQ (model0, merged);
7175 /* Pointers: non-NULL and non-NULL: ptr to a global. */
7177 region_model merged (&mgr);
7178 /* p == &y in both input models. */
7179 const svalue *merged_p_sval;
7180 assert_region_models_merge (p, addr_of_y, addr_of_y, &merged,
7181 &merged_p_sval);
7183 /* We should get p == &y in the merged model. */
7184 ASSERT_EQ (merged_p_sval->get_kind (), SK_REGION);
7185 const region_svalue *merged_p_ptr
7186 = merged_p_sval->dyn_cast_region_svalue ();
7187 const region *merged_p_star_reg = merged_p_ptr->get_pointee ();
7188 ASSERT_EQ (merged_p_star_reg, merged.get_lvalue (y, NULL));
7191 /* Pointers: non-NULL ptrs to different globals: should be unknown. */
7193 region_model merged (&mgr);
7194 /* x == &y vs x == &z in the input models; these are actually casts
7195 of the ptrs to "int". */
7196 const svalue *merged_x_sval;
7197 // TODO:
7198 assert_region_models_merge (x, addr_of_y, addr_of_z, &merged,
7199 &merged_x_sval);
7201 /* We should get x == unknown in the merged model. */
7202 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
7205 /* Pointers: non-NULL and non-NULL: ptr to a heap region. */
7207 test_region_model_context ctxt;
7208 region_model model0 (&mgr);
7209 tree size = build_int_cst (size_type_node, 1024);
7210 const svalue *size_sval = mgr.get_or_create_constant_svalue (size);
7211 const region *new_reg
7212 = model0.get_or_create_region_for_heap_alloc (size_sval, &ctxt);
7213 const svalue *ptr_sval = mgr.get_ptr_svalue (ptr_type_node, new_reg);
7214 model0.set_value (model0.get_lvalue (p, &ctxt),
7215 ptr_sval, &ctxt);
7217 region_model model1 (model0);
7219 ASSERT_EQ (model0, model1);
7221 region_model merged (&mgr);
7222 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7224 /* The merged model ought to be identical. */
7225 ASSERT_EQ (model0, merged);
7228 /* Two regions sharing the same placeholder svalue should continue sharing
7229 it after self-merger. */
7231 test_region_model_context ctxt;
7232 region_model model0 (&mgr);
7233 placeholder_svalue placeholder_sval (integer_type_node, "test");
7234 model0.set_value (model0.get_lvalue (x, &ctxt),
7235 &placeholder_sval, &ctxt);
7236 model0.set_value (model0.get_lvalue (y, &ctxt), &placeholder_sval, &ctxt);
7237 region_model model1 (model0);
7239 /* They should be mergeable, and the result should be the same. */
7240 region_model merged (&mgr);
7241 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7242 ASSERT_EQ (model0, merged);
7244 /* In particular, we should have x == y. */
7245 ASSERT_EQ (merged.eval_condition (x, EQ_EXPR, y, &ctxt),
7246 tristate (tristate::TS_TRUE));
7250 region_model model0 (&mgr);
7251 region_model model1 (&mgr);
7252 test_region_model_context ctxt;
7253 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
7254 model1.add_constraint (x, NE_EXPR, int_42, &ctxt);
7255 region_model merged (&mgr);
7256 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7260 region_model model0 (&mgr);
7261 region_model model1 (&mgr);
7262 test_region_model_context ctxt;
7263 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
7264 model1.add_constraint (x, NE_EXPR, int_42, &ctxt);
7265 model1.add_constraint (x, EQ_EXPR, int_113, &ctxt);
7266 region_model merged (&mgr);
7267 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7270 // TODO: what can't we merge? need at least one such test
7272 /* TODO: various things
7273 - heap regions
7274 - value merging:
7275 - every combination, but in particular
7276 - pairs of regions
7279 /* Views. */
7281 test_region_model_context ctxt;
7282 region_model model0 (&mgr);
7284 const region *x_reg = model0.get_lvalue (x, &ctxt);
7285 const region *x_as_ptr = mgr.get_cast_region (x_reg, ptr_type_node);
7286 model0.set_value (x_as_ptr, model0.get_rvalue (addr_of_y, &ctxt), &ctxt);
7288 region_model model1 (model0);
7289 ASSERT_EQ (model1, model0);
7291 /* They should be mergeable, and the result should be the same. */
7292 region_model merged (&mgr);
7293 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7296 /* Verify that we can merge a model in which a local in an older stack
7297 frame points to a local in a more recent stack frame. */
7299 region_model model0 (&mgr);
7300 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
7301 const region *q_in_first_frame = model0.get_lvalue (q, NULL);
7303 /* Push a second frame. */
7304 const region *reg_2nd_frame
7305 = model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
7307 /* Have a pointer in the older frame point to a local in the
7308 more recent frame. */
7309 const svalue *sval_ptr = model0.get_rvalue (addr_of_a, NULL);
7310 model0.set_value (q_in_first_frame, sval_ptr, NULL);
7312 /* Verify that it's pointing at the newer frame. */
7313 const region *reg_pointee = sval_ptr->maybe_get_region ();
7314 ASSERT_EQ (reg_pointee->get_parent_region (), reg_2nd_frame);
7316 model0.canonicalize ();
7318 region_model model1 (model0);
7319 ASSERT_EQ (model0, model1);
7321 /* They should be mergeable, and the result should be the same
7322 (after canonicalization, at least). */
7323 region_model merged (&mgr);
7324 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7325 merged.canonicalize ();
7326 ASSERT_EQ (model0, merged);
7329 /* Verify that we can merge a model in which a local points to a global. */
7331 region_model model0 (&mgr);
7332 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
7333 model0.set_value (model0.get_lvalue (q, NULL),
7334 model0.get_rvalue (addr_of_y, NULL), NULL);
7336 region_model model1 (model0);
7337 ASSERT_EQ (model0, model1);
7339 /* They should be mergeable, and the result should be the same
7340 (after canonicalization, at least). */
7341 region_model merged (&mgr);
7342 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7343 ASSERT_EQ (model0, merged);
7347 /* Verify that constraints are correctly merged when merging region_model
7348 instances. */
7350 static void
7351 test_constraint_merging ()
7353 tree int_0 = build_int_cst (integer_type_node, 0);
7354 tree int_5 = build_int_cst (integer_type_node, 5);
7355 tree x = build_global_decl ("x", integer_type_node);
7356 tree y = build_global_decl ("y", integer_type_node);
7357 tree z = build_global_decl ("z", integer_type_node);
7358 tree n = build_global_decl ("n", integer_type_node);
7360 region_model_manager mgr;
7361 test_region_model_context ctxt;
7363 /* model0: 0 <= (x == y) < n. */
7364 region_model model0 (&mgr);
7365 model0.add_constraint (x, EQ_EXPR, y, &ctxt);
7366 model0.add_constraint (x, GE_EXPR, int_0, NULL);
7367 model0.add_constraint (x, LT_EXPR, n, NULL);
7369 /* model1: z != 5 && (0 <= x < n). */
7370 region_model model1 (&mgr);
7371 model1.add_constraint (z, NE_EXPR, int_5, NULL);
7372 model1.add_constraint (x, GE_EXPR, int_0, NULL);
7373 model1.add_constraint (x, LT_EXPR, n, NULL);
7375 /* They should be mergeable; the merged constraints should
7376 be: (0 <= x < n). */
7377 program_point point (program_point::origin (mgr));
7378 region_model merged (&mgr);
7379 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7381 ASSERT_EQ (merged.eval_condition (x, GE_EXPR, int_0, &ctxt),
7382 tristate (tristate::TS_TRUE));
7383 ASSERT_EQ (merged.eval_condition (x, LT_EXPR, n, &ctxt),
7384 tristate (tristate::TS_TRUE));
7386 ASSERT_EQ (merged.eval_condition (z, NE_EXPR, int_5, &ctxt),
7387 tristate (tristate::TS_UNKNOWN));
7388 ASSERT_EQ (merged.eval_condition (x, LT_EXPR, y, &ctxt),
7389 tristate (tristate::TS_UNKNOWN));
7392 /* Verify that widening_svalue::eval_condition_without_cm works as
7393 expected. */
7395 static void
7396 test_widening_constraints ()
7398 region_model_manager mgr;
7399 function_point point (program_point::origin (mgr).get_function_point ());
7400 tree int_0 = build_int_cst (integer_type_node, 0);
7401 tree int_m1 = build_int_cst (integer_type_node, -1);
7402 tree int_1 = build_int_cst (integer_type_node, 1);
7403 tree int_256 = build_int_cst (integer_type_node, 256);
7404 test_region_model_context ctxt;
7405 const svalue *int_0_sval = mgr.get_or_create_constant_svalue (int_0);
7406 const svalue *int_1_sval = mgr.get_or_create_constant_svalue (int_1);
7407 const svalue *w_zero_then_one_sval
7408 = mgr.get_or_create_widening_svalue (integer_type_node, point,
7409 int_0_sval, int_1_sval);
7410 const widening_svalue *w_zero_then_one
7411 = w_zero_then_one_sval->dyn_cast_widening_svalue ();
7412 ASSERT_EQ (w_zero_then_one->get_direction (),
7413 widening_svalue::DIR_ASCENDING);
7414 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_m1),
7415 tristate::TS_FALSE);
7416 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_0),
7417 tristate::TS_FALSE);
7418 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_1),
7419 tristate::TS_UNKNOWN);
7420 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_256),
7421 tristate::TS_UNKNOWN);
7423 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_m1),
7424 tristate::TS_FALSE);
7425 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_0),
7426 tristate::TS_UNKNOWN);
7427 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_1),
7428 tristate::TS_UNKNOWN);
7429 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_256),
7430 tristate::TS_UNKNOWN);
7432 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_m1),
7433 tristate::TS_TRUE);
7434 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_0),
7435 tristate::TS_UNKNOWN);
7436 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_1),
7437 tristate::TS_UNKNOWN);
7438 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_256),
7439 tristate::TS_UNKNOWN);
7441 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_m1),
7442 tristate::TS_TRUE);
7443 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_0),
7444 tristate::TS_TRUE);
7445 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_1),
7446 tristate::TS_UNKNOWN);
7447 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_256),
7448 tristate::TS_UNKNOWN);
7450 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_m1),
7451 tristate::TS_FALSE);
7452 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_0),
7453 tristate::TS_UNKNOWN);
7454 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_1),
7455 tristate::TS_UNKNOWN);
7456 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_256),
7457 tristate::TS_UNKNOWN);
7459 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_m1),
7460 tristate::TS_TRUE);
7461 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_0),
7462 tristate::TS_UNKNOWN);
7463 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_1),
7464 tristate::TS_UNKNOWN);
7465 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_256),
7466 tristate::TS_UNKNOWN);
7469 /* Verify merging constraints for states simulating successive iterations
7470 of a loop.
7471 Simulate:
7472 for (i = 0; i < 256; i++)
7473 [...body...]
7474 i.e. this gimple:.
7475 i_15 = 0;
7476 goto <bb 4>;
7478 <bb 4> :
7479 i_11 = PHI <i_15(2), i_23(3)>
7480 if (i_11 <= 255)
7481 goto <bb 3>;
7482 else
7483 goto [AFTER LOOP]
7485 <bb 3> :
7486 [LOOP BODY]
7487 i_23 = i_11 + 1;
7489 and thus these ops (and resultant states):
7490 i_11 = PHI()
7491 {i_11: 0}
7492 add_constraint (i_11 <= 255) [for the true edge]
7493 {i_11: 0} [constraint was a no-op]
7494 i_23 = i_11 + 1;
7495 {i_22: 1}
7496 i_11 = PHI()
7497 {i_11: WIDENED (at phi, 0, 1)}
7498 add_constraint (i_11 <= 255) [for the true edge]
7499 {i_11: WIDENED (at phi, 0, 1); WIDENED <= 255}
7500 i_23 = i_11 + 1;
7501 {i_23: (WIDENED (at phi, 0, 1) + 1); WIDENED <= 255}
7502 i_11 = PHI(); merge with state at phi above
7503 {i_11: WIDENED (at phi, 0, 1); WIDENED <= 256}
7504 [changing meaning of "WIDENED" here]
7505 if (i_11 <= 255)
7506 T: {i_11: WIDENED (at phi, 0, 1); WIDENED <= 255}; cache hit
7507 F: {i_11: 256}
7510 static void
7511 test_iteration_1 ()
7513 region_model_manager mgr;
7514 program_point point (program_point::origin (mgr));
7516 tree int_0 = build_int_cst (integer_type_node, 0);
7517 tree int_1 = build_int_cst (integer_type_node, 1);
7518 tree int_256 = build_int_cst (integer_type_node, 256);
7519 tree int_257 = build_int_cst (integer_type_node, 257);
7520 tree i = build_global_decl ("i", integer_type_node);
7522 test_region_model_context ctxt;
7524 /* model0: i: 0. */
7525 region_model model0 (&mgr);
7526 model0.set_value (i, int_0, &ctxt);
7528 /* model1: i: 1. */
7529 region_model model1 (&mgr);
7530 model1.set_value (i, int_1, &ctxt);
7532 /* Should merge "i" to a widened value. */
7533 region_model model2 (&mgr);
7534 ASSERT_TRUE (model1.can_merge_with_p (model0, point, &model2));
7535 const svalue *merged_i = model2.get_rvalue (i, &ctxt);
7536 ASSERT_EQ (merged_i->get_kind (), SK_WIDENING);
7537 const widening_svalue *w = merged_i->dyn_cast_widening_svalue ();
7538 ASSERT_EQ (w->get_direction (), widening_svalue::DIR_ASCENDING);
7540 /* Add constraint: i < 256 */
7541 model2.add_constraint (i, LT_EXPR, int_256, &ctxt);
7542 ASSERT_EQ (model2.eval_condition (i, LT_EXPR, int_256, &ctxt),
7543 tristate (tristate::TS_TRUE));
7544 ASSERT_EQ (model2.eval_condition (i, GE_EXPR, int_0, &ctxt),
7545 tristate (tristate::TS_TRUE));
7547 /* Try merging with the initial state. */
7548 region_model model3 (&mgr);
7549 ASSERT_TRUE (model2.can_merge_with_p (model0, point, &model3));
7550 /* Merging the merged value with the initial value should be idempotent,
7551 so that the analysis converges. */
7552 ASSERT_EQ (model3.get_rvalue (i, &ctxt), merged_i);
7553 /* Merger of 0 and a widening value with constraint < CST
7554 should retain the constraint, even though it was implicit
7555 for the 0 case. */
7556 ASSERT_EQ (model3.eval_condition (i, LT_EXPR, int_256, &ctxt),
7557 tristate (tristate::TS_TRUE));
7558 /* ...and we should have equality: the analysis should have converged. */
7559 ASSERT_EQ (model3, model2);
7561 /* "i_23 = i_11 + 1;" */
7562 region_model model4 (model3);
7563 ASSERT_EQ (model4, model2);
7564 model4.set_value (i, build2 (PLUS_EXPR, integer_type_node, i, int_1), &ctxt);
7565 const svalue *plus_one = model4.get_rvalue (i, &ctxt);
7566 ASSERT_EQ (plus_one->get_kind (), SK_BINOP);
7568 /* Try merging with the "i: 1" state. */
7569 region_model model5 (&mgr);
7570 ASSERT_TRUE (model4.can_merge_with_p (model1, point, &model5));
7571 ASSERT_EQ (model5.get_rvalue (i, &ctxt), plus_one);
7572 ASSERT_EQ (model5, model4);
7574 /* "i_11 = PHI();" merge with state at phi above.
7575 For i, we should have a merger of WIDENING with WIDENING + 1,
7576 and this should be WIDENING again. */
7577 region_model model6 (&mgr);
7578 ASSERT_TRUE (model5.can_merge_with_p (model2, point, &model6));
7579 const svalue *merged_widening = model6.get_rvalue (i, &ctxt);
7580 ASSERT_EQ (merged_widening->get_kind (), SK_WIDENING);
7582 ASSERT_CONDITION_TRUE (model6, i, LT_EXPR, int_257);
7585 /* Verify that if we mark a pointer to a malloc-ed region as non-NULL,
7586 all cast pointers to that region are also known to be non-NULL. */
7588 static void
7589 test_malloc_constraints ()
7591 region_model_manager mgr;
7592 region_model model (&mgr);
7593 tree p = build_global_decl ("p", ptr_type_node);
7594 tree char_star = build_pointer_type (char_type_node);
7595 tree q = build_global_decl ("q", char_star);
7596 tree null_ptr = build_int_cst (ptr_type_node, 0);
7598 const svalue *size_in_bytes
7599 = mgr.get_or_create_unknown_svalue (size_type_node);
7600 const region *reg
7601 = model.get_or_create_region_for_heap_alloc (size_in_bytes, NULL);
7602 const svalue *sval = mgr.get_ptr_svalue (ptr_type_node, reg);
7603 model.set_value (model.get_lvalue (p, NULL), sval, NULL);
7604 model.set_value (q, p, NULL);
7606 ASSERT_CONDITION_UNKNOWN (model, p, NE_EXPR, null_ptr);
7607 ASSERT_CONDITION_UNKNOWN (model, p, EQ_EXPR, null_ptr);
7608 ASSERT_CONDITION_UNKNOWN (model, q, NE_EXPR, null_ptr);
7609 ASSERT_CONDITION_UNKNOWN (model, q, EQ_EXPR, null_ptr);
7611 model.add_constraint (p, NE_EXPR, null_ptr, NULL);
7613 ASSERT_CONDITION_TRUE (model, p, NE_EXPR, null_ptr);
7614 ASSERT_CONDITION_FALSE (model, p, EQ_EXPR, null_ptr);
7615 ASSERT_CONDITION_TRUE (model, q, NE_EXPR, null_ptr);
7616 ASSERT_CONDITION_FALSE (model, q, EQ_EXPR, null_ptr);
7619 /* Smoketest of getting and setting the value of a variable. */
7621 static void
7622 test_var ()
7624 /* "int i;" */
7625 tree i = build_global_decl ("i", integer_type_node);
7627 tree int_17 = build_int_cst (integer_type_node, 17);
7628 tree int_m3 = build_int_cst (integer_type_node, -3);
7630 region_model_manager mgr;
7631 region_model model (&mgr);
7633 const region *i_reg = model.get_lvalue (i, NULL);
7634 ASSERT_EQ (i_reg->get_kind (), RK_DECL);
7636 /* Reading "i" should give a symbolic "initial value". */
7637 const svalue *sval_init = model.get_rvalue (i, NULL);
7638 ASSERT_EQ (sval_init->get_kind (), SK_INITIAL);
7639 ASSERT_EQ (sval_init->dyn_cast_initial_svalue ()->get_region (), i_reg);
7640 /* ..and doing it again should give the same "initial value". */
7641 ASSERT_EQ (model.get_rvalue (i, NULL), sval_init);
7643 /* "i = 17;". */
7644 model.set_value (i, int_17, NULL);
7645 ASSERT_EQ (model.get_rvalue (i, NULL),
7646 model.get_rvalue (int_17, NULL));
7648 /* "i = -3;". */
7649 model.set_value (i, int_m3, NULL);
7650 ASSERT_EQ (model.get_rvalue (i, NULL),
7651 model.get_rvalue (int_m3, NULL));
7653 /* Verify get_offset for "i". */
7655 region_offset offset = i_reg->get_offset (&mgr);
7656 ASSERT_EQ (offset.get_base_region (), i_reg);
7657 ASSERT_EQ (offset.get_bit_offset (), 0);
7661 static void
7662 test_array_2 ()
7664 /* "int arr[10];" */
7665 tree tlen = size_int (10);
7666 tree arr_type
7667 = build_array_type (integer_type_node, build_index_type (tlen));
7668 tree arr = build_global_decl ("arr", arr_type);
7670 /* "int i;" */
7671 tree i = build_global_decl ("i", integer_type_node);
7673 tree int_0 = build_int_cst (integer_type_node, 0);
7674 tree int_1 = build_int_cst (integer_type_node, 1);
7676 tree arr_0 = build4 (ARRAY_REF, integer_type_node,
7677 arr, int_0, NULL_TREE, NULL_TREE);
7678 tree arr_1 = build4 (ARRAY_REF, integer_type_node,
7679 arr, int_1, NULL_TREE, NULL_TREE);
7680 tree arr_i = build4 (ARRAY_REF, integer_type_node,
7681 arr, i, NULL_TREE, NULL_TREE);
7683 tree int_17 = build_int_cst (integer_type_node, 17);
7684 tree int_42 = build_int_cst (integer_type_node, 42);
7685 tree int_m3 = build_int_cst (integer_type_node, -3);
7687 region_model_manager mgr;
7688 region_model model (&mgr);
7689 /* "arr[0] = 17;". */
7690 model.set_value (arr_0, int_17, NULL);
7691 /* "arr[1] = -3;". */
7692 model.set_value (arr_1, int_m3, NULL);
7694 ASSERT_EQ (model.get_rvalue (arr_0, NULL), model.get_rvalue (int_17, NULL));
7695 ASSERT_EQ (model.get_rvalue (arr_1, NULL), model.get_rvalue (int_m3, NULL));
7697 /* Overwrite a pre-existing binding: "arr[1] = 42;". */
7698 model.set_value (arr_1, int_42, NULL);
7699 ASSERT_EQ (model.get_rvalue (arr_1, NULL), model.get_rvalue (int_42, NULL));
7701 /* Verify get_offset for "arr[0]". */
7703 const region *arr_0_reg = model.get_lvalue (arr_0, NULL);
7704 region_offset offset = arr_0_reg->get_offset (&mgr);
7705 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
7706 ASSERT_EQ (offset.get_bit_offset (), 0);
7709 /* Verify get_offset for "arr[1]". */
7711 const region *arr_1_reg = model.get_lvalue (arr_1, NULL);
7712 region_offset offset = arr_1_reg->get_offset (&mgr);
7713 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
7714 ASSERT_EQ (offset.get_bit_offset (), INT_TYPE_SIZE);
7717 /* Verify get_offset for "arr[i]". */
7719 const region *arr_i_reg = model.get_lvalue (arr_i, NULL);
7720 region_offset offset = arr_i_reg->get_offset (&mgr);
7721 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
7722 ASSERT_EQ (offset.get_symbolic_byte_offset ()->get_kind (), SK_BINOP);
7725 /* "arr[i] = i;" - this should remove the earlier bindings. */
7726 model.set_value (arr_i, i, NULL);
7727 ASSERT_EQ (model.get_rvalue (arr_i, NULL), model.get_rvalue (i, NULL));
7728 ASSERT_EQ (model.get_rvalue (arr_0, NULL)->get_kind (), SK_UNKNOWN);
7730 /* "arr[0] = 17;" - this should remove the arr[i] binding. */
7731 model.set_value (arr_0, int_17, NULL);
7732 ASSERT_EQ (model.get_rvalue (arr_0, NULL), model.get_rvalue (int_17, NULL));
7733 ASSERT_EQ (model.get_rvalue (arr_i, NULL)->get_kind (), SK_UNKNOWN);
7736 /* Smoketest of dereferencing a pointer via MEM_REF. */
7738 static void
7739 test_mem_ref ()
7742 x = 17;
7743 p = &x;
7746 tree x = build_global_decl ("x", integer_type_node);
7747 tree int_star = build_pointer_type (integer_type_node);
7748 tree p = build_global_decl ("p", int_star);
7750 tree int_17 = build_int_cst (integer_type_node, 17);
7751 tree addr_of_x = build1 (ADDR_EXPR, int_star, x);
7752 tree offset_0 = build_int_cst (integer_type_node, 0);
7753 tree star_p = build2 (MEM_REF, integer_type_node, p, offset_0);
7755 region_model_manager mgr;
7756 region_model model (&mgr);
7758 /* "x = 17;". */
7759 model.set_value (x, int_17, NULL);
7761 /* "p = &x;". */
7762 model.set_value (p, addr_of_x, NULL);
7764 const svalue *sval = model.get_rvalue (star_p, NULL);
7765 ASSERT_EQ (sval->maybe_get_constant (), int_17);
7768 /* Test for a POINTER_PLUS_EXPR followed by a MEM_REF.
7769 Analogous to this code:
7770 void test_6 (int a[10])
7772 __analyzer_eval (a[3] == 42); [should be UNKNOWN]
7773 a[3] = 42;
7774 __analyzer_eval (a[3] == 42); [should be TRUE]
7776 from data-model-1.c, which looks like this at the gimple level:
7777 # __analyzer_eval (a[3] == 42); [should be UNKNOWN]
7778 int *_1 = a_10(D) + 12; # POINTER_PLUS_EXPR
7779 int _2 = *_1; # MEM_REF
7780 _Bool _3 = _2 == 42;
7781 int _4 = (int) _3;
7782 __analyzer_eval (_4);
7784 # a[3] = 42;
7785 int *_5 = a_10(D) + 12; # POINTER_PLUS_EXPR
7786 *_5 = 42; # MEM_REF
7788 # __analyzer_eval (a[3] == 42); [should be TRUE]
7789 int *_6 = a_10(D) + 12; # POINTER_PLUS_EXPR
7790 int _7 = *_6; # MEM_REF
7791 _Bool _8 = _7 == 42;
7792 int _9 = (int) _8;
7793 __analyzer_eval (_9); */
7795 static void
7796 test_POINTER_PLUS_EXPR_then_MEM_REF ()
7798 tree int_star = build_pointer_type (integer_type_node);
7799 tree a = build_global_decl ("a", int_star);
7800 tree offset_12 = build_int_cst (size_type_node, 12);
7801 tree pointer_plus_expr = build2 (POINTER_PLUS_EXPR, int_star, a, offset_12);
7802 tree offset_0 = build_int_cst (integer_type_node, 0);
7803 tree mem_ref = build2 (MEM_REF, integer_type_node,
7804 pointer_plus_expr, offset_0);
7805 region_model_manager mgr;
7806 region_model m (&mgr);
7808 tree int_42 = build_int_cst (integer_type_node, 42);
7809 m.set_value (mem_ref, int_42, NULL);
7810 ASSERT_EQ (m.get_rvalue (mem_ref, NULL)->maybe_get_constant (), int_42);
7813 /* Verify that malloc works. */
7815 static void
7816 test_malloc ()
7818 tree int_star = build_pointer_type (integer_type_node);
7819 tree p = build_global_decl ("p", int_star);
7820 tree n = build_global_decl ("n", integer_type_node);
7821 tree n_times_4 = build2 (MULT_EXPR, size_type_node,
7822 n, build_int_cst (size_type_node, 4));
7824 region_model_manager mgr;
7825 test_region_model_context ctxt;
7826 region_model model (&mgr);
7828 /* "p = malloc (n * 4);". */
7829 const svalue *size_sval = model.get_rvalue (n_times_4, &ctxt);
7830 const region *reg
7831 = model.get_or_create_region_for_heap_alloc (size_sval, &ctxt);
7832 const svalue *ptr = mgr.get_ptr_svalue (int_star, reg);
7833 model.set_value (model.get_lvalue (p, &ctxt), ptr, &ctxt);
7834 ASSERT_EQ (model.get_capacity (reg), size_sval);
7837 /* Verify that alloca works. */
7839 static void
7840 test_alloca ()
7842 auto_vec <tree> param_types;
7843 tree fndecl = make_fndecl (integer_type_node,
7844 "test_fn",
7845 param_types);
7846 allocate_struct_function (fndecl, true);
7849 tree int_star = build_pointer_type (integer_type_node);
7850 tree p = build_global_decl ("p", int_star);
7851 tree n = build_global_decl ("n", integer_type_node);
7852 tree n_times_4 = build2 (MULT_EXPR, size_type_node,
7853 n, build_int_cst (size_type_node, 4));
7855 region_model_manager mgr;
7856 test_region_model_context ctxt;
7857 region_model model (&mgr);
7859 /* Push stack frame. */
7860 const region *frame_reg
7861 = model.push_frame (DECL_STRUCT_FUNCTION (fndecl),
7862 NULL, &ctxt);
7863 /* "p = alloca (n * 4);". */
7864 const svalue *size_sval = model.get_rvalue (n_times_4, &ctxt);
7865 const region *reg = model.create_region_for_alloca (size_sval, &ctxt);
7866 ASSERT_EQ (reg->get_parent_region (), frame_reg);
7867 const svalue *ptr = mgr.get_ptr_svalue (int_star, reg);
7868 model.set_value (model.get_lvalue (p, &ctxt), ptr, &ctxt);
7869 ASSERT_EQ (model.get_capacity (reg), size_sval);
7871 /* Verify that the pointers to the alloca region are replaced by
7872 poisoned values when the frame is popped. */
7873 model.pop_frame (NULL, NULL, &ctxt);
7874 ASSERT_EQ (model.get_rvalue (p, NULL)->get_kind (), SK_POISONED);
7877 /* Verify that svalue::involves_p works. */
7879 static void
7880 test_involves_p ()
7882 region_model_manager mgr;
7883 tree int_star = build_pointer_type (integer_type_node);
7884 tree p = build_global_decl ("p", int_star);
7885 tree q = build_global_decl ("q", int_star);
7887 test_region_model_context ctxt;
7888 region_model model (&mgr);
7889 const svalue *p_init = model.get_rvalue (p, &ctxt);
7890 const svalue *q_init = model.get_rvalue (q, &ctxt);
7892 ASSERT_TRUE (p_init->involves_p (p_init));
7893 ASSERT_FALSE (p_init->involves_p (q_init));
7895 const region *star_p_reg = mgr.get_symbolic_region (p_init);
7896 const region *star_q_reg = mgr.get_symbolic_region (q_init);
7898 const svalue *init_star_p = mgr.get_or_create_initial_value (star_p_reg);
7899 const svalue *init_star_q = mgr.get_or_create_initial_value (star_q_reg);
7901 ASSERT_TRUE (init_star_p->involves_p (p_init));
7902 ASSERT_FALSE (p_init->involves_p (init_star_p));
7903 ASSERT_FALSE (init_star_p->involves_p (q_init));
7904 ASSERT_TRUE (init_star_q->involves_p (q_init));
7905 ASSERT_FALSE (init_star_q->involves_p (p_init));
7908 /* Run all of the selftests within this file. */
7910 void
7911 analyzer_region_model_cc_tests ()
7913 test_tree_cmp_on_constants ();
7914 test_dump ();
7915 test_struct ();
7916 test_array_1 ();
7917 test_get_representative_tree ();
7918 test_unique_constants ();
7919 test_unique_unknowns ();
7920 test_initial_svalue_folding ();
7921 test_unaryop_svalue_folding ();
7922 test_binop_svalue_folding ();
7923 test_sub_svalue_folding ();
7924 test_bits_within_svalue_folding ();
7925 test_descendent_of_p ();
7926 test_bit_range_regions ();
7927 test_assignment ();
7928 test_compound_assignment ();
7929 test_stack_frames ();
7930 test_get_representative_path_var ();
7931 test_equality_1 ();
7932 test_canonicalization_2 ();
7933 test_canonicalization_3 ();
7934 test_canonicalization_4 ();
7935 test_state_merging ();
7936 test_constraint_merging ();
7937 test_widening_constraints ();
7938 test_iteration_1 ();
7939 test_malloc_constraints ();
7940 test_var ();
7941 test_array_2 ();
7942 test_mem_ref ();
7943 test_POINTER_PLUS_EXPR_then_MEM_REF ();
7944 test_malloc ();
7945 test_alloca ();
7946 test_involves_p ();
7949 } // namespace selftest
7951 #endif /* CHECKING_P */
7953 } // namespace ana
7955 #endif /* #if ENABLE_ANALYZER */