d: Add test for PR d/108167 to the testsuite [PR108167]
[official-gcc.git] / gcc / analyzer / region-model.cc
blobbf07cec2884fb2e0f6a63a2909127d84d83224dd
1 /* Classes for modeling the state of memory.
2 Copyright (C) 2019-2023 Free Software Foundation, Inc.
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #define INCLUDE_MEMORY
23 #include "system.h"
24 #include "coretypes.h"
25 #include "make-unique.h"
26 #include "tree.h"
27 #include "function.h"
28 #include "basic-block.h"
29 #include "gimple.h"
30 #include "gimple-iterator.h"
31 #include "diagnostic-core.h"
32 #include "graphviz.h"
33 #include "options.h"
34 #include "cgraph.h"
35 #include "tree-dfa.h"
36 #include "stringpool.h"
37 #include "convert.h"
38 #include "target.h"
39 #include "fold-const.h"
40 #include "tree-pretty-print.h"
41 #include "diagnostic-color.h"
42 #include "diagnostic-metadata.h"
43 #include "bitmap.h"
44 #include "selftest.h"
45 #include "analyzer/analyzer.h"
46 #include "analyzer/analyzer-logging.h"
47 #include "ordered-hash-map.h"
48 #include "options.h"
49 #include "cgraph.h"
50 #include "cfg.h"
51 #include "analyzer/supergraph.h"
52 #include "sbitmap.h"
53 #include "analyzer/call-string.h"
54 #include "analyzer/program-point.h"
55 #include "analyzer/store.h"
56 #include "analyzer/region-model.h"
57 #include "analyzer/constraint-manager.h"
58 #include "diagnostic-event-id.h"
59 #include "analyzer/sm.h"
60 #include "diagnostic-event-id.h"
61 #include "analyzer/sm.h"
62 #include "analyzer/pending-diagnostic.h"
63 #include "analyzer/region-model-reachability.h"
64 #include "analyzer/analyzer-selftests.h"
65 #include "analyzer/program-state.h"
66 #include "analyzer/call-summary.h"
67 #include "stor-layout.h"
68 #include "attribs.h"
69 #include "tree-object-size.h"
70 #include "gimple-ssa.h"
71 #include "tree-phinodes.h"
72 #include "tree-ssa-operands.h"
73 #include "ssa-iterators.h"
74 #include "calls.h"
75 #include "is-a.h"
76 #include "gcc-rich-location.h"
77 #include "analyzer/checker-event.h"
78 #include "analyzer/checker-path.h"
79 #include "analyzer/feasible-graph.h"
81 #if ENABLE_ANALYZER
83 namespace ana {
85 /* Dump T to PP in language-independent form, for debugging/logging/dumping
86 purposes. */
88 void
89 dump_tree (pretty_printer *pp, tree t)
91 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
94 /* Dump T to PP in language-independent form in quotes, for
95 debugging/logging/dumping purposes. */
97 void
98 dump_quoted_tree (pretty_printer *pp, tree t)
100 pp_begin_quote (pp, pp_show_color (pp));
101 dump_tree (pp, t);
102 pp_end_quote (pp, pp_show_color (pp));
105 /* Equivalent to pp_printf (pp, "%qT", t), to avoid nesting pp_printf
106 calls within other pp_printf calls.
108 default_tree_printer handles 'T' and some other codes by calling
109 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
110 dump_generic_node calls pp_printf in various places, leading to
111 garbled output.
113 Ideally pp_printf could be made to be reentrant, but in the meantime
114 this function provides a workaround. */
116 void
117 print_quoted_type (pretty_printer *pp, tree t)
119 pp_begin_quote (pp, pp_show_color (pp));
120 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
121 pp_end_quote (pp, pp_show_color (pp));
124 /* class region_to_value_map. */
126 /* Assignment operator for region_to_value_map. */
128 region_to_value_map &
129 region_to_value_map::operator= (const region_to_value_map &other)
131 m_hash_map.empty ();
132 for (auto iter : other.m_hash_map)
134 const region *reg = iter.first;
135 const svalue *sval = iter.second;
136 m_hash_map.put (reg, sval);
138 return *this;
141 /* Equality operator for region_to_value_map. */
143 bool
144 region_to_value_map::operator== (const region_to_value_map &other) const
146 if (m_hash_map.elements () != other.m_hash_map.elements ())
147 return false;
149 for (auto iter : *this)
151 const region *reg = iter.first;
152 const svalue *sval = iter.second;
153 const svalue * const *other_slot = other.get (reg);
154 if (other_slot == NULL)
155 return false;
156 if (sval != *other_slot)
157 return false;
160 return true;
163 /* Dump this object to PP. */
165 void
166 region_to_value_map::dump_to_pp (pretty_printer *pp, bool simple,
167 bool multiline) const
169 auto_vec<const region *> regs;
170 for (iterator iter = begin (); iter != end (); ++iter)
171 regs.safe_push ((*iter).first);
172 regs.qsort (region::cmp_ptr_ptr);
173 if (multiline)
174 pp_newline (pp);
175 else
176 pp_string (pp, " {");
177 unsigned i;
178 const region *reg;
179 FOR_EACH_VEC_ELT (regs, i, reg)
181 if (multiline)
182 pp_string (pp, " ");
183 else if (i > 0)
184 pp_string (pp, ", ");
185 reg->dump_to_pp (pp, simple);
186 pp_string (pp, ": ");
187 const svalue *sval = *get (reg);
188 sval->dump_to_pp (pp, true);
189 if (multiline)
190 pp_newline (pp);
192 if (!multiline)
193 pp_string (pp, "}");
196 /* Dump this object to stderr. */
198 DEBUG_FUNCTION void
199 region_to_value_map::dump (bool simple) const
201 pretty_printer pp;
202 pp_format_decoder (&pp) = default_tree_printer;
203 pp_show_color (&pp) = pp_show_color (global_dc->printer);
204 pp.buffer->stream = stderr;
205 dump_to_pp (&pp, simple, true);
206 pp_newline (&pp);
207 pp_flush (&pp);
211 /* Attempt to merge THIS with OTHER, writing the result
212 to OUT.
214 For now, write (region, value) mappings that are in common between THIS
215 and OTHER to OUT, effectively taking the intersection.
217 Reject merger of different values. */
219 bool
220 region_to_value_map::can_merge_with_p (const region_to_value_map &other,
221 region_to_value_map *out) const
223 for (auto iter : *this)
225 const region *iter_reg = iter.first;
226 const svalue *iter_sval = iter.second;
227 const svalue * const * other_slot = other.get (iter_reg);
228 if (other_slot)
230 if (iter_sval == *other_slot)
231 out->put (iter_reg, iter_sval);
232 else
233 return false;
236 return true;
239 /* Purge any state involving SVAL. */
241 void
242 region_to_value_map::purge_state_involving (const svalue *sval)
244 auto_vec<const region *> to_purge;
245 for (auto iter : *this)
247 const region *iter_reg = iter.first;
248 const svalue *iter_sval = iter.second;
249 if (iter_reg->involves_p (sval) || iter_sval->involves_p (sval))
250 to_purge.safe_push (iter_reg);
252 for (auto iter : to_purge)
253 m_hash_map.remove (iter);
256 /* class region_model. */
258 /* Ctor for region_model: construct an "empty" model. */
260 region_model::region_model (region_model_manager *mgr)
261 : m_mgr (mgr), m_store (), m_current_frame (NULL),
262 m_dynamic_extents ()
264 m_constraints = new constraint_manager (mgr);
267 /* region_model's copy ctor. */
269 region_model::region_model (const region_model &other)
270 : m_mgr (other.m_mgr), m_store (other.m_store),
271 m_constraints (new constraint_manager (*other.m_constraints)),
272 m_current_frame (other.m_current_frame),
273 m_dynamic_extents (other.m_dynamic_extents)
277 /* region_model's dtor. */
279 region_model::~region_model ()
281 delete m_constraints;
284 /* region_model's assignment operator. */
286 region_model &
287 region_model::operator= (const region_model &other)
289 /* m_mgr is const. */
290 gcc_assert (m_mgr == other.m_mgr);
292 m_store = other.m_store;
294 delete m_constraints;
295 m_constraints = new constraint_manager (*other.m_constraints);
297 m_current_frame = other.m_current_frame;
299 m_dynamic_extents = other.m_dynamic_extents;
301 return *this;
304 /* Equality operator for region_model.
306 Amongst other things this directly compares the stores and the constraint
307 managers, so for this to be meaningful both this and OTHER should
308 have been canonicalized. */
310 bool
311 region_model::operator== (const region_model &other) const
313 /* We can only compare instances that use the same manager. */
314 gcc_assert (m_mgr == other.m_mgr);
316 if (m_store != other.m_store)
317 return false;
319 if (*m_constraints != *other.m_constraints)
320 return false;
322 if (m_current_frame != other.m_current_frame)
323 return false;
325 if (m_dynamic_extents != other.m_dynamic_extents)
326 return false;
328 gcc_checking_assert (hash () == other.hash ());
330 return true;
333 /* Generate a hash value for this region_model. */
335 hashval_t
336 region_model::hash () const
338 hashval_t result = m_store.hash ();
339 result ^= m_constraints->hash ();
340 return result;
343 /* Dump a representation of this model to PP, showing the
344 stack, the store, and any constraints.
345 Use SIMPLE to control how svalues and regions are printed. */
347 void
348 region_model::dump_to_pp (pretty_printer *pp, bool simple,
349 bool multiline) const
351 /* Dump stack. */
352 pp_printf (pp, "stack depth: %i", get_stack_depth ());
353 if (multiline)
354 pp_newline (pp);
355 else
356 pp_string (pp, " {");
357 for (const frame_region *iter_frame = m_current_frame; iter_frame;
358 iter_frame = iter_frame->get_calling_frame ())
360 if (multiline)
361 pp_string (pp, " ");
362 else if (iter_frame != m_current_frame)
363 pp_string (pp, ", ");
364 pp_printf (pp, "frame (index %i): ", iter_frame->get_index ());
365 iter_frame->dump_to_pp (pp, simple);
366 if (multiline)
367 pp_newline (pp);
369 if (!multiline)
370 pp_string (pp, "}");
372 /* Dump store. */
373 if (!multiline)
374 pp_string (pp, ", {");
375 m_store.dump_to_pp (pp, simple, multiline,
376 m_mgr->get_store_manager ());
377 if (!multiline)
378 pp_string (pp, "}");
380 /* Dump constraints. */
381 pp_string (pp, "constraint_manager:");
382 if (multiline)
383 pp_newline (pp);
384 else
385 pp_string (pp, " {");
386 m_constraints->dump_to_pp (pp, multiline);
387 if (!multiline)
388 pp_string (pp, "}");
390 /* Dump sizes of dynamic regions, if any are known. */
391 if (!m_dynamic_extents.is_empty ())
393 pp_string (pp, "dynamic_extents:");
394 m_dynamic_extents.dump_to_pp (pp, simple, multiline);
398 /* Dump a representation of this model to FILE. */
400 void
401 region_model::dump (FILE *fp, bool simple, bool multiline) const
403 pretty_printer pp;
404 pp_format_decoder (&pp) = default_tree_printer;
405 pp_show_color (&pp) = pp_show_color (global_dc->printer);
406 pp.buffer->stream = fp;
407 dump_to_pp (&pp, simple, multiline);
408 pp_newline (&pp);
409 pp_flush (&pp);
412 /* Dump a multiline representation of this model to stderr. */
414 DEBUG_FUNCTION void
415 region_model::dump (bool simple) const
417 dump (stderr, simple, true);
420 /* Dump a multiline representation of this model to stderr. */
422 DEBUG_FUNCTION void
423 region_model::debug () const
425 dump (true);
428 /* Assert that this object is valid. */
430 void
431 region_model::validate () const
433 m_store.validate ();
436 /* Canonicalize the store and constraints, to maximize the chance of
437 equality between region_model instances. */
439 void
440 region_model::canonicalize ()
442 m_store.canonicalize (m_mgr->get_store_manager ());
443 m_constraints->canonicalize ();
446 /* Return true if this region_model is in canonical form. */
448 bool
449 region_model::canonicalized_p () const
451 region_model copy (*this);
452 copy.canonicalize ();
453 return *this == copy;
456 /* See the comment for store::loop_replay_fixup. */
458 void
459 region_model::loop_replay_fixup (const region_model *dst_state)
461 m_store.loop_replay_fixup (dst_state->get_store (), m_mgr);
464 /* A subclass of pending_diagnostic for complaining about uses of
465 poisoned values. */
467 class poisoned_value_diagnostic
468 : public pending_diagnostic_subclass<poisoned_value_diagnostic>
470 public:
471 poisoned_value_diagnostic (tree expr, enum poison_kind pkind,
472 const region *src_region,
473 tree check_expr)
474 : m_expr (expr), m_pkind (pkind),
475 m_src_region (src_region),
476 m_check_expr (check_expr)
479 const char *get_kind () const final override { return "poisoned_value_diagnostic"; }
481 bool use_of_uninit_p () const final override
483 return m_pkind == POISON_KIND_UNINIT;
486 bool operator== (const poisoned_value_diagnostic &other) const
488 return (m_expr == other.m_expr
489 && m_pkind == other.m_pkind
490 && m_src_region == other.m_src_region);
493 int get_controlling_option () const final override
495 switch (m_pkind)
497 default:
498 gcc_unreachable ();
499 case POISON_KIND_UNINIT:
500 return OPT_Wanalyzer_use_of_uninitialized_value;
501 case POISON_KIND_FREED:
502 return OPT_Wanalyzer_use_after_free;
503 case POISON_KIND_POPPED_STACK:
504 return OPT_Wanalyzer_use_of_pointer_in_stale_stack_frame;
508 bool terminate_path_p () const final override { return true; }
510 bool emit (rich_location *rich_loc) final override
512 switch (m_pkind)
514 default:
515 gcc_unreachable ();
516 case POISON_KIND_UNINIT:
518 diagnostic_metadata m;
519 m.add_cwe (457); /* "CWE-457: Use of Uninitialized Variable". */
520 return warning_meta (rich_loc, m, get_controlling_option (),
521 "use of uninitialized value %qE",
522 m_expr);
524 break;
525 case POISON_KIND_FREED:
527 diagnostic_metadata m;
528 m.add_cwe (416); /* "CWE-416: Use After Free". */
529 return warning_meta (rich_loc, m, get_controlling_option (),
530 "use after %<free%> of %qE",
531 m_expr);
533 break;
534 case POISON_KIND_POPPED_STACK:
536 /* TODO: which CWE? */
537 return warning_at
538 (rich_loc, get_controlling_option (),
539 "dereferencing pointer %qE to within stale stack frame",
540 m_expr);
542 break;
546 label_text describe_final_event (const evdesc::final_event &ev) final override
548 switch (m_pkind)
550 default:
551 gcc_unreachable ();
552 case POISON_KIND_UNINIT:
553 return ev.formatted_print ("use of uninitialized value %qE here",
554 m_expr);
555 case POISON_KIND_FREED:
556 return ev.formatted_print ("use after %<free%> of %qE here",
557 m_expr);
558 case POISON_KIND_POPPED_STACK:
559 return ev.formatted_print
560 ("dereferencing pointer %qE to within stale stack frame",
561 m_expr);
565 void mark_interesting_stuff (interesting_t *interest) final override
567 if (m_src_region)
568 interest->add_region_creation (m_src_region);
571 /* Attempt to suppress false positives.
572 Reject paths where the value of the underlying region isn't poisoned.
573 This can happen due to state merging when exploring the exploded graph,
574 where the more precise analysis during feasibility analysis finds that
575 the region is in fact valid.
576 To do this we need to get the value from the fgraph. Unfortunately
577 we can't simply query the state of m_src_region (from the enode),
578 since it might be a different region in the fnode state (e.g. with
579 heap-allocated regions, the numbering could be different).
580 Hence we access m_check_expr, if available. */
582 bool check_valid_fpath_p (const feasible_node &fnode,
583 const gimple *emission_stmt)
584 const final override
586 if (!m_check_expr)
587 return true;
589 /* We've reached the enode, but not necessarily the right function_point.
590 Try to get the state at the correct stmt. */
591 region_model emission_model (fnode.get_model ().get_manager());
592 if (!fnode.get_state_at_stmt (emission_stmt, &emission_model))
593 /* Couldn't get state; accept this diagnostic. */
594 return true;
596 const svalue *fsval = emission_model.get_rvalue (m_check_expr, NULL);
597 /* Check to see if the expr is also poisoned in FNODE (and in the
598 same way). */
599 const poisoned_svalue * fspval = fsval->dyn_cast_poisoned_svalue ();
600 if (!fspval)
601 return false;
602 if (fspval->get_poison_kind () != m_pkind)
603 return false;
604 return true;
607 private:
608 tree m_expr;
609 enum poison_kind m_pkind;
610 const region *m_src_region;
611 tree m_check_expr;
614 /* A subclass of pending_diagnostic for complaining about shifts
615 by negative counts. */
617 class shift_count_negative_diagnostic
618 : public pending_diagnostic_subclass<shift_count_negative_diagnostic>
620 public:
621 shift_count_negative_diagnostic (const gassign *assign, tree count_cst)
622 : m_assign (assign), m_count_cst (count_cst)
625 const char *get_kind () const final override
627 return "shift_count_negative_diagnostic";
630 bool operator== (const shift_count_negative_diagnostic &other) const
632 return (m_assign == other.m_assign
633 && same_tree_p (m_count_cst, other.m_count_cst));
636 int get_controlling_option () const final override
638 return OPT_Wanalyzer_shift_count_negative;
641 bool emit (rich_location *rich_loc) final override
643 return warning_at (rich_loc, get_controlling_option (),
644 "shift by negative count (%qE)", m_count_cst);
647 label_text describe_final_event (const evdesc::final_event &ev) final override
649 return ev.formatted_print ("shift by negative amount here (%qE)", m_count_cst);
652 private:
653 const gassign *m_assign;
654 tree m_count_cst;
657 /* A subclass of pending_diagnostic for complaining about shifts
658 by counts >= the width of the operand type. */
660 class shift_count_overflow_diagnostic
661 : public pending_diagnostic_subclass<shift_count_overflow_diagnostic>
663 public:
664 shift_count_overflow_diagnostic (const gassign *assign,
665 int operand_precision,
666 tree count_cst)
667 : m_assign (assign), m_operand_precision (operand_precision),
668 m_count_cst (count_cst)
671 const char *get_kind () const final override
673 return "shift_count_overflow_diagnostic";
676 bool operator== (const shift_count_overflow_diagnostic &other) const
678 return (m_assign == other.m_assign
679 && m_operand_precision == other.m_operand_precision
680 && same_tree_p (m_count_cst, other.m_count_cst));
683 int get_controlling_option () const final override
685 return OPT_Wanalyzer_shift_count_overflow;
688 bool emit (rich_location *rich_loc) final override
690 return warning_at (rich_loc, get_controlling_option (),
691 "shift by count (%qE) >= precision of type (%qi)",
692 m_count_cst, m_operand_precision);
695 label_text describe_final_event (const evdesc::final_event &ev) final override
697 return ev.formatted_print ("shift by count %qE here", m_count_cst);
700 private:
701 const gassign *m_assign;
702 int m_operand_precision;
703 tree m_count_cst;
706 /* If ASSIGN is a stmt that can be modelled via
707 set_value (lhs_reg, SVALUE, CTXT)
708 for some SVALUE, get the SVALUE.
709 Otherwise return NULL. */
711 const svalue *
712 region_model::get_gassign_result (const gassign *assign,
713 region_model_context *ctxt)
715 tree lhs = gimple_assign_lhs (assign);
716 tree rhs1 = gimple_assign_rhs1 (assign);
717 enum tree_code op = gimple_assign_rhs_code (assign);
718 switch (op)
720 default:
721 return NULL;
723 case POINTER_PLUS_EXPR:
725 /* e.g. "_1 = a_10(D) + 12;" */
726 tree ptr = rhs1;
727 tree offset = gimple_assign_rhs2 (assign);
729 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
730 const svalue *offset_sval = get_rvalue (offset, ctxt);
731 /* Quoting tree.def, "the second operand [of a POINTER_PLUS_EXPR]
732 is an integer of type sizetype". */
733 offset_sval = m_mgr->get_or_create_cast (size_type_node, offset_sval);
735 const svalue *sval_binop
736 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
737 ptr_sval, offset_sval);
738 return sval_binop;
740 break;
742 case POINTER_DIFF_EXPR:
744 /* e.g. "_1 = p_2(D) - q_3(D);". */
745 tree rhs2 = gimple_assign_rhs2 (assign);
746 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
747 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
749 // TODO: perhaps fold to zero if they're known to be equal?
751 const svalue *sval_binop
752 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
753 rhs1_sval, rhs2_sval);
754 return sval_binop;
756 break;
758 /* Assignments of the form
759 set_value (lvalue (LHS), rvalue (EXPR))
760 for various EXPR.
761 We already have the lvalue for the LHS above, as "lhs_reg". */
762 case ADDR_EXPR: /* LHS = &RHS; */
763 case BIT_FIELD_REF:
764 case COMPONENT_REF: /* LHS = op0.op1; */
765 case MEM_REF:
766 case REAL_CST:
767 case COMPLEX_CST:
768 case VECTOR_CST:
769 case INTEGER_CST:
770 case ARRAY_REF:
771 case SSA_NAME: /* LHS = VAR; */
772 case VAR_DECL: /* LHS = VAR; */
773 case PARM_DECL:/* LHS = VAR; */
774 case REALPART_EXPR:
775 case IMAGPART_EXPR:
776 return get_rvalue (rhs1, ctxt);
778 case ABS_EXPR:
779 case ABSU_EXPR:
780 case CONJ_EXPR:
781 case BIT_NOT_EXPR:
782 case FIX_TRUNC_EXPR:
783 case FLOAT_EXPR:
784 case NEGATE_EXPR:
785 case NOP_EXPR:
786 case VIEW_CONVERT_EXPR:
788 /* Unary ops. */
789 const svalue *rhs_sval = get_rvalue (rhs1, ctxt);
790 const svalue *sval_unaryop
791 = m_mgr->get_or_create_unaryop (TREE_TYPE (lhs), op, rhs_sval);
792 return sval_unaryop;
795 case EQ_EXPR:
796 case GE_EXPR:
797 case LE_EXPR:
798 case NE_EXPR:
799 case GT_EXPR:
800 case LT_EXPR:
801 case UNORDERED_EXPR:
802 case ORDERED_EXPR:
804 tree rhs2 = gimple_assign_rhs2 (assign);
806 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
807 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
809 if (TREE_TYPE (lhs) == boolean_type_node)
811 /* Consider constraints between svalues. */
812 tristate t = eval_condition (rhs1_sval, op, rhs2_sval);
813 if (t.is_known ())
814 return m_mgr->get_or_create_constant_svalue
815 (t.is_true () ? boolean_true_node : boolean_false_node);
818 /* Otherwise, generate a symbolic binary op. */
819 const svalue *sval_binop
820 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
821 rhs1_sval, rhs2_sval);
822 return sval_binop;
824 break;
826 case PLUS_EXPR:
827 case MINUS_EXPR:
828 case MULT_EXPR:
829 case MULT_HIGHPART_EXPR:
830 case TRUNC_DIV_EXPR:
831 case CEIL_DIV_EXPR:
832 case FLOOR_DIV_EXPR:
833 case ROUND_DIV_EXPR:
834 case TRUNC_MOD_EXPR:
835 case CEIL_MOD_EXPR:
836 case FLOOR_MOD_EXPR:
837 case ROUND_MOD_EXPR:
838 case RDIV_EXPR:
839 case EXACT_DIV_EXPR:
840 case LSHIFT_EXPR:
841 case RSHIFT_EXPR:
842 case LROTATE_EXPR:
843 case RROTATE_EXPR:
844 case BIT_IOR_EXPR:
845 case BIT_XOR_EXPR:
846 case BIT_AND_EXPR:
847 case MIN_EXPR:
848 case MAX_EXPR:
849 case COMPLEX_EXPR:
851 /* Binary ops. */
852 tree rhs2 = gimple_assign_rhs2 (assign);
854 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
855 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
857 if (ctxt && (op == LSHIFT_EXPR || op == RSHIFT_EXPR))
859 /* "INT34-C. Do not shift an expression by a negative number of bits
860 or by greater than or equal to the number of bits that exist in
861 the operand." */
862 if (const tree rhs2_cst = rhs2_sval->maybe_get_constant ())
863 if (TREE_CODE (rhs2_cst) == INTEGER_CST)
865 if (tree_int_cst_sgn (rhs2_cst) < 0)
866 ctxt->warn
867 (make_unique<shift_count_negative_diagnostic>
868 (assign, rhs2_cst));
869 else if (compare_tree_int (rhs2_cst,
870 TYPE_PRECISION (TREE_TYPE (rhs1)))
871 >= 0)
872 ctxt->warn
873 (make_unique<shift_count_overflow_diagnostic>
874 (assign,
875 int (TYPE_PRECISION (TREE_TYPE (rhs1))),
876 rhs2_cst));
880 const svalue *sval_binop
881 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
882 rhs1_sval, rhs2_sval);
883 return sval_binop;
886 /* Vector expressions. In theory we could implement these elementwise,
887 but for now, simply return unknown values. */
888 case VEC_DUPLICATE_EXPR:
889 case VEC_SERIES_EXPR:
890 case VEC_COND_EXPR:
891 case VEC_PERM_EXPR:
892 case VEC_WIDEN_MULT_HI_EXPR:
893 case VEC_WIDEN_MULT_LO_EXPR:
894 case VEC_WIDEN_MULT_EVEN_EXPR:
895 case VEC_WIDEN_MULT_ODD_EXPR:
896 case VEC_UNPACK_HI_EXPR:
897 case VEC_UNPACK_LO_EXPR:
898 case VEC_UNPACK_FLOAT_HI_EXPR:
899 case VEC_UNPACK_FLOAT_LO_EXPR:
900 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
901 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
902 case VEC_PACK_TRUNC_EXPR:
903 case VEC_PACK_SAT_EXPR:
904 case VEC_PACK_FIX_TRUNC_EXPR:
905 case VEC_PACK_FLOAT_EXPR:
906 case VEC_WIDEN_LSHIFT_HI_EXPR:
907 case VEC_WIDEN_LSHIFT_LO_EXPR:
908 return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (lhs));
912 /* Workaround for discarding certain false positives from
913 -Wanalyzer-use-of-uninitialized-value
914 of the form:
915 ((A OR-IF B) OR-IF C)
916 and:
917 ((A AND-IF B) AND-IF C)
918 where evaluating B is redundant, but could involve simple accesses of
919 uninitialized locals.
921 When optimization is turned on the FE can immediately fold compound
922 conditionals. Specifically, c_parser_condition parses this condition:
923 ((A OR-IF B) OR-IF C)
924 and calls c_fully_fold on the condition.
925 Within c_fully_fold, fold_truth_andor is called, which bails when
926 optimization is off, but if any optimization is turned on can convert the
927 ((A OR-IF B) OR-IF C)
928 into:
929 ((A OR B) OR_IF C)
930 for sufficiently simple B
931 i.e. the inner OR-IF becomes an OR.
932 At gimplification time the inner OR becomes BIT_IOR_EXPR (in gimplify_expr),
933 giving this for the inner condition:
934 tmp = A | B;
935 if (tmp)
936 thus effectively synthesizing a redundant access of B when optimization
937 is turned on, when compared to:
938 if (A) goto L1; else goto L4;
939 L1: if (B) goto L2; else goto L4;
940 L2: if (C) goto L3; else goto L4;
941 for the unoptimized case.
943 Return true if CTXT appears to be handling such a short-circuitable stmt,
944 such as the def-stmt for B for the:
945 tmp = A | B;
946 case above, for the case where A is true and thus B would have been
947 short-circuited without optimization, using MODEL for the value of A. */
949 static bool
950 within_short_circuited_stmt_p (const region_model *model,
951 const gassign *assign_stmt)
953 /* We must have an assignment to a temporary of _Bool type. */
954 tree lhs = gimple_assign_lhs (assign_stmt);
955 if (TREE_TYPE (lhs) != boolean_type_node)
956 return false;
957 if (TREE_CODE (lhs) != SSA_NAME)
958 return false;
959 if (SSA_NAME_VAR (lhs) != NULL_TREE)
960 return false;
962 /* The temporary bool must be used exactly once: as the second arg of
963 a BIT_IOR_EXPR or BIT_AND_EXPR. */
964 use_operand_p use_op;
965 gimple *use_stmt;
966 if (!single_imm_use (lhs, &use_op, &use_stmt))
967 return false;
968 const gassign *use_assign = dyn_cast <const gassign *> (use_stmt);
969 if (!use_assign)
970 return false;
971 enum tree_code op = gimple_assign_rhs_code (use_assign);
972 if (!(op == BIT_IOR_EXPR ||op == BIT_AND_EXPR))
973 return false;
974 if (!(gimple_assign_rhs1 (use_assign) != lhs
975 && gimple_assign_rhs2 (use_assign) == lhs))
976 return false;
978 /* The first arg of the bitwise stmt must have a known value in MODEL
979 that implies that the value of the second arg doesn't matter, i.e.
980 1 for bitwise or, 0 for bitwise and. */
981 tree other_arg = gimple_assign_rhs1 (use_assign);
982 /* Use a NULL ctxt here to avoid generating warnings. */
983 const svalue *other_arg_sval = model->get_rvalue (other_arg, NULL);
984 tree other_arg_cst = other_arg_sval->maybe_get_constant ();
985 if (!other_arg_cst)
986 return false;
987 switch (op)
989 default:
990 gcc_unreachable ();
991 case BIT_IOR_EXPR:
992 if (zerop (other_arg_cst))
993 return false;
994 break;
995 case BIT_AND_EXPR:
996 if (!zerop (other_arg_cst))
997 return false;
998 break;
1001 /* All tests passed. We appear to be in a stmt that generates a boolean
1002 temporary with a value that won't matter. */
1003 return true;
1006 /* Workaround for discarding certain false positives from
1007 -Wanalyzer-use-of-uninitialized-value
1008 seen with -ftrivial-auto-var-init=.
1010 -ftrivial-auto-var-init= will generate calls to IFN_DEFERRED_INIT.
1012 If the address of the var is taken, gimplification will give us
1013 something like:
1015 _1 = .DEFERRED_INIT (4, 2, &"len"[0]);
1016 len = _1;
1018 The result of DEFERRED_INIT will be an uninit value; we don't
1019 want to emit a false positive for "len = _1;"
1021 Return true if ASSIGN_STMT is such a stmt. */
1023 static bool
1024 due_to_ifn_deferred_init_p (const gassign *assign_stmt)
1027 /* We must have an assignment to a decl from an SSA name that's the
1028 result of a IFN_DEFERRED_INIT call. */
1029 if (gimple_assign_rhs_code (assign_stmt) != SSA_NAME)
1030 return false;
1031 tree lhs = gimple_assign_lhs (assign_stmt);
1032 if (TREE_CODE (lhs) != VAR_DECL)
1033 return false;
1034 tree rhs = gimple_assign_rhs1 (assign_stmt);
1035 if (TREE_CODE (rhs) != SSA_NAME)
1036 return false;
1037 const gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
1038 const gcall *call = dyn_cast <const gcall *> (def_stmt);
1039 if (!call)
1040 return false;
1041 if (gimple_call_internal_p (call)
1042 && gimple_call_internal_fn (call) == IFN_DEFERRED_INIT)
1043 return true;
1044 return false;
1047 /* Check for SVAL being poisoned, adding a warning to CTXT.
1048 Return SVAL, or, if a warning is added, another value, to avoid
1049 repeatedly complaining about the same poisoned value in followup code.
1050 SRC_REGION is a hint about where SVAL came from, and can be NULL. */
1052 const svalue *
1053 region_model::check_for_poison (const svalue *sval,
1054 tree expr,
1055 const region *src_region,
1056 region_model_context *ctxt) const
1058 if (!ctxt)
1059 return sval;
1061 if (const poisoned_svalue *poisoned_sval = sval->dyn_cast_poisoned_svalue ())
1063 enum poison_kind pkind = poisoned_sval->get_poison_kind ();
1065 /* Ignore uninitialized uses of empty types; there's nothing
1066 to initialize. */
1067 if (pkind == POISON_KIND_UNINIT
1068 && sval->get_type ()
1069 && is_empty_type (sval->get_type ()))
1070 return sval;
1072 if (pkind == POISON_KIND_UNINIT)
1073 if (const gimple *curr_stmt = ctxt->get_stmt ())
1074 if (const gassign *assign_stmt
1075 = dyn_cast <const gassign *> (curr_stmt))
1077 /* Special case to avoid certain false positives. */
1078 if (within_short_circuited_stmt_p (this, assign_stmt))
1079 return sval;
1081 /* Special case to avoid false positive on
1082 -ftrivial-auto-var-init=. */
1083 if (due_to_ifn_deferred_init_p (assign_stmt))
1084 return sval;
1087 /* If we have an SSA name for a temporary, we don't want to print
1088 '<unknown>'.
1089 Poisoned values are shared by type, and so we can't reconstruct
1090 the tree other than via the def stmts, using
1091 fixup_tree_for_diagnostic. */
1092 tree diag_arg = fixup_tree_for_diagnostic (expr);
1093 if (src_region == NULL && pkind == POISON_KIND_UNINIT)
1094 src_region = get_region_for_poisoned_expr (expr);
1096 /* Can we reliably get the poisoned value from "expr"?
1097 This is for use by poisoned_value_diagnostic::check_valid_fpath_p.
1098 Unfortunately, we might not have a reliable value for EXPR.
1099 Hence we only query its value now, and only use it if we get the
1100 poisoned value back again. */
1101 tree check_expr = expr;
1102 const svalue *foo_sval = get_rvalue (expr, NULL);
1103 if (foo_sval == sval)
1104 check_expr = expr;
1105 else
1106 check_expr = NULL;
1107 if (ctxt->warn (make_unique<poisoned_value_diagnostic> (diag_arg,
1108 pkind,
1109 src_region,
1110 check_expr)))
1112 /* We only want to report use of a poisoned value at the first
1113 place it gets used; return an unknown value to avoid generating
1114 a chain of followup warnings. */
1115 sval = m_mgr->get_or_create_unknown_svalue (sval->get_type ());
1118 return sval;
1121 return sval;
1124 /* Attempt to get a region for describing EXPR, the source of region of
1125 a poisoned_svalue for use in a poisoned_value_diagnostic.
1126 Return NULL if there is no good region to use. */
1128 const region *
1129 region_model::get_region_for_poisoned_expr (tree expr) const
1131 if (TREE_CODE (expr) == SSA_NAME)
1133 tree decl = SSA_NAME_VAR (expr);
1134 if (decl && DECL_P (decl))
1135 expr = decl;
1136 else
1137 return NULL;
1139 return get_lvalue (expr, NULL);
1142 /* Update this model for the ASSIGN stmt, using CTXT to report any
1143 diagnostics. */
1145 void
1146 region_model::on_assignment (const gassign *assign, region_model_context *ctxt)
1148 tree lhs = gimple_assign_lhs (assign);
1149 tree rhs1 = gimple_assign_rhs1 (assign);
1151 const region *lhs_reg = get_lvalue (lhs, ctxt);
1153 /* Most assignments are handled by:
1154 set_value (lhs_reg, SVALUE, CTXT)
1155 for some SVALUE. */
1156 if (const svalue *sval = get_gassign_result (assign, ctxt))
1158 tree expr = get_diagnostic_tree_for_gassign (assign);
1159 check_for_poison (sval, expr, NULL, ctxt);
1160 set_value (lhs_reg, sval, ctxt);
1161 return;
1164 enum tree_code op = gimple_assign_rhs_code (assign);
1165 switch (op)
1167 default:
1169 if (0)
1170 sorry_at (assign->location, "unhandled assignment op: %qs",
1171 get_tree_code_name (op));
1172 const svalue *unknown_sval
1173 = m_mgr->get_or_create_unknown_svalue (TREE_TYPE (lhs));
1174 set_value (lhs_reg, unknown_sval, ctxt);
1176 break;
1178 case CONSTRUCTOR:
1180 if (TREE_CLOBBER_P (rhs1))
1182 /* e.g. "x ={v} {CLOBBER};" */
1183 clobber_region (lhs_reg);
1185 else
1187 /* Any CONSTRUCTOR that survives to this point is either
1188 just a zero-init of everything, or a vector. */
1189 if (!CONSTRUCTOR_NO_CLEARING (rhs1))
1190 zero_fill_region (lhs_reg);
1191 unsigned ix;
1192 tree index;
1193 tree val;
1194 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), ix, index, val)
1196 gcc_assert (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE);
1197 if (!index)
1198 index = build_int_cst (integer_type_node, ix);
1199 gcc_assert (TREE_CODE (index) == INTEGER_CST);
1200 const svalue *index_sval
1201 = m_mgr->get_or_create_constant_svalue (index);
1202 gcc_assert (index_sval);
1203 const region *sub_reg
1204 = m_mgr->get_element_region (lhs_reg,
1205 TREE_TYPE (val),
1206 index_sval);
1207 const svalue *val_sval = get_rvalue (val, ctxt);
1208 set_value (sub_reg, val_sval, ctxt);
1212 break;
1214 case STRING_CST:
1216 /* e.g. "struct s2 x = {{'A', 'B', 'C', 'D'}};". */
1217 const svalue *rhs_sval = get_rvalue (rhs1, ctxt);
1218 m_store.set_value (m_mgr->get_store_manager(), lhs_reg, rhs_sval,
1219 ctxt ? ctxt->get_uncertainty () : NULL);
1221 break;
1225 /* Handle the pre-sm-state part of STMT, modifying this object in-place.
1226 Write true to *OUT_UNKNOWN_SIDE_EFFECTS if the stmt has unknown
1227 side effects. */
1229 void
1230 region_model::on_stmt_pre (const gimple *stmt,
1231 bool *out_unknown_side_effects,
1232 region_model_context *ctxt)
1234 switch (gimple_code (stmt))
1236 default:
1237 /* No-op for now. */
1238 break;
1240 case GIMPLE_ASSIGN:
1242 const gassign *assign = as_a <const gassign *> (stmt);
1243 on_assignment (assign, ctxt);
1245 break;
1247 case GIMPLE_ASM:
1249 const gasm *asm_stmt = as_a <const gasm *> (stmt);
1250 on_asm_stmt (asm_stmt, ctxt);
1252 break;
1254 case GIMPLE_CALL:
1256 /* Track whether we have a gcall to a function that's not recognized by
1257 anything, for which we don't have a function body, or for which we
1258 don't know the fndecl. */
1259 const gcall *call = as_a <const gcall *> (stmt);
1260 *out_unknown_side_effects = on_call_pre (call, ctxt);
1262 break;
1264 case GIMPLE_RETURN:
1266 const greturn *return_ = as_a <const greturn *> (stmt);
1267 on_return (return_, ctxt);
1269 break;
1273 /* Ensure that all arguments at the call described by CD are checked
1274 for poisoned values, by calling get_rvalue on each argument. */
1276 void
1277 region_model::check_call_args (const call_details &cd) const
1279 for (unsigned arg_idx = 0; arg_idx < cd.num_args (); arg_idx++)
1280 cd.get_arg_svalue (arg_idx);
1283 /* Return true if CD is known to be a call to a function with
1284 __attribute__((const)). */
1286 static bool
1287 const_fn_p (const call_details &cd)
1289 tree fndecl = cd.get_fndecl_for_call ();
1290 if (!fndecl)
1291 return false;
1292 gcc_assert (DECL_P (fndecl));
1293 return TREE_READONLY (fndecl);
1296 /* If this CD is known to be a call to a function with
1297 __attribute__((const)), attempt to get a const_fn_result_svalue
1298 based on the arguments, or return NULL otherwise. */
1300 static const svalue *
1301 maybe_get_const_fn_result (const call_details &cd)
1303 if (!const_fn_p (cd))
1304 return NULL;
1306 unsigned num_args = cd.num_args ();
1307 if (num_args > const_fn_result_svalue::MAX_INPUTS)
1308 /* Too many arguments. */
1309 return NULL;
1311 auto_vec<const svalue *> inputs (num_args);
1312 for (unsigned arg_idx = 0; arg_idx < num_args; arg_idx++)
1314 const svalue *arg_sval = cd.get_arg_svalue (arg_idx);
1315 if (!arg_sval->can_have_associated_state_p ())
1316 return NULL;
1317 inputs.quick_push (arg_sval);
1320 region_model_manager *mgr = cd.get_manager ();
1321 const svalue *sval
1322 = mgr->get_or_create_const_fn_result_svalue (cd.get_lhs_type (),
1323 cd.get_fndecl_for_call (),
1324 inputs);
1325 return sval;
1328 /* Update this model for an outcome of a call that returns a specific
1329 integer constant.
1330 If UNMERGEABLE, then make the result unmergeable, e.g. to prevent
1331 the state-merger code from merging success and failure outcomes. */
1333 void
1334 region_model::update_for_int_cst_return (const call_details &cd,
1335 int retval,
1336 bool unmergeable)
1338 if (!cd.get_lhs_type ())
1339 return;
1340 if (TREE_CODE (cd.get_lhs_type ()) != INTEGER_TYPE)
1341 return;
1342 const svalue *result
1343 = m_mgr->get_or_create_int_cst (cd.get_lhs_type (), retval);
1344 if (unmergeable)
1345 result = m_mgr->get_or_create_unmergeable (result);
1346 set_value (cd.get_lhs_region (), result, cd.get_ctxt ());
1349 /* Update this model for an outcome of a call that returns zero.
1350 If UNMERGEABLE, then make the result unmergeable, e.g. to prevent
1351 the state-merger code from merging success and failure outcomes. */
1353 void
1354 region_model::update_for_zero_return (const call_details &cd,
1355 bool unmergeable)
1357 update_for_int_cst_return (cd, 0, unmergeable);
1360 /* Update this model for an outcome of a call that returns non-zero. */
1362 void
1363 region_model::update_for_nonzero_return (const call_details &cd)
1365 if (!cd.get_lhs_type ())
1366 return;
1367 if (TREE_CODE (cd.get_lhs_type ()) != INTEGER_TYPE)
1368 return;
1369 const svalue *zero
1370 = m_mgr->get_or_create_int_cst (cd.get_lhs_type (), 0);
1371 const svalue *result
1372 = get_store_value (cd.get_lhs_region (), cd.get_ctxt ());
1373 add_constraint (result, NE_EXPR, zero, cd.get_ctxt ());
1376 /* Subroutine of region_model::maybe_get_copy_bounds.
1377 The Linux kernel commonly uses
1378 min_t([unsigned] long, VAR, sizeof(T));
1379 to set an upper bound on the size of a copy_to_user.
1380 Attempt to simplify such sizes by trying to get the upper bound as a
1381 constant.
1382 Return the simplified svalue if possible, or NULL otherwise. */
1384 static const svalue *
1385 maybe_simplify_upper_bound (const svalue *num_bytes_sval,
1386 region_model_manager *mgr)
1388 tree type = num_bytes_sval->get_type ();
1389 while (const svalue *raw = num_bytes_sval->maybe_undo_cast ())
1390 num_bytes_sval = raw;
1391 if (const binop_svalue *binop_sval = num_bytes_sval->dyn_cast_binop_svalue ())
1392 if (binop_sval->get_op () == MIN_EXPR)
1393 if (binop_sval->get_arg1 ()->get_kind () == SK_CONSTANT)
1395 return mgr->get_or_create_cast (type, binop_sval->get_arg1 ());
1396 /* TODO: we might want to also capture the constraint
1397 when recording the diagnostic, or note that we're using
1398 the upper bound. */
1400 return NULL;
1403 /* Attempt to get an upper bound for the size of a copy when simulating a
1404 copy function.
1406 NUM_BYTES_SVAL is the symbolic value for the size of the copy.
1407 Use it if it's constant, otherwise try to simplify it. Failing
1408 that, use the size of SRC_REG if constant.
1410 Return a symbolic value for an upper limit on the number of bytes
1411 copied, or NULL if no such value could be determined. */
1413 const svalue *
1414 region_model::maybe_get_copy_bounds (const region *src_reg,
1415 const svalue *num_bytes_sval)
1417 if (num_bytes_sval->maybe_get_constant ())
1418 return num_bytes_sval;
1420 if (const svalue *simplified
1421 = maybe_simplify_upper_bound (num_bytes_sval, m_mgr))
1422 num_bytes_sval = simplified;
1424 if (num_bytes_sval->maybe_get_constant ())
1425 return num_bytes_sval;
1427 /* For now, try just guessing the size as the capacity of the
1428 base region of the src.
1429 This is a hack; we might get too large a value. */
1430 const region *src_base_reg = src_reg->get_base_region ();
1431 num_bytes_sval = get_capacity (src_base_reg);
1433 if (num_bytes_sval->maybe_get_constant ())
1434 return num_bytes_sval;
1436 /* Non-constant: give up. */
1437 return NULL;
1440 /* Get any known_function for FNDECL for call CD.
1442 The call must match all assumptions made by the known_function (such as
1443 e.g. "argument 1's type must be a pointer type").
1445 Return NULL if no known_function is found, or it does not match the
1446 assumption(s). */
1448 const known_function *
1449 region_model::get_known_function (tree fndecl, const call_details &cd) const
1451 known_function_manager *known_fn_mgr = m_mgr->get_known_function_manager ();
1452 return known_fn_mgr->get_match (fndecl, cd);
1455 /* Get any known_function for IFN, or NULL. */
1457 const known_function *
1458 region_model::get_known_function (enum internal_fn ifn) const
1460 known_function_manager *known_fn_mgr = m_mgr->get_known_function_manager ();
1461 return known_fn_mgr->get_internal_fn (ifn);
1464 /* Update this model for the CALL stmt, using CTXT to report any
1465 diagnostics - the first half.
1467 Updates to the region_model that should be made *before* sm-states
1468 are updated are done here; other updates to the region_model are done
1469 in region_model::on_call_post.
1471 Return true if the function call has unknown side effects (it wasn't
1472 recognized and we don't have a body for it, or are unable to tell which
1473 fndecl it is). */
1475 bool
1476 region_model::on_call_pre (const gcall *call, region_model_context *ctxt)
1478 call_details cd (call, this, ctxt);
1480 /* Special-case for IFN_DEFERRED_INIT.
1481 We want to report uninitialized variables with -fanalyzer (treating
1482 -ftrivial-auto-var-init= as purely a mitigation feature).
1483 Handle IFN_DEFERRED_INIT by treating it as no-op: don't touch the
1484 lhs of the call, so that it is still uninitialized from the point of
1485 view of the analyzer. */
1486 if (gimple_call_internal_p (call)
1487 && gimple_call_internal_fn (call) == IFN_DEFERRED_INIT)
1488 return false; /* No side effects. */
1490 /* Get svalues for all of the arguments at the callsite, to ensure that we
1491 complain about any uninitialized arguments. This might lead to
1492 duplicates if any of the handling below also looks up the svalues,
1493 but the deduplication code should deal with that. */
1494 if (ctxt)
1495 check_call_args (cd);
1497 tree callee_fndecl = get_fndecl_for_call (call, ctxt);
1499 /* Some of the cases below update the lhs of the call based on the
1500 return value, but not all. Provide a default value, which may
1501 get overwritten below. */
1502 if (tree lhs = gimple_call_lhs (call))
1504 const region *lhs_region = get_lvalue (lhs, ctxt);
1505 const svalue *sval = maybe_get_const_fn_result (cd);
1506 if (!sval)
1508 if (callee_fndecl
1509 && lookup_attribute ("malloc", DECL_ATTRIBUTES (callee_fndecl)))
1511 const region *new_reg
1512 = get_or_create_region_for_heap_alloc (NULL, ctxt);
1513 mark_region_as_unknown (new_reg, NULL);
1514 sval = m_mgr->get_ptr_svalue (cd.get_lhs_type (), new_reg);
1516 else
1517 /* For the common case of functions without __attribute__((const)),
1518 use a conjured value, and purge any prior state involving that
1519 value (in case this is in a loop). */
1520 sval = m_mgr->get_or_create_conjured_svalue (TREE_TYPE (lhs), call,
1521 lhs_region,
1522 conjured_purge (this,
1523 ctxt));
1525 set_value (lhs_region, sval, ctxt);
1528 if (gimple_call_internal_p (call))
1529 if (const known_function *kf
1530 = get_known_function (gimple_call_internal_fn (call)))
1532 kf->impl_call_pre (cd);
1533 return false; /* No further side effects. */
1536 if (!callee_fndecl)
1537 return true; /* Unknown side effects. */
1539 if (const known_function *kf = get_known_function (callee_fndecl, cd))
1541 kf->impl_call_pre (cd);
1542 return false; /* No further side effects. */
1545 const int callee_fndecl_flags = flags_from_decl_or_type (callee_fndecl);
1546 if (callee_fndecl_flags & (ECF_CONST | ECF_PURE))
1547 return false; /* No side effects. */
1549 if (fndecl_built_in_p (callee_fndecl))
1550 return true; /* Unknown side effects. */
1552 if (!fndecl_has_gimple_body_p (callee_fndecl))
1553 return true; /* Unknown side effects. */
1555 return false; /* No side effects. */
1558 /* Update this model for the CALL stmt, using CTXT to report any
1559 diagnostics - the second half.
1561 Updates to the region_model that should be made *after* sm-states
1562 are updated are done here; other updates to the region_model are done
1563 in region_model::on_call_pre.
1565 If UNKNOWN_SIDE_EFFECTS is true, also call handle_unrecognized_call
1566 to purge state. */
1568 void
1569 region_model::on_call_post (const gcall *call,
1570 bool unknown_side_effects,
1571 region_model_context *ctxt)
1573 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
1575 call_details cd (call, this, ctxt);
1576 if (const known_function *kf = get_known_function (callee_fndecl, cd))
1578 kf->impl_call_post (cd);
1579 return;
1581 /* Was this fndecl referenced by
1582 __attribute__((malloc(FOO)))? */
1583 if (lookup_attribute ("*dealloc", DECL_ATTRIBUTES (callee_fndecl)))
1585 impl_deallocation_call (cd);
1586 return;
1590 if (unknown_side_effects)
1591 handle_unrecognized_call (call, ctxt);
1594 /* Purge state involving SVAL from this region_model, using CTXT
1595 (if non-NULL) to purge other state in a program_state.
1597 For example, if we're at the def-stmt of an SSA name, then we need to
1598 purge any state for svalues that involve that SSA name. This avoids
1599 false positives in loops, since a symbolic value referring to the
1600 SSA name will be referring to the previous value of that SSA name.
1602 For example, in:
1603 while ((e = hashmap_iter_next(&iter))) {
1604 struct oid2strbuf *e_strbuf = (struct oid2strbuf *)e;
1605 free (e_strbuf->value);
1607 at the def-stmt of e_8:
1608 e_8 = hashmap_iter_next (&iter);
1609 we should purge the "freed" state of:
1610 INIT_VAL(CAST_REG(‘struct oid2strbuf’, (*INIT_VAL(e_8))).value)
1611 which is the "e_strbuf->value" value from the previous iteration,
1612 or we will erroneously report a double-free - the "e_8" within it
1613 refers to the previous value. */
1615 void
1616 region_model::purge_state_involving (const svalue *sval,
1617 region_model_context *ctxt)
1619 if (!sval->can_have_associated_state_p ())
1620 return;
1621 m_store.purge_state_involving (sval, m_mgr);
1622 m_constraints->purge_state_involving (sval);
1623 m_dynamic_extents.purge_state_involving (sval);
1624 if (ctxt)
1625 ctxt->purge_state_involving (sval);
1628 /* A pending_note subclass for adding a note about an
1629 __attribute__((access, ...)) to a diagnostic. */
1631 class reason_attr_access : public pending_note_subclass<reason_attr_access>
1633 public:
1634 reason_attr_access (tree callee_fndecl, const attr_access &access)
1635 : m_callee_fndecl (callee_fndecl),
1636 m_ptr_argno (access.ptrarg),
1637 m_access_str (TREE_STRING_POINTER (access.to_external_string ()))
1641 const char *get_kind () const final override { return "reason_attr_access"; }
1643 void emit () const final override
1645 inform (DECL_SOURCE_LOCATION (m_callee_fndecl),
1646 "parameter %i of %qD marked with attribute %qs",
1647 m_ptr_argno + 1, m_callee_fndecl, m_access_str);
1650 bool operator== (const reason_attr_access &other) const
1652 return (m_callee_fndecl == other.m_callee_fndecl
1653 && m_ptr_argno == other.m_ptr_argno
1654 && !strcmp (m_access_str, other.m_access_str));
1657 private:
1658 tree m_callee_fndecl;
1659 unsigned m_ptr_argno;
1660 const char *m_access_str;
1663 /* Check CALL a call to external function CALLEE_FNDECL based on
1664 any __attribute__ ((access, ....) on the latter, complaining to
1665 CTXT about any issues.
1667 Currently we merely call check_region_for_write on any regions
1668 pointed to by arguments marked with a "write_only" or "read_write"
1669 attribute. */
1671 void
1672 region_model::
1673 check_external_function_for_access_attr (const gcall *call,
1674 tree callee_fndecl,
1675 region_model_context *ctxt) const
1677 gcc_assert (call);
1678 gcc_assert (callee_fndecl);
1679 gcc_assert (ctxt);
1681 tree fntype = TREE_TYPE (callee_fndecl);
1682 if (!fntype)
1683 return;
1685 if (!TYPE_ATTRIBUTES (fntype))
1686 return;
1688 /* Initialize a map of attribute access specifications for arguments
1689 to the function call. */
1690 rdwr_map rdwr_idx;
1691 init_attr_rdwr_indices (&rdwr_idx, TYPE_ATTRIBUTES (fntype));
1693 unsigned argno = 0;
1695 for (tree iter = TYPE_ARG_TYPES (fntype); iter;
1696 iter = TREE_CHAIN (iter), ++argno)
1698 const attr_access* access = rdwr_idx.get (argno);
1699 if (!access)
1700 continue;
1702 /* Ignore any duplicate entry in the map for the size argument. */
1703 if (access->ptrarg != argno)
1704 continue;
1706 if (access->mode == access_write_only
1707 || access->mode == access_read_write)
1709 /* Subclass of decorated_region_model_context that
1710 adds a note about the attr access to any saved diagnostics. */
1711 class annotating_ctxt : public note_adding_context
1713 public:
1714 annotating_ctxt (tree callee_fndecl,
1715 const attr_access &access,
1716 region_model_context *ctxt)
1717 : note_adding_context (ctxt),
1718 m_callee_fndecl (callee_fndecl),
1719 m_access (access)
1722 std::unique_ptr<pending_note> make_note () final override
1724 return make_unique<reason_attr_access>
1725 (m_callee_fndecl, m_access);
1727 private:
1728 tree m_callee_fndecl;
1729 const attr_access &m_access;
1732 /* Use this ctxt below so that any diagnostics get the
1733 note added to them. */
1734 annotating_ctxt my_ctxt (callee_fndecl, *access, ctxt);
1736 tree ptr_tree = gimple_call_arg (call, access->ptrarg);
1737 const svalue *ptr_sval = get_rvalue (ptr_tree, &my_ctxt);
1738 const region *reg = deref_rvalue (ptr_sval, ptr_tree, &my_ctxt);
1739 check_region_for_write (reg, &my_ctxt);
1740 /* We don't use the size arg for now. */
1745 /* Handle a call CALL to a function with unknown behavior.
1747 Traverse the regions in this model, determining what regions are
1748 reachable from pointer arguments to CALL and from global variables,
1749 recursively.
1751 Set all reachable regions to new unknown values and purge sm-state
1752 from their values, and from values that point to them. */
1754 void
1755 region_model::handle_unrecognized_call (const gcall *call,
1756 region_model_context *ctxt)
1758 tree fndecl = get_fndecl_for_call (call, ctxt);
1760 if (fndecl && ctxt)
1761 check_external_function_for_access_attr (call, fndecl, ctxt);
1763 reachable_regions reachable_regs (this);
1765 /* Determine the reachable regions and their mutability. */
1767 /* Add globals and regions that already escaped in previous
1768 unknown calls. */
1769 m_store.for_each_cluster (reachable_regions::init_cluster_cb,
1770 &reachable_regs);
1772 /* Params that are pointers. */
1773 tree iter_param_types = NULL_TREE;
1774 if (fndecl)
1775 iter_param_types = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
1776 for (unsigned arg_idx = 0; arg_idx < gimple_call_num_args (call); arg_idx++)
1778 /* Track expected param type, where available. */
1779 tree param_type = NULL_TREE;
1780 if (iter_param_types)
1782 param_type = TREE_VALUE (iter_param_types);
1783 gcc_assert (param_type);
1784 iter_param_types = TREE_CHAIN (iter_param_types);
1787 tree parm = gimple_call_arg (call, arg_idx);
1788 const svalue *parm_sval = get_rvalue (parm, ctxt);
1789 reachable_regs.handle_parm (parm_sval, param_type);
1793 uncertainty_t *uncertainty = ctxt ? ctxt->get_uncertainty () : NULL;
1795 /* Purge sm-state for the svalues that were reachable,
1796 both in non-mutable and mutable form. */
1797 for (svalue_set::iterator iter
1798 = reachable_regs.begin_reachable_svals ();
1799 iter != reachable_regs.end_reachable_svals (); ++iter)
1801 const svalue *sval = (*iter);
1802 if (ctxt)
1803 ctxt->on_unknown_change (sval, false);
1805 for (svalue_set::iterator iter
1806 = reachable_regs.begin_mutable_svals ();
1807 iter != reachable_regs.end_mutable_svals (); ++iter)
1809 const svalue *sval = (*iter);
1810 if (ctxt)
1811 ctxt->on_unknown_change (sval, true);
1812 if (uncertainty)
1813 uncertainty->on_mutable_sval_at_unknown_call (sval);
1816 /* Mark any clusters that have escaped. */
1817 reachable_regs.mark_escaped_clusters (ctxt);
1819 /* Update bindings for all clusters that have escaped, whether above,
1820 or previously. */
1821 m_store.on_unknown_fncall (call, m_mgr->get_store_manager (),
1822 conjured_purge (this, ctxt));
1824 /* Purge dynamic extents from any regions that have escaped mutably:
1825 realloc could have been called on them. */
1826 for (hash_set<const region *>::iterator
1827 iter = reachable_regs.begin_mutable_base_regs ();
1828 iter != reachable_regs.end_mutable_base_regs ();
1829 ++iter)
1831 const region *base_reg = (*iter);
1832 unset_dynamic_extents (base_reg);
1836 /* Traverse the regions in this model, determining what regions are
1837 reachable from the store and populating *OUT.
1839 If EXTRA_SVAL is non-NULL, treat it as an additional "root"
1840 for reachability (for handling return values from functions when
1841 analyzing return of the only function on the stack).
1843 If UNCERTAINTY is non-NULL, treat any svalues that were recorded
1844 within it as being maybe-bound as additional "roots" for reachability.
1846 Find svalues that haven't leaked. */
1848 void
1849 region_model::get_reachable_svalues (svalue_set *out,
1850 const svalue *extra_sval,
1851 const uncertainty_t *uncertainty)
1853 reachable_regions reachable_regs (this);
1855 /* Add globals and regions that already escaped in previous
1856 unknown calls. */
1857 m_store.for_each_cluster (reachable_regions::init_cluster_cb,
1858 &reachable_regs);
1860 if (extra_sval)
1861 reachable_regs.handle_sval (extra_sval);
1863 if (uncertainty)
1864 for (uncertainty_t::iterator iter
1865 = uncertainty->begin_maybe_bound_svals ();
1866 iter != uncertainty->end_maybe_bound_svals (); ++iter)
1867 reachable_regs.handle_sval (*iter);
1869 /* Get regions for locals that have explicitly bound values. */
1870 for (store::cluster_map_t::iterator iter = m_store.begin ();
1871 iter != m_store.end (); ++iter)
1873 const region *base_reg = (*iter).first;
1874 if (const region *parent = base_reg->get_parent_region ())
1875 if (parent->get_kind () == RK_FRAME)
1876 reachable_regs.add (base_reg, false);
1879 /* Populate *OUT based on the values that were reachable. */
1880 for (svalue_set::iterator iter
1881 = reachable_regs.begin_reachable_svals ();
1882 iter != reachable_regs.end_reachable_svals (); ++iter)
1883 out->add (*iter);
1886 /* Update this model for the RETURN_STMT, using CTXT to report any
1887 diagnostics. */
1889 void
1890 region_model::on_return (const greturn *return_stmt, region_model_context *ctxt)
1892 tree callee = get_current_function ()->decl;
1893 tree lhs = DECL_RESULT (callee);
1894 tree rhs = gimple_return_retval (return_stmt);
1896 if (lhs && rhs)
1898 const svalue *sval = get_rvalue (rhs, ctxt);
1899 const region *ret_reg = get_lvalue (lhs, ctxt);
1900 set_value (ret_reg, sval, ctxt);
1904 /* Update this model for a call and return of setjmp/sigsetjmp at CALL within
1905 ENODE, using CTXT to report any diagnostics.
1907 This is for the initial direct invocation of setjmp/sigsetjmp (which returns
1908 0), as opposed to any second return due to longjmp/sigsetjmp. */
1910 void
1911 region_model::on_setjmp (const gcall *call, const exploded_node *enode,
1912 region_model_context *ctxt)
1914 const svalue *buf_ptr = get_rvalue (gimple_call_arg (call, 0), ctxt);
1915 const region *buf_reg = deref_rvalue (buf_ptr, gimple_call_arg (call, 0),
1916 ctxt);
1918 /* Create a setjmp_svalue for this call and store it in BUF_REG's
1919 region. */
1920 if (buf_reg)
1922 setjmp_record r (enode, call);
1923 const svalue *sval
1924 = m_mgr->get_or_create_setjmp_svalue (r, buf_reg->get_type ());
1925 set_value (buf_reg, sval, ctxt);
1928 /* Direct calls to setjmp return 0. */
1929 if (tree lhs = gimple_call_lhs (call))
1931 const svalue *new_sval
1932 = m_mgr->get_or_create_int_cst (TREE_TYPE (lhs), 0);
1933 const region *lhs_reg = get_lvalue (lhs, ctxt);
1934 set_value (lhs_reg, new_sval, ctxt);
1938 /* Update this region_model for rewinding from a "longjmp" at LONGJMP_CALL
1939 to a "setjmp" at SETJMP_CALL where the final stack depth should be
1940 SETJMP_STACK_DEPTH. Pop any stack frames. Leak detection is *not*
1941 done, and should be done by the caller. */
1943 void
1944 region_model::on_longjmp (const gcall *longjmp_call, const gcall *setjmp_call,
1945 int setjmp_stack_depth, region_model_context *ctxt)
1947 /* Evaluate the val, using the frame of the "longjmp". */
1948 tree fake_retval = gimple_call_arg (longjmp_call, 1);
1949 const svalue *fake_retval_sval = get_rvalue (fake_retval, ctxt);
1951 /* Pop any frames until we reach the stack depth of the function where
1952 setjmp was called. */
1953 gcc_assert (get_stack_depth () >= setjmp_stack_depth);
1954 while (get_stack_depth () > setjmp_stack_depth)
1955 pop_frame (NULL, NULL, ctxt);
1957 gcc_assert (get_stack_depth () == setjmp_stack_depth);
1959 /* Assign to LHS of "setjmp" in new_state. */
1960 if (tree lhs = gimple_call_lhs (setjmp_call))
1962 /* Passing 0 as the val to longjmp leads to setjmp returning 1. */
1963 const svalue *zero_sval
1964 = m_mgr->get_or_create_int_cst (TREE_TYPE (fake_retval), 0);
1965 tristate eq_zero = eval_condition (fake_retval_sval, EQ_EXPR, zero_sval);
1966 /* If we have 0, use 1. */
1967 if (eq_zero.is_true ())
1969 const svalue *one_sval
1970 = m_mgr->get_or_create_int_cst (TREE_TYPE (fake_retval), 1);
1971 fake_retval_sval = one_sval;
1973 else
1975 /* Otherwise note that the value is nonzero. */
1976 m_constraints->add_constraint (fake_retval_sval, NE_EXPR, zero_sval);
1979 /* Decorate the return value from setjmp as being unmergeable,
1980 so that we don't attempt to merge states with it as zero
1981 with states in which it's nonzero, leading to a clean distinction
1982 in the exploded_graph betweeen the first return and the second
1983 return. */
1984 fake_retval_sval = m_mgr->get_or_create_unmergeable (fake_retval_sval);
1986 const region *lhs_reg = get_lvalue (lhs, ctxt);
1987 set_value (lhs_reg, fake_retval_sval, ctxt);
1991 /* Update this region_model for a phi stmt of the form
1992 LHS = PHI <...RHS...>.
1993 where RHS is for the appropriate edge.
1994 Get state from OLD_STATE so that all of the phi stmts for a basic block
1995 are effectively handled simultaneously. */
1997 void
1998 region_model::handle_phi (const gphi *phi,
1999 tree lhs, tree rhs,
2000 const region_model &old_state,
2001 region_model_context *ctxt)
2003 /* For now, don't bother tracking the .MEM SSA names. */
2004 if (tree var = SSA_NAME_VAR (lhs))
2005 if (TREE_CODE (var) == VAR_DECL)
2006 if (VAR_DECL_IS_VIRTUAL_OPERAND (var))
2007 return;
2009 const svalue *src_sval = old_state.get_rvalue (rhs, ctxt);
2010 const region *dst_reg = old_state.get_lvalue (lhs, ctxt);
2012 set_value (dst_reg, src_sval, ctxt);
2014 if (ctxt)
2015 ctxt->on_phi (phi, rhs);
2018 /* Implementation of region_model::get_lvalue; the latter adds type-checking.
2020 Get the id of the region for PV within this region_model,
2021 emitting any diagnostics to CTXT. */
2023 const region *
2024 region_model::get_lvalue_1 (path_var pv, region_model_context *ctxt) const
2026 tree expr = pv.m_tree;
2028 gcc_assert (expr);
2030 switch (TREE_CODE (expr))
2032 default:
2033 return m_mgr->get_region_for_unexpected_tree_code (ctxt, expr,
2034 dump_location_t ());
2036 case ARRAY_REF:
2038 tree array = TREE_OPERAND (expr, 0);
2039 tree index = TREE_OPERAND (expr, 1);
2041 const region *array_reg = get_lvalue (array, ctxt);
2042 const svalue *index_sval = get_rvalue (index, ctxt);
2043 return m_mgr->get_element_region (array_reg,
2044 TREE_TYPE (TREE_TYPE (array)),
2045 index_sval);
2047 break;
2049 case BIT_FIELD_REF:
2051 tree inner_expr = TREE_OPERAND (expr, 0);
2052 const region *inner_reg = get_lvalue (inner_expr, ctxt);
2053 tree num_bits = TREE_OPERAND (expr, 1);
2054 tree first_bit_offset = TREE_OPERAND (expr, 2);
2055 gcc_assert (TREE_CODE (num_bits) == INTEGER_CST);
2056 gcc_assert (TREE_CODE (first_bit_offset) == INTEGER_CST);
2057 bit_range bits (TREE_INT_CST_LOW (first_bit_offset),
2058 TREE_INT_CST_LOW (num_bits));
2059 return m_mgr->get_bit_range (inner_reg, TREE_TYPE (expr), bits);
2061 break;
2063 case MEM_REF:
2065 tree ptr = TREE_OPERAND (expr, 0);
2066 tree offset = TREE_OPERAND (expr, 1);
2067 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
2068 const svalue *offset_sval = get_rvalue (offset, ctxt);
2069 const region *star_ptr = deref_rvalue (ptr_sval, ptr, ctxt);
2070 return m_mgr->get_offset_region (star_ptr,
2071 TREE_TYPE (expr),
2072 offset_sval);
2074 break;
2076 case FUNCTION_DECL:
2077 return m_mgr->get_region_for_fndecl (expr);
2079 case LABEL_DECL:
2080 return m_mgr->get_region_for_label (expr);
2082 case VAR_DECL:
2083 /* Handle globals. */
2084 if (is_global_var (expr))
2085 return m_mgr->get_region_for_global (expr);
2087 /* Fall through. */
2089 case SSA_NAME:
2090 case PARM_DECL:
2091 case RESULT_DECL:
2093 gcc_assert (TREE_CODE (expr) == SSA_NAME
2094 || TREE_CODE (expr) == PARM_DECL
2095 || TREE_CODE (expr) == VAR_DECL
2096 || TREE_CODE (expr) == RESULT_DECL);
2098 int stack_index = pv.m_stack_depth;
2099 const frame_region *frame = get_frame_at_index (stack_index);
2100 gcc_assert (frame);
2101 return frame->get_region_for_local (m_mgr, expr, ctxt);
2104 case COMPONENT_REF:
2106 /* obj.field */
2107 tree obj = TREE_OPERAND (expr, 0);
2108 tree field = TREE_OPERAND (expr, 1);
2109 const region *obj_reg = get_lvalue (obj, ctxt);
2110 return m_mgr->get_field_region (obj_reg, field);
2112 break;
2114 case STRING_CST:
2115 return m_mgr->get_region_for_string (expr);
2119 /* Assert that SRC_TYPE can be converted to DST_TYPE as a no-op. */
2121 static void
2122 assert_compat_types (tree src_type, tree dst_type)
2124 if (src_type && dst_type && !VOID_TYPE_P (dst_type))
2126 #if CHECKING_P
2127 if (!(useless_type_conversion_p (src_type, dst_type)))
2128 internal_error ("incompatible types: %qT and %qT", src_type, dst_type);
2129 #endif
2133 /* Return true if SRC_TYPE can be converted to DST_TYPE as a no-op. */
2135 bool
2136 compat_types_p (tree src_type, tree dst_type)
2138 if (src_type && dst_type && !VOID_TYPE_P (dst_type))
2139 if (!(useless_type_conversion_p (src_type, dst_type)))
2140 return false;
2141 return true;
2144 /* Get the region for PV within this region_model,
2145 emitting any diagnostics to CTXT. */
2147 const region *
2148 region_model::get_lvalue (path_var pv, region_model_context *ctxt) const
2150 if (pv.m_tree == NULL_TREE)
2151 return NULL;
2153 const region *result_reg = get_lvalue_1 (pv, ctxt);
2154 assert_compat_types (result_reg->get_type (), TREE_TYPE (pv.m_tree));
2155 return result_reg;
2158 /* Get the region for EXPR within this region_model (assuming the most
2159 recent stack frame if it's a local). */
2161 const region *
2162 region_model::get_lvalue (tree expr, region_model_context *ctxt) const
2164 return get_lvalue (path_var (expr, get_stack_depth () - 1), ctxt);
2167 /* Implementation of region_model::get_rvalue; the latter adds type-checking.
2169 Get the value of PV within this region_model,
2170 emitting any diagnostics to CTXT. */
2172 const svalue *
2173 region_model::get_rvalue_1 (path_var pv, region_model_context *ctxt) const
2175 gcc_assert (pv.m_tree);
2177 switch (TREE_CODE (pv.m_tree))
2179 default:
2180 return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (pv.m_tree));
2182 case ADDR_EXPR:
2184 /* "&EXPR". */
2185 tree expr = pv.m_tree;
2186 tree op0 = TREE_OPERAND (expr, 0);
2187 const region *expr_reg = get_lvalue (op0, ctxt);
2188 return m_mgr->get_ptr_svalue (TREE_TYPE (expr), expr_reg);
2190 break;
2192 case BIT_FIELD_REF:
2194 tree expr = pv.m_tree;
2195 tree op0 = TREE_OPERAND (expr, 0);
2196 const region *reg = get_lvalue (op0, ctxt);
2197 tree num_bits = TREE_OPERAND (expr, 1);
2198 tree first_bit_offset = TREE_OPERAND (expr, 2);
2199 gcc_assert (TREE_CODE (num_bits) == INTEGER_CST);
2200 gcc_assert (TREE_CODE (first_bit_offset) == INTEGER_CST);
2201 bit_range bits (TREE_INT_CST_LOW (first_bit_offset),
2202 TREE_INT_CST_LOW (num_bits));
2203 return get_rvalue_for_bits (TREE_TYPE (expr), reg, bits, ctxt);
2206 case VAR_DECL:
2207 if (DECL_HARD_REGISTER (pv.m_tree))
2209 /* If it has a hard register, it doesn't have a memory region
2210 and can't be referred to as an lvalue. */
2211 return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (pv.m_tree));
2213 /* Fall through. */
2214 case PARM_DECL:
2215 case SSA_NAME:
2216 case RESULT_DECL:
2217 case ARRAY_REF:
2219 const region *reg = get_lvalue (pv, ctxt);
2220 return get_store_value (reg, ctxt);
2223 case REALPART_EXPR:
2224 case IMAGPART_EXPR:
2225 case VIEW_CONVERT_EXPR:
2227 tree expr = pv.m_tree;
2228 tree arg = TREE_OPERAND (expr, 0);
2229 const svalue *arg_sval = get_rvalue (arg, ctxt);
2230 const svalue *sval_unaryop
2231 = m_mgr->get_or_create_unaryop (TREE_TYPE (expr), TREE_CODE (expr),
2232 arg_sval);
2233 return sval_unaryop;
2236 case INTEGER_CST:
2237 case REAL_CST:
2238 case COMPLEX_CST:
2239 case VECTOR_CST:
2240 case STRING_CST:
2241 return m_mgr->get_or_create_constant_svalue (pv.m_tree);
2243 case POINTER_PLUS_EXPR:
2245 tree expr = pv.m_tree;
2246 tree ptr = TREE_OPERAND (expr, 0);
2247 tree offset = TREE_OPERAND (expr, 1);
2248 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
2249 const svalue *offset_sval = get_rvalue (offset, ctxt);
2250 const svalue *sval_binop
2251 = m_mgr->get_or_create_binop (TREE_TYPE (expr), POINTER_PLUS_EXPR,
2252 ptr_sval, offset_sval);
2253 return sval_binop;
2256 /* Binary ops. */
2257 case PLUS_EXPR:
2258 case MULT_EXPR:
2259 case BIT_AND_EXPR:
2260 case BIT_IOR_EXPR:
2261 case BIT_XOR_EXPR:
2263 tree expr = pv.m_tree;
2264 tree arg0 = TREE_OPERAND (expr, 0);
2265 tree arg1 = TREE_OPERAND (expr, 1);
2266 const svalue *arg0_sval = get_rvalue (arg0, ctxt);
2267 const svalue *arg1_sval = get_rvalue (arg1, ctxt);
2268 const svalue *sval_binop
2269 = m_mgr->get_or_create_binop (TREE_TYPE (expr), TREE_CODE (expr),
2270 arg0_sval, arg1_sval);
2271 return sval_binop;
2274 case COMPONENT_REF:
2275 case MEM_REF:
2277 const region *ref_reg = get_lvalue (pv, ctxt);
2278 return get_store_value (ref_reg, ctxt);
2280 case OBJ_TYPE_REF:
2282 tree expr = OBJ_TYPE_REF_EXPR (pv.m_tree);
2283 return get_rvalue (expr, ctxt);
2288 /* Get the value of PV within this region_model,
2289 emitting any diagnostics to CTXT. */
2291 const svalue *
2292 region_model::get_rvalue (path_var pv, region_model_context *ctxt) const
2294 if (pv.m_tree == NULL_TREE)
2295 return NULL;
2297 const svalue *result_sval = get_rvalue_1 (pv, ctxt);
2299 assert_compat_types (result_sval->get_type (), TREE_TYPE (pv.m_tree));
2301 result_sval = check_for_poison (result_sval, pv.m_tree, NULL, ctxt);
2303 return result_sval;
2306 /* Get the value of EXPR within this region_model (assuming the most
2307 recent stack frame if it's a local). */
2309 const svalue *
2310 region_model::get_rvalue (tree expr, region_model_context *ctxt) const
2312 return get_rvalue (path_var (expr, get_stack_depth () - 1), ctxt);
2315 /* Return true if this model is on a path with "main" as the entrypoint
2316 (as opposed to one in which we're merely analyzing a subset of the
2317 path through the code). */
2319 bool
2320 region_model::called_from_main_p () const
2322 if (!m_current_frame)
2323 return false;
2324 /* Determine if the oldest stack frame in this model is for "main". */
2325 const frame_region *frame0 = get_frame_at_index (0);
2326 gcc_assert (frame0);
2327 return id_equal (DECL_NAME (frame0->get_function ()->decl), "main");
2330 /* Subroutine of region_model::get_store_value for when REG is (or is within)
2331 a global variable that hasn't been touched since the start of this path
2332 (or was implicitly touched due to a call to an unknown function). */
2334 const svalue *
2335 region_model::get_initial_value_for_global (const region *reg) const
2337 /* Get the decl that REG is for (or is within). */
2338 const decl_region *base_reg
2339 = reg->get_base_region ()->dyn_cast_decl_region ();
2340 gcc_assert (base_reg);
2341 tree decl = base_reg->get_decl ();
2343 /* Special-case: to avoid having to explicitly update all previously
2344 untracked globals when calling an unknown fn, they implicitly have
2345 an unknown value if an unknown call has occurred, unless this is
2346 static to-this-TU and hasn't escaped. Globals that have escaped
2347 are explicitly tracked, so we shouldn't hit this case for them. */
2348 if (m_store.called_unknown_fn_p ()
2349 && TREE_PUBLIC (decl)
2350 && !TREE_READONLY (decl))
2351 return m_mgr->get_or_create_unknown_svalue (reg->get_type ());
2353 /* If we are on a path from the entrypoint from "main" and we have a
2354 global decl defined in this TU that hasn't been touched yet, then
2355 the initial value of REG can be taken from the initialization value
2356 of the decl. */
2357 if (called_from_main_p () || TREE_READONLY (decl))
2359 /* Attempt to get the initializer value for base_reg. */
2360 if (const svalue *base_reg_init
2361 = base_reg->get_svalue_for_initializer (m_mgr))
2363 if (reg == base_reg)
2364 return base_reg_init;
2365 else
2367 /* Get the value for REG within base_reg_init. */
2368 binding_cluster c (base_reg);
2369 c.bind (m_mgr->get_store_manager (), base_reg, base_reg_init);
2370 const svalue *sval
2371 = c.get_any_binding (m_mgr->get_store_manager (), reg);
2372 if (sval)
2374 if (reg->get_type ())
2375 sval = m_mgr->get_or_create_cast (reg->get_type (),
2376 sval);
2377 return sval;
2383 /* Otherwise, return INIT_VAL(REG). */
2384 return m_mgr->get_or_create_initial_value (reg);
2387 /* Get a value for REG, looking it up in the store, or otherwise falling
2388 back to "initial" or "unknown" values.
2389 Use CTXT to report any warnings associated with reading from REG. */
2391 const svalue *
2392 region_model::get_store_value (const region *reg,
2393 region_model_context *ctxt) const
2395 /* Getting the value of an empty region gives an unknown_svalue. */
2396 if (reg->empty_p ())
2397 return m_mgr->get_or_create_unknown_svalue (reg->get_type ());
2399 check_region_for_read (reg, ctxt);
2401 /* Special-case: handle var_decls in the constant pool. */
2402 if (const decl_region *decl_reg = reg->dyn_cast_decl_region ())
2403 if (const svalue *sval = decl_reg->maybe_get_constant_value (m_mgr))
2404 return sval;
2406 const svalue *sval
2407 = m_store.get_any_binding (m_mgr->get_store_manager (), reg);
2408 if (sval)
2410 if (reg->get_type ())
2411 sval = m_mgr->get_or_create_cast (reg->get_type (), sval);
2412 return sval;
2415 /* Special-case: read at a constant index within a STRING_CST. */
2416 if (const offset_region *offset_reg = reg->dyn_cast_offset_region ())
2417 if (tree byte_offset_cst
2418 = offset_reg->get_byte_offset ()->maybe_get_constant ())
2419 if (const string_region *str_reg
2420 = reg->get_parent_region ()->dyn_cast_string_region ())
2422 tree string_cst = str_reg->get_string_cst ();
2423 if (const svalue *char_sval
2424 = m_mgr->maybe_get_char_from_string_cst (string_cst,
2425 byte_offset_cst))
2426 return m_mgr->get_or_create_cast (reg->get_type (), char_sval);
2429 /* Special-case: read the initial char of a STRING_CST. */
2430 if (const cast_region *cast_reg = reg->dyn_cast_cast_region ())
2431 if (const string_region *str_reg
2432 = cast_reg->get_original_region ()->dyn_cast_string_region ())
2434 tree string_cst = str_reg->get_string_cst ();
2435 tree byte_offset_cst = build_int_cst (integer_type_node, 0);
2436 if (const svalue *char_sval
2437 = m_mgr->maybe_get_char_from_string_cst (string_cst,
2438 byte_offset_cst))
2439 return m_mgr->get_or_create_cast (reg->get_type (), char_sval);
2442 /* Otherwise we implicitly have the initial value of the region
2443 (if the cluster had been touched, binding_cluster::get_any_binding,
2444 would have returned UNKNOWN, and we would already have returned
2445 that above). */
2447 /* Handle globals. */
2448 if (reg->get_base_region ()->get_parent_region ()->get_kind ()
2449 == RK_GLOBALS)
2450 return get_initial_value_for_global (reg);
2452 return m_mgr->get_or_create_initial_value (reg);
2455 /* Return false if REG does not exist, true if it may do.
2456 This is for detecting regions within the stack that don't exist anymore
2457 after frames are popped. */
2459 bool
2460 region_model::region_exists_p (const region *reg) const
2462 /* If within a stack frame, check that the stack frame is live. */
2463 if (const frame_region *enclosing_frame = reg->maybe_get_frame_region ())
2465 /* Check that the current frame is the enclosing frame, or is called
2466 by it. */
2467 for (const frame_region *iter_frame = get_current_frame (); iter_frame;
2468 iter_frame = iter_frame->get_calling_frame ())
2469 if (iter_frame == enclosing_frame)
2470 return true;
2471 return false;
2474 return true;
2477 /* Get a region for referencing PTR_SVAL, creating a region if need be, and
2478 potentially generating warnings via CTXT.
2479 PTR_SVAL must be of pointer type.
2480 PTR_TREE if non-NULL can be used when emitting diagnostics. */
2482 const region *
2483 region_model::deref_rvalue (const svalue *ptr_sval, tree ptr_tree,
2484 region_model_context *ctxt) const
2486 gcc_assert (ptr_sval);
2487 gcc_assert (POINTER_TYPE_P (ptr_sval->get_type ()));
2489 /* If we're dereferencing PTR_SVAL, assume that it is non-NULL; add this
2490 as a constraint. This suppresses false positives from
2491 -Wanalyzer-null-dereference for the case where we later have an
2492 if (PTR_SVAL) that would occur if we considered the false branch
2493 and transitioned the malloc state machine from start->null. */
2494 tree null_ptr_cst = build_int_cst (ptr_sval->get_type (), 0);
2495 const svalue *null_ptr = m_mgr->get_or_create_constant_svalue (null_ptr_cst);
2496 m_constraints->add_constraint (ptr_sval, NE_EXPR, null_ptr);
2498 switch (ptr_sval->get_kind ())
2500 default:
2501 break;
2503 case SK_REGION:
2505 const region_svalue *region_sval
2506 = as_a <const region_svalue *> (ptr_sval);
2507 return region_sval->get_pointee ();
2510 case SK_BINOP:
2512 const binop_svalue *binop_sval
2513 = as_a <const binop_svalue *> (ptr_sval);
2514 switch (binop_sval->get_op ())
2516 case POINTER_PLUS_EXPR:
2518 /* If we have a symbolic value expressing pointer arithmentic,
2519 try to convert it to a suitable region. */
2520 const region *parent_region
2521 = deref_rvalue (binop_sval->get_arg0 (), NULL_TREE, ctxt);
2522 const svalue *offset = binop_sval->get_arg1 ();
2523 tree type= TREE_TYPE (ptr_sval->get_type ());
2524 return m_mgr->get_offset_region (parent_region, type, offset);
2526 default:
2527 break;
2530 break;
2532 case SK_POISONED:
2534 if (ctxt)
2536 tree ptr = get_representative_tree (ptr_sval);
2537 /* If we can't get a representative tree for PTR_SVAL
2538 (e.g. if it hasn't been bound into the store), then
2539 fall back on PTR_TREE, if non-NULL. */
2540 if (!ptr)
2541 ptr = ptr_tree;
2542 if (ptr)
2544 const poisoned_svalue *poisoned_sval
2545 = as_a <const poisoned_svalue *> (ptr_sval);
2546 enum poison_kind pkind = poisoned_sval->get_poison_kind ();
2547 ctxt->warn (make_unique<poisoned_value_diagnostic>
2548 (ptr, pkind, NULL, NULL));
2552 break;
2555 return m_mgr->get_symbolic_region (ptr_sval);
2558 /* Attempt to get BITS within any value of REG, as TYPE.
2559 In particular, extract values from compound_svalues for the case
2560 where there's a concrete binding at BITS.
2561 Return an unknown svalue if we can't handle the given case.
2562 Use CTXT to report any warnings associated with reading from REG. */
2564 const svalue *
2565 region_model::get_rvalue_for_bits (tree type,
2566 const region *reg,
2567 const bit_range &bits,
2568 region_model_context *ctxt) const
2570 const svalue *sval = get_store_value (reg, ctxt);
2571 return m_mgr->get_or_create_bits_within (type, bits, sval);
2574 /* A subclass of pending_diagnostic for complaining about writes to
2575 constant regions of memory. */
2577 class write_to_const_diagnostic
2578 : public pending_diagnostic_subclass<write_to_const_diagnostic>
2580 public:
2581 write_to_const_diagnostic (const region *reg, tree decl)
2582 : m_reg (reg), m_decl (decl)
2585 const char *get_kind () const final override
2587 return "write_to_const_diagnostic";
2590 bool operator== (const write_to_const_diagnostic &other) const
2592 return (m_reg == other.m_reg
2593 && m_decl == other.m_decl);
2596 int get_controlling_option () const final override
2598 return OPT_Wanalyzer_write_to_const;
2601 bool emit (rich_location *rich_loc) final override
2603 auto_diagnostic_group d;
2604 bool warned;
2605 switch (m_reg->get_kind ())
2607 default:
2608 warned = warning_at (rich_loc, get_controlling_option (),
2609 "write to %<const%> object %qE", m_decl);
2610 break;
2611 case RK_FUNCTION:
2612 warned = warning_at (rich_loc, get_controlling_option (),
2613 "write to function %qE", m_decl);
2614 break;
2615 case RK_LABEL:
2616 warned = warning_at (rich_loc, get_controlling_option (),
2617 "write to label %qE", m_decl);
2618 break;
2620 if (warned)
2621 inform (DECL_SOURCE_LOCATION (m_decl), "declared here");
2622 return warned;
2625 label_text describe_final_event (const evdesc::final_event &ev) final override
2627 switch (m_reg->get_kind ())
2629 default:
2630 return ev.formatted_print ("write to %<const%> object %qE here", m_decl);
2631 case RK_FUNCTION:
2632 return ev.formatted_print ("write to function %qE here", m_decl);
2633 case RK_LABEL:
2634 return ev.formatted_print ("write to label %qE here", m_decl);
2638 private:
2639 const region *m_reg;
2640 tree m_decl;
2643 /* A subclass of pending_diagnostic for complaining about writes to
2644 string literals. */
2646 class write_to_string_literal_diagnostic
2647 : public pending_diagnostic_subclass<write_to_string_literal_diagnostic>
2649 public:
2650 write_to_string_literal_diagnostic (const region *reg)
2651 : m_reg (reg)
2654 const char *get_kind () const final override
2656 return "write_to_string_literal_diagnostic";
2659 bool operator== (const write_to_string_literal_diagnostic &other) const
2661 return m_reg == other.m_reg;
2664 int get_controlling_option () const final override
2666 return OPT_Wanalyzer_write_to_string_literal;
2669 bool emit (rich_location *rich_loc) final override
2671 return warning_at (rich_loc, get_controlling_option (),
2672 "write to string literal");
2673 /* Ideally we would show the location of the STRING_CST as well,
2674 but it is not available at this point. */
2677 label_text describe_final_event (const evdesc::final_event &ev) final override
2679 return ev.formatted_print ("write to string literal here");
2682 private:
2683 const region *m_reg;
2686 /* Use CTXT to warn If DEST_REG is a region that shouldn't be written to. */
2688 void
2689 region_model::check_for_writable_region (const region* dest_reg,
2690 region_model_context *ctxt) const
2692 /* Fail gracefully if CTXT is NULL. */
2693 if (!ctxt)
2694 return;
2696 const region *base_reg = dest_reg->get_base_region ();
2697 switch (base_reg->get_kind ())
2699 default:
2700 break;
2701 case RK_FUNCTION:
2703 const function_region *func_reg = as_a <const function_region *> (base_reg);
2704 tree fndecl = func_reg->get_fndecl ();
2705 ctxt->warn (make_unique<write_to_const_diagnostic>
2706 (func_reg, fndecl));
2708 break;
2709 case RK_LABEL:
2711 const label_region *label_reg = as_a <const label_region *> (base_reg);
2712 tree label = label_reg->get_label ();
2713 ctxt->warn (make_unique<write_to_const_diagnostic>
2714 (label_reg, label));
2716 break;
2717 case RK_DECL:
2719 const decl_region *decl_reg = as_a <const decl_region *> (base_reg);
2720 tree decl = decl_reg->get_decl ();
2721 /* Warn about writes to const globals.
2722 Don't warn for writes to const locals, and params in particular,
2723 since we would warn in push_frame when setting them up (e.g the
2724 "this" param is "T* const"). */
2725 if (TREE_READONLY (decl)
2726 && is_global_var (decl))
2727 ctxt->warn (make_unique<write_to_const_diagnostic> (dest_reg, decl));
2729 break;
2730 case RK_STRING:
2731 ctxt->warn (make_unique<write_to_string_literal_diagnostic> (dest_reg));
2732 break;
2736 /* Get the capacity of REG in bytes. */
2738 const svalue *
2739 region_model::get_capacity (const region *reg) const
2741 switch (reg->get_kind ())
2743 default:
2744 break;
2745 case RK_DECL:
2747 const decl_region *decl_reg = as_a <const decl_region *> (reg);
2748 tree decl = decl_reg->get_decl ();
2749 if (TREE_CODE (decl) == SSA_NAME)
2751 tree type = TREE_TYPE (decl);
2752 tree size = TYPE_SIZE (type);
2753 return get_rvalue (size, NULL);
2755 else
2757 tree size = decl_init_size (decl, false);
2758 if (size)
2759 return get_rvalue (size, NULL);
2762 break;
2763 case RK_SIZED:
2764 /* Look through sized regions to get at the capacity
2765 of the underlying regions. */
2766 return get_capacity (reg->get_parent_region ());
2769 if (const svalue *recorded = get_dynamic_extents (reg))
2770 return recorded;
2772 return m_mgr->get_or_create_unknown_svalue (sizetype);
2775 /* Return the string size, including the 0-terminator, if SVAL is a
2776 constant_svalue holding a string. Otherwise, return an unknown_svalue. */
2778 const svalue *
2779 region_model::get_string_size (const svalue *sval) const
2781 tree cst = sval->maybe_get_constant ();
2782 if (!cst || TREE_CODE (cst) != STRING_CST)
2783 return m_mgr->get_or_create_unknown_svalue (size_type_node);
2785 tree out = build_int_cst (size_type_node, TREE_STRING_LENGTH (cst));
2786 return m_mgr->get_or_create_constant_svalue (out);
2789 /* Return the string size, including the 0-terminator, if REG is a
2790 string_region. Otherwise, return an unknown_svalue. */
2792 const svalue *
2793 region_model::get_string_size (const region *reg) const
2795 const string_region *str_reg = dyn_cast <const string_region *> (reg);
2796 if (!str_reg)
2797 return m_mgr->get_or_create_unknown_svalue (size_type_node);
2799 tree cst = str_reg->get_string_cst ();
2800 tree out = build_int_cst (size_type_node, TREE_STRING_LENGTH (cst));
2801 return m_mgr->get_or_create_constant_svalue (out);
2804 /* If CTXT is non-NULL, use it to warn about any problems accessing REG,
2805 using DIR to determine if this access is a read or write. */
2807 void
2808 region_model::check_region_access (const region *reg,
2809 enum access_direction dir,
2810 region_model_context *ctxt) const
2812 /* Fail gracefully if CTXT is NULL. */
2813 if (!ctxt)
2814 return;
2816 check_region_for_taint (reg, dir, ctxt);
2817 check_region_bounds (reg, dir, ctxt);
2819 switch (dir)
2821 default:
2822 gcc_unreachable ();
2823 case DIR_READ:
2824 /* Currently a no-op. */
2825 break;
2826 case DIR_WRITE:
2827 check_for_writable_region (reg, ctxt);
2828 break;
2832 /* If CTXT is non-NULL, use it to warn about any problems writing to REG. */
2834 void
2835 region_model::check_region_for_write (const region *dest_reg,
2836 region_model_context *ctxt) const
2838 check_region_access (dest_reg, DIR_WRITE, ctxt);
2841 /* If CTXT is non-NULL, use it to warn about any problems reading from REG. */
2843 void
2844 region_model::check_region_for_read (const region *src_reg,
2845 region_model_context *ctxt) const
2847 check_region_access (src_reg, DIR_READ, ctxt);
2850 /* Concrete subclass for casts of pointers that lead to trailing bytes. */
2852 class dubious_allocation_size
2853 : public pending_diagnostic_subclass<dubious_allocation_size>
2855 public:
2856 dubious_allocation_size (const region *lhs, const region *rhs)
2857 : m_lhs (lhs), m_rhs (rhs), m_expr (NULL_TREE),
2858 m_has_allocation_event (false)
2861 dubious_allocation_size (const region *lhs, const region *rhs,
2862 tree expr)
2863 : m_lhs (lhs), m_rhs (rhs), m_expr (expr),
2864 m_has_allocation_event (false)
2867 const char *get_kind () const final override
2869 return "dubious_allocation_size";
2872 bool operator== (const dubious_allocation_size &other) const
2874 return m_lhs == other.m_lhs && m_rhs == other.m_rhs
2875 && pending_diagnostic::same_tree_p (m_expr, other.m_expr);
2878 int get_controlling_option () const final override
2880 return OPT_Wanalyzer_allocation_size;
2883 bool emit (rich_location *rich_loc) final override
2885 diagnostic_metadata m;
2886 m.add_cwe (131);
2888 return warning_meta (rich_loc, m, get_controlling_option (),
2889 "allocated buffer size is not a multiple"
2890 " of the pointee's size");
2893 label_text describe_final_event (const evdesc::final_event &ev) final
2894 override
2896 tree pointee_type = TREE_TYPE (m_lhs->get_type ());
2897 if (m_has_allocation_event)
2898 return ev.formatted_print ("assigned to %qT here;"
2899 " %<sizeof (%T)%> is %qE",
2900 m_lhs->get_type (), pointee_type,
2901 size_in_bytes (pointee_type));
2902 /* Fallback: Typically, we should always see an allocation_event
2903 before. */
2904 if (m_expr)
2906 if (TREE_CODE (m_expr) == INTEGER_CST)
2907 return ev.formatted_print ("allocated %E bytes and assigned to"
2908 " %qT here; %<sizeof (%T)%> is %qE",
2909 m_expr, m_lhs->get_type (), pointee_type,
2910 size_in_bytes (pointee_type));
2911 else
2912 return ev.formatted_print ("allocated %qE bytes and assigned to"
2913 " %qT here; %<sizeof (%T)%> is %qE",
2914 m_expr, m_lhs->get_type (), pointee_type,
2915 size_in_bytes (pointee_type));
2918 return ev.formatted_print ("allocated and assigned to %qT here;"
2919 " %<sizeof (%T)%> is %qE",
2920 m_lhs->get_type (), pointee_type,
2921 size_in_bytes (pointee_type));
2924 void
2925 add_region_creation_events (const region *,
2926 tree capacity,
2927 const event_loc_info &loc_info,
2928 checker_path &emission_path) final override
2930 emission_path.add_event
2931 (make_unique<region_creation_event_allocation_size> (capacity, loc_info));
2933 m_has_allocation_event = true;
2936 void mark_interesting_stuff (interesting_t *interest) final override
2938 interest->add_region_creation (m_rhs);
2941 private:
2942 const region *m_lhs;
2943 const region *m_rhs;
2944 const tree m_expr;
2945 bool m_has_allocation_event;
2948 /* Return true on dubious allocation sizes for constant sizes. */
2950 static bool
2951 capacity_compatible_with_type (tree cst, tree pointee_size_tree,
2952 bool is_struct)
2954 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
2955 gcc_assert (TREE_CODE (pointee_size_tree) == INTEGER_CST);
2957 unsigned HOST_WIDE_INT pointee_size = TREE_INT_CST_LOW (pointee_size_tree);
2958 unsigned HOST_WIDE_INT alloc_size = TREE_INT_CST_LOW (cst);
2960 if (is_struct)
2961 return alloc_size == 0 || alloc_size >= pointee_size;
2962 return alloc_size % pointee_size == 0;
2965 static bool
2966 capacity_compatible_with_type (tree cst, tree pointee_size_tree)
2968 return capacity_compatible_with_type (cst, pointee_size_tree, false);
2971 /* Checks whether SVAL could be a multiple of SIZE_CST.
2973 It works by visiting all svalues inside SVAL until it reaches
2974 atomic nodes. From those, it goes back up again and adds each
2975 node that might be a multiple of SIZE_CST to the RESULT_SET. */
2977 class size_visitor : public visitor
2979 public:
2980 size_visitor (tree size_cst, const svalue *root_sval, constraint_manager *cm)
2981 : m_size_cst (size_cst), m_root_sval (root_sval), m_cm (cm)
2983 m_root_sval->accept (this);
2986 bool get_result ()
2988 return result_set.contains (m_root_sval);
2991 void visit_constant_svalue (const constant_svalue *sval) final override
2993 check_constant (sval->get_constant (), sval);
2996 void visit_unknown_svalue (const unknown_svalue *sval ATTRIBUTE_UNUSED)
2997 final override
2999 result_set.add (sval);
3002 void visit_poisoned_svalue (const poisoned_svalue *sval ATTRIBUTE_UNUSED)
3003 final override
3005 result_set.add (sval);
3008 void visit_unaryop_svalue (const unaryop_svalue *sval) final override
3010 const svalue *arg = sval->get_arg ();
3011 if (result_set.contains (arg))
3012 result_set.add (sval);
3015 void visit_binop_svalue (const binop_svalue *sval) final override
3017 const svalue *arg0 = sval->get_arg0 ();
3018 const svalue *arg1 = sval->get_arg1 ();
3020 if (sval->get_op () == MULT_EXPR)
3022 if (result_set.contains (arg0) || result_set.contains (arg1))
3023 result_set.add (sval);
3025 else
3027 if (result_set.contains (arg0) && result_set.contains (arg1))
3028 result_set.add (sval);
3032 void visit_repeated_svalue (const repeated_svalue *sval) final override
3034 sval->get_inner_svalue ()->accept (this);
3035 if (result_set.contains (sval->get_inner_svalue ()))
3036 result_set.add (sval);
3039 void visit_unmergeable_svalue (const unmergeable_svalue *sval) final override
3041 sval->get_arg ()->accept (this);
3042 if (result_set.contains (sval->get_arg ()))
3043 result_set.add (sval);
3046 void visit_widening_svalue (const widening_svalue *sval) final override
3048 const svalue *base = sval->get_base_svalue ();
3049 const svalue *iter = sval->get_iter_svalue ();
3051 if (result_set.contains (base) && result_set.contains (iter))
3052 result_set.add (sval);
3055 void visit_conjured_svalue (const conjured_svalue *sval ATTRIBUTE_UNUSED)
3056 final override
3058 equiv_class_id id (-1);
3059 if (m_cm->get_equiv_class_by_svalue (sval, &id))
3061 if (tree cst = id.get_obj (*m_cm).get_any_constant ())
3062 check_constant (cst, sval);
3063 else
3064 result_set.add (sval);
3068 void visit_asm_output_svalue (const asm_output_svalue *sval ATTRIBUTE_UNUSED)
3069 final override
3071 result_set.add (sval);
3074 void visit_const_fn_result_svalue (const const_fn_result_svalue
3075 *sval ATTRIBUTE_UNUSED) final override
3077 result_set.add (sval);
3080 private:
3081 void check_constant (tree cst, const svalue *sval)
3083 switch (TREE_CODE (cst))
3085 default:
3086 /* Assume all unhandled operands are compatible. */
3087 result_set.add (sval);
3088 break;
3089 case INTEGER_CST:
3090 if (capacity_compatible_with_type (cst, m_size_cst))
3091 result_set.add (sval);
3092 break;
3096 tree m_size_cst;
3097 const svalue *m_root_sval;
3098 constraint_manager *m_cm;
3099 svalue_set result_set; /* Used as a mapping of svalue*->bool. */
3102 /* Return true if a struct or union either uses the inheritance pattern,
3103 where the first field is a base struct, or the flexible array member
3104 pattern, where the last field is an array without a specified size. */
3106 static bool
3107 struct_or_union_with_inheritance_p (tree struc)
3109 tree iter = TYPE_FIELDS (struc);
3110 if (iter == NULL_TREE)
3111 return false;
3112 if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (iter)))
3113 return true;
3115 tree last_field;
3116 while (iter != NULL_TREE)
3118 last_field = iter;
3119 iter = DECL_CHAIN (iter);
3122 if (last_field != NULL_TREE
3123 && TREE_CODE (TREE_TYPE (last_field)) == ARRAY_TYPE)
3124 return true;
3126 return false;
3129 /* Return true if the lhs and rhs of an assignment have different types. */
3131 static bool
3132 is_any_cast_p (const gimple *stmt)
3134 if (const gassign *assign = dyn_cast <const gassign *> (stmt))
3135 return gimple_assign_cast_p (assign)
3136 || !pending_diagnostic::same_tree_p (
3137 TREE_TYPE (gimple_assign_lhs (assign)),
3138 TREE_TYPE (gimple_assign_rhs1 (assign)));
3139 else if (const gcall *call = dyn_cast <const gcall *> (stmt))
3141 tree lhs = gimple_call_lhs (call);
3142 return lhs != NULL_TREE && !pending_diagnostic::same_tree_p (
3143 TREE_TYPE (gimple_call_lhs (call)),
3144 gimple_call_return_type (call));
3147 return false;
3150 /* On pointer assignments, check whether the buffer size of
3151 RHS_SVAL is compatible with the type of the LHS_REG.
3152 Use a non-null CTXT to report allocation size warnings. */
3154 void
3155 region_model::check_region_size (const region *lhs_reg, const svalue *rhs_sval,
3156 region_model_context *ctxt) const
3158 if (!ctxt || ctxt->get_stmt () == NULL)
3159 return;
3160 /* Only report warnings on assignments that actually change the type. */
3161 if (!is_any_cast_p (ctxt->get_stmt ()))
3162 return;
3164 const region_svalue *reg_sval = dyn_cast <const region_svalue *> (rhs_sval);
3165 if (!reg_sval)
3166 return;
3168 tree pointer_type = lhs_reg->get_type ();
3169 if (pointer_type == NULL_TREE || !POINTER_TYPE_P (pointer_type))
3170 return;
3172 tree pointee_type = TREE_TYPE (pointer_type);
3173 /* Make sure that the type on the left-hand size actually has a size. */
3174 if (pointee_type == NULL_TREE || VOID_TYPE_P (pointee_type)
3175 || TYPE_SIZE_UNIT (pointee_type) == NULL_TREE)
3176 return;
3178 /* Bail out early on pointers to structs where we can
3179 not deduce whether the buffer size is compatible. */
3180 bool is_struct = RECORD_OR_UNION_TYPE_P (pointee_type);
3181 if (is_struct && struct_or_union_with_inheritance_p (pointee_type))
3182 return;
3184 tree pointee_size_tree = size_in_bytes (pointee_type);
3185 /* We give up if the type size is not known at compile-time or the
3186 type size is always compatible regardless of the buffer size. */
3187 if (TREE_CODE (pointee_size_tree) != INTEGER_CST
3188 || integer_zerop (pointee_size_tree)
3189 || integer_onep (pointee_size_tree))
3190 return;
3192 const region *rhs_reg = reg_sval->get_pointee ();
3193 const svalue *capacity = get_capacity (rhs_reg);
3194 switch (capacity->get_kind ())
3196 case svalue_kind::SK_CONSTANT:
3198 const constant_svalue *cst_cap_sval
3199 = as_a <const constant_svalue *> (capacity);
3200 tree cst_cap = cst_cap_sval->get_constant ();
3201 if (TREE_CODE (cst_cap) == INTEGER_CST
3202 && !capacity_compatible_with_type (cst_cap, pointee_size_tree,
3203 is_struct))
3204 ctxt->warn (make_unique <dubious_allocation_size> (lhs_reg, rhs_reg,
3205 cst_cap));
3207 break;
3208 default:
3210 if (!is_struct)
3212 size_visitor v (pointee_size_tree, capacity, m_constraints);
3213 if (!v.get_result ())
3215 tree expr = get_representative_tree (capacity);
3216 ctxt->warn (make_unique <dubious_allocation_size> (lhs_reg,
3217 rhs_reg,
3218 expr));
3221 break;
3226 /* Set the value of the region given by LHS_REG to the value given
3227 by RHS_SVAL.
3228 Use CTXT to report any warnings associated with writing to LHS_REG. */
3230 void
3231 region_model::set_value (const region *lhs_reg, const svalue *rhs_sval,
3232 region_model_context *ctxt)
3234 gcc_assert (lhs_reg);
3235 gcc_assert (rhs_sval);
3237 /* Setting the value of an empty region is a no-op. */
3238 if (lhs_reg->empty_p ())
3239 return;
3241 check_region_size (lhs_reg, rhs_sval, ctxt);
3243 check_region_for_write (lhs_reg, ctxt);
3245 m_store.set_value (m_mgr->get_store_manager(), lhs_reg, rhs_sval,
3246 ctxt ? ctxt->get_uncertainty () : NULL);
3249 /* Set the value of the region given by LHS to the value given by RHS. */
3251 void
3252 region_model::set_value (tree lhs, tree rhs, region_model_context *ctxt)
3254 const region *lhs_reg = get_lvalue (lhs, ctxt);
3255 const svalue *rhs_sval = get_rvalue (rhs, ctxt);
3256 gcc_assert (lhs_reg);
3257 gcc_assert (rhs_sval);
3258 set_value (lhs_reg, rhs_sval, ctxt);
3261 /* Remove all bindings overlapping REG within the store. */
3263 void
3264 region_model::clobber_region (const region *reg)
3266 m_store.clobber_region (m_mgr->get_store_manager(), reg);
3269 /* Remove any bindings for REG within the store. */
3271 void
3272 region_model::purge_region (const region *reg)
3274 m_store.purge_region (m_mgr->get_store_manager(), reg);
3277 /* Fill REG with SVAL. */
3279 void
3280 region_model::fill_region (const region *reg, const svalue *sval)
3282 m_store.fill_region (m_mgr->get_store_manager(), reg, sval);
3285 /* Zero-fill REG. */
3287 void
3288 region_model::zero_fill_region (const region *reg)
3290 m_store.zero_fill_region (m_mgr->get_store_manager(), reg);
3293 /* Mark REG as having unknown content. */
3295 void
3296 region_model::mark_region_as_unknown (const region *reg,
3297 uncertainty_t *uncertainty)
3299 m_store.mark_region_as_unknown (m_mgr->get_store_manager(), reg,
3300 uncertainty);
3303 /* Determine what is known about the condition "LHS_SVAL OP RHS_SVAL" within
3304 this model. */
3306 tristate
3307 region_model::eval_condition (const svalue *lhs,
3308 enum tree_code op,
3309 const svalue *rhs) const
3311 gcc_assert (lhs);
3312 gcc_assert (rhs);
3314 /* For now, make no attempt to capture constraints on floating-point
3315 values. */
3316 if ((lhs->get_type () && FLOAT_TYPE_P (lhs->get_type ()))
3317 || (rhs->get_type () && FLOAT_TYPE_P (rhs->get_type ())))
3318 return tristate::unknown ();
3320 /* See what we know based on the values. */
3322 /* Unwrap any unmergeable values. */
3323 lhs = lhs->unwrap_any_unmergeable ();
3324 rhs = rhs->unwrap_any_unmergeable ();
3326 if (lhs == rhs)
3328 /* If we have the same svalue, then we have equality
3329 (apart from NaN-handling).
3330 TODO: should this definitely be the case for poisoned values? */
3331 /* Poisoned and unknown values are "unknowable". */
3332 if (lhs->get_kind () == SK_POISONED
3333 || lhs->get_kind () == SK_UNKNOWN)
3334 return tristate::TS_UNKNOWN;
3336 switch (op)
3338 case EQ_EXPR:
3339 case GE_EXPR:
3340 case LE_EXPR:
3341 return tristate::TS_TRUE;
3343 case NE_EXPR:
3344 case GT_EXPR:
3345 case LT_EXPR:
3346 return tristate::TS_FALSE;
3348 default:
3349 /* For other ops, use the logic below. */
3350 break;
3354 /* If we have a pair of region_svalues, compare them. */
3355 if (const region_svalue *lhs_ptr = lhs->dyn_cast_region_svalue ())
3356 if (const region_svalue *rhs_ptr = rhs->dyn_cast_region_svalue ())
3358 tristate res = region_svalue::eval_condition (lhs_ptr, op, rhs_ptr);
3359 if (res.is_known ())
3360 return res;
3361 /* Otherwise, only known through constraints. */
3364 if (const constant_svalue *cst_lhs = lhs->dyn_cast_constant_svalue ())
3366 /* If we have a pair of constants, compare them. */
3367 if (const constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ())
3368 return constant_svalue::eval_condition (cst_lhs, op, cst_rhs);
3369 else
3371 /* When we have one constant, put it on the RHS. */
3372 std::swap (lhs, rhs);
3373 op = swap_tree_comparison (op);
3376 gcc_assert (lhs->get_kind () != SK_CONSTANT);
3378 /* Handle comparison against zero. */
3379 if (const constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ())
3380 if (zerop (cst_rhs->get_constant ()))
3382 if (const region_svalue *ptr = lhs->dyn_cast_region_svalue ())
3384 /* A region_svalue is a non-NULL pointer, except in certain
3385 special cases (see the comment for region::non_null_p). */
3386 const region *pointee = ptr->get_pointee ();
3387 if (pointee->non_null_p ())
3389 switch (op)
3391 default:
3392 gcc_unreachable ();
3394 case EQ_EXPR:
3395 case GE_EXPR:
3396 case LE_EXPR:
3397 return tristate::TS_FALSE;
3399 case NE_EXPR:
3400 case GT_EXPR:
3401 case LT_EXPR:
3402 return tristate::TS_TRUE;
3406 else if (const binop_svalue *binop = lhs->dyn_cast_binop_svalue ())
3408 /* Treat offsets from a non-NULL pointer as being non-NULL. This
3409 isn't strictly true, in that eventually ptr++ will wrap
3410 around and be NULL, but it won't occur in practise and thus
3411 can be used to suppress effectively false positives that we
3412 shouldn't warn for. */
3413 if (binop->get_op () == POINTER_PLUS_EXPR)
3415 tristate lhs_ts = eval_condition (binop->get_arg0 (), op, rhs);
3416 if (lhs_ts.is_known ())
3417 return lhs_ts;
3420 else if (const unaryop_svalue *unaryop
3421 = lhs->dyn_cast_unaryop_svalue ())
3423 if (unaryop->get_op () == NEGATE_EXPR)
3425 /* e.g. "-X <= 0" is equivalent to X >= 0". */
3426 tristate lhs_ts = eval_condition (unaryop->get_arg (),
3427 swap_tree_comparison (op),
3428 rhs);
3429 if (lhs_ts.is_known ())
3430 return lhs_ts;
3435 /* Handle rejection of equality for comparisons of the initial values of
3436 "external" values (such as params) with the address of locals. */
3437 if (const initial_svalue *init_lhs = lhs->dyn_cast_initial_svalue ())
3438 if (const region_svalue *rhs_ptr = rhs->dyn_cast_region_svalue ())
3440 tristate res = compare_initial_and_pointer (init_lhs, rhs_ptr);
3441 if (res.is_known ())
3442 return res;
3444 if (const initial_svalue *init_rhs = rhs->dyn_cast_initial_svalue ())
3445 if (const region_svalue *lhs_ptr = lhs->dyn_cast_region_svalue ())
3447 tristate res = compare_initial_and_pointer (init_rhs, lhs_ptr);
3448 if (res.is_known ())
3449 return res;
3452 if (const widening_svalue *widen_lhs = lhs->dyn_cast_widening_svalue ())
3453 if (tree rhs_cst = rhs->maybe_get_constant ())
3455 tristate res = widen_lhs->eval_condition_without_cm (op, rhs_cst);
3456 if (res.is_known ())
3457 return res;
3460 /* Handle comparisons between two svalues with more than one operand. */
3461 if (const binop_svalue *binop = lhs->dyn_cast_binop_svalue ())
3463 switch (op)
3465 default:
3466 break;
3467 case EQ_EXPR:
3469 /* TODO: binops can be equal even if they are not structurally
3470 equal in case of commutative operators. */
3471 tristate res = structural_equality (lhs, rhs);
3472 if (res.is_true ())
3473 return res;
3475 break;
3476 case LE_EXPR:
3478 tristate res = structural_equality (lhs, rhs);
3479 if (res.is_true ())
3480 return res;
3482 break;
3483 case GE_EXPR:
3485 tristate res = structural_equality (lhs, rhs);
3486 if (res.is_true ())
3487 return res;
3488 res = symbolic_greater_than (binop, rhs);
3489 if (res.is_true ())
3490 return res;
3492 break;
3493 case GT_EXPR:
3495 tristate res = symbolic_greater_than (binop, rhs);
3496 if (res.is_true ())
3497 return res;
3499 break;
3503 /* Otherwise, try constraints.
3504 Cast to const to ensure we don't change the constraint_manager as we
3505 do this (e.g. by creating equivalence classes). */
3506 const constraint_manager *constraints = m_constraints;
3507 return constraints->eval_condition (lhs, op, rhs);
3510 /* Subroutine of region_model::eval_condition, for rejecting
3511 equality of INIT_VAL(PARM) with &LOCAL. */
3513 tristate
3514 region_model::compare_initial_and_pointer (const initial_svalue *init,
3515 const region_svalue *ptr) const
3517 const region *pointee = ptr->get_pointee ();
3519 /* If we have a pointer to something within a stack frame, it can't be the
3520 initial value of a param. */
3521 if (pointee->maybe_get_frame_region ())
3522 if (init->initial_value_of_param_p ())
3523 return tristate::TS_FALSE;
3525 return tristate::TS_UNKNOWN;
3528 /* Return true if SVAL is definitely positive. */
3530 static bool
3531 is_positive_svalue (const svalue *sval)
3533 if (tree cst = sval->maybe_get_constant ())
3534 return !zerop (cst) && get_range_pos_neg (cst) == 1;
3535 tree type = sval->get_type ();
3536 if (!type)
3537 return false;
3538 /* Consider a binary operation size_t + int. The analyzer wraps the int in
3539 an unaryop_svalue, converting it to a size_t, but in the dynamic execution
3540 the result is smaller than the first operand. Thus, we have to look if
3541 the argument of the unaryop_svalue is also positive. */
3542 if (const unaryop_svalue *un_op = dyn_cast <const unaryop_svalue *> (sval))
3543 return CONVERT_EXPR_CODE_P (un_op->get_op ()) && TYPE_UNSIGNED (type)
3544 && is_positive_svalue (un_op->get_arg ());
3545 return TYPE_UNSIGNED (type);
3548 /* Return true if A is definitely larger than B.
3550 Limitation: does not account for integer overflows and does not try to
3551 return false, so it can not be used negated. */
3553 tristate
3554 region_model::symbolic_greater_than (const binop_svalue *bin_a,
3555 const svalue *b) const
3557 if (bin_a->get_op () == PLUS_EXPR || bin_a->get_op () == MULT_EXPR)
3559 /* Eliminate the right-hand side of both svalues. */
3560 if (const binop_svalue *bin_b = dyn_cast <const binop_svalue *> (b))
3561 if (bin_a->get_op () == bin_b->get_op ()
3562 && eval_condition (bin_a->get_arg1 (),
3563 GT_EXPR,
3564 bin_b->get_arg1 ()).is_true ()
3565 && eval_condition (bin_a->get_arg0 (),
3566 GE_EXPR,
3567 bin_b->get_arg0 ()).is_true ())
3568 return tristate (tristate::TS_TRUE);
3570 /* Otherwise, try to remove a positive offset or factor from BIN_A. */
3571 if (is_positive_svalue (bin_a->get_arg1 ())
3572 && eval_condition (bin_a->get_arg0 (),
3573 GE_EXPR, b).is_true ())
3574 return tristate (tristate::TS_TRUE);
3576 return tristate::unknown ();
3579 /* Return true if A and B are equal structurally.
3581 Structural equality means that A and B are equal if the svalues A and B have
3582 the same nodes at the same positions in the tree and the leafs are equal.
3583 Equality for conjured_svalues and initial_svalues is determined by comparing
3584 the pointers while constants are compared by value. That behavior is useful
3585 to check for binaryop_svlaues that evaluate to the same concrete value but
3586 might use one operand with a different type but the same constant value.
3588 For example,
3589 binop_svalue (mult_expr,
3590 initial_svalue (‘size_t’, decl_region (..., 'some_var')),
3591 constant_svalue (‘size_t’, 4))
3593 binop_svalue (mult_expr,
3594 initial_svalue (‘size_t’, decl_region (..., 'some_var'),
3595 constant_svalue (‘sizetype’, 4))
3596 are structurally equal. A concrete C code example, where this occurs, can
3597 be found in test7 of out-of-bounds-5.c. */
3599 tristate
3600 region_model::structural_equality (const svalue *a, const svalue *b) const
3602 /* If A and B are referentially equal, they are also structurally equal. */
3603 if (a == b)
3604 return tristate (tristate::TS_TRUE);
3606 switch (a->get_kind ())
3608 default:
3609 return tristate::unknown ();
3610 /* SK_CONJURED and SK_INITIAL are already handled
3611 by the referential equality above. */
3612 case SK_CONSTANT:
3614 tree a_cst = a->maybe_get_constant ();
3615 tree b_cst = b->maybe_get_constant ();
3616 if (a_cst && b_cst)
3617 return tristate (tree_int_cst_equal (a_cst, b_cst));
3619 return tristate (tristate::TS_FALSE);
3620 case SK_UNARYOP:
3622 const unaryop_svalue *un_a = as_a <const unaryop_svalue *> (a);
3623 if (const unaryop_svalue *un_b = dyn_cast <const unaryop_svalue *> (b))
3624 return tristate (pending_diagnostic::same_tree_p (un_a->get_type (),
3625 un_b->get_type ())
3626 && un_a->get_op () == un_b->get_op ()
3627 && structural_equality (un_a->get_arg (),
3628 un_b->get_arg ()));
3630 return tristate (tristate::TS_FALSE);
3631 case SK_BINOP:
3633 const binop_svalue *bin_a = as_a <const binop_svalue *> (a);
3634 if (const binop_svalue *bin_b = dyn_cast <const binop_svalue *> (b))
3635 return tristate (bin_a->get_op () == bin_b->get_op ()
3636 && structural_equality (bin_a->get_arg0 (),
3637 bin_b->get_arg0 ())
3638 && structural_equality (bin_a->get_arg1 (),
3639 bin_b->get_arg1 ()));
3641 return tristate (tristate::TS_FALSE);
3645 /* Handle various constraints of the form:
3646 LHS: ((bool)INNER_LHS INNER_OP INNER_RHS))
3647 OP : == or !=
3648 RHS: zero
3649 and (with a cast):
3650 LHS: CAST([long]int, ((bool)INNER_LHS INNER_OP INNER_RHS))
3651 OP : == or !=
3652 RHS: zero
3653 by adding constraints for INNER_LHS INNEROP INNER_RHS.
3655 Return true if this function can fully handle the constraint; if
3656 so, add the implied constraint(s) and write true to *OUT if they
3657 are consistent with existing constraints, or write false to *OUT
3658 if they contradicts existing constraints.
3660 Return false for cases that this function doeesn't know how to handle.
3662 For example, if we're checking a stored conditional, we'll have
3663 something like:
3664 LHS: CAST(long int, (&HEAP_ALLOCATED_REGION(8)!=(int *)0B))
3665 OP : NE_EXPR
3666 RHS: zero
3667 which this function can turn into an add_constraint of:
3668 (&HEAP_ALLOCATED_REGION(8) != (int *)0B)
3670 Similarly, optimized && and || conditionals lead to e.g.
3671 if (p && q)
3672 becoming gimple like this:
3673 _1 = p_6 == 0B;
3674 _2 = q_8 == 0B
3675 _3 = _1 | _2
3676 On the "_3 is false" branch we can have constraints of the form:
3677 ((&HEAP_ALLOCATED_REGION(8)!=(int *)0B)
3678 | (&HEAP_ALLOCATED_REGION(10)!=(int *)0B))
3679 == 0
3680 which implies that both _1 and _2 are false,
3681 which this function can turn into a pair of add_constraints of
3682 (&HEAP_ALLOCATED_REGION(8)!=(int *)0B)
3683 and:
3684 (&HEAP_ALLOCATED_REGION(10)!=(int *)0B). */
3686 bool
3687 region_model::add_constraints_from_binop (const svalue *outer_lhs,
3688 enum tree_code outer_op,
3689 const svalue *outer_rhs,
3690 bool *out,
3691 region_model_context *ctxt)
3693 while (const svalue *cast = outer_lhs->maybe_undo_cast ())
3694 outer_lhs = cast;
3695 const binop_svalue *binop_sval = outer_lhs->dyn_cast_binop_svalue ();
3696 if (!binop_sval)
3697 return false;
3698 if (!outer_rhs->all_zeroes_p ())
3699 return false;
3701 const svalue *inner_lhs = binop_sval->get_arg0 ();
3702 enum tree_code inner_op = binop_sval->get_op ();
3703 const svalue *inner_rhs = binop_sval->get_arg1 ();
3705 if (outer_op != NE_EXPR && outer_op != EQ_EXPR)
3706 return false;
3708 /* We have either
3709 - "OUTER_LHS != false" (i.e. OUTER is true), or
3710 - "OUTER_LHS == false" (i.e. OUTER is false). */
3711 bool is_true = outer_op == NE_EXPR;
3713 switch (inner_op)
3715 default:
3716 return false;
3718 case EQ_EXPR:
3719 case NE_EXPR:
3721 /* ...and "(inner_lhs OP inner_rhs) == 0"
3722 then (inner_lhs OP inner_rhs) must have the same
3723 logical value as LHS. */
3724 if (!is_true)
3725 inner_op = invert_tree_comparison (inner_op, false /* honor_nans */);
3726 *out = add_constraint (inner_lhs, inner_op, inner_rhs, ctxt);
3727 return true;
3729 break;
3731 case BIT_AND_EXPR:
3732 if (is_true)
3734 /* ...and "(inner_lhs & inner_rhs) != 0"
3735 then both inner_lhs and inner_rhs must be true. */
3736 const svalue *false_sval
3737 = m_mgr->get_or_create_constant_svalue (boolean_false_node);
3738 bool sat1 = add_constraint (inner_lhs, NE_EXPR, false_sval, ctxt);
3739 bool sat2 = add_constraint (inner_rhs, NE_EXPR, false_sval, ctxt);
3740 *out = sat1 && sat2;
3741 return true;
3743 return false;
3745 case BIT_IOR_EXPR:
3746 if (!is_true)
3748 /* ...and "(inner_lhs | inner_rhs) == 0"
3749 i.e. "(inner_lhs | inner_rhs)" is false
3750 then both inner_lhs and inner_rhs must be false. */
3751 const svalue *false_sval
3752 = m_mgr->get_or_create_constant_svalue (boolean_false_node);
3753 bool sat1 = add_constraint (inner_lhs, EQ_EXPR, false_sval, ctxt);
3754 bool sat2 = add_constraint (inner_rhs, EQ_EXPR, false_sval, ctxt);
3755 *out = sat1 && sat2;
3756 return true;
3758 return false;
3762 /* Attempt to add the constraint "LHS OP RHS" to this region_model.
3763 If it is consistent with existing constraints, add it, and return true.
3764 Return false if it contradicts existing constraints.
3765 Use CTXT for reporting any diagnostics associated with the accesses. */
3767 bool
3768 region_model::add_constraint (tree lhs, enum tree_code op, tree rhs,
3769 region_model_context *ctxt)
3771 /* For now, make no attempt to capture constraints on floating-point
3772 values. */
3773 if (FLOAT_TYPE_P (TREE_TYPE (lhs)) || FLOAT_TYPE_P (TREE_TYPE (rhs)))
3774 return true;
3776 const svalue *lhs_sval = get_rvalue (lhs, ctxt);
3777 const svalue *rhs_sval = get_rvalue (rhs, ctxt);
3779 return add_constraint (lhs_sval, op, rhs_sval, ctxt);
3782 /* Attempt to add the constraint "LHS OP RHS" to this region_model.
3783 If it is consistent with existing constraints, add it, and return true.
3784 Return false if it contradicts existing constraints.
3785 Use CTXT for reporting any diagnostics associated with the accesses. */
3787 bool
3788 region_model::add_constraint (const svalue *lhs,
3789 enum tree_code op,
3790 const svalue *rhs,
3791 region_model_context *ctxt)
3793 tristate t_cond = eval_condition (lhs, op, rhs);
3795 /* If we already have the condition, do nothing. */
3796 if (t_cond.is_true ())
3797 return true;
3799 /* Reject a constraint that would contradict existing knowledge, as
3800 unsatisfiable. */
3801 if (t_cond.is_false ())
3802 return false;
3804 bool out;
3805 if (add_constraints_from_binop (lhs, op, rhs, &out, ctxt))
3806 return out;
3808 /* Attempt to store the constraint. */
3809 if (!m_constraints->add_constraint (lhs, op, rhs))
3810 return false;
3812 /* Notify the context, if any. This exists so that the state machines
3813 in a program_state can be notified about the condition, and so can
3814 set sm-state for e.g. unchecked->checked, both for cfg-edges, and
3815 when synthesizing constraints as above. */
3816 if (ctxt)
3817 ctxt->on_condition (lhs, op, rhs);
3819 /* If we have &REGION == NULL, then drop dynamic extents for REGION (for
3820 the case where REGION is heap-allocated and thus could be NULL). */
3821 if (tree rhs_cst = rhs->maybe_get_constant ())
3822 if (op == EQ_EXPR && zerop (rhs_cst))
3823 if (const region_svalue *region_sval = lhs->dyn_cast_region_svalue ())
3824 unset_dynamic_extents (region_sval->get_pointee ());
3826 return true;
3829 /* As above, but when returning false, if OUT is non-NULL, write a
3830 new rejected_constraint to *OUT. */
3832 bool
3833 region_model::add_constraint (tree lhs, enum tree_code op, tree rhs,
3834 region_model_context *ctxt,
3835 rejected_constraint **out)
3837 bool sat = add_constraint (lhs, op, rhs, ctxt);
3838 if (!sat && out)
3839 *out = new rejected_op_constraint (*this, lhs, op, rhs);
3840 return sat;
3843 /* Determine what is known about the condition "LHS OP RHS" within
3844 this model.
3845 Use CTXT for reporting any diagnostics associated with the accesses. */
3847 tristate
3848 region_model::eval_condition (tree lhs,
3849 enum tree_code op,
3850 tree rhs,
3851 region_model_context *ctxt) const
3853 /* For now, make no attempt to model constraints on floating-point
3854 values. */
3855 if (FLOAT_TYPE_P (TREE_TYPE (lhs)) || FLOAT_TYPE_P (TREE_TYPE (rhs)))
3856 return tristate::unknown ();
3858 return eval_condition (get_rvalue (lhs, ctxt), op, get_rvalue (rhs, ctxt));
3861 /* Implementation of region_model::get_representative_path_var.
3862 Attempt to return a path_var that represents SVAL, or return NULL_TREE.
3863 Use VISITED to prevent infinite mutual recursion with the overload for
3864 regions. */
3866 path_var
3867 region_model::get_representative_path_var_1 (const svalue *sval,
3868 svalue_set *visited) const
3870 gcc_assert (sval);
3872 /* Prevent infinite recursion. */
3873 if (visited->contains (sval))
3874 return path_var (NULL_TREE, 0);
3875 visited->add (sval);
3877 /* Handle casts by recursion into get_representative_path_var. */
3878 if (const svalue *cast_sval = sval->maybe_undo_cast ())
3880 path_var result = get_representative_path_var (cast_sval, visited);
3881 tree orig_type = sval->get_type ();
3882 /* If necessary, wrap the result in a cast. */
3883 if (result.m_tree && orig_type)
3884 result.m_tree = build1 (NOP_EXPR, orig_type, result.m_tree);
3885 return result;
3888 auto_vec<path_var> pvs;
3889 m_store.get_representative_path_vars (this, visited, sval, &pvs);
3891 if (tree cst = sval->maybe_get_constant ())
3892 pvs.safe_push (path_var (cst, 0));
3894 /* Handle string literals and various other pointers. */
3895 if (const region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
3897 const region *reg = ptr_sval->get_pointee ();
3898 if (path_var pv = get_representative_path_var (reg, visited))
3899 return path_var (build1 (ADDR_EXPR,
3900 sval->get_type (),
3901 pv.m_tree),
3902 pv.m_stack_depth);
3905 /* If we have a sub_svalue, look for ways to represent the parent. */
3906 if (const sub_svalue *sub_sval = sval->dyn_cast_sub_svalue ())
3908 const svalue *parent_sval = sub_sval->get_parent ();
3909 const region *subreg = sub_sval->get_subregion ();
3910 if (path_var parent_pv
3911 = get_representative_path_var (parent_sval, visited))
3912 if (const field_region *field_reg = subreg->dyn_cast_field_region ())
3913 return path_var (build3 (COMPONENT_REF,
3914 sval->get_type (),
3915 parent_pv.m_tree,
3916 field_reg->get_field (),
3917 NULL_TREE),
3918 parent_pv.m_stack_depth);
3921 /* Handle binops. */
3922 if (const binop_svalue *binop_sval = sval->dyn_cast_binop_svalue ())
3923 if (path_var lhs_pv
3924 = get_representative_path_var (binop_sval->get_arg0 (), visited))
3925 if (path_var rhs_pv
3926 = get_representative_path_var (binop_sval->get_arg1 (), visited))
3927 return path_var (build2 (binop_sval->get_op (),
3928 sval->get_type (),
3929 lhs_pv.m_tree, rhs_pv.m_tree),
3930 lhs_pv.m_stack_depth);
3932 if (pvs.length () < 1)
3933 return path_var (NULL_TREE, 0);
3935 pvs.qsort (readability_comparator);
3936 return pvs[0];
3939 /* Attempt to return a path_var that represents SVAL, or return NULL_TREE.
3940 Use VISITED to prevent infinite mutual recursion with the overload for
3941 regions
3943 This function defers to get_representative_path_var_1 to do the work;
3944 it adds verification that get_representative_path_var_1 returned a tree
3945 of the correct type. */
3947 path_var
3948 region_model::get_representative_path_var (const svalue *sval,
3949 svalue_set *visited) const
3951 if (sval == NULL)
3952 return path_var (NULL_TREE, 0);
3954 tree orig_type = sval->get_type ();
3956 path_var result = get_representative_path_var_1 (sval, visited);
3958 /* Verify that the result has the same type as SVAL, if any. */
3959 if (result.m_tree && orig_type)
3960 gcc_assert (TREE_TYPE (result.m_tree) == orig_type);
3962 return result;
3965 /* Attempt to return a tree that represents SVAL, or return NULL_TREE.
3967 Strip off any top-level cast, to avoid messages like
3968 double-free of '(void *)ptr'
3969 from analyzer diagnostics. */
3971 tree
3972 region_model::get_representative_tree (const svalue *sval) const
3974 svalue_set visited;
3975 tree expr = get_representative_path_var (sval, &visited).m_tree;
3977 /* Strip off any top-level cast. */
3978 if (expr && TREE_CODE (expr) == NOP_EXPR)
3979 expr = TREE_OPERAND (expr, 0);
3981 return fixup_tree_for_diagnostic (expr);
3984 tree
3985 region_model::get_representative_tree (const region *reg) const
3987 svalue_set visited;
3988 tree expr = get_representative_path_var (reg, &visited).m_tree;
3990 /* Strip off any top-level cast. */
3991 if (expr && TREE_CODE (expr) == NOP_EXPR)
3992 expr = TREE_OPERAND (expr, 0);
3994 return fixup_tree_for_diagnostic (expr);
3997 /* Implementation of region_model::get_representative_path_var.
3999 Attempt to return a path_var that represents REG, or return
4000 the NULL path_var.
4001 For example, a region for a field of a local would be a path_var
4002 wrapping a COMPONENT_REF.
4003 Use VISITED to prevent infinite mutual recursion with the overload for
4004 svalues. */
4006 path_var
4007 region_model::get_representative_path_var_1 (const region *reg,
4008 svalue_set *visited) const
4010 switch (reg->get_kind ())
4012 default:
4013 gcc_unreachable ();
4015 case RK_FRAME:
4016 case RK_GLOBALS:
4017 case RK_CODE:
4018 case RK_HEAP:
4019 case RK_STACK:
4020 case RK_THREAD_LOCAL:
4021 case RK_ROOT:
4022 /* Regions that represent memory spaces are not expressible as trees. */
4023 return path_var (NULL_TREE, 0);
4025 case RK_FUNCTION:
4027 const function_region *function_reg
4028 = as_a <const function_region *> (reg);
4029 return path_var (function_reg->get_fndecl (), 0);
4031 case RK_LABEL:
4033 const label_region *label_reg = as_a <const label_region *> (reg);
4034 return path_var (label_reg->get_label (), 0);
4037 case RK_SYMBOLIC:
4039 const symbolic_region *symbolic_reg
4040 = as_a <const symbolic_region *> (reg);
4041 const svalue *pointer = symbolic_reg->get_pointer ();
4042 path_var pointer_pv = get_representative_path_var (pointer, visited);
4043 if (!pointer_pv)
4044 return path_var (NULL_TREE, 0);
4045 tree offset = build_int_cst (pointer->get_type (), 0);
4046 return path_var (build2 (MEM_REF,
4047 reg->get_type (),
4048 pointer_pv.m_tree,
4049 offset),
4050 pointer_pv.m_stack_depth);
4052 case RK_DECL:
4054 const decl_region *decl_reg = as_a <const decl_region *> (reg);
4055 return path_var (decl_reg->get_decl (), decl_reg->get_stack_depth ());
4057 case RK_FIELD:
4059 const field_region *field_reg = as_a <const field_region *> (reg);
4060 path_var parent_pv
4061 = get_representative_path_var (reg->get_parent_region (), visited);
4062 if (!parent_pv)
4063 return path_var (NULL_TREE, 0);
4064 return path_var (build3 (COMPONENT_REF,
4065 reg->get_type (),
4066 parent_pv.m_tree,
4067 field_reg->get_field (),
4068 NULL_TREE),
4069 parent_pv.m_stack_depth);
4072 case RK_ELEMENT:
4074 const element_region *element_reg
4075 = as_a <const element_region *> (reg);
4076 path_var parent_pv
4077 = get_representative_path_var (reg->get_parent_region (), visited);
4078 if (!parent_pv)
4079 return path_var (NULL_TREE, 0);
4080 path_var index_pv
4081 = get_representative_path_var (element_reg->get_index (), visited);
4082 if (!index_pv)
4083 return path_var (NULL_TREE, 0);
4084 return path_var (build4 (ARRAY_REF,
4085 reg->get_type (),
4086 parent_pv.m_tree, index_pv.m_tree,
4087 NULL_TREE, NULL_TREE),
4088 parent_pv.m_stack_depth);
4091 case RK_OFFSET:
4093 const offset_region *offset_reg
4094 = as_a <const offset_region *> (reg);
4095 path_var parent_pv
4096 = get_representative_path_var (reg->get_parent_region (), visited);
4097 if (!parent_pv)
4098 return path_var (NULL_TREE, 0);
4099 path_var offset_pv
4100 = get_representative_path_var (offset_reg->get_byte_offset (),
4101 visited);
4102 if (!offset_pv || TREE_CODE (offset_pv.m_tree) != INTEGER_CST)
4103 return path_var (NULL_TREE, 0);
4104 tree addr_parent = build1 (ADDR_EXPR,
4105 build_pointer_type (reg->get_type ()),
4106 parent_pv.m_tree);
4107 return path_var (build2 (MEM_REF,
4108 reg->get_type (),
4109 addr_parent, offset_pv.m_tree),
4110 parent_pv.m_stack_depth);
4113 case RK_SIZED:
4114 return path_var (NULL_TREE, 0);
4116 case RK_CAST:
4118 path_var parent_pv
4119 = get_representative_path_var (reg->get_parent_region (), visited);
4120 if (!parent_pv)
4121 return path_var (NULL_TREE, 0);
4122 return path_var (build1 (NOP_EXPR,
4123 reg->get_type (),
4124 parent_pv.m_tree),
4125 parent_pv.m_stack_depth);
4128 case RK_HEAP_ALLOCATED:
4129 case RK_ALLOCA:
4130 /* No good way to express heap-allocated/alloca regions as trees. */
4131 return path_var (NULL_TREE, 0);
4133 case RK_STRING:
4135 const string_region *string_reg = as_a <const string_region *> (reg);
4136 return path_var (string_reg->get_string_cst (), 0);
4139 case RK_VAR_ARG:
4140 case RK_ERRNO:
4141 case RK_UNKNOWN:
4142 return path_var (NULL_TREE, 0);
4146 /* Attempt to return a path_var that represents REG, or return
4147 the NULL path_var.
4148 For example, a region for a field of a local would be a path_var
4149 wrapping a COMPONENT_REF.
4150 Use VISITED to prevent infinite mutual recursion with the overload for
4151 svalues.
4153 This function defers to get_representative_path_var_1 to do the work;
4154 it adds verification that get_representative_path_var_1 returned a tree
4155 of the correct type. */
4157 path_var
4158 region_model::get_representative_path_var (const region *reg,
4159 svalue_set *visited) const
4161 path_var result = get_representative_path_var_1 (reg, visited);
4163 /* Verify that the result has the same type as REG, if any. */
4164 if (result.m_tree && reg->get_type ())
4165 gcc_assert (TREE_TYPE (result.m_tree) == reg->get_type ());
4167 return result;
4170 /* Update this model for any phis in SNODE, assuming we came from
4171 LAST_CFG_SUPEREDGE. */
4173 void
4174 region_model::update_for_phis (const supernode *snode,
4175 const cfg_superedge *last_cfg_superedge,
4176 region_model_context *ctxt)
4178 gcc_assert (last_cfg_superedge);
4180 /* Copy this state and pass it to handle_phi so that all of the phi stmts
4181 are effectively handled simultaneously. */
4182 const region_model old_state (*this);
4184 for (gphi_iterator gpi = const_cast<supernode *>(snode)->start_phis ();
4185 !gsi_end_p (gpi); gsi_next (&gpi))
4187 gphi *phi = gpi.phi ();
4189 tree src = last_cfg_superedge->get_phi_arg (phi);
4190 tree lhs = gimple_phi_result (phi);
4192 /* Update next_state based on phi and old_state. */
4193 handle_phi (phi, lhs, src, old_state, ctxt);
4197 /* Attempt to update this model for taking EDGE (where the last statement
4198 was LAST_STMT), returning true if the edge can be taken, false
4199 otherwise.
4200 When returning false, if OUT is non-NULL, write a new rejected_constraint
4201 to it.
4203 For CFG superedges where LAST_STMT is a conditional or a switch
4204 statement, attempt to add the relevant conditions for EDGE to this
4205 model, returning true if they are feasible, or false if they are
4206 impossible.
4208 For call superedges, push frame information and store arguments
4209 into parameters.
4211 For return superedges, pop frame information and store return
4212 values into any lhs.
4214 Rejection of call/return superedges happens elsewhere, in
4215 program_point::on_edge (i.e. based on program point, rather
4216 than program state). */
4218 bool
4219 region_model::maybe_update_for_edge (const superedge &edge,
4220 const gimple *last_stmt,
4221 region_model_context *ctxt,
4222 rejected_constraint **out)
4224 /* Handle frame updates for interprocedural edges. */
4225 switch (edge.m_kind)
4227 default:
4228 break;
4230 case SUPEREDGE_CALL:
4232 const call_superedge *call_edge = as_a <const call_superedge *> (&edge);
4233 update_for_call_superedge (*call_edge, ctxt);
4235 break;
4237 case SUPEREDGE_RETURN:
4239 const return_superedge *return_edge
4240 = as_a <const return_superedge *> (&edge);
4241 update_for_return_superedge (*return_edge, ctxt);
4243 break;
4245 case SUPEREDGE_INTRAPROCEDURAL_CALL:
4246 /* This is a no-op for call summaries; we should already
4247 have handled the effect of the call summary at the call stmt. */
4248 break;
4251 if (last_stmt == NULL)
4252 return true;
4254 /* Apply any constraints for conditionals/switch statements. */
4256 if (const gcond *cond_stmt = dyn_cast <const gcond *> (last_stmt))
4258 const cfg_superedge *cfg_sedge = as_a <const cfg_superedge *> (&edge);
4259 return apply_constraints_for_gcond (*cfg_sedge, cond_stmt, ctxt, out);
4262 if (const gswitch *switch_stmt = dyn_cast <const gswitch *> (last_stmt))
4264 const switch_cfg_superedge *switch_sedge
4265 = as_a <const switch_cfg_superedge *> (&edge);
4266 return apply_constraints_for_gswitch (*switch_sedge, switch_stmt,
4267 ctxt, out);
4270 /* Apply any constraints due to an exception being thrown. */
4271 if (const cfg_superedge *cfg_sedge = dyn_cast <const cfg_superedge *> (&edge))
4272 if (cfg_sedge->get_flags () & EDGE_EH)
4273 return apply_constraints_for_exception (last_stmt, ctxt, out);
4275 return true;
4278 /* Push a new frame_region on to the stack region.
4279 Populate the frame_region with child regions for the function call's
4280 parameters, using values from the arguments at the callsite in the
4281 caller's frame. */
4283 void
4284 region_model::update_for_gcall (const gcall *call_stmt,
4285 region_model_context *ctxt,
4286 function *callee)
4288 /* Build a vec of argument svalues, using the current top
4289 frame for resolving tree expressions. */
4290 auto_vec<const svalue *> arg_svals (gimple_call_num_args (call_stmt));
4292 for (unsigned i = 0; i < gimple_call_num_args (call_stmt); i++)
4294 tree arg = gimple_call_arg (call_stmt, i);
4295 arg_svals.quick_push (get_rvalue (arg, ctxt));
4298 if(!callee)
4300 /* Get the function * from the gcall. */
4301 tree fn_decl = get_fndecl_for_call (call_stmt,ctxt);
4302 callee = DECL_STRUCT_FUNCTION (fn_decl);
4305 push_frame (callee, &arg_svals, ctxt);
4308 /* Pop the top-most frame_region from the stack, and copy the return
4309 region's values (if any) into the region for the lvalue of the LHS of
4310 the call (if any). */
4312 void
4313 region_model::update_for_return_gcall (const gcall *call_stmt,
4314 region_model_context *ctxt)
4316 /* Get the lvalue for the result of the call, passing it to pop_frame,
4317 so that pop_frame can determine the region with respect to the
4318 *caller* frame. */
4319 tree lhs = gimple_call_lhs (call_stmt);
4320 pop_frame (lhs, NULL, ctxt);
4323 /* Extract calling information from the superedge and update the model for the
4324 call */
4326 void
4327 region_model::update_for_call_superedge (const call_superedge &call_edge,
4328 region_model_context *ctxt)
4330 const gcall *call_stmt = call_edge.get_call_stmt ();
4331 update_for_gcall (call_stmt, ctxt, call_edge.get_callee_function ());
4334 /* Extract calling information from the return superedge and update the model
4335 for the returning call */
4337 void
4338 region_model::update_for_return_superedge (const return_superedge &return_edge,
4339 region_model_context *ctxt)
4341 const gcall *call_stmt = return_edge.get_call_stmt ();
4342 update_for_return_gcall (call_stmt, ctxt);
4345 /* Attempt to to use R to replay SUMMARY into this object.
4346 Return true if it is possible. */
4348 bool
4349 region_model::replay_call_summary (call_summary_replay &r,
4350 const region_model &summary)
4352 gcc_assert (summary.get_stack_depth () == 1);
4354 m_store.replay_call_summary (r, summary.m_store);
4356 if (!m_constraints->replay_call_summary (r, *summary.m_constraints))
4357 return false;
4359 for (auto kv : summary.m_dynamic_extents)
4361 const region *summary_reg = kv.first;
4362 const region *caller_reg = r.convert_region_from_summary (summary_reg);
4363 if (!caller_reg)
4364 continue;
4365 const svalue *summary_sval = kv.second;
4366 const svalue *caller_sval = r.convert_svalue_from_summary (summary_sval);
4367 if (!caller_sval)
4368 continue;
4369 m_dynamic_extents.put (caller_reg, caller_sval);
4372 return true;
4375 /* Given a true or false edge guarded by conditional statement COND_STMT,
4376 determine appropriate constraints for the edge to be taken.
4378 If they are feasible, add the constraints and return true.
4380 Return false if the constraints contradict existing knowledge
4381 (and so the edge should not be taken).
4382 When returning false, if OUT is non-NULL, write a new rejected_constraint
4383 to it. */
4385 bool
4386 region_model::apply_constraints_for_gcond (const cfg_superedge &sedge,
4387 const gcond *cond_stmt,
4388 region_model_context *ctxt,
4389 rejected_constraint **out)
4391 ::edge cfg_edge = sedge.get_cfg_edge ();
4392 gcc_assert (cfg_edge != NULL);
4393 gcc_assert (cfg_edge->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE));
4395 enum tree_code op = gimple_cond_code (cond_stmt);
4396 tree lhs = gimple_cond_lhs (cond_stmt);
4397 tree rhs = gimple_cond_rhs (cond_stmt);
4398 if (cfg_edge->flags & EDGE_FALSE_VALUE)
4399 op = invert_tree_comparison (op, false /* honor_nans */);
4400 return add_constraint (lhs, op, rhs, ctxt, out);
4403 /* Return true iff SWITCH_STMT has a non-default label that contains
4404 INT_CST. */
4406 static bool
4407 has_nondefault_case_for_value_p (const gswitch *switch_stmt, tree int_cst)
4409 /* We expect the initial label to be the default; skip it. */
4410 gcc_assert (CASE_LOW (gimple_switch_label (switch_stmt, 0)) == NULL);
4411 unsigned min_idx = 1;
4412 unsigned max_idx = gimple_switch_num_labels (switch_stmt) - 1;
4414 /* Binary search: try to find the label containing INT_CST.
4415 This requires the cases to be sorted by CASE_LOW (done by the
4416 gimplifier). */
4417 while (max_idx >= min_idx)
4419 unsigned case_idx = (min_idx + max_idx) / 2;
4420 tree label = gimple_switch_label (switch_stmt, case_idx);
4421 tree low = CASE_LOW (label);
4422 gcc_assert (low);
4423 tree high = CASE_HIGH (label);
4424 if (!high)
4425 high = low;
4426 if (tree_int_cst_compare (int_cst, low) < 0)
4428 /* INT_CST is below the range of this label. */
4429 gcc_assert (case_idx > 0);
4430 max_idx = case_idx - 1;
4432 else if (tree_int_cst_compare (int_cst, high) > 0)
4434 /* INT_CST is above the range of this case. */
4435 min_idx = case_idx + 1;
4437 else
4438 /* This case contains INT_CST. */
4439 return true;
4441 /* Not found. */
4442 return false;
4445 /* Return true iff SWITCH_STMT (which must be on an enum value)
4446 has nondefault cases handling all values in the enum. */
4448 static bool
4449 has_nondefault_cases_for_all_enum_values_p (const gswitch *switch_stmt)
4451 gcc_assert (switch_stmt);
4452 tree type = TREE_TYPE (gimple_switch_index (switch_stmt));
4453 gcc_assert (TREE_CODE (type) == ENUMERAL_TYPE);
4455 for (tree enum_val_iter = TYPE_VALUES (type);
4456 enum_val_iter;
4457 enum_val_iter = TREE_CHAIN (enum_val_iter))
4459 tree enum_val = TREE_VALUE (enum_val_iter);
4460 gcc_assert (TREE_CODE (enum_val) == CONST_DECL);
4461 gcc_assert (TREE_CODE (DECL_INITIAL (enum_val)) == INTEGER_CST);
4462 if (!has_nondefault_case_for_value_p (switch_stmt,
4463 DECL_INITIAL (enum_val)))
4464 return false;
4466 return true;
4469 /* Given an EDGE guarded by SWITCH_STMT, determine appropriate constraints
4470 for the edge to be taken.
4472 If they are feasible, add the constraints and return true.
4474 Return false if the constraints contradict existing knowledge
4475 (and so the edge should not be taken).
4476 When returning false, if OUT is non-NULL, write a new rejected_constraint
4477 to it. */
4479 bool
4480 region_model::apply_constraints_for_gswitch (const switch_cfg_superedge &edge,
4481 const gswitch *switch_stmt,
4482 region_model_context *ctxt,
4483 rejected_constraint **out)
4485 tree index = gimple_switch_index (switch_stmt);
4486 const svalue *index_sval = get_rvalue (index, ctxt);
4488 /* If we're switching based on an enum type, assume that the user is only
4489 working with values from the enum. Hence if this is an
4490 implicitly-created "default", assume it doesn't get followed.
4491 This fixes numerous "uninitialized" false positives where we otherwise
4492 consider jumping past the initialization cases. */
4494 if (/* Don't check during feasibility-checking (when ctxt is NULL). */
4495 ctxt
4496 /* Must be an enum value. */
4497 && index_sval->get_type ()
4498 && TREE_CODE (TREE_TYPE (index)) == ENUMERAL_TYPE
4499 && TREE_CODE (index_sval->get_type ()) == ENUMERAL_TYPE
4500 /* If we have a constant, then we can check it directly. */
4501 && index_sval->get_kind () != SK_CONSTANT
4502 && edge.implicitly_created_default_p ()
4503 && has_nondefault_cases_for_all_enum_values_p (switch_stmt)
4504 /* Don't do this if there's a chance that the index is
4505 attacker-controlled. */
4506 && !ctxt->possibly_tainted_p (index_sval))
4508 if (out)
4509 *out = new rejected_default_case (*this);
4510 return false;
4513 bounded_ranges_manager *ranges_mgr = get_range_manager ();
4514 const bounded_ranges *all_cases_ranges
4515 = ranges_mgr->get_or_create_ranges_for_switch (&edge, switch_stmt);
4516 bool sat = m_constraints->add_bounded_ranges (index_sval, all_cases_ranges);
4517 if (!sat && out)
4518 *out = new rejected_ranges_constraint (*this, index, all_cases_ranges);
4519 if (sat && ctxt && !all_cases_ranges->empty_p ())
4520 ctxt->on_bounded_ranges (*index_sval, *all_cases_ranges);
4521 return sat;
4524 /* Apply any constraints due to an exception being thrown at LAST_STMT.
4526 If they are feasible, add the constraints and return true.
4528 Return false if the constraints contradict existing knowledge
4529 (and so the edge should not be taken).
4530 When returning false, if OUT is non-NULL, write a new rejected_constraint
4531 to it. */
4533 bool
4534 region_model::apply_constraints_for_exception (const gimple *last_stmt,
4535 region_model_context *ctxt,
4536 rejected_constraint **out)
4538 gcc_assert (last_stmt);
4539 if (const gcall *call = dyn_cast <const gcall *> (last_stmt))
4540 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
4541 if (is_named_call_p (callee_fndecl, "operator new", call, 1)
4542 || is_named_call_p (callee_fndecl, "operator new []", call, 1))
4544 /* We have an exception thrown from operator new.
4545 Add a constraint that the result was NULL, to avoid a false
4546 leak report due to the result being lost when following
4547 the EH edge. */
4548 if (tree lhs = gimple_call_lhs (call))
4549 return add_constraint (lhs, EQ_EXPR, null_pointer_node, ctxt, out);
4550 return true;
4552 return true;
4555 /* For use with push_frame when handling a top-level call within the analysis.
4556 PARAM has a defined but unknown initial value.
4557 Anything it points to has escaped, since the calling context "knows"
4558 the pointer, and thus calls to unknown functions could read/write into
4559 the region.
4560 If NONNULL is true, then assume that PARAM must be non-NULL. */
4562 void
4563 region_model::on_top_level_param (tree param,
4564 bool nonnull,
4565 region_model_context *ctxt)
4567 if (POINTER_TYPE_P (TREE_TYPE (param)))
4569 const region *param_reg = get_lvalue (param, ctxt);
4570 const svalue *init_ptr_sval
4571 = m_mgr->get_or_create_initial_value (param_reg);
4572 const region *pointee_reg = m_mgr->get_symbolic_region (init_ptr_sval);
4573 m_store.mark_as_escaped (pointee_reg);
4574 if (nonnull)
4576 const svalue *null_ptr_sval
4577 = m_mgr->get_or_create_null_ptr (TREE_TYPE (param));
4578 add_constraint (init_ptr_sval, NE_EXPR, null_ptr_sval, ctxt);
4583 /* Update this region_model to reflect pushing a frame onto the stack
4584 for a call to FUN.
4586 If ARG_SVALS is non-NULL, use it to populate the parameters
4587 in the new frame.
4588 Otherwise, the params have their initial_svalues.
4590 Return the frame_region for the new frame. */
4592 const region *
4593 region_model::push_frame (function *fun, const vec<const svalue *> *arg_svals,
4594 region_model_context *ctxt)
4596 m_current_frame = m_mgr->get_frame_region (m_current_frame, fun);
4597 if (arg_svals)
4599 /* Arguments supplied from a caller frame. */
4600 tree fndecl = fun->decl;
4601 unsigned idx = 0;
4602 for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm;
4603 iter_parm = DECL_CHAIN (iter_parm), ++idx)
4605 /* If there's a mismatching declaration, the call stmt might
4606 not have enough args. Handle this case by leaving the
4607 rest of the params as uninitialized. */
4608 if (idx >= arg_svals->length ())
4609 break;
4610 tree parm_lval = iter_parm;
4611 if (tree parm_default_ssa = ssa_default_def (fun, iter_parm))
4612 parm_lval = parm_default_ssa;
4613 const region *parm_reg = get_lvalue (parm_lval, ctxt);
4614 const svalue *arg_sval = (*arg_svals)[idx];
4615 set_value (parm_reg, arg_sval, ctxt);
4618 /* Handle any variadic args. */
4619 unsigned va_arg_idx = 0;
4620 for (; idx < arg_svals->length (); idx++, va_arg_idx++)
4622 const svalue *arg_sval = (*arg_svals)[idx];
4623 const region *var_arg_reg
4624 = m_mgr->get_var_arg_region (m_current_frame,
4625 va_arg_idx);
4626 set_value (var_arg_reg, arg_sval, ctxt);
4629 else
4631 /* Otherwise we have a top-level call within the analysis. The params
4632 have defined but unknown initial values.
4633 Anything they point to has escaped. */
4634 tree fndecl = fun->decl;
4636 /* Handle "__attribute__((nonnull))". */
4637 tree fntype = TREE_TYPE (fndecl);
4638 bitmap nonnull_args = get_nonnull_args (fntype);
4640 unsigned parm_idx = 0;
4641 for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm;
4642 iter_parm = DECL_CHAIN (iter_parm))
4644 bool non_null = (nonnull_args
4645 ? (bitmap_empty_p (nonnull_args)
4646 || bitmap_bit_p (nonnull_args, parm_idx))
4647 : false);
4648 if (tree parm_default_ssa = ssa_default_def (fun, iter_parm))
4649 on_top_level_param (parm_default_ssa, non_null, ctxt);
4650 else
4651 on_top_level_param (iter_parm, non_null, ctxt);
4652 parm_idx++;
4655 BITMAP_FREE (nonnull_args);
4658 return m_current_frame;
4661 /* Get the function of the top-most frame in this region_model's stack.
4662 There must be such a frame. */
4664 function *
4665 region_model::get_current_function () const
4667 const frame_region *frame = get_current_frame ();
4668 gcc_assert (frame);
4669 return frame->get_function ();
4672 /* Pop the topmost frame_region from this region_model's stack;
4674 If RESULT_LVALUE is non-null, copy any return value from the frame
4675 into the corresponding region (evaluated with respect to the *caller*
4676 frame, rather than the called frame).
4677 If OUT_RESULT is non-null, copy any return value from the frame
4678 into *OUT_RESULT.
4680 Purge the frame region and all its descendent regions.
4681 Convert any pointers that point into such regions into
4682 POISON_KIND_POPPED_STACK svalues. */
4684 void
4685 region_model::pop_frame (tree result_lvalue,
4686 const svalue **out_result,
4687 region_model_context *ctxt)
4689 gcc_assert (m_current_frame);
4691 const frame_region *frame_reg = m_current_frame;
4693 /* Notify state machines. */
4694 if (ctxt)
4695 ctxt->on_pop_frame (frame_reg);
4697 /* Evaluate the result, within the callee frame. */
4698 tree fndecl = m_current_frame->get_function ()->decl;
4699 tree result = DECL_RESULT (fndecl);
4700 const svalue *retval = NULL;
4701 if (result && TREE_TYPE (result) != void_type_node)
4703 retval = get_rvalue (result, ctxt);
4704 if (out_result)
4705 *out_result = retval;
4708 /* Pop the frame. */
4709 m_current_frame = m_current_frame->get_calling_frame ();
4711 if (result_lvalue && retval)
4713 /* Compute result_dst_reg using RESULT_LVALUE *after* popping
4714 the frame, but before poisoning pointers into the old frame. */
4715 const region *result_dst_reg = get_lvalue (result_lvalue, ctxt);
4716 set_value (result_dst_reg, retval, ctxt);
4719 unbind_region_and_descendents (frame_reg,POISON_KIND_POPPED_STACK);
4722 /* Get the number of frames in this region_model's stack. */
4725 region_model::get_stack_depth () const
4727 const frame_region *frame = get_current_frame ();
4728 if (frame)
4729 return frame->get_stack_depth ();
4730 else
4731 return 0;
4734 /* Get the frame_region with the given index within the stack.
4735 The frame_region must exist. */
4737 const frame_region *
4738 region_model::get_frame_at_index (int index) const
4740 const frame_region *frame = get_current_frame ();
4741 gcc_assert (frame);
4742 gcc_assert (index >= 0);
4743 gcc_assert (index <= frame->get_index ());
4744 while (index != frame->get_index ())
4746 frame = frame->get_calling_frame ();
4747 gcc_assert (frame);
4749 return frame;
4752 /* Unbind svalues for any regions in REG and below.
4753 Find any pointers to such regions; convert them to
4754 poisoned values of kind PKIND.
4755 Also purge any dynamic extents. */
4757 void
4758 region_model::unbind_region_and_descendents (const region *reg,
4759 enum poison_kind pkind)
4761 /* Gather a set of base regions to be unbound. */
4762 hash_set<const region *> base_regs;
4763 for (store::cluster_map_t::iterator iter = m_store.begin ();
4764 iter != m_store.end (); ++iter)
4766 const region *iter_base_reg = (*iter).first;
4767 if (iter_base_reg->descendent_of_p (reg))
4768 base_regs.add (iter_base_reg);
4770 for (hash_set<const region *>::iterator iter = base_regs.begin ();
4771 iter != base_regs.end (); ++iter)
4772 m_store.purge_cluster (*iter);
4774 /* Find any pointers to REG or its descendents; convert to poisoned. */
4775 poison_any_pointers_to_descendents (reg, pkind);
4777 /* Purge dynamic extents of any base regions in REG and below
4778 (e.g. VLAs and alloca stack regions). */
4779 for (auto iter : m_dynamic_extents)
4781 const region *iter_reg = iter.first;
4782 if (iter_reg->descendent_of_p (reg))
4783 unset_dynamic_extents (iter_reg);
4787 /* Implementation of BindingVisitor.
4788 Update the bound svalues for regions below REG to use poisoned
4789 values instead. */
4791 struct bad_pointer_finder
4793 bad_pointer_finder (const region *reg, enum poison_kind pkind,
4794 region_model_manager *mgr)
4795 : m_reg (reg), m_pkind (pkind), m_mgr (mgr), m_count (0)
4798 void on_binding (const binding_key *, const svalue *&sval)
4800 if (const region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
4802 const region *ptr_dst = ptr_sval->get_pointee ();
4803 /* Poison ptrs to descendents of REG, but not to REG itself,
4804 otherwise double-free detection doesn't work (since sm-state
4805 for "free" is stored on the original ptr svalue). */
4806 if (ptr_dst->descendent_of_p (m_reg)
4807 && ptr_dst != m_reg)
4809 sval = m_mgr->get_or_create_poisoned_svalue (m_pkind,
4810 sval->get_type ());
4811 ++m_count;
4816 const region *m_reg;
4817 enum poison_kind m_pkind;
4818 region_model_manager *const m_mgr;
4819 int m_count;
4822 /* Find any pointers to REG or its descendents; convert them to
4823 poisoned values of kind PKIND.
4824 Return the number of pointers that were poisoned. */
4827 region_model::poison_any_pointers_to_descendents (const region *reg,
4828 enum poison_kind pkind)
4830 bad_pointer_finder bv (reg, pkind, m_mgr);
4831 m_store.for_each_binding (bv);
4832 return bv.m_count;
4835 /* Attempt to merge THIS with OTHER_MODEL, writing the result
4836 to OUT_MODEL. Use POINT to distinguish values created as a
4837 result of merging. */
4839 bool
4840 region_model::can_merge_with_p (const region_model &other_model,
4841 const program_point &point,
4842 region_model *out_model,
4843 const extrinsic_state *ext_state,
4844 const program_state *state_a,
4845 const program_state *state_b) const
4847 gcc_assert (out_model);
4848 gcc_assert (m_mgr == other_model.m_mgr);
4849 gcc_assert (m_mgr == out_model->m_mgr);
4851 if (m_current_frame != other_model.m_current_frame)
4852 return false;
4853 out_model->m_current_frame = m_current_frame;
4855 model_merger m (this, &other_model, point, out_model,
4856 ext_state, state_a, state_b);
4858 if (!store::can_merge_p (&m_store, &other_model.m_store,
4859 &out_model->m_store, m_mgr->get_store_manager (),
4860 &m))
4861 return false;
4863 if (!m_dynamic_extents.can_merge_with_p (other_model.m_dynamic_extents,
4864 &out_model->m_dynamic_extents))
4865 return false;
4867 /* Merge constraints. */
4868 constraint_manager::merge (*m_constraints,
4869 *other_model.m_constraints,
4870 out_model->m_constraints);
4872 return true;
4875 /* Attempt to get the fndecl used at CALL, if known, or NULL_TREE
4876 otherwise. */
4878 tree
4879 region_model::get_fndecl_for_call (const gcall *call,
4880 region_model_context *ctxt)
4882 tree fn_ptr = gimple_call_fn (call);
4883 if (fn_ptr == NULL_TREE)
4884 return NULL_TREE;
4885 const svalue *fn_ptr_sval = get_rvalue (fn_ptr, ctxt);
4886 if (const region_svalue *fn_ptr_ptr
4887 = fn_ptr_sval->dyn_cast_region_svalue ())
4889 const region *reg = fn_ptr_ptr->get_pointee ();
4890 if (const function_region *fn_reg = reg->dyn_cast_function_region ())
4892 tree fn_decl = fn_reg->get_fndecl ();
4893 cgraph_node *node = cgraph_node::get (fn_decl);
4894 if (!node)
4895 return NULL_TREE;
4896 const cgraph_node *ultimate_node = node->ultimate_alias_target ();
4897 if (ultimate_node)
4898 return ultimate_node->decl;
4902 return NULL_TREE;
4905 /* Would be much simpler to use a lambda here, if it were supported. */
4907 struct append_regions_cb_data
4909 const region_model *model;
4910 auto_vec<const decl_region *> *out;
4913 /* Populate *OUT with all decl_regions in the current
4914 frame that have clusters within the store. */
4916 void
4917 region_model::
4918 get_regions_for_current_frame (auto_vec<const decl_region *> *out) const
4920 append_regions_cb_data data;
4921 data.model = this;
4922 data.out = out;
4923 m_store.for_each_cluster (append_regions_cb, &data);
4926 /* Implementation detail of get_regions_for_current_frame. */
4928 void
4929 region_model::append_regions_cb (const region *base_reg,
4930 append_regions_cb_data *cb_data)
4932 if (base_reg->get_parent_region () != cb_data->model->m_current_frame)
4933 return;
4934 if (const decl_region *decl_reg = base_reg->dyn_cast_decl_region ())
4935 cb_data->out->safe_push (decl_reg);
4939 /* Abstract class for diagnostics related to the use of
4940 floating-point arithmetic where precision is needed. */
4942 class imprecise_floating_point_arithmetic : public pending_diagnostic
4944 public:
4945 int get_controlling_option () const final override
4947 return OPT_Wanalyzer_imprecise_fp_arithmetic;
4951 /* Concrete diagnostic to complain about uses of floating-point arithmetic
4952 in the size argument of malloc etc. */
4954 class float_as_size_arg : public imprecise_floating_point_arithmetic
4956 public:
4957 float_as_size_arg (tree arg) : m_arg (arg)
4960 const char *get_kind () const final override
4962 return "float_as_size_arg_diagnostic";
4965 bool subclass_equal_p (const pending_diagnostic &other) const final override
4967 return same_tree_p (m_arg, ((const float_as_size_arg &) other).m_arg);
4970 bool emit (rich_location *rich_loc) final override
4972 diagnostic_metadata m;
4973 bool warned = warning_meta (rich_loc, m, get_controlling_option (),
4974 "use of floating-point arithmetic here might"
4975 " yield unexpected results");
4976 if (warned)
4977 inform (rich_loc->get_loc (), "only use operands of an integer type"
4978 " inside the size argument");
4979 return warned;
4982 label_text describe_final_event (const evdesc::final_event &ev) final
4983 override
4985 if (m_arg)
4986 return ev.formatted_print ("operand %qE is of type %qT",
4987 m_arg, TREE_TYPE (m_arg));
4988 return ev.formatted_print ("at least one operand of the size argument is"
4989 " of a floating-point type");
4992 private:
4993 tree m_arg;
4996 /* Visitor to find uses of floating-point variables/constants in an svalue. */
4998 class contains_floating_point_visitor : public visitor
5000 public:
5001 contains_floating_point_visitor (const svalue *root_sval) : m_result (NULL)
5003 root_sval->accept (this);
5006 const svalue *get_svalue_to_report ()
5008 return m_result;
5011 void visit_constant_svalue (const constant_svalue *sval) final override
5013 /* At the point the analyzer runs, constant integer operands in a floating
5014 point expression are already implictly converted to floating-points.
5015 Thus, we do prefer to report non-constants such that the diagnostic
5016 always reports a floating-point operand. */
5017 tree type = sval->get_type ();
5018 if (type && FLOAT_TYPE_P (type) && !m_result)
5019 m_result = sval;
5022 void visit_conjured_svalue (const conjured_svalue *sval) final override
5024 tree type = sval->get_type ();
5025 if (type && FLOAT_TYPE_P (type))
5026 m_result = sval;
5029 void visit_initial_svalue (const initial_svalue *sval) final override
5031 tree type = sval->get_type ();
5032 if (type && FLOAT_TYPE_P (type))
5033 m_result = sval;
5036 private:
5037 /* Non-null if at least one floating-point operand was found. */
5038 const svalue *m_result;
5041 /* May complain about uses of floating-point operands in SIZE_IN_BYTES. */
5043 void
5044 region_model::check_dynamic_size_for_floats (const svalue *size_in_bytes,
5045 region_model_context *ctxt) const
5047 gcc_assert (ctxt);
5049 contains_floating_point_visitor v (size_in_bytes);
5050 if (const svalue *float_sval = v.get_svalue_to_report ())
5052 tree diag_arg = get_representative_tree (float_sval);
5053 ctxt->warn (make_unique<float_as_size_arg> (diag_arg));
5057 /* Return a region describing a heap-allocated block of memory.
5058 Use CTXT to complain about tainted sizes.
5060 Reuse an existing heap_allocated_region if it's not being referenced by
5061 this region_model; otherwise create a new one. */
5063 const region *
5064 region_model::get_or_create_region_for_heap_alloc (const svalue *size_in_bytes,
5065 region_model_context *ctxt)
5067 /* Determine which regions are referenced in this region_model, so that
5068 we can reuse an existing heap_allocated_region if it's not in use on
5069 this path. */
5070 auto_bitmap base_regs_in_use;
5071 get_referenced_base_regions (base_regs_in_use);
5073 /* Don't reuse regions that are marked as TOUCHED. */
5074 for (store::cluster_map_t::iterator iter = m_store.begin ();
5075 iter != m_store.end (); ++iter)
5076 if ((*iter).second->touched_p ())
5078 const region *base_reg = (*iter).first;
5079 bitmap_set_bit (base_regs_in_use, base_reg->get_id ());
5082 const region *reg
5083 = m_mgr->get_or_create_region_for_heap_alloc (base_regs_in_use);
5084 if (size_in_bytes)
5085 if (compat_types_p (size_in_bytes->get_type (), size_type_node))
5086 set_dynamic_extents (reg, size_in_bytes, ctxt);
5087 return reg;
5090 /* Populate OUT_IDS with the set of IDs of those base regions which are
5091 reachable in this region_model. */
5093 void
5094 region_model::get_referenced_base_regions (auto_bitmap &out_ids) const
5096 reachable_regions reachable_regs (const_cast<region_model *> (this));
5097 m_store.for_each_cluster (reachable_regions::init_cluster_cb,
5098 &reachable_regs);
5099 /* Get regions for locals that have explicitly bound values. */
5100 for (store::cluster_map_t::iterator iter = m_store.begin ();
5101 iter != m_store.end (); ++iter)
5103 const region *base_reg = (*iter).first;
5104 if (const region *parent = base_reg->get_parent_region ())
5105 if (parent->get_kind () == RK_FRAME)
5106 reachable_regs.add (base_reg, false);
5109 bitmap_clear (out_ids);
5110 for (auto iter_reg : reachable_regs)
5111 bitmap_set_bit (out_ids, iter_reg->get_id ());
5114 /* Return a new region describing a block of memory allocated within the
5115 current frame.
5116 Use CTXT to complain about tainted sizes. */
5118 const region *
5119 region_model::create_region_for_alloca (const svalue *size_in_bytes,
5120 region_model_context *ctxt)
5122 const region *reg = m_mgr->create_region_for_alloca (m_current_frame);
5123 if (compat_types_p (size_in_bytes->get_type (), size_type_node))
5124 set_dynamic_extents (reg, size_in_bytes, ctxt);
5125 return reg;
5128 /* Record that the size of REG is SIZE_IN_BYTES.
5129 Use CTXT to complain about tainted sizes. */
5131 void
5132 region_model::set_dynamic_extents (const region *reg,
5133 const svalue *size_in_bytes,
5134 region_model_context *ctxt)
5136 assert_compat_types (size_in_bytes->get_type (), size_type_node);
5137 if (ctxt)
5139 check_dynamic_size_for_taint (reg->get_memory_space (), size_in_bytes,
5140 ctxt);
5141 check_dynamic_size_for_floats (size_in_bytes, ctxt);
5143 m_dynamic_extents.put (reg, size_in_bytes);
5146 /* Get the recording of REG in bytes, or NULL if no dynamic size was
5147 recorded. */
5149 const svalue *
5150 region_model::get_dynamic_extents (const region *reg) const
5152 if (const svalue * const *slot = m_dynamic_extents.get (reg))
5153 return *slot;
5154 return NULL;
5157 /* Unset any recorded dynamic size of REG. */
5159 void
5160 region_model::unset_dynamic_extents (const region *reg)
5162 m_dynamic_extents.remove (reg);
5165 /* Information of the layout of a RECORD_TYPE, capturing it as a vector
5166 of items, where each item is either a field or padding. */
5168 class record_layout
5170 public:
5171 /* An item within a record; either a field, or padding after a field. */
5172 struct item
5174 public:
5175 item (const bit_range &br,
5176 tree field,
5177 bool is_padding)
5178 : m_bit_range (br),
5179 m_field (field),
5180 m_is_padding (is_padding)
5184 bit_offset_t get_start_bit_offset () const
5186 return m_bit_range.get_start_bit_offset ();
5188 bit_offset_t get_next_bit_offset () const
5190 return m_bit_range.get_next_bit_offset ();
5193 bool contains_p (bit_offset_t offset) const
5195 return m_bit_range.contains_p (offset);
5198 void dump_to_pp (pretty_printer *pp) const
5200 if (m_is_padding)
5201 pp_printf (pp, "padding after %qD", m_field);
5202 else
5203 pp_printf (pp, "%qD", m_field);
5204 pp_string (pp, ", ");
5205 m_bit_range.dump_to_pp (pp);
5208 bit_range m_bit_range;
5209 tree m_field;
5210 bool m_is_padding;
5213 record_layout (tree record_type)
5215 gcc_assert (TREE_CODE (record_type) == RECORD_TYPE);
5217 for (tree iter = TYPE_FIELDS (record_type); iter != NULL_TREE;
5218 iter = DECL_CHAIN (iter))
5220 if (TREE_CODE (iter) == FIELD_DECL)
5222 int iter_field_offset = int_bit_position (iter);
5223 bit_size_t size_in_bits;
5224 if (!int_size_in_bits (TREE_TYPE (iter), &size_in_bits))
5225 size_in_bits = 0;
5227 maybe_pad_to (iter_field_offset);
5229 /* Add field. */
5230 m_items.safe_push (item (bit_range (iter_field_offset,
5231 size_in_bits),
5232 iter, false));
5236 /* Add any trailing padding. */
5237 bit_size_t size_in_bits;
5238 if (int_size_in_bits (record_type, &size_in_bits))
5239 maybe_pad_to (size_in_bits);
5242 void dump_to_pp (pretty_printer *pp) const
5244 unsigned i;
5245 item *it;
5246 FOR_EACH_VEC_ELT (m_items, i, it)
5248 it->dump_to_pp (pp);
5249 pp_newline (pp);
5253 DEBUG_FUNCTION void dump () const
5255 pretty_printer pp;
5256 pp_format_decoder (&pp) = default_tree_printer;
5257 pp.buffer->stream = stderr;
5258 dump_to_pp (&pp);
5259 pp_flush (&pp);
5262 const record_layout::item *get_item_at (bit_offset_t offset) const
5264 unsigned i;
5265 item *it;
5266 FOR_EACH_VEC_ELT (m_items, i, it)
5267 if (it->contains_p (offset))
5268 return it;
5269 return NULL;
5272 private:
5273 /* Subroutine of ctor. Add padding item to NEXT_OFFSET if necessary. */
5275 void maybe_pad_to (bit_offset_t next_offset)
5277 if (m_items.length () > 0)
5279 const item &last_item = m_items[m_items.length () - 1];
5280 bit_offset_t offset_after_last_item
5281 = last_item.get_next_bit_offset ();
5282 if (next_offset > offset_after_last_item)
5284 bit_size_t padding_size
5285 = next_offset - offset_after_last_item;
5286 m_items.safe_push (item (bit_range (offset_after_last_item,
5287 padding_size),
5288 last_item.m_field, true));
5293 auto_vec<item> m_items;
5296 /* A subclass of pending_diagnostic for complaining about uninitialized data
5297 being copied across a trust boundary to an untrusted output
5298 (e.g. copy_to_user infoleaks in the Linux kernel). */
5300 class exposure_through_uninit_copy
5301 : public pending_diagnostic_subclass<exposure_through_uninit_copy>
5303 public:
5304 exposure_through_uninit_copy (const region *src_region,
5305 const region *dest_region,
5306 const svalue *copied_sval)
5307 : m_src_region (src_region),
5308 m_dest_region (dest_region),
5309 m_copied_sval (copied_sval)
5311 gcc_assert (m_copied_sval->get_kind () == SK_POISONED
5312 || m_copied_sval->get_kind () == SK_COMPOUND);
5315 const char *get_kind () const final override
5317 return "exposure_through_uninit_copy";
5320 bool operator== (const exposure_through_uninit_copy &other) const
5322 return (m_src_region == other.m_src_region
5323 && m_dest_region == other.m_dest_region
5324 && m_copied_sval == other.m_copied_sval);
5327 int get_controlling_option () const final override
5329 return OPT_Wanalyzer_exposure_through_uninit_copy;
5332 bool emit (rich_location *rich_loc) final override
5334 diagnostic_metadata m;
5335 /* CWE-200: Exposure of Sensitive Information to an Unauthorized Actor. */
5336 m.add_cwe (200);
5337 enum memory_space mem_space = get_src_memory_space ();
5338 bool warned;
5339 switch (mem_space)
5341 default:
5342 warned = warning_meta
5343 (rich_loc, m, get_controlling_option (),
5344 "potential exposure of sensitive information"
5345 " by copying uninitialized data across trust boundary");
5346 break;
5347 case MEMSPACE_STACK:
5348 warned = warning_meta
5349 (rich_loc, m, get_controlling_option (),
5350 "potential exposure of sensitive information"
5351 " by copying uninitialized data from stack across trust boundary");
5352 break;
5353 case MEMSPACE_HEAP:
5354 warned = warning_meta
5355 (rich_loc, m, get_controlling_option (),
5356 "potential exposure of sensitive information"
5357 " by copying uninitialized data from heap across trust boundary");
5358 break;
5360 if (warned)
5362 location_t loc = rich_loc->get_loc ();
5363 inform_number_of_uninit_bits (loc);
5364 complain_about_uninit_ranges (loc);
5366 if (mem_space == MEMSPACE_STACK)
5367 maybe_emit_fixit_hint ();
5369 return warned;
5372 label_text describe_final_event (const evdesc::final_event &) final override
5374 enum memory_space mem_space = get_src_memory_space ();
5375 switch (mem_space)
5377 default:
5378 return label_text::borrow ("uninitialized data copied here");
5380 case MEMSPACE_STACK:
5381 return label_text::borrow ("uninitialized data copied from stack here");
5383 case MEMSPACE_HEAP:
5384 return label_text::borrow ("uninitialized data copied from heap here");
5388 void mark_interesting_stuff (interesting_t *interest) final override
5390 if (m_src_region)
5391 interest->add_region_creation (m_src_region);
5394 private:
5395 enum memory_space get_src_memory_space () const
5397 return m_src_region ? m_src_region->get_memory_space () : MEMSPACE_UNKNOWN;
5400 bit_size_t calc_num_uninit_bits () const
5402 switch (m_copied_sval->get_kind ())
5404 default:
5405 gcc_unreachable ();
5406 break;
5407 case SK_POISONED:
5409 const poisoned_svalue *poisoned_sval
5410 = as_a <const poisoned_svalue *> (m_copied_sval);
5411 gcc_assert (poisoned_sval->get_poison_kind () == POISON_KIND_UNINIT);
5413 /* Give up if don't have type information. */
5414 if (m_copied_sval->get_type () == NULL_TREE)
5415 return 0;
5417 bit_size_t size_in_bits;
5418 if (int_size_in_bits (m_copied_sval->get_type (), &size_in_bits))
5419 return size_in_bits;
5421 /* Give up if we can't get the size of the type. */
5422 return 0;
5424 break;
5425 case SK_COMPOUND:
5427 const compound_svalue *compound_sval
5428 = as_a <const compound_svalue *> (m_copied_sval);
5429 bit_size_t result = 0;
5430 /* Find keys for uninit svals. */
5431 for (auto iter : *compound_sval)
5433 const svalue *sval = iter.second;
5434 if (const poisoned_svalue *psval
5435 = sval->dyn_cast_poisoned_svalue ())
5436 if (psval->get_poison_kind () == POISON_KIND_UNINIT)
5438 const binding_key *key = iter.first;
5439 const concrete_binding *ckey
5440 = key->dyn_cast_concrete_binding ();
5441 gcc_assert (ckey);
5442 result += ckey->get_size_in_bits ();
5445 return result;
5450 void inform_number_of_uninit_bits (location_t loc) const
5452 bit_size_t num_uninit_bits = calc_num_uninit_bits ();
5453 if (num_uninit_bits <= 0)
5454 return;
5455 if (num_uninit_bits % BITS_PER_UNIT == 0)
5457 /* Express in bytes. */
5458 byte_size_t num_uninit_bytes = num_uninit_bits / BITS_PER_UNIT;
5459 if (num_uninit_bytes == 1)
5460 inform (loc, "1 byte is uninitialized");
5461 else
5462 inform (loc,
5463 "%wu bytes are uninitialized", num_uninit_bytes.to_uhwi ());
5465 else
5467 /* Express in bits. */
5468 if (num_uninit_bits == 1)
5469 inform (loc, "1 bit is uninitialized");
5470 else
5471 inform (loc,
5472 "%wu bits are uninitialized", num_uninit_bits.to_uhwi ());
5476 void complain_about_uninit_ranges (location_t loc) const
5478 if (const compound_svalue *compound_sval
5479 = m_copied_sval->dyn_cast_compound_svalue ())
5481 /* Find keys for uninit svals. */
5482 auto_vec<const concrete_binding *> uninit_keys;
5483 for (auto iter : *compound_sval)
5485 const svalue *sval = iter.second;
5486 if (const poisoned_svalue *psval
5487 = sval->dyn_cast_poisoned_svalue ())
5488 if (psval->get_poison_kind () == POISON_KIND_UNINIT)
5490 const binding_key *key = iter.first;
5491 const concrete_binding *ckey
5492 = key->dyn_cast_concrete_binding ();
5493 gcc_assert (ckey);
5494 uninit_keys.safe_push (ckey);
5497 /* Complain about them in sorted order. */
5498 uninit_keys.qsort (concrete_binding::cmp_ptr_ptr);
5500 std::unique_ptr<record_layout> layout;
5502 tree type = m_copied_sval->get_type ();
5503 if (type && TREE_CODE (type) == RECORD_TYPE)
5505 // (std::make_unique is C++14)
5506 layout = std::unique_ptr<record_layout> (new record_layout (type));
5508 if (0)
5509 layout->dump ();
5512 unsigned i;
5513 const concrete_binding *ckey;
5514 FOR_EACH_VEC_ELT (uninit_keys, i, ckey)
5516 bit_offset_t start_bit = ckey->get_start_bit_offset ();
5517 bit_offset_t next_bit = ckey->get_next_bit_offset ();
5518 complain_about_uninit_range (loc, start_bit, next_bit,
5519 layout.get ());
5524 void complain_about_uninit_range (location_t loc,
5525 bit_offset_t start_bit,
5526 bit_offset_t next_bit,
5527 const record_layout *layout) const
5529 if (layout)
5531 while (start_bit < next_bit)
5533 if (const record_layout::item *item
5534 = layout->get_item_at (start_bit))
5536 gcc_assert (start_bit >= item->get_start_bit_offset ());
5537 gcc_assert (start_bit < item->get_next_bit_offset ());
5538 if (item->get_start_bit_offset () == start_bit
5539 && item->get_next_bit_offset () <= next_bit)
5540 complain_about_fully_uninit_item (*item);
5541 else
5542 complain_about_partially_uninit_item (*item);
5543 start_bit = item->get_next_bit_offset ();
5544 continue;
5546 else
5547 break;
5551 if (start_bit >= next_bit)
5552 return;
5554 if (start_bit % 8 == 0 && next_bit % 8 == 0)
5556 /* Express in bytes. */
5557 byte_offset_t start_byte = start_bit / 8;
5558 byte_offset_t last_byte = (next_bit / 8) - 1;
5559 if (last_byte == start_byte)
5560 inform (loc,
5561 "byte %wu is uninitialized",
5562 start_byte.to_uhwi ());
5563 else
5564 inform (loc,
5565 "bytes %wu - %wu are uninitialized",
5566 start_byte.to_uhwi (),
5567 last_byte.to_uhwi ());
5569 else
5571 /* Express in bits. */
5572 bit_offset_t last_bit = next_bit - 1;
5573 if (last_bit == start_bit)
5574 inform (loc,
5575 "bit %wu is uninitialized",
5576 start_bit.to_uhwi ());
5577 else
5578 inform (loc,
5579 "bits %wu - %wu are uninitialized",
5580 start_bit.to_uhwi (),
5581 last_bit.to_uhwi ());
5585 static void
5586 complain_about_fully_uninit_item (const record_layout::item &item)
5588 tree field = item.m_field;
5589 bit_size_t num_bits = item.m_bit_range.m_size_in_bits;
5590 if (item.m_is_padding)
5592 if (num_bits % 8 == 0)
5594 /* Express in bytes. */
5595 byte_size_t num_bytes = num_bits / BITS_PER_UNIT;
5596 if (num_bytes == 1)
5597 inform (DECL_SOURCE_LOCATION (field),
5598 "padding after field %qD is uninitialized (1 byte)",
5599 field);
5600 else
5601 inform (DECL_SOURCE_LOCATION (field),
5602 "padding after field %qD is uninitialized (%wu bytes)",
5603 field, num_bytes.to_uhwi ());
5605 else
5607 /* Express in bits. */
5608 if (num_bits == 1)
5609 inform (DECL_SOURCE_LOCATION (field),
5610 "padding after field %qD is uninitialized (1 bit)",
5611 field);
5612 else
5613 inform (DECL_SOURCE_LOCATION (field),
5614 "padding after field %qD is uninitialized (%wu bits)",
5615 field, num_bits.to_uhwi ());
5618 else
5620 if (num_bits % 8 == 0)
5622 /* Express in bytes. */
5623 byte_size_t num_bytes = num_bits / BITS_PER_UNIT;
5624 if (num_bytes == 1)
5625 inform (DECL_SOURCE_LOCATION (field),
5626 "field %qD is uninitialized (1 byte)", field);
5627 else
5628 inform (DECL_SOURCE_LOCATION (field),
5629 "field %qD is uninitialized (%wu bytes)",
5630 field, num_bytes.to_uhwi ());
5632 else
5634 /* Express in bits. */
5635 if (num_bits == 1)
5636 inform (DECL_SOURCE_LOCATION (field),
5637 "field %qD is uninitialized (1 bit)", field);
5638 else
5639 inform (DECL_SOURCE_LOCATION (field),
5640 "field %qD is uninitialized (%wu bits)",
5641 field, num_bits.to_uhwi ());
5646 static void
5647 complain_about_partially_uninit_item (const record_layout::item &item)
5649 tree field = item.m_field;
5650 if (item.m_is_padding)
5651 inform (DECL_SOURCE_LOCATION (field),
5652 "padding after field %qD is partially uninitialized",
5653 field);
5654 else
5655 inform (DECL_SOURCE_LOCATION (field),
5656 "field %qD is partially uninitialized",
5657 field);
5658 /* TODO: ideally we'd describe what parts are uninitialized. */
5661 void maybe_emit_fixit_hint () const
5663 if (tree decl = m_src_region->maybe_get_decl ())
5665 gcc_rich_location hint_richloc (DECL_SOURCE_LOCATION (decl));
5666 hint_richloc.add_fixit_insert_after (" = {0}");
5667 inform (&hint_richloc,
5668 "suggest forcing zero-initialization by"
5669 " providing a %<{0}%> initializer");
5673 private:
5674 const region *m_src_region;
5675 const region *m_dest_region;
5676 const svalue *m_copied_sval;
5679 /* Return true if any part of SVAL is uninitialized. */
5681 static bool
5682 contains_uninit_p (const svalue *sval)
5684 struct uninit_finder : public visitor
5686 public:
5687 uninit_finder () : m_found_uninit (false) {}
5688 void visit_poisoned_svalue (const poisoned_svalue *sval)
5690 if (sval->get_poison_kind () == POISON_KIND_UNINIT)
5691 m_found_uninit = true;
5693 bool m_found_uninit;
5696 uninit_finder v;
5697 sval->accept (&v);
5699 return v.m_found_uninit;
5702 /* Function for use by plugins when simulating writing data through a
5703 pointer to an "untrusted" region DST_REG (and thus crossing a security
5704 boundary), such as copying data to user space in an OS kernel.
5706 Check that COPIED_SVAL is fully initialized. If not, complain about
5707 an infoleak to CTXT.
5709 SRC_REG can be NULL; if non-NULL it is used as a hint in the diagnostic
5710 as to where COPIED_SVAL came from. */
5712 void
5713 region_model::maybe_complain_about_infoleak (const region *dst_reg,
5714 const svalue *copied_sval,
5715 const region *src_reg,
5716 region_model_context *ctxt)
5718 /* Check for exposure. */
5719 if (contains_uninit_p (copied_sval))
5720 ctxt->warn (make_unique<exposure_through_uninit_copy> (src_reg,
5721 dst_reg,
5722 copied_sval));
5725 /* Set errno to a positive symbolic int, as if some error has occurred. */
5727 void
5728 region_model::set_errno (const call_details &cd)
5730 const region *errno_reg = m_mgr->get_errno_region ();
5731 conjured_purge p (this, cd.get_ctxt ());
5732 const svalue *new_errno_sval
5733 = m_mgr->get_or_create_conjured_svalue (integer_type_node,
5734 cd.get_call_stmt (),
5735 errno_reg, p);
5736 const svalue *zero
5737 = m_mgr->get_or_create_int_cst (integer_type_node, 0);
5738 add_constraint (new_errno_sval, GT_EXPR, zero, cd.get_ctxt ());
5739 set_value (errno_reg, new_errno_sval, cd.get_ctxt ());
5742 /* class noop_region_model_context : public region_model_context. */
5744 void
5745 noop_region_model_context::add_note (std::unique_ptr<pending_note>)
5749 void
5750 noop_region_model_context::bifurcate (std::unique_ptr<custom_edge_info>)
5754 void
5755 noop_region_model_context::terminate_path ()
5759 /* struct model_merger. */
5761 /* Dump a multiline representation of this merger to PP. */
5763 void
5764 model_merger::dump_to_pp (pretty_printer *pp, bool simple) const
5766 pp_string (pp, "model A:");
5767 pp_newline (pp);
5768 m_model_a->dump_to_pp (pp, simple, true);
5769 pp_newline (pp);
5771 pp_string (pp, "model B:");
5772 pp_newline (pp);
5773 m_model_b->dump_to_pp (pp, simple, true);
5774 pp_newline (pp);
5776 pp_string (pp, "merged model:");
5777 pp_newline (pp);
5778 m_merged_model->dump_to_pp (pp, simple, true);
5779 pp_newline (pp);
5782 /* Dump a multiline representation of this merger to FILE. */
5784 void
5785 model_merger::dump (FILE *fp, bool simple) const
5787 pretty_printer pp;
5788 pp_format_decoder (&pp) = default_tree_printer;
5789 pp_show_color (&pp) = pp_show_color (global_dc->printer);
5790 pp.buffer->stream = fp;
5791 dump_to_pp (&pp, simple);
5792 pp_flush (&pp);
5795 /* Dump a multiline representation of this merger to stderr. */
5797 DEBUG_FUNCTION void
5798 model_merger::dump (bool simple) const
5800 dump (stderr, simple);
5803 /* Return true if it's OK to merge SVAL with other svalues. */
5805 bool
5806 model_merger::mergeable_svalue_p (const svalue *sval) const
5808 if (m_ext_state)
5810 /* Reject merging svalues that have non-purgable sm-state,
5811 to avoid falsely reporting memory leaks by merging them
5812 with something else. For example, given a local var "p",
5813 reject the merger of a:
5814 store_a mapping "p" to a malloc-ed ptr
5815 with:
5816 store_b mapping "p" to a NULL ptr. */
5817 if (m_state_a)
5818 if (!m_state_a->can_purge_p (*m_ext_state, sval))
5819 return false;
5820 if (m_state_b)
5821 if (!m_state_b->can_purge_p (*m_ext_state, sval))
5822 return false;
5824 return true;
5827 } // namespace ana
5829 /* Dump RMODEL fully to stderr (i.e. without summarization). */
5831 DEBUG_FUNCTION void
5832 debug (const region_model &rmodel)
5834 rmodel.dump (false);
5837 /* class rejected_op_constraint : public rejected_constraint. */
5839 void
5840 rejected_op_constraint::dump_to_pp (pretty_printer *pp) const
5842 region_model m (m_model);
5843 const svalue *lhs_sval = m.get_rvalue (m_lhs, NULL);
5844 const svalue *rhs_sval = m.get_rvalue (m_rhs, NULL);
5845 lhs_sval->dump_to_pp (pp, true);
5846 pp_printf (pp, " %s ", op_symbol_code (m_op));
5847 rhs_sval->dump_to_pp (pp, true);
5850 /* class rejected_default_case : public rejected_constraint. */
5852 void
5853 rejected_default_case::dump_to_pp (pretty_printer *pp) const
5855 pp_string (pp, "implicit default for enum");
5858 /* class rejected_ranges_constraint : public rejected_constraint. */
5860 void
5861 rejected_ranges_constraint::dump_to_pp (pretty_printer *pp) const
5863 region_model m (m_model);
5864 const svalue *sval = m.get_rvalue (m_expr, NULL);
5865 sval->dump_to_pp (pp, true);
5866 pp_string (pp, " in ");
5867 m_ranges->dump_to_pp (pp, true);
5870 /* class engine. */
5872 /* engine's ctor. */
5874 engine::engine (const supergraph *sg, logger *logger)
5875 : m_sg (sg), m_mgr (logger)
5879 /* Dump the managed objects by class to LOGGER, and the per-class totals. */
5881 void
5882 engine::log_stats (logger *logger) const
5884 m_mgr.log_stats (logger, true);
5887 namespace ana {
5889 #if CHECKING_P
5891 namespace selftest {
5893 /* Build a constant tree of the given type from STR. */
5895 static tree
5896 build_real_cst_from_string (tree type, const char *str)
5898 REAL_VALUE_TYPE real;
5899 real_from_string (&real, str);
5900 return build_real (type, real);
5903 /* Append various "interesting" constants to OUT (e.g. NaN). */
5905 static void
5906 append_interesting_constants (auto_vec<tree> *out)
5908 out->safe_push (build_int_cst (integer_type_node, 0));
5909 out->safe_push (build_int_cst (integer_type_node, 42));
5910 out->safe_push (build_int_cst (unsigned_type_node, 0));
5911 out->safe_push (build_int_cst (unsigned_type_node, 42));
5912 out->safe_push (build_real_cst_from_string (float_type_node, "QNaN"));
5913 out->safe_push (build_real_cst_from_string (float_type_node, "-QNaN"));
5914 out->safe_push (build_real_cst_from_string (float_type_node, "SNaN"));
5915 out->safe_push (build_real_cst_from_string (float_type_node, "-SNaN"));
5916 out->safe_push (build_real_cst_from_string (float_type_node, "0.0"));
5917 out->safe_push (build_real_cst_from_string (float_type_node, "-0.0"));
5918 out->safe_push (build_real_cst_from_string (float_type_node, "Inf"));
5919 out->safe_push (build_real_cst_from_string (float_type_node, "-Inf"));
5922 /* Verify that tree_cmp is a well-behaved comparator for qsort, even
5923 if the underlying constants aren't comparable. */
5925 static void
5926 test_tree_cmp_on_constants ()
5928 auto_vec<tree> csts;
5929 append_interesting_constants (&csts);
5931 /* Try sorting every triple. */
5932 const unsigned num = csts.length ();
5933 for (unsigned i = 0; i < num; i++)
5934 for (unsigned j = 0; j < num; j++)
5935 for (unsigned k = 0; k < num; k++)
5937 auto_vec<tree> v (3);
5938 v.quick_push (csts[i]);
5939 v.quick_push (csts[j]);
5940 v.quick_push (csts[k]);
5941 v.qsort (tree_cmp);
5945 /* Implementation detail of the ASSERT_CONDITION_* macros. */
5947 void
5948 assert_condition (const location &loc,
5949 region_model &model,
5950 const svalue *lhs, tree_code op, const svalue *rhs,
5951 tristate expected)
5953 tristate actual = model.eval_condition (lhs, op, rhs);
5954 ASSERT_EQ_AT (loc, actual, expected);
5957 /* Implementation detail of the ASSERT_CONDITION_* macros. */
5959 void
5960 assert_condition (const location &loc,
5961 region_model &model,
5962 tree lhs, tree_code op, tree rhs,
5963 tristate expected)
5965 tristate actual = model.eval_condition (lhs, op, rhs, NULL);
5966 ASSERT_EQ_AT (loc, actual, expected);
5969 /* Implementation detail of ASSERT_DUMP_TREE_EQ. */
5971 static void
5972 assert_dump_tree_eq (const location &loc, tree t, const char *expected)
5974 auto_fix_quotes sentinel;
5975 pretty_printer pp;
5976 pp_format_decoder (&pp) = default_tree_printer;
5977 dump_tree (&pp, t);
5978 ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected);
5981 /* Assert that dump_tree (T) is EXPECTED. */
5983 #define ASSERT_DUMP_TREE_EQ(T, EXPECTED) \
5984 SELFTEST_BEGIN_STMT \
5985 assert_dump_tree_eq ((SELFTEST_LOCATION), (T), (EXPECTED)); \
5986 SELFTEST_END_STMT
5988 /* Implementation detail of ASSERT_DUMP_EQ. */
5990 static void
5991 assert_dump_eq (const location &loc,
5992 const region_model &model,
5993 bool summarize,
5994 const char *expected)
5996 auto_fix_quotes sentinel;
5997 pretty_printer pp;
5998 pp_format_decoder (&pp) = default_tree_printer;
6000 model.dump_to_pp (&pp, summarize, true);
6001 ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected);
6004 /* Assert that MODEL.dump_to_pp (SUMMARIZE) is EXPECTED. */
6006 #define ASSERT_DUMP_EQ(MODEL, SUMMARIZE, EXPECTED) \
6007 SELFTEST_BEGIN_STMT \
6008 assert_dump_eq ((SELFTEST_LOCATION), (MODEL), (SUMMARIZE), (EXPECTED)); \
6009 SELFTEST_END_STMT
6011 /* Smoketest for region_model::dump_to_pp. */
6013 static void
6014 test_dump ()
6016 region_model_manager mgr;
6017 region_model model (&mgr);
6019 ASSERT_DUMP_EQ (model, false,
6020 "stack depth: 0\n"
6021 "m_called_unknown_fn: FALSE\n"
6022 "constraint_manager:\n"
6023 " equiv classes:\n"
6024 " constraints:\n");
6025 ASSERT_DUMP_EQ (model, true,
6026 "stack depth: 0\n"
6027 "m_called_unknown_fn: FALSE\n"
6028 "constraint_manager:\n"
6029 " equiv classes:\n"
6030 " constraints:\n");
6033 /* Helper function for selftests. Create a struct or union type named NAME,
6034 with the fields given by the FIELD_DECLS in FIELDS.
6035 If IS_STRUCT is true create a RECORD_TYPE (aka a struct), otherwise
6036 create a UNION_TYPE. */
6038 static tree
6039 make_test_compound_type (const char *name, bool is_struct,
6040 const auto_vec<tree> *fields)
6042 tree t = make_node (is_struct ? RECORD_TYPE : UNION_TYPE);
6043 TYPE_NAME (t) = get_identifier (name);
6044 TYPE_SIZE (t) = 0;
6046 tree fieldlist = NULL;
6047 int i;
6048 tree field;
6049 FOR_EACH_VEC_ELT (*fields, i, field)
6051 gcc_assert (TREE_CODE (field) == FIELD_DECL);
6052 DECL_CONTEXT (field) = t;
6053 fieldlist = chainon (field, fieldlist);
6055 fieldlist = nreverse (fieldlist);
6056 TYPE_FIELDS (t) = fieldlist;
6058 layout_type (t);
6059 return t;
6062 /* Selftest fixture for creating the type "struct coord {int x; int y; };". */
6064 struct coord_test
6066 coord_test ()
6068 auto_vec<tree> fields;
6069 m_x_field = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
6070 get_identifier ("x"), integer_type_node);
6071 fields.safe_push (m_x_field);
6072 m_y_field = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
6073 get_identifier ("y"), integer_type_node);
6074 fields.safe_push (m_y_field);
6075 m_coord_type = make_test_compound_type ("coord", true, &fields);
6078 tree m_x_field;
6079 tree m_y_field;
6080 tree m_coord_type;
6083 /* Verify usage of a struct. */
6085 static void
6086 test_struct ()
6088 coord_test ct;
6090 tree c = build_global_decl ("c", ct.m_coord_type);
6091 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
6092 c, ct.m_x_field, NULL_TREE);
6093 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
6094 c, ct.m_y_field, NULL_TREE);
6096 tree int_17 = build_int_cst (integer_type_node, 17);
6097 tree int_m3 = build_int_cst (integer_type_node, -3);
6099 region_model_manager mgr;
6100 region_model model (&mgr);
6101 model.set_value (c_x, int_17, NULL);
6102 model.set_value (c_y, int_m3, NULL);
6104 /* Verify get_offset for "c.x". */
6106 const region *c_x_reg = model.get_lvalue (c_x, NULL);
6107 region_offset offset = c_x_reg->get_offset (&mgr);
6108 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (c, NULL));
6109 ASSERT_EQ (offset.get_bit_offset (), 0);
6112 /* Verify get_offset for "c.y". */
6114 const region *c_y_reg = model.get_lvalue (c_y, NULL);
6115 region_offset offset = c_y_reg->get_offset (&mgr);
6116 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (c, NULL));
6117 ASSERT_EQ (offset.get_bit_offset (), INT_TYPE_SIZE);
6121 /* Verify usage of an array element. */
6123 static void
6124 test_array_1 ()
6126 tree tlen = size_int (10);
6127 tree arr_type = build_array_type (char_type_node, build_index_type (tlen));
6129 tree a = build_global_decl ("a", arr_type);
6131 region_model_manager mgr;
6132 region_model model (&mgr);
6133 tree int_0 = build_int_cst (integer_type_node, 0);
6134 tree a_0 = build4 (ARRAY_REF, char_type_node,
6135 a, int_0, NULL_TREE, NULL_TREE);
6136 tree char_A = build_int_cst (char_type_node, 'A');
6137 model.set_value (a_0, char_A, NULL);
6140 /* Verify that region_model::get_representative_tree works as expected. */
6142 static void
6143 test_get_representative_tree ()
6145 region_model_manager mgr;
6147 /* STRING_CST. */
6149 tree string_cst = build_string (4, "foo");
6150 region_model m (&mgr);
6151 const svalue *str_sval = m.get_rvalue (string_cst, NULL);
6152 tree rep = m.get_representative_tree (str_sval);
6153 ASSERT_EQ (rep, string_cst);
6156 /* String literal. */
6158 tree string_cst_ptr = build_string_literal (4, "foo");
6159 region_model m (&mgr);
6160 const svalue *str_sval = m.get_rvalue (string_cst_ptr, NULL);
6161 tree rep = m.get_representative_tree (str_sval);
6162 ASSERT_DUMP_TREE_EQ (rep, "&\"foo\"[0]");
6165 /* Value of an element within an array. */
6167 tree tlen = size_int (10);
6168 tree arr_type = build_array_type (char_type_node, build_index_type (tlen));
6169 tree a = build_global_decl ("a", arr_type);
6170 placeholder_svalue test_sval (char_type_node, "test value");
6172 /* Value of a[3]. */
6174 test_region_model_context ctxt;
6175 region_model model (&mgr);
6176 tree int_3 = build_int_cst (integer_type_node, 3);
6177 tree a_3 = build4 (ARRAY_REF, char_type_node,
6178 a, int_3, NULL_TREE, NULL_TREE);
6179 const region *a_3_reg = model.get_lvalue (a_3, &ctxt);
6180 model.set_value (a_3_reg, &test_sval, &ctxt);
6181 tree rep = model.get_representative_tree (&test_sval);
6182 ASSERT_DUMP_TREE_EQ (rep, "a[3]");
6185 /* Value of a[0]. */
6187 test_region_model_context ctxt;
6188 region_model model (&mgr);
6189 tree idx = build_int_cst (integer_type_node, 0);
6190 tree a_0 = build4 (ARRAY_REF, char_type_node,
6191 a, idx, NULL_TREE, NULL_TREE);
6192 const region *a_0_reg = model.get_lvalue (a_0, &ctxt);
6193 model.set_value (a_0_reg, &test_sval, &ctxt);
6194 tree rep = model.get_representative_tree (&test_sval);
6195 ASSERT_DUMP_TREE_EQ (rep, "a[0]");
6199 /* Value of a field within a struct. */
6201 coord_test ct;
6203 tree c = build_global_decl ("c", ct.m_coord_type);
6204 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
6205 c, ct.m_x_field, NULL_TREE);
6206 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
6207 c, ct.m_y_field, NULL_TREE);
6209 test_region_model_context ctxt;
6211 /* Value of initial field. */
6213 region_model m (&mgr);
6214 const region *c_x_reg = m.get_lvalue (c_x, &ctxt);
6215 placeholder_svalue test_sval_x (integer_type_node, "test x val");
6216 m.set_value (c_x_reg, &test_sval_x, &ctxt);
6217 tree rep = m.get_representative_tree (&test_sval_x);
6218 ASSERT_DUMP_TREE_EQ (rep, "c.x");
6221 /* Value of non-initial field. */
6223 region_model m (&mgr);
6224 const region *c_y_reg = m.get_lvalue (c_y, &ctxt);
6225 placeholder_svalue test_sval_y (integer_type_node, "test y val");
6226 m.set_value (c_y_reg, &test_sval_y, &ctxt);
6227 tree rep = m.get_representative_tree (&test_sval_y);
6228 ASSERT_DUMP_TREE_EQ (rep, "c.y");
6233 /* Verify that calling region_model::get_rvalue repeatedly on the same
6234 tree constant retrieves the same svalue *. */
6236 static void
6237 test_unique_constants ()
6239 tree int_0 = build_int_cst (integer_type_node, 0);
6240 tree int_42 = build_int_cst (integer_type_node, 42);
6242 test_region_model_context ctxt;
6243 region_model_manager mgr;
6244 region_model model (&mgr);
6245 ASSERT_EQ (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_0, &ctxt));
6246 ASSERT_EQ (model.get_rvalue (int_42, &ctxt),
6247 model.get_rvalue (int_42, &ctxt));
6248 ASSERT_NE (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_42, &ctxt));
6249 ASSERT_EQ (ctxt.get_num_diagnostics (), 0);
6251 /* A "(const int)42" will be a different tree from "(int)42)"... */
6252 tree const_int_type_node
6253 = build_qualified_type (integer_type_node, TYPE_QUAL_CONST);
6254 tree const_int_42 = build_int_cst (const_int_type_node, 42);
6255 ASSERT_NE (int_42, const_int_42);
6256 /* It should have a different const_svalue. */
6257 const svalue *int_42_sval = model.get_rvalue (int_42, &ctxt);
6258 const svalue *const_int_42_sval = model.get_rvalue (const_int_42, &ctxt);
6259 ASSERT_NE (int_42_sval, const_int_42_sval);
6260 /* But they should compare as equal. */
6261 ASSERT_CONDITION_TRUE (model, int_42_sval, EQ_EXPR, const_int_42_sval);
6262 ASSERT_CONDITION_FALSE (model, int_42_sval, NE_EXPR, const_int_42_sval);
6265 /* Verify that each type gets its own singleton unknown_svalue within a
6266 region_model_manager, and that NULL_TREE gets its own singleton. */
6268 static void
6269 test_unique_unknowns ()
6271 region_model_manager mgr;
6272 const svalue *unknown_int
6273 = mgr.get_or_create_unknown_svalue (integer_type_node);
6274 /* Repeated calls with the same type should get the same "unknown"
6275 svalue. */
6276 const svalue *unknown_int_2
6277 = mgr.get_or_create_unknown_svalue (integer_type_node);
6278 ASSERT_EQ (unknown_int, unknown_int_2);
6280 /* Different types (or the NULL type) should have different
6281 unknown_svalues. */
6282 const svalue *unknown_NULL_type = mgr.get_or_create_unknown_svalue (NULL);
6283 ASSERT_NE (unknown_NULL_type, unknown_int);
6285 /* Repeated calls with NULL for the type should get the same "unknown"
6286 svalue. */
6287 const svalue *unknown_NULL_type_2 = mgr.get_or_create_unknown_svalue (NULL);
6288 ASSERT_EQ (unknown_NULL_type, unknown_NULL_type_2);
6291 /* Verify that initial_svalue are handled as expected. */
6293 static void
6294 test_initial_svalue_folding ()
6296 region_model_manager mgr;
6297 tree x = build_global_decl ("x", integer_type_node);
6298 tree y = build_global_decl ("y", integer_type_node);
6300 test_region_model_context ctxt;
6301 region_model model (&mgr);
6302 const svalue *x_init = model.get_rvalue (x, &ctxt);
6303 const svalue *y_init = model.get_rvalue (y, &ctxt);
6304 ASSERT_NE (x_init, y_init);
6305 const region *x_reg = model.get_lvalue (x, &ctxt);
6306 ASSERT_EQ (x_init, mgr.get_or_create_initial_value (x_reg));
6310 /* Verify that unary ops are folded as expected. */
6312 static void
6313 test_unaryop_svalue_folding ()
6315 region_model_manager mgr;
6316 tree x = build_global_decl ("x", integer_type_node);
6317 tree y = build_global_decl ("y", integer_type_node);
6319 test_region_model_context ctxt;
6320 region_model model (&mgr);
6321 const svalue *x_init = model.get_rvalue (x, &ctxt);
6322 const svalue *y_init = model.get_rvalue (y, &ctxt);
6323 const region *x_reg = model.get_lvalue (x, &ctxt);
6324 ASSERT_EQ (x_init, mgr.get_or_create_initial_value (x_reg));
6326 /* "(int)x" -> "x". */
6327 ASSERT_EQ (x_init, mgr.get_or_create_cast (integer_type_node, x_init));
6329 /* "(void *)x" -> something other than "x". */
6330 ASSERT_NE (x_init, mgr.get_or_create_cast (ptr_type_node, x_init));
6332 /* "!(x == y)" -> "x != y". */
6333 ASSERT_EQ (mgr.get_or_create_unaryop
6334 (boolean_type_node, TRUTH_NOT_EXPR,
6335 mgr.get_or_create_binop (boolean_type_node, EQ_EXPR,
6336 x_init, y_init)),
6337 mgr.get_or_create_binop (boolean_type_node, NE_EXPR,
6338 x_init, y_init));
6339 /* "!(x > y)" -> "x <= y". */
6340 ASSERT_EQ (mgr.get_or_create_unaryop
6341 (boolean_type_node, TRUTH_NOT_EXPR,
6342 mgr.get_or_create_binop (boolean_type_node, GT_EXPR,
6343 x_init, y_init)),
6344 mgr.get_or_create_binop (boolean_type_node, LE_EXPR,
6345 x_init, y_init));
6348 /* Verify that binops on constant svalues are folded. */
6350 static void
6351 test_binop_svalue_folding ()
6353 #define NUM_CSTS 10
6354 tree cst_int[NUM_CSTS];
6355 region_model_manager mgr;
6356 const svalue *cst_sval[NUM_CSTS];
6357 for (int i = 0; i < NUM_CSTS; i++)
6359 cst_int[i] = build_int_cst (integer_type_node, i);
6360 cst_sval[i] = mgr.get_or_create_constant_svalue (cst_int[i]);
6361 ASSERT_EQ (cst_sval[i]->get_kind (), SK_CONSTANT);
6362 ASSERT_EQ (cst_sval[i]->maybe_get_constant (), cst_int[i]);
6365 for (int i = 0; i < NUM_CSTS; i++)
6366 for (int j = 0; j < NUM_CSTS; j++)
6368 if (i != j)
6369 ASSERT_NE (cst_sval[i], cst_sval[j]);
6370 if (i + j < NUM_CSTS)
6372 const svalue *sum
6373 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
6374 cst_sval[i], cst_sval[j]);
6375 ASSERT_EQ (sum, cst_sval[i + j]);
6377 if (i - j >= 0)
6379 const svalue *difference
6380 = mgr.get_or_create_binop (integer_type_node, MINUS_EXPR,
6381 cst_sval[i], cst_sval[j]);
6382 ASSERT_EQ (difference, cst_sval[i - j]);
6384 if (i * j < NUM_CSTS)
6386 const svalue *product
6387 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6388 cst_sval[i], cst_sval[j]);
6389 ASSERT_EQ (product, cst_sval[i * j]);
6391 const svalue *eq = mgr.get_or_create_binop (integer_type_node, EQ_EXPR,
6392 cst_sval[i], cst_sval[j]);
6393 ASSERT_EQ (eq, i == j ? cst_sval[1] : cst_sval [0]);
6394 const svalue *neq = mgr.get_or_create_binop (integer_type_node, NE_EXPR,
6395 cst_sval[i], cst_sval[j]);
6396 ASSERT_EQ (neq, i != j ? cst_sval[1] : cst_sval [0]);
6397 // etc
6400 tree x = build_global_decl ("x", integer_type_node);
6402 test_region_model_context ctxt;
6403 region_model model (&mgr);
6404 const svalue *x_init = model.get_rvalue (x, &ctxt);
6406 /* PLUS_EXPR folding. */
6407 const svalue *x_init_plus_zero
6408 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
6409 x_init, cst_sval[0]);
6410 ASSERT_EQ (x_init_plus_zero, x_init);
6411 const svalue *zero_plus_x_init
6412 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
6413 cst_sval[0], x_init);
6414 ASSERT_EQ (zero_plus_x_init, x_init);
6416 /* MULT_EXPR folding. */
6417 const svalue *x_init_times_zero
6418 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6419 x_init, cst_sval[0]);
6420 ASSERT_EQ (x_init_times_zero, cst_sval[0]);
6421 const svalue *zero_times_x_init
6422 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6423 cst_sval[0], x_init);
6424 ASSERT_EQ (zero_times_x_init, cst_sval[0]);
6426 const svalue *x_init_times_one
6427 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6428 x_init, cst_sval[1]);
6429 ASSERT_EQ (x_init_times_one, x_init);
6430 const svalue *one_times_x_init
6431 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6432 cst_sval[1], x_init);
6433 ASSERT_EQ (one_times_x_init, x_init);
6435 // etc
6436 // TODO: do we want to use the match-and-simplify DSL for this?
6438 /* Verify that binops put any constants on the RHS. */
6439 const svalue *four_times_x_init
6440 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6441 cst_sval[4], x_init);
6442 const svalue *x_init_times_four
6443 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6444 x_init, cst_sval[4]);
6445 ASSERT_EQ (four_times_x_init, x_init_times_four);
6446 const binop_svalue *binop = four_times_x_init->dyn_cast_binop_svalue ();
6447 ASSERT_EQ (binop->get_op (), MULT_EXPR);
6448 ASSERT_EQ (binop->get_arg0 (), x_init);
6449 ASSERT_EQ (binop->get_arg1 (), cst_sval[4]);
6451 /* Verify that ((x + 1) + 1) == (x + 2). */
6452 const svalue *x_init_plus_one
6453 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
6454 x_init, cst_sval[1]);
6455 const svalue *x_init_plus_two
6456 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
6457 x_init, cst_sval[2]);
6458 const svalue *x_init_plus_one_plus_one
6459 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
6460 x_init_plus_one, cst_sval[1]);
6461 ASSERT_EQ (x_init_plus_one_plus_one, x_init_plus_two);
6463 /* Verify various binops on booleans. */
6465 const svalue *sval_true = mgr.get_or_create_int_cst (boolean_type_node, 1);
6466 const svalue *sval_false = mgr.get_or_create_int_cst (boolean_type_node, 0);
6467 const svalue *sval_unknown
6468 = mgr.get_or_create_unknown_svalue (boolean_type_node);
6469 const placeholder_svalue sval_placeholder (boolean_type_node, "v");
6470 for (auto op : {BIT_IOR_EXPR, TRUTH_OR_EXPR})
6472 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
6473 sval_true, sval_unknown),
6474 sval_true);
6475 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
6476 sval_false, sval_unknown),
6477 sval_unknown);
6478 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
6479 sval_false, &sval_placeholder),
6480 &sval_placeholder);
6482 for (auto op : {BIT_AND_EXPR, TRUTH_AND_EXPR})
6484 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
6485 sval_false, sval_unknown),
6486 sval_false);
6487 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
6488 sval_true, sval_unknown),
6489 sval_unknown);
6490 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
6491 sval_true, &sval_placeholder),
6492 &sval_placeholder);
6497 /* Verify that sub_svalues are folded as expected. */
6499 static void
6500 test_sub_svalue_folding ()
6502 coord_test ct;
6503 tree c = build_global_decl ("c", ct.m_coord_type);
6504 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
6505 c, ct.m_x_field, NULL_TREE);
6507 region_model_manager mgr;
6508 region_model model (&mgr);
6509 test_region_model_context ctxt;
6510 const region *c_x_reg = model.get_lvalue (c_x, &ctxt);
6512 /* Verify that sub_svalue of "unknown" simply
6513 yields an unknown. */
6515 const svalue *unknown = mgr.get_or_create_unknown_svalue (ct.m_coord_type);
6516 const svalue *sub = mgr.get_or_create_sub_svalue (TREE_TYPE (ct.m_x_field),
6517 unknown, c_x_reg);
6518 ASSERT_EQ (sub->get_kind (), SK_UNKNOWN);
6519 ASSERT_EQ (sub->get_type (), TREE_TYPE (ct.m_x_field));
6522 /* Get BIT within VAL as a symbolic value within MGR. */
6524 static const svalue *
6525 get_bit (region_model_manager *mgr,
6526 bit_offset_t bit,
6527 unsigned HOST_WIDE_INT val)
6529 const svalue *inner_svalue
6530 = mgr->get_or_create_int_cst (unsigned_type_node, val);
6531 return mgr->get_or_create_bits_within (boolean_type_node,
6532 bit_range (bit, 1),
6533 inner_svalue);
6536 /* Verify that bits_within_svalues are folded as expected. */
6538 static void
6539 test_bits_within_svalue_folding ()
6541 region_model_manager mgr;
6543 const svalue *zero = mgr.get_or_create_int_cst (boolean_type_node, 0);
6544 const svalue *one = mgr.get_or_create_int_cst (boolean_type_node, 1);
6547 const unsigned val = 0x0000;
6548 for (unsigned bit = 0; bit < 16; bit++)
6549 ASSERT_EQ (get_bit (&mgr, bit, val), zero);
6553 const unsigned val = 0x0001;
6554 ASSERT_EQ (get_bit (&mgr, 0, val), one);
6555 for (unsigned bit = 1; bit < 16; bit++)
6556 ASSERT_EQ (get_bit (&mgr, bit, val), zero);
6560 const unsigned val = 0x8000;
6561 for (unsigned bit = 0; bit < 15; bit++)
6562 ASSERT_EQ (get_bit (&mgr, bit, val), zero);
6563 ASSERT_EQ (get_bit (&mgr, 15, val), one);
6567 const unsigned val = 0xFFFF;
6568 for (unsigned bit = 0; bit < 16; bit++)
6569 ASSERT_EQ (get_bit (&mgr, bit, val), one);
6573 /* Test that region::descendent_of_p works as expected. */
6575 static void
6576 test_descendent_of_p ()
6578 region_model_manager mgr;
6579 const region *stack = mgr.get_stack_region ();
6580 const region *heap = mgr.get_heap_region ();
6581 const region *code = mgr.get_code_region ();
6582 const region *globals = mgr.get_globals_region ();
6584 /* descendent_of_p should return true when used on the region itself. */
6585 ASSERT_TRUE (stack->descendent_of_p (stack));
6586 ASSERT_FALSE (stack->descendent_of_p (heap));
6587 ASSERT_FALSE (stack->descendent_of_p (code));
6588 ASSERT_FALSE (stack->descendent_of_p (globals));
6590 tree x = build_global_decl ("x", integer_type_node);
6591 const region *x_reg = mgr.get_region_for_global (x);
6592 ASSERT_TRUE (x_reg->descendent_of_p (globals));
6594 /* A cast_region should be a descendent of the original region. */
6595 const region *cast_reg = mgr.get_cast_region (x_reg, ptr_type_node);
6596 ASSERT_TRUE (cast_reg->descendent_of_p (x_reg));
6599 /* Verify that bit_range_region works as expected. */
6601 static void
6602 test_bit_range_regions ()
6604 tree x = build_global_decl ("x", integer_type_node);
6605 region_model_manager mgr;
6606 const region *x_reg = mgr.get_region_for_global (x);
6607 const region *byte0
6608 = mgr.get_bit_range (x_reg, char_type_node, bit_range (0, 8));
6609 const region *byte1
6610 = mgr.get_bit_range (x_reg, char_type_node, bit_range (8, 8));
6611 ASSERT_TRUE (byte0->descendent_of_p (x_reg));
6612 ASSERT_TRUE (byte1->descendent_of_p (x_reg));
6613 ASSERT_NE (byte0, byte1);
6616 /* Verify that simple assignments work as expected. */
6618 static void
6619 test_assignment ()
6621 tree int_0 = build_int_cst (integer_type_node, 0);
6622 tree x = build_global_decl ("x", integer_type_node);
6623 tree y = build_global_decl ("y", integer_type_node);
6625 /* "x == 0", then use of y, then "y = 0;". */
6626 region_model_manager mgr;
6627 region_model model (&mgr);
6628 ADD_SAT_CONSTRAINT (model, x, EQ_EXPR, int_0);
6629 ASSERT_CONDITION_UNKNOWN (model, y, EQ_EXPR, int_0);
6630 model.set_value (model.get_lvalue (y, NULL),
6631 model.get_rvalue (int_0, NULL),
6632 NULL);
6633 ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, int_0);
6634 ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, x);
6637 /* Verify that compound assignments work as expected. */
6639 static void
6640 test_compound_assignment ()
6642 coord_test ct;
6644 tree c = build_global_decl ("c", ct.m_coord_type);
6645 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
6646 c, ct.m_x_field, NULL_TREE);
6647 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
6648 c, ct.m_y_field, NULL_TREE);
6649 tree d = build_global_decl ("d", ct.m_coord_type);
6650 tree d_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
6651 d, ct.m_x_field, NULL_TREE);
6652 tree d_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
6653 d, ct.m_y_field, NULL_TREE);
6655 tree int_17 = build_int_cst (integer_type_node, 17);
6656 tree int_m3 = build_int_cst (integer_type_node, -3);
6658 region_model_manager mgr;
6659 region_model model (&mgr);
6660 model.set_value (c_x, int_17, NULL);
6661 model.set_value (c_y, int_m3, NULL);
6663 /* Copy c to d. */
6664 const svalue *sval = model.get_rvalue (c, NULL);
6665 model.set_value (model.get_lvalue (d, NULL), sval, NULL);
6667 /* Check that the fields have the same svalues. */
6668 ASSERT_EQ (model.get_rvalue (c_x, NULL), model.get_rvalue (d_x, NULL));
6669 ASSERT_EQ (model.get_rvalue (c_y, NULL), model.get_rvalue (d_y, NULL));
6672 /* Verify the details of pushing and popping stack frames. */
6674 static void
6675 test_stack_frames ()
6677 tree int_42 = build_int_cst (integer_type_node, 42);
6678 tree int_10 = build_int_cst (integer_type_node, 10);
6679 tree int_5 = build_int_cst (integer_type_node, 5);
6680 tree int_0 = build_int_cst (integer_type_node, 0);
6682 auto_vec <tree> param_types;
6683 tree parent_fndecl = make_fndecl (integer_type_node,
6684 "parent_fn",
6685 param_types);
6686 allocate_struct_function (parent_fndecl, true);
6688 tree child_fndecl = make_fndecl (integer_type_node,
6689 "child_fn",
6690 param_types);
6691 allocate_struct_function (child_fndecl, true);
6693 /* "a" and "b" in the parent frame. */
6694 tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL,
6695 get_identifier ("a"),
6696 integer_type_node);
6697 DECL_CONTEXT (a) = parent_fndecl;
6698 tree b = build_decl (UNKNOWN_LOCATION, PARM_DECL,
6699 get_identifier ("b"),
6700 integer_type_node);
6701 DECL_CONTEXT (b) = parent_fndecl;
6702 /* "x" and "y" in a child frame. */
6703 tree x = build_decl (UNKNOWN_LOCATION, PARM_DECL,
6704 get_identifier ("x"),
6705 integer_type_node);
6706 DECL_CONTEXT (x) = child_fndecl;
6707 tree y = build_decl (UNKNOWN_LOCATION, PARM_DECL,
6708 get_identifier ("y"),
6709 integer_type_node);
6710 DECL_CONTEXT (y) = child_fndecl;
6712 /* "p" global. */
6713 tree p = build_global_decl ("p", ptr_type_node);
6715 /* "q" global. */
6716 tree q = build_global_decl ("q", ptr_type_node);
6718 region_model_manager mgr;
6719 test_region_model_context ctxt;
6720 region_model model (&mgr);
6722 /* Push stack frame for "parent_fn". */
6723 const region *parent_frame_reg
6724 = model.push_frame (DECL_STRUCT_FUNCTION (parent_fndecl),
6725 NULL, &ctxt);
6726 ASSERT_EQ (model.get_current_frame (), parent_frame_reg);
6727 ASSERT_TRUE (model.region_exists_p (parent_frame_reg));
6728 const region *a_in_parent_reg = model.get_lvalue (a, &ctxt);
6729 model.set_value (a_in_parent_reg,
6730 model.get_rvalue (int_42, &ctxt),
6731 &ctxt);
6732 ASSERT_EQ (a_in_parent_reg->maybe_get_frame_region (), parent_frame_reg);
6734 model.add_constraint (b, LT_EXPR, int_10, &ctxt);
6735 ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt),
6736 tristate (tristate::TS_TRUE));
6738 /* Push stack frame for "child_fn". */
6739 const region *child_frame_reg
6740 = model.push_frame (DECL_STRUCT_FUNCTION (child_fndecl), NULL, &ctxt);
6741 ASSERT_EQ (model.get_current_frame (), child_frame_reg);
6742 ASSERT_TRUE (model.region_exists_p (child_frame_reg));
6743 const region *x_in_child_reg = model.get_lvalue (x, &ctxt);
6744 model.set_value (x_in_child_reg,
6745 model.get_rvalue (int_0, &ctxt),
6746 &ctxt);
6747 ASSERT_EQ (x_in_child_reg->maybe_get_frame_region (), child_frame_reg);
6749 model.add_constraint (y, NE_EXPR, int_5, &ctxt);
6750 ASSERT_EQ (model.eval_condition (y, NE_EXPR, int_5, &ctxt),
6751 tristate (tristate::TS_TRUE));
6753 /* Point a global pointer at a local in the child frame: p = &x. */
6754 const region *p_in_globals_reg = model.get_lvalue (p, &ctxt);
6755 model.set_value (p_in_globals_reg,
6756 mgr.get_ptr_svalue (ptr_type_node, x_in_child_reg),
6757 &ctxt);
6758 ASSERT_EQ (p_in_globals_reg->maybe_get_frame_region (), NULL);
6760 /* Point another global pointer at p: q = &p. */
6761 const region *q_in_globals_reg = model.get_lvalue (q, &ctxt);
6762 model.set_value (q_in_globals_reg,
6763 mgr.get_ptr_svalue (ptr_type_node, p_in_globals_reg),
6764 &ctxt);
6766 /* Test region::descendent_of_p. */
6767 ASSERT_TRUE (child_frame_reg->descendent_of_p (child_frame_reg));
6768 ASSERT_TRUE (x_in_child_reg->descendent_of_p (child_frame_reg));
6769 ASSERT_FALSE (a_in_parent_reg->descendent_of_p (child_frame_reg));
6771 /* Pop the "child_fn" frame from the stack. */
6772 model.pop_frame (NULL, NULL, &ctxt);
6773 ASSERT_FALSE (model.region_exists_p (child_frame_reg));
6774 ASSERT_TRUE (model.region_exists_p (parent_frame_reg));
6776 /* Verify that p (which was pointing at the local "x" in the popped
6777 frame) has been poisoned. */
6778 const svalue *new_p_sval = model.get_rvalue (p, NULL);
6779 ASSERT_EQ (new_p_sval->get_kind (), SK_POISONED);
6780 ASSERT_EQ (new_p_sval->dyn_cast_poisoned_svalue ()->get_poison_kind (),
6781 POISON_KIND_POPPED_STACK);
6783 /* Verify that q still points to p, in spite of the region
6784 renumbering. */
6785 const svalue *new_q_sval = model.get_rvalue (q, &ctxt);
6786 ASSERT_EQ (new_q_sval->get_kind (), SK_REGION);
6787 ASSERT_EQ (new_q_sval->maybe_get_region (),
6788 model.get_lvalue (p, &ctxt));
6790 /* Verify that top of stack has been updated. */
6791 ASSERT_EQ (model.get_current_frame (), parent_frame_reg);
6793 /* Verify locals in parent frame. */
6794 /* Verify "a" still has its value. */
6795 const svalue *new_a_sval = model.get_rvalue (a, &ctxt);
6796 ASSERT_EQ (new_a_sval->get_kind (), SK_CONSTANT);
6797 ASSERT_EQ (new_a_sval->dyn_cast_constant_svalue ()->get_constant (),
6798 int_42);
6799 /* Verify "b" still has its constraint. */
6800 ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt),
6801 tristate (tristate::TS_TRUE));
6804 /* Verify that get_representative_path_var works as expected, that
6805 we can map from regions to parms and back within a recursive call
6806 stack. */
6808 static void
6809 test_get_representative_path_var ()
6811 auto_vec <tree> param_types;
6812 tree fndecl = make_fndecl (integer_type_node,
6813 "factorial",
6814 param_types);
6815 allocate_struct_function (fndecl, true);
6817 /* Parm "n". */
6818 tree n = build_decl (UNKNOWN_LOCATION, PARM_DECL,
6819 get_identifier ("n"),
6820 integer_type_node);
6821 DECL_CONTEXT (n) = fndecl;
6823 region_model_manager mgr;
6824 test_region_model_context ctxt;
6825 region_model model (&mgr);
6827 /* Push 5 stack frames for "factorial", each with a param */
6828 auto_vec<const region *> parm_regs;
6829 auto_vec<const svalue *> parm_svals;
6830 for (int depth = 0; depth < 5; depth++)
6832 const region *frame_n_reg
6833 = model.push_frame (DECL_STRUCT_FUNCTION (fndecl), NULL, &ctxt);
6834 const region *parm_n_reg = model.get_lvalue (path_var (n, depth), &ctxt);
6835 parm_regs.safe_push (parm_n_reg);
6837 ASSERT_EQ (parm_n_reg->get_parent_region (), frame_n_reg);
6838 const svalue *sval_n = mgr.get_or_create_initial_value (parm_n_reg);
6839 parm_svals.safe_push (sval_n);
6842 /* Verify that we can recognize that the regions are the parms,
6843 at every depth. */
6844 for (int depth = 0; depth < 5; depth++)
6847 svalue_set visited;
6848 ASSERT_EQ (model.get_representative_path_var (parm_regs[depth],
6849 &visited),
6850 path_var (n, depth + 1));
6852 /* ...and that we can lookup lvalues for locals for all frames,
6853 not just the top. */
6854 ASSERT_EQ (model.get_lvalue (path_var (n, depth), NULL),
6855 parm_regs[depth]);
6856 /* ...and that we can locate the svalues. */
6858 svalue_set visited;
6859 ASSERT_EQ (model.get_representative_path_var (parm_svals[depth],
6860 &visited),
6861 path_var (n, depth + 1));
6866 /* Ensure that region_model::operator== works as expected. */
6868 static void
6869 test_equality_1 ()
6871 tree int_42 = build_int_cst (integer_type_node, 42);
6872 tree int_17 = build_int_cst (integer_type_node, 17);
6874 /* Verify that "empty" region_model instances are equal to each other. */
6875 region_model_manager mgr;
6876 region_model model0 (&mgr);
6877 region_model model1 (&mgr);
6878 ASSERT_EQ (model0, model1);
6880 /* Verify that setting state in model1 makes the models non-equal. */
6881 tree x = build_global_decl ("x", integer_type_node);
6882 model0.set_value (x, int_42, NULL);
6883 ASSERT_EQ (model0.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
6884 ASSERT_NE (model0, model1);
6886 /* Verify the copy-ctor. */
6887 region_model model2 (model0);
6888 ASSERT_EQ (model0, model2);
6889 ASSERT_EQ (model2.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
6890 ASSERT_NE (model1, model2);
6892 /* Verify that models obtained from copy-ctor are independently editable
6893 w/o affecting the original model. */
6894 model2.set_value (x, int_17, NULL);
6895 ASSERT_NE (model0, model2);
6896 ASSERT_EQ (model2.get_rvalue (x, NULL)->maybe_get_constant (), int_17);
6897 ASSERT_EQ (model0.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
6900 /* Verify that region models for
6901 x = 42; y = 113;
6903 y = 113; x = 42;
6904 are equal. */
6906 static void
6907 test_canonicalization_2 ()
6909 tree int_42 = build_int_cst (integer_type_node, 42);
6910 tree int_113 = build_int_cst (integer_type_node, 113);
6911 tree x = build_global_decl ("x", integer_type_node);
6912 tree y = build_global_decl ("y", integer_type_node);
6914 region_model_manager mgr;
6915 region_model model0 (&mgr);
6916 model0.set_value (model0.get_lvalue (x, NULL),
6917 model0.get_rvalue (int_42, NULL),
6918 NULL);
6919 model0.set_value (model0.get_lvalue (y, NULL),
6920 model0.get_rvalue (int_113, NULL),
6921 NULL);
6923 region_model model1 (&mgr);
6924 model1.set_value (model1.get_lvalue (y, NULL),
6925 model1.get_rvalue (int_113, NULL),
6926 NULL);
6927 model1.set_value (model1.get_lvalue (x, NULL),
6928 model1.get_rvalue (int_42, NULL),
6929 NULL);
6931 ASSERT_EQ (model0, model1);
6934 /* Verify that constraints for
6935 x > 3 && y > 42
6937 y > 42 && x > 3
6938 are equal after canonicalization. */
6940 static void
6941 test_canonicalization_3 ()
6943 tree int_3 = build_int_cst (integer_type_node, 3);
6944 tree int_42 = build_int_cst (integer_type_node, 42);
6945 tree x = build_global_decl ("x", integer_type_node);
6946 tree y = build_global_decl ("y", integer_type_node);
6948 region_model_manager mgr;
6949 region_model model0 (&mgr);
6950 model0.add_constraint (x, GT_EXPR, int_3, NULL);
6951 model0.add_constraint (y, GT_EXPR, int_42, NULL);
6953 region_model model1 (&mgr);
6954 model1.add_constraint (y, GT_EXPR, int_42, NULL);
6955 model1.add_constraint (x, GT_EXPR, int_3, NULL);
6957 model0.canonicalize ();
6958 model1.canonicalize ();
6959 ASSERT_EQ (model0, model1);
6962 /* Verify that we can canonicalize a model containing NaN and other real
6963 constants. */
6965 static void
6966 test_canonicalization_4 ()
6968 auto_vec<tree> csts;
6969 append_interesting_constants (&csts);
6971 region_model_manager mgr;
6972 region_model model (&mgr);
6974 for (tree cst : csts)
6975 model.get_rvalue (cst, NULL);
6977 model.canonicalize ();
6980 /* Assert that if we have two region_model instances
6981 with values VAL_A and VAL_B for EXPR that they are
6982 mergable. Write the merged model to *OUT_MERGED_MODEL,
6983 and the merged svalue ptr to *OUT_MERGED_SVALUE.
6984 If VAL_A or VAL_B are NULL_TREE, don't populate EXPR
6985 for that region_model. */
6987 static void
6988 assert_region_models_merge (tree expr, tree val_a, tree val_b,
6989 region_model *out_merged_model,
6990 const svalue **out_merged_svalue)
6992 region_model_manager *mgr = out_merged_model->get_manager ();
6993 program_point point (program_point::origin (*mgr));
6994 test_region_model_context ctxt;
6995 region_model model0 (mgr);
6996 region_model model1 (mgr);
6997 if (val_a)
6998 model0.set_value (model0.get_lvalue (expr, &ctxt),
6999 model0.get_rvalue (val_a, &ctxt),
7000 &ctxt);
7001 if (val_b)
7002 model1.set_value (model1.get_lvalue (expr, &ctxt),
7003 model1.get_rvalue (val_b, &ctxt),
7004 &ctxt);
7006 /* They should be mergeable. */
7007 ASSERT_TRUE (model0.can_merge_with_p (model1, point, out_merged_model));
7008 *out_merged_svalue = out_merged_model->get_rvalue (expr, &ctxt);
7011 /* Verify that we can merge region_model instances. */
7013 static void
7014 test_state_merging ()
7016 tree int_42 = build_int_cst (integer_type_node, 42);
7017 tree int_113 = build_int_cst (integer_type_node, 113);
7018 tree x = build_global_decl ("x", integer_type_node);
7019 tree y = build_global_decl ("y", integer_type_node);
7020 tree z = build_global_decl ("z", integer_type_node);
7021 tree p = build_global_decl ("p", ptr_type_node);
7023 tree addr_of_y = build1 (ADDR_EXPR, ptr_type_node, y);
7024 tree addr_of_z = build1 (ADDR_EXPR, ptr_type_node, z);
7026 auto_vec <tree> param_types;
7027 tree test_fndecl = make_fndecl (integer_type_node, "test_fn", param_types);
7028 allocate_struct_function (test_fndecl, true);
7030 /* Param "a". */
7031 tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7032 get_identifier ("a"),
7033 integer_type_node);
7034 DECL_CONTEXT (a) = test_fndecl;
7035 tree addr_of_a = build1 (ADDR_EXPR, ptr_type_node, a);
7037 /* Param "q", a pointer. */
7038 tree q = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7039 get_identifier ("q"),
7040 ptr_type_node);
7041 DECL_CONTEXT (q) = test_fndecl;
7043 region_model_manager mgr;
7044 program_point point (program_point::origin (mgr));
7047 region_model model0 (&mgr);
7048 region_model model1 (&mgr);
7049 region_model merged (&mgr);
7050 /* Verify empty models can be merged. */
7051 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7052 ASSERT_EQ (model0, merged);
7055 /* Verify that we can merge two contradictory constraints on the
7056 value for a global. */
7057 /* TODO: verify that the merged model doesn't have a value for
7058 the global */
7060 region_model model0 (&mgr);
7061 region_model model1 (&mgr);
7062 region_model merged (&mgr);
7063 test_region_model_context ctxt;
7064 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
7065 model1.add_constraint (x, EQ_EXPR, int_113, &ctxt);
7066 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7067 ASSERT_NE (model0, merged);
7068 ASSERT_NE (model1, merged);
7071 /* Verify handling of a PARM_DECL. */
7073 test_region_model_context ctxt;
7074 region_model model0 (&mgr);
7075 region_model model1 (&mgr);
7076 ASSERT_EQ (model0.get_stack_depth (), 0);
7077 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, &ctxt);
7078 ASSERT_EQ (model0.get_stack_depth (), 1);
7079 model1.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, &ctxt);
7081 placeholder_svalue test_sval (integer_type_node, "test sval");
7082 model0.set_value (model0.get_lvalue (a, &ctxt), &test_sval, &ctxt);
7083 model1.set_value (model1.get_lvalue (a, &ctxt), &test_sval, &ctxt);
7084 ASSERT_EQ (model0, model1);
7086 /* They should be mergeable, and the result should be the same. */
7087 region_model merged (&mgr);
7088 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7089 ASSERT_EQ (model0, merged);
7090 /* In particular, "a" should have the placeholder value. */
7091 ASSERT_EQ (merged.get_rvalue (a, &ctxt), &test_sval);
7094 /* Verify handling of a global. */
7096 test_region_model_context ctxt;
7097 region_model model0 (&mgr);
7098 region_model model1 (&mgr);
7100 placeholder_svalue test_sval (integer_type_node, "test sval");
7101 model0.set_value (model0.get_lvalue (x, &ctxt), &test_sval, &ctxt);
7102 model1.set_value (model1.get_lvalue (x, &ctxt), &test_sval, &ctxt);
7103 ASSERT_EQ (model0, model1);
7105 /* They should be mergeable, and the result should be the same. */
7106 region_model merged (&mgr);
7107 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7108 ASSERT_EQ (model0, merged);
7109 /* In particular, "x" should have the placeholder value. */
7110 ASSERT_EQ (merged.get_rvalue (x, &ctxt), &test_sval);
7113 /* Use global-handling to verify various combinations of values. */
7115 /* Two equal constant values. */
7117 region_model merged (&mgr);
7118 const svalue *merged_x_sval;
7119 assert_region_models_merge (x, int_42, int_42, &merged, &merged_x_sval);
7121 /* In particular, there should be a constant value for "x". */
7122 ASSERT_EQ (merged_x_sval->get_kind (), SK_CONSTANT);
7123 ASSERT_EQ (merged_x_sval->dyn_cast_constant_svalue ()->get_constant (),
7124 int_42);
7127 /* Two non-equal constant values. */
7129 region_model merged (&mgr);
7130 const svalue *merged_x_sval;
7131 assert_region_models_merge (x, int_42, int_113, &merged, &merged_x_sval);
7133 /* In particular, there should be a "widening" value for "x". */
7134 ASSERT_EQ (merged_x_sval->get_kind (), SK_WIDENING);
7137 /* Initial and constant. */
7139 region_model merged (&mgr);
7140 const svalue *merged_x_sval;
7141 assert_region_models_merge (x, NULL_TREE, int_113, &merged, &merged_x_sval);
7143 /* In particular, there should be an unknown value for "x". */
7144 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
7147 /* Constant and initial. */
7149 region_model merged (&mgr);
7150 const svalue *merged_x_sval;
7151 assert_region_models_merge (x, int_42, NULL_TREE, &merged, &merged_x_sval);
7153 /* In particular, there should be an unknown value for "x". */
7154 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
7157 /* Unknown and constant. */
7158 // TODO
7160 /* Pointers: NULL and NULL. */
7161 // TODO
7163 /* Pointers: NULL and non-NULL. */
7164 // TODO
7166 /* Pointers: non-NULL and non-NULL: ptr to a local. */
7168 region_model model0 (&mgr);
7169 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
7170 model0.set_value (model0.get_lvalue (p, NULL),
7171 model0.get_rvalue (addr_of_a, NULL), NULL);
7173 region_model model1 (model0);
7174 ASSERT_EQ (model0, model1);
7176 /* They should be mergeable, and the result should be the same. */
7177 region_model merged (&mgr);
7178 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7179 ASSERT_EQ (model0, merged);
7182 /* Pointers: non-NULL and non-NULL: ptr to a global. */
7184 region_model merged (&mgr);
7185 /* p == &y in both input models. */
7186 const svalue *merged_p_sval;
7187 assert_region_models_merge (p, addr_of_y, addr_of_y, &merged,
7188 &merged_p_sval);
7190 /* We should get p == &y in the merged model. */
7191 ASSERT_EQ (merged_p_sval->get_kind (), SK_REGION);
7192 const region_svalue *merged_p_ptr
7193 = merged_p_sval->dyn_cast_region_svalue ();
7194 const region *merged_p_star_reg = merged_p_ptr->get_pointee ();
7195 ASSERT_EQ (merged_p_star_reg, merged.get_lvalue (y, NULL));
7198 /* Pointers: non-NULL ptrs to different globals: should be unknown. */
7200 region_model merged (&mgr);
7201 /* x == &y vs x == &z in the input models; these are actually casts
7202 of the ptrs to "int". */
7203 const svalue *merged_x_sval;
7204 // TODO:
7205 assert_region_models_merge (x, addr_of_y, addr_of_z, &merged,
7206 &merged_x_sval);
7208 /* We should get x == unknown in the merged model. */
7209 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
7212 /* Pointers: non-NULL and non-NULL: ptr to a heap region. */
7214 test_region_model_context ctxt;
7215 region_model model0 (&mgr);
7216 tree size = build_int_cst (size_type_node, 1024);
7217 const svalue *size_sval = mgr.get_or_create_constant_svalue (size);
7218 const region *new_reg
7219 = model0.get_or_create_region_for_heap_alloc (size_sval, &ctxt);
7220 const svalue *ptr_sval = mgr.get_ptr_svalue (ptr_type_node, new_reg);
7221 model0.set_value (model0.get_lvalue (p, &ctxt),
7222 ptr_sval, &ctxt);
7224 region_model model1 (model0);
7226 ASSERT_EQ (model0, model1);
7228 region_model merged (&mgr);
7229 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7231 /* The merged model ought to be identical. */
7232 ASSERT_EQ (model0, merged);
7235 /* Two regions sharing the same placeholder svalue should continue sharing
7236 it after self-merger. */
7238 test_region_model_context ctxt;
7239 region_model model0 (&mgr);
7240 placeholder_svalue placeholder_sval (integer_type_node, "test");
7241 model0.set_value (model0.get_lvalue (x, &ctxt),
7242 &placeholder_sval, &ctxt);
7243 model0.set_value (model0.get_lvalue (y, &ctxt), &placeholder_sval, &ctxt);
7244 region_model model1 (model0);
7246 /* They should be mergeable, and the result should be the same. */
7247 region_model merged (&mgr);
7248 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7249 ASSERT_EQ (model0, merged);
7251 /* In particular, we should have x == y. */
7252 ASSERT_EQ (merged.eval_condition (x, EQ_EXPR, y, &ctxt),
7253 tristate (tristate::TS_TRUE));
7257 region_model model0 (&mgr);
7258 region_model model1 (&mgr);
7259 test_region_model_context ctxt;
7260 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
7261 model1.add_constraint (x, NE_EXPR, int_42, &ctxt);
7262 region_model merged (&mgr);
7263 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7267 region_model model0 (&mgr);
7268 region_model model1 (&mgr);
7269 test_region_model_context ctxt;
7270 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
7271 model1.add_constraint (x, NE_EXPR, int_42, &ctxt);
7272 model1.add_constraint (x, EQ_EXPR, int_113, &ctxt);
7273 region_model merged (&mgr);
7274 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7277 // TODO: what can't we merge? need at least one such test
7279 /* TODO: various things
7280 - heap regions
7281 - value merging:
7282 - every combination, but in particular
7283 - pairs of regions
7286 /* Views. */
7288 test_region_model_context ctxt;
7289 region_model model0 (&mgr);
7291 const region *x_reg = model0.get_lvalue (x, &ctxt);
7292 const region *x_as_ptr = mgr.get_cast_region (x_reg, ptr_type_node);
7293 model0.set_value (x_as_ptr, model0.get_rvalue (addr_of_y, &ctxt), &ctxt);
7295 region_model model1 (model0);
7296 ASSERT_EQ (model1, model0);
7298 /* They should be mergeable, and the result should be the same. */
7299 region_model merged (&mgr);
7300 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7303 /* Verify that we can merge a model in which a local in an older stack
7304 frame points to a local in a more recent stack frame. */
7306 region_model model0 (&mgr);
7307 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
7308 const region *q_in_first_frame = model0.get_lvalue (q, NULL);
7310 /* Push a second frame. */
7311 const region *reg_2nd_frame
7312 = model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
7314 /* Have a pointer in the older frame point to a local in the
7315 more recent frame. */
7316 const svalue *sval_ptr = model0.get_rvalue (addr_of_a, NULL);
7317 model0.set_value (q_in_first_frame, sval_ptr, NULL);
7319 /* Verify that it's pointing at the newer frame. */
7320 const region *reg_pointee = sval_ptr->maybe_get_region ();
7321 ASSERT_EQ (reg_pointee->get_parent_region (), reg_2nd_frame);
7323 model0.canonicalize ();
7325 region_model model1 (model0);
7326 ASSERT_EQ (model0, model1);
7328 /* They should be mergeable, and the result should be the same
7329 (after canonicalization, at least). */
7330 region_model merged (&mgr);
7331 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7332 merged.canonicalize ();
7333 ASSERT_EQ (model0, merged);
7336 /* Verify that we can merge a model in which a local points to a global. */
7338 region_model model0 (&mgr);
7339 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
7340 model0.set_value (model0.get_lvalue (q, NULL),
7341 model0.get_rvalue (addr_of_y, NULL), NULL);
7343 region_model model1 (model0);
7344 ASSERT_EQ (model0, model1);
7346 /* They should be mergeable, and the result should be the same
7347 (after canonicalization, at least). */
7348 region_model merged (&mgr);
7349 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7350 ASSERT_EQ (model0, merged);
7354 /* Verify that constraints are correctly merged when merging region_model
7355 instances. */
7357 static void
7358 test_constraint_merging ()
7360 tree int_0 = build_int_cst (integer_type_node, 0);
7361 tree int_5 = build_int_cst (integer_type_node, 5);
7362 tree x = build_global_decl ("x", integer_type_node);
7363 tree y = build_global_decl ("y", integer_type_node);
7364 tree z = build_global_decl ("z", integer_type_node);
7365 tree n = build_global_decl ("n", integer_type_node);
7367 region_model_manager mgr;
7368 test_region_model_context ctxt;
7370 /* model0: 0 <= (x == y) < n. */
7371 region_model model0 (&mgr);
7372 model0.add_constraint (x, EQ_EXPR, y, &ctxt);
7373 model0.add_constraint (x, GE_EXPR, int_0, NULL);
7374 model0.add_constraint (x, LT_EXPR, n, NULL);
7376 /* model1: z != 5 && (0 <= x < n). */
7377 region_model model1 (&mgr);
7378 model1.add_constraint (z, NE_EXPR, int_5, NULL);
7379 model1.add_constraint (x, GE_EXPR, int_0, NULL);
7380 model1.add_constraint (x, LT_EXPR, n, NULL);
7382 /* They should be mergeable; the merged constraints should
7383 be: (0 <= x < n). */
7384 program_point point (program_point::origin (mgr));
7385 region_model merged (&mgr);
7386 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7388 ASSERT_EQ (merged.eval_condition (x, GE_EXPR, int_0, &ctxt),
7389 tristate (tristate::TS_TRUE));
7390 ASSERT_EQ (merged.eval_condition (x, LT_EXPR, n, &ctxt),
7391 tristate (tristate::TS_TRUE));
7393 ASSERT_EQ (merged.eval_condition (z, NE_EXPR, int_5, &ctxt),
7394 tristate (tristate::TS_UNKNOWN));
7395 ASSERT_EQ (merged.eval_condition (x, LT_EXPR, y, &ctxt),
7396 tristate (tristate::TS_UNKNOWN));
7399 /* Verify that widening_svalue::eval_condition_without_cm works as
7400 expected. */
7402 static void
7403 test_widening_constraints ()
7405 region_model_manager mgr;
7406 function_point point (program_point::origin (mgr).get_function_point ());
7407 tree int_0 = build_int_cst (integer_type_node, 0);
7408 tree int_m1 = build_int_cst (integer_type_node, -1);
7409 tree int_1 = build_int_cst (integer_type_node, 1);
7410 tree int_256 = build_int_cst (integer_type_node, 256);
7411 test_region_model_context ctxt;
7412 const svalue *int_0_sval = mgr.get_or_create_constant_svalue (int_0);
7413 const svalue *int_1_sval = mgr.get_or_create_constant_svalue (int_1);
7414 const svalue *w_zero_then_one_sval
7415 = mgr.get_or_create_widening_svalue (integer_type_node, point,
7416 int_0_sval, int_1_sval);
7417 const widening_svalue *w_zero_then_one
7418 = w_zero_then_one_sval->dyn_cast_widening_svalue ();
7419 ASSERT_EQ (w_zero_then_one->get_direction (),
7420 widening_svalue::DIR_ASCENDING);
7421 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_m1),
7422 tristate::TS_FALSE);
7423 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_0),
7424 tristate::TS_FALSE);
7425 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_1),
7426 tristate::TS_UNKNOWN);
7427 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_256),
7428 tristate::TS_UNKNOWN);
7430 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_m1),
7431 tristate::TS_FALSE);
7432 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_0),
7433 tristate::TS_UNKNOWN);
7434 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_1),
7435 tristate::TS_UNKNOWN);
7436 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_256),
7437 tristate::TS_UNKNOWN);
7439 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_m1),
7440 tristate::TS_TRUE);
7441 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_0),
7442 tristate::TS_UNKNOWN);
7443 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_1),
7444 tristate::TS_UNKNOWN);
7445 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_256),
7446 tristate::TS_UNKNOWN);
7448 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_m1),
7449 tristate::TS_TRUE);
7450 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_0),
7451 tristate::TS_TRUE);
7452 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_1),
7453 tristate::TS_UNKNOWN);
7454 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_256),
7455 tristate::TS_UNKNOWN);
7457 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_m1),
7458 tristate::TS_FALSE);
7459 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_0),
7460 tristate::TS_UNKNOWN);
7461 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_1),
7462 tristate::TS_UNKNOWN);
7463 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_256),
7464 tristate::TS_UNKNOWN);
7466 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_m1),
7467 tristate::TS_TRUE);
7468 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_0),
7469 tristate::TS_UNKNOWN);
7470 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_1),
7471 tristate::TS_UNKNOWN);
7472 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_256),
7473 tristate::TS_UNKNOWN);
7476 /* Verify merging constraints for states simulating successive iterations
7477 of a loop.
7478 Simulate:
7479 for (i = 0; i < 256; i++)
7480 [...body...]
7481 i.e. this gimple:.
7482 i_15 = 0;
7483 goto <bb 4>;
7485 <bb 4> :
7486 i_11 = PHI <i_15(2), i_23(3)>
7487 if (i_11 <= 255)
7488 goto <bb 3>;
7489 else
7490 goto [AFTER LOOP]
7492 <bb 3> :
7493 [LOOP BODY]
7494 i_23 = i_11 + 1;
7496 and thus these ops (and resultant states):
7497 i_11 = PHI()
7498 {i_11: 0}
7499 add_constraint (i_11 <= 255) [for the true edge]
7500 {i_11: 0} [constraint was a no-op]
7501 i_23 = i_11 + 1;
7502 {i_22: 1}
7503 i_11 = PHI()
7504 {i_11: WIDENED (at phi, 0, 1)}
7505 add_constraint (i_11 <= 255) [for the true edge]
7506 {i_11: WIDENED (at phi, 0, 1); WIDENED <= 255}
7507 i_23 = i_11 + 1;
7508 {i_23: (WIDENED (at phi, 0, 1) + 1); WIDENED <= 255}
7509 i_11 = PHI(); merge with state at phi above
7510 {i_11: WIDENED (at phi, 0, 1); WIDENED <= 256}
7511 [changing meaning of "WIDENED" here]
7512 if (i_11 <= 255)
7513 T: {i_11: WIDENED (at phi, 0, 1); WIDENED <= 255}; cache hit
7514 F: {i_11: 256}
7517 static void
7518 test_iteration_1 ()
7520 region_model_manager mgr;
7521 program_point point (program_point::origin (mgr));
7523 tree int_0 = build_int_cst (integer_type_node, 0);
7524 tree int_1 = build_int_cst (integer_type_node, 1);
7525 tree int_256 = build_int_cst (integer_type_node, 256);
7526 tree int_257 = build_int_cst (integer_type_node, 257);
7527 tree i = build_global_decl ("i", integer_type_node);
7529 test_region_model_context ctxt;
7531 /* model0: i: 0. */
7532 region_model model0 (&mgr);
7533 model0.set_value (i, int_0, &ctxt);
7535 /* model1: i: 1. */
7536 region_model model1 (&mgr);
7537 model1.set_value (i, int_1, &ctxt);
7539 /* Should merge "i" to a widened value. */
7540 region_model model2 (&mgr);
7541 ASSERT_TRUE (model1.can_merge_with_p (model0, point, &model2));
7542 const svalue *merged_i = model2.get_rvalue (i, &ctxt);
7543 ASSERT_EQ (merged_i->get_kind (), SK_WIDENING);
7544 const widening_svalue *w = merged_i->dyn_cast_widening_svalue ();
7545 ASSERT_EQ (w->get_direction (), widening_svalue::DIR_ASCENDING);
7547 /* Add constraint: i < 256 */
7548 model2.add_constraint (i, LT_EXPR, int_256, &ctxt);
7549 ASSERT_EQ (model2.eval_condition (i, LT_EXPR, int_256, &ctxt),
7550 tristate (tristate::TS_TRUE));
7551 ASSERT_EQ (model2.eval_condition (i, GE_EXPR, int_0, &ctxt),
7552 tristate (tristate::TS_TRUE));
7554 /* Try merging with the initial state. */
7555 region_model model3 (&mgr);
7556 ASSERT_TRUE (model2.can_merge_with_p (model0, point, &model3));
7557 /* Merging the merged value with the initial value should be idempotent,
7558 so that the analysis converges. */
7559 ASSERT_EQ (model3.get_rvalue (i, &ctxt), merged_i);
7560 /* Merger of 0 and a widening value with constraint < CST
7561 should retain the constraint, even though it was implicit
7562 for the 0 case. */
7563 ASSERT_EQ (model3.eval_condition (i, LT_EXPR, int_256, &ctxt),
7564 tristate (tristate::TS_TRUE));
7565 /* ...and we should have equality: the analysis should have converged. */
7566 ASSERT_EQ (model3, model2);
7568 /* "i_23 = i_11 + 1;" */
7569 region_model model4 (model3);
7570 ASSERT_EQ (model4, model2);
7571 model4.set_value (i, build2 (PLUS_EXPR, integer_type_node, i, int_1), &ctxt);
7572 const svalue *plus_one = model4.get_rvalue (i, &ctxt);
7573 ASSERT_EQ (plus_one->get_kind (), SK_BINOP);
7575 /* Try merging with the "i: 1" state. */
7576 region_model model5 (&mgr);
7577 ASSERT_TRUE (model4.can_merge_with_p (model1, point, &model5));
7578 ASSERT_EQ (model5.get_rvalue (i, &ctxt), plus_one);
7579 ASSERT_EQ (model5, model4);
7581 /* "i_11 = PHI();" merge with state at phi above.
7582 For i, we should have a merger of WIDENING with WIDENING + 1,
7583 and this should be WIDENING again. */
7584 region_model model6 (&mgr);
7585 ASSERT_TRUE (model5.can_merge_with_p (model2, point, &model6));
7586 const svalue *merged_widening = model6.get_rvalue (i, &ctxt);
7587 ASSERT_EQ (merged_widening->get_kind (), SK_WIDENING);
7589 ASSERT_CONDITION_TRUE (model6, i, LT_EXPR, int_257);
7592 /* Verify that if we mark a pointer to a malloc-ed region as non-NULL,
7593 all cast pointers to that region are also known to be non-NULL. */
7595 static void
7596 test_malloc_constraints ()
7598 region_model_manager mgr;
7599 region_model model (&mgr);
7600 tree p = build_global_decl ("p", ptr_type_node);
7601 tree char_star = build_pointer_type (char_type_node);
7602 tree q = build_global_decl ("q", char_star);
7603 tree null_ptr = build_int_cst (ptr_type_node, 0);
7605 const svalue *size_in_bytes
7606 = mgr.get_or_create_unknown_svalue (size_type_node);
7607 const region *reg
7608 = model.get_or_create_region_for_heap_alloc (size_in_bytes, NULL);
7609 const svalue *sval = mgr.get_ptr_svalue (ptr_type_node, reg);
7610 model.set_value (model.get_lvalue (p, NULL), sval, NULL);
7611 model.set_value (q, p, NULL);
7613 ASSERT_CONDITION_UNKNOWN (model, p, NE_EXPR, null_ptr);
7614 ASSERT_CONDITION_UNKNOWN (model, p, EQ_EXPR, null_ptr);
7615 ASSERT_CONDITION_UNKNOWN (model, q, NE_EXPR, null_ptr);
7616 ASSERT_CONDITION_UNKNOWN (model, q, EQ_EXPR, null_ptr);
7618 model.add_constraint (p, NE_EXPR, null_ptr, NULL);
7620 ASSERT_CONDITION_TRUE (model, p, NE_EXPR, null_ptr);
7621 ASSERT_CONDITION_FALSE (model, p, EQ_EXPR, null_ptr);
7622 ASSERT_CONDITION_TRUE (model, q, NE_EXPR, null_ptr);
7623 ASSERT_CONDITION_FALSE (model, q, EQ_EXPR, null_ptr);
7626 /* Smoketest of getting and setting the value of a variable. */
7628 static void
7629 test_var ()
7631 /* "int i;" */
7632 tree i = build_global_decl ("i", integer_type_node);
7634 tree int_17 = build_int_cst (integer_type_node, 17);
7635 tree int_m3 = build_int_cst (integer_type_node, -3);
7637 region_model_manager mgr;
7638 region_model model (&mgr);
7640 const region *i_reg = model.get_lvalue (i, NULL);
7641 ASSERT_EQ (i_reg->get_kind (), RK_DECL);
7643 /* Reading "i" should give a symbolic "initial value". */
7644 const svalue *sval_init = model.get_rvalue (i, NULL);
7645 ASSERT_EQ (sval_init->get_kind (), SK_INITIAL);
7646 ASSERT_EQ (sval_init->dyn_cast_initial_svalue ()->get_region (), i_reg);
7647 /* ..and doing it again should give the same "initial value". */
7648 ASSERT_EQ (model.get_rvalue (i, NULL), sval_init);
7650 /* "i = 17;". */
7651 model.set_value (i, int_17, NULL);
7652 ASSERT_EQ (model.get_rvalue (i, NULL),
7653 model.get_rvalue (int_17, NULL));
7655 /* "i = -3;". */
7656 model.set_value (i, int_m3, NULL);
7657 ASSERT_EQ (model.get_rvalue (i, NULL),
7658 model.get_rvalue (int_m3, NULL));
7660 /* Verify get_offset for "i". */
7662 region_offset offset = i_reg->get_offset (&mgr);
7663 ASSERT_EQ (offset.get_base_region (), i_reg);
7664 ASSERT_EQ (offset.get_bit_offset (), 0);
7668 static void
7669 test_array_2 ()
7671 /* "int arr[10];" */
7672 tree tlen = size_int (10);
7673 tree arr_type
7674 = build_array_type (integer_type_node, build_index_type (tlen));
7675 tree arr = build_global_decl ("arr", arr_type);
7677 /* "int i;" */
7678 tree i = build_global_decl ("i", integer_type_node);
7680 tree int_0 = build_int_cst (integer_type_node, 0);
7681 tree int_1 = build_int_cst (integer_type_node, 1);
7683 tree arr_0 = build4 (ARRAY_REF, integer_type_node,
7684 arr, int_0, NULL_TREE, NULL_TREE);
7685 tree arr_1 = build4 (ARRAY_REF, integer_type_node,
7686 arr, int_1, NULL_TREE, NULL_TREE);
7687 tree arr_i = build4 (ARRAY_REF, integer_type_node,
7688 arr, i, NULL_TREE, NULL_TREE);
7690 tree int_17 = build_int_cst (integer_type_node, 17);
7691 tree int_42 = build_int_cst (integer_type_node, 42);
7692 tree int_m3 = build_int_cst (integer_type_node, -3);
7694 region_model_manager mgr;
7695 region_model model (&mgr);
7696 /* "arr[0] = 17;". */
7697 model.set_value (arr_0, int_17, NULL);
7698 /* "arr[1] = -3;". */
7699 model.set_value (arr_1, int_m3, NULL);
7701 ASSERT_EQ (model.get_rvalue (arr_0, NULL), model.get_rvalue (int_17, NULL));
7702 ASSERT_EQ (model.get_rvalue (arr_1, NULL), model.get_rvalue (int_m3, NULL));
7704 /* Overwrite a pre-existing binding: "arr[1] = 42;". */
7705 model.set_value (arr_1, int_42, NULL);
7706 ASSERT_EQ (model.get_rvalue (arr_1, NULL), model.get_rvalue (int_42, NULL));
7708 /* Verify get_offset for "arr[0]". */
7710 const region *arr_0_reg = model.get_lvalue (arr_0, NULL);
7711 region_offset offset = arr_0_reg->get_offset (&mgr);
7712 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
7713 ASSERT_EQ (offset.get_bit_offset (), 0);
7716 /* Verify get_offset for "arr[1]". */
7718 const region *arr_1_reg = model.get_lvalue (arr_1, NULL);
7719 region_offset offset = arr_1_reg->get_offset (&mgr);
7720 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
7721 ASSERT_EQ (offset.get_bit_offset (), INT_TYPE_SIZE);
7724 /* Verify get_offset for "arr[i]". */
7726 const region *arr_i_reg = model.get_lvalue (arr_i, NULL);
7727 region_offset offset = arr_i_reg->get_offset (&mgr);
7728 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
7729 ASSERT_EQ (offset.get_symbolic_byte_offset ()->get_kind (), SK_BINOP);
7732 /* "arr[i] = i;" - this should remove the earlier bindings. */
7733 model.set_value (arr_i, i, NULL);
7734 ASSERT_EQ (model.get_rvalue (arr_i, NULL), model.get_rvalue (i, NULL));
7735 ASSERT_EQ (model.get_rvalue (arr_0, NULL)->get_kind (), SK_UNKNOWN);
7737 /* "arr[0] = 17;" - this should remove the arr[i] binding. */
7738 model.set_value (arr_0, int_17, NULL);
7739 ASSERT_EQ (model.get_rvalue (arr_0, NULL), model.get_rvalue (int_17, NULL));
7740 ASSERT_EQ (model.get_rvalue (arr_i, NULL)->get_kind (), SK_UNKNOWN);
7743 /* Smoketest of dereferencing a pointer via MEM_REF. */
7745 static void
7746 test_mem_ref ()
7749 x = 17;
7750 p = &x;
7753 tree x = build_global_decl ("x", integer_type_node);
7754 tree int_star = build_pointer_type (integer_type_node);
7755 tree p = build_global_decl ("p", int_star);
7757 tree int_17 = build_int_cst (integer_type_node, 17);
7758 tree addr_of_x = build1 (ADDR_EXPR, int_star, x);
7759 tree offset_0 = build_int_cst (integer_type_node, 0);
7760 tree star_p = build2 (MEM_REF, integer_type_node, p, offset_0);
7762 region_model_manager mgr;
7763 region_model model (&mgr);
7765 /* "x = 17;". */
7766 model.set_value (x, int_17, NULL);
7768 /* "p = &x;". */
7769 model.set_value (p, addr_of_x, NULL);
7771 const svalue *sval = model.get_rvalue (star_p, NULL);
7772 ASSERT_EQ (sval->maybe_get_constant (), int_17);
7775 /* Test for a POINTER_PLUS_EXPR followed by a MEM_REF.
7776 Analogous to this code:
7777 void test_6 (int a[10])
7779 __analyzer_eval (a[3] == 42); [should be UNKNOWN]
7780 a[3] = 42;
7781 __analyzer_eval (a[3] == 42); [should be TRUE]
7783 from data-model-1.c, which looks like this at the gimple level:
7784 # __analyzer_eval (a[3] == 42); [should be UNKNOWN]
7785 int *_1 = a_10(D) + 12; # POINTER_PLUS_EXPR
7786 int _2 = *_1; # MEM_REF
7787 _Bool _3 = _2 == 42;
7788 int _4 = (int) _3;
7789 __analyzer_eval (_4);
7791 # a[3] = 42;
7792 int *_5 = a_10(D) + 12; # POINTER_PLUS_EXPR
7793 *_5 = 42; # MEM_REF
7795 # __analyzer_eval (a[3] == 42); [should be TRUE]
7796 int *_6 = a_10(D) + 12; # POINTER_PLUS_EXPR
7797 int _7 = *_6; # MEM_REF
7798 _Bool _8 = _7 == 42;
7799 int _9 = (int) _8;
7800 __analyzer_eval (_9); */
7802 static void
7803 test_POINTER_PLUS_EXPR_then_MEM_REF ()
7805 tree int_star = build_pointer_type (integer_type_node);
7806 tree a = build_global_decl ("a", int_star);
7807 tree offset_12 = build_int_cst (size_type_node, 12);
7808 tree pointer_plus_expr = build2 (POINTER_PLUS_EXPR, int_star, a, offset_12);
7809 tree offset_0 = build_int_cst (integer_type_node, 0);
7810 tree mem_ref = build2 (MEM_REF, integer_type_node,
7811 pointer_plus_expr, offset_0);
7812 region_model_manager mgr;
7813 region_model m (&mgr);
7815 tree int_42 = build_int_cst (integer_type_node, 42);
7816 m.set_value (mem_ref, int_42, NULL);
7817 ASSERT_EQ (m.get_rvalue (mem_ref, NULL)->maybe_get_constant (), int_42);
7820 /* Verify that malloc works. */
7822 static void
7823 test_malloc ()
7825 tree int_star = build_pointer_type (integer_type_node);
7826 tree p = build_global_decl ("p", int_star);
7827 tree n = build_global_decl ("n", integer_type_node);
7828 tree n_times_4 = build2 (MULT_EXPR, size_type_node,
7829 n, build_int_cst (size_type_node, 4));
7831 region_model_manager mgr;
7832 test_region_model_context ctxt;
7833 region_model model (&mgr);
7835 /* "p = malloc (n * 4);". */
7836 const svalue *size_sval = model.get_rvalue (n_times_4, &ctxt);
7837 const region *reg
7838 = model.get_or_create_region_for_heap_alloc (size_sval, &ctxt);
7839 const svalue *ptr = mgr.get_ptr_svalue (int_star, reg);
7840 model.set_value (model.get_lvalue (p, &ctxt), ptr, &ctxt);
7841 ASSERT_EQ (model.get_capacity (reg), size_sval);
7844 /* Verify that alloca works. */
7846 static void
7847 test_alloca ()
7849 auto_vec <tree> param_types;
7850 tree fndecl = make_fndecl (integer_type_node,
7851 "test_fn",
7852 param_types);
7853 allocate_struct_function (fndecl, true);
7856 tree int_star = build_pointer_type (integer_type_node);
7857 tree p = build_global_decl ("p", int_star);
7858 tree n = build_global_decl ("n", integer_type_node);
7859 tree n_times_4 = build2 (MULT_EXPR, size_type_node,
7860 n, build_int_cst (size_type_node, 4));
7862 region_model_manager mgr;
7863 test_region_model_context ctxt;
7864 region_model model (&mgr);
7866 /* Push stack frame. */
7867 const region *frame_reg
7868 = model.push_frame (DECL_STRUCT_FUNCTION (fndecl),
7869 NULL, &ctxt);
7870 /* "p = alloca (n * 4);". */
7871 const svalue *size_sval = model.get_rvalue (n_times_4, &ctxt);
7872 const region *reg = model.create_region_for_alloca (size_sval, &ctxt);
7873 ASSERT_EQ (reg->get_parent_region (), frame_reg);
7874 const svalue *ptr = mgr.get_ptr_svalue (int_star, reg);
7875 model.set_value (model.get_lvalue (p, &ctxt), ptr, &ctxt);
7876 ASSERT_EQ (model.get_capacity (reg), size_sval);
7878 /* Verify that the pointers to the alloca region are replaced by
7879 poisoned values when the frame is popped. */
7880 model.pop_frame (NULL, NULL, &ctxt);
7881 ASSERT_EQ (model.get_rvalue (p, NULL)->get_kind (), SK_POISONED);
7884 /* Verify that svalue::involves_p works. */
7886 static void
7887 test_involves_p ()
7889 region_model_manager mgr;
7890 tree int_star = build_pointer_type (integer_type_node);
7891 tree p = build_global_decl ("p", int_star);
7892 tree q = build_global_decl ("q", int_star);
7894 test_region_model_context ctxt;
7895 region_model model (&mgr);
7896 const svalue *p_init = model.get_rvalue (p, &ctxt);
7897 const svalue *q_init = model.get_rvalue (q, &ctxt);
7899 ASSERT_TRUE (p_init->involves_p (p_init));
7900 ASSERT_FALSE (p_init->involves_p (q_init));
7902 const region *star_p_reg = mgr.get_symbolic_region (p_init);
7903 const region *star_q_reg = mgr.get_symbolic_region (q_init);
7905 const svalue *init_star_p = mgr.get_or_create_initial_value (star_p_reg);
7906 const svalue *init_star_q = mgr.get_or_create_initial_value (star_q_reg);
7908 ASSERT_TRUE (init_star_p->involves_p (p_init));
7909 ASSERT_FALSE (p_init->involves_p (init_star_p));
7910 ASSERT_FALSE (init_star_p->involves_p (q_init));
7911 ASSERT_TRUE (init_star_q->involves_p (q_init));
7912 ASSERT_FALSE (init_star_q->involves_p (p_init));
7915 /* Run all of the selftests within this file. */
7917 void
7918 analyzer_region_model_cc_tests ()
7920 test_tree_cmp_on_constants ();
7921 test_dump ();
7922 test_struct ();
7923 test_array_1 ();
7924 test_get_representative_tree ();
7925 test_unique_constants ();
7926 test_unique_unknowns ();
7927 test_initial_svalue_folding ();
7928 test_unaryop_svalue_folding ();
7929 test_binop_svalue_folding ();
7930 test_sub_svalue_folding ();
7931 test_bits_within_svalue_folding ();
7932 test_descendent_of_p ();
7933 test_bit_range_regions ();
7934 test_assignment ();
7935 test_compound_assignment ();
7936 test_stack_frames ();
7937 test_get_representative_path_var ();
7938 test_equality_1 ();
7939 test_canonicalization_2 ();
7940 test_canonicalization_3 ();
7941 test_canonicalization_4 ();
7942 test_state_merging ();
7943 test_constraint_merging ();
7944 test_widening_constraints ();
7945 test_iteration_1 ();
7946 test_malloc_constraints ();
7947 test_var ();
7948 test_array_2 ();
7949 test_mem_ref ();
7950 test_POINTER_PLUS_EXPR_then_MEM_REF ();
7951 test_malloc ();
7952 test_alloca ();
7953 test_involves_p ();
7956 } // namespace selftest
7958 #endif /* CHECKING_P */
7960 } // namespace ana
7962 #endif /* #if ENABLE_ANALYZER */