analyzer: handle comparisons against negated symbolic values [PR107948]
[official-gcc.git] / gcc / analyzer / region-model-manager.cc
blob471a9272e410aec35788267b43ed90ea070c58fe
1 /* Consolidation of svalues and regions.
2 Copyright (C) 2020-2022 Free Software Foundation, Inc.
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #define INCLUDE_MEMORY
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tree.h"
26 #include "diagnostic-core.h"
27 #include "gimple-pretty-print.h"
28 #include "function.h"
29 #include "basic-block.h"
30 #include "gimple.h"
31 #include "gimple-iterator.h"
32 #include "diagnostic-core.h"
33 #include "graphviz.h"
34 #include "options.h"
35 #include "cgraph.h"
36 #include "tree-dfa.h"
37 #include "stringpool.h"
38 #include "convert.h"
39 #include "target.h"
40 #include "fold-const.h"
41 #include "tree-pretty-print.h"
42 #include "bitmap.h"
43 #include "analyzer/analyzer.h"
44 #include "analyzer/analyzer-logging.h"
45 #include "ordered-hash-map.h"
46 #include "options.h"
47 #include "analyzer/supergraph.h"
48 #include "sbitmap.h"
49 #include "analyzer/call-string.h"
50 #include "analyzer/program-point.h"
51 #include "analyzer/store.h"
52 #include "analyzer/region-model.h"
53 #include "analyzer/constraint-manager.h"
55 #if ENABLE_ANALYZER
57 namespace ana {
59 /* class region_model_manager. */
61 /* region_model_manager's ctor. */
63 region_model_manager::region_model_manager (logger *logger)
64 : m_logger (logger),
65 m_empty_call_string (),
66 m_next_region_id (0),
67 m_root_region (alloc_region_id ()),
68 m_stack_region (alloc_region_id (), &m_root_region),
69 m_heap_region (alloc_region_id (), &m_root_region),
70 m_unknown_NULL (NULL),
71 m_checking_feasibility (false),
72 m_max_complexity (0, 0),
73 m_code_region (alloc_region_id (), &m_root_region),
74 m_fndecls_map (), m_labels_map (),
75 m_globals_region (alloc_region_id (), &m_root_region),
76 m_globals_map (),
77 m_thread_local_region (alloc_region_id (), &m_root_region),
78 m_errno_region (alloc_region_id (), &m_thread_local_region),
79 m_store_mgr (this),
80 m_range_mgr (new bounded_ranges_manager ()),
81 m_known_fn_mgr (logger)
85 /* region_model_manager's dtor. Delete all of the managed svalues
86 and regions. */
88 region_model_manager::~region_model_manager ()
90 /* Delete consolidated svalues. */
91 for (constants_map_t::iterator iter = m_constants_map.begin ();
92 iter != m_constants_map.end (); ++iter)
93 delete (*iter).second;
94 for (unknowns_map_t::iterator iter = m_unknowns_map.begin ();
95 iter != m_unknowns_map.end (); ++iter)
96 delete (*iter).second;
97 delete m_unknown_NULL;
98 for (poisoned_values_map_t::iterator iter = m_poisoned_values_map.begin ();
99 iter != m_poisoned_values_map.end (); ++iter)
100 delete (*iter).second;
101 for (setjmp_values_map_t::iterator iter = m_setjmp_values_map.begin ();
102 iter != m_setjmp_values_map.end (); ++iter)
103 delete (*iter).second;
104 for (initial_values_map_t::iterator iter = m_initial_values_map.begin ();
105 iter != m_initial_values_map.end (); ++iter)
106 delete (*iter).second;
107 for (pointer_values_map_t::iterator iter = m_pointer_values_map.begin ();
108 iter != m_pointer_values_map.end (); ++iter)
109 delete (*iter).second;
110 for (unaryop_values_map_t::iterator iter = m_unaryop_values_map.begin ();
111 iter != m_unaryop_values_map.end (); ++iter)
112 delete (*iter).second;
113 for (binop_values_map_t::iterator iter = m_binop_values_map.begin ();
114 iter != m_binop_values_map.end (); ++iter)
115 delete (*iter).second;
116 for (sub_values_map_t::iterator iter = m_sub_values_map.begin ();
117 iter != m_sub_values_map.end (); ++iter)
118 delete (*iter).second;
119 for (auto iter : m_repeated_values_map)
120 delete iter.second;
121 for (auto iter : m_bits_within_values_map)
122 delete iter.second;
123 for (unmergeable_values_map_t::iterator iter
124 = m_unmergeable_values_map.begin ();
125 iter != m_unmergeable_values_map.end (); ++iter)
126 delete (*iter).second;
127 for (widening_values_map_t::iterator iter = m_widening_values_map.begin ();
128 iter != m_widening_values_map.end (); ++iter)
129 delete (*iter).second;
130 for (compound_values_map_t::iterator iter = m_compound_values_map.begin ();
131 iter != m_compound_values_map.end (); ++iter)
132 delete (*iter).second;
133 for (conjured_values_map_t::iterator iter = m_conjured_values_map.begin ();
134 iter != m_conjured_values_map.end (); ++iter)
135 delete (*iter).second;
136 for (auto iter : m_asm_output_values_map)
137 delete iter.second;
138 for (auto iter : m_const_fn_result_values_map)
139 delete iter.second;
141 /* Delete consolidated regions. */
142 for (fndecls_map_t::iterator iter = m_fndecls_map.begin ();
143 iter != m_fndecls_map.end (); ++iter)
144 delete (*iter).second;
145 for (labels_map_t::iterator iter = m_labels_map.begin ();
146 iter != m_labels_map.end (); ++iter)
147 delete (*iter).second;
148 for (globals_map_t::iterator iter = m_globals_map.begin ();
149 iter != m_globals_map.end (); ++iter)
150 delete (*iter).second;
151 for (string_map_t::iterator iter = m_string_map.begin ();
152 iter != m_string_map.end (); ++iter)
153 delete (*iter).second;
155 delete m_range_mgr;
158 /* Return true if C exceeds the complexity limit for svalues. */
160 bool
161 region_model_manager::too_complex_p (const complexity &c) const
163 if (c.m_max_depth > (unsigned)param_analyzer_max_svalue_depth)
164 return true;
165 return false;
168 /* If SVAL exceeds the complexity limit for svalues, delete it
169 and return true.
170 Otherwise update m_max_complexity and return false. */
172 bool
173 region_model_manager::reject_if_too_complex (svalue *sval)
175 if (m_checking_feasibility)
176 return false;
178 const complexity &c = sval->get_complexity ();
179 if (!too_complex_p (c))
181 if (m_max_complexity.m_num_nodes < c.m_num_nodes)
182 m_max_complexity.m_num_nodes = c.m_num_nodes;
183 if (m_max_complexity.m_max_depth < c.m_max_depth)
184 m_max_complexity.m_max_depth = c.m_max_depth;
185 return false;
188 delete sval;
189 return true;
192 /* Macro for imposing a complexity limit on svalues, for use within
193 region_model_manager member functions.
195 If SVAL exceeds the complexity limit, delete it and return an UNKNOWN
196 value of the same type.
197 Otherwise update m_max_complexity and carry on. */
199 #define RETURN_UNKNOWN_IF_TOO_COMPLEX(SVAL) \
200 do { \
201 svalue *sval_ = (SVAL); \
202 tree type_ = sval_->get_type (); \
203 if (reject_if_too_complex (sval_)) \
204 return get_or_create_unknown_svalue (type_); \
205 } while (0)
207 /* svalue consolidation. */
209 /* Return the svalue * for a constant_svalue for CST_EXPR,
210 creating it if necessary.
211 The constant_svalue instances are reused, based on pointer equality
212 of trees */
214 const svalue *
215 region_model_manager::get_or_create_constant_svalue (tree cst_expr)
217 gcc_assert (cst_expr);
218 gcc_assert (CONSTANT_CLASS_P (cst_expr));
220 constant_svalue **slot = m_constants_map.get (cst_expr);
221 if (slot)
222 return *slot;
223 constant_svalue *cst_sval = new constant_svalue (cst_expr);
224 RETURN_UNKNOWN_IF_TOO_COMPLEX (cst_sval);
225 m_constants_map.put (cst_expr, cst_sval);
226 return cst_sval;
229 /* Return the svalue * for a constant_svalue for the INTEGER_CST
230 for VAL of type TYPE, creating it if necessary. */
232 const svalue *
233 region_model_manager::get_or_create_int_cst (tree type, poly_int64 val)
235 gcc_assert (type);
236 tree tree_cst = build_int_cst (type, val);
237 return get_or_create_constant_svalue (tree_cst);
240 /* Return the svalue * for a unknown_svalue for TYPE (which can be NULL),
241 creating it if necessary.
242 The unknown_svalue instances are reused, based on pointer equality
243 of the types */
245 const svalue *
246 region_model_manager::get_or_create_unknown_svalue (tree type)
248 /* Don't create unknown values when doing feasibility testing;
249 instead, create a unique svalue. */
250 if (m_checking_feasibility)
251 return create_unique_svalue (type);
253 /* Special-case NULL, so that the hash_map can use NULL as the
254 "empty" value. */
255 if (type == NULL_TREE)
257 if (!m_unknown_NULL)
258 m_unknown_NULL = new unknown_svalue (type);
259 return m_unknown_NULL;
262 unknown_svalue **slot = m_unknowns_map.get (type);
263 if (slot)
264 return *slot;
265 unknown_svalue *sval = new unknown_svalue (type);
266 m_unknowns_map.put (type, sval);
267 return sval;
270 /* Return a freshly-allocated svalue of TYPE, owned by this manager. */
272 const svalue *
273 region_model_manager::create_unique_svalue (tree type)
275 svalue *sval = new placeholder_svalue (type, "unique");
276 m_managed_dynamic_svalues.safe_push (sval);
277 return sval;
280 /* Return the svalue * for the initial value of REG, creating it if
281 necessary. */
283 const svalue *
284 region_model_manager::get_or_create_initial_value (const region *reg)
286 if (!reg->can_have_initial_svalue_p ())
287 return get_or_create_poisoned_svalue (POISON_KIND_UNINIT,
288 reg->get_type ());
290 /* The initial value of a cast is a cast of the initial value. */
291 if (const cast_region *cast_reg = reg->dyn_cast_cast_region ())
293 const region *original_reg = cast_reg->get_original_region ();
294 return get_or_create_cast (cast_reg->get_type (),
295 get_or_create_initial_value (original_reg));
298 /* INIT_VAL (*UNKNOWN_PTR) -> UNKNOWN_VAL. */
299 if (reg->symbolic_for_unknown_ptr_p ())
300 return get_or_create_unknown_svalue (reg->get_type ());
302 if (initial_svalue **slot = m_initial_values_map.get (reg))
303 return *slot;
304 initial_svalue *initial_sval = new initial_svalue (reg->get_type (), reg);
305 RETURN_UNKNOWN_IF_TOO_COMPLEX (initial_sval);
306 m_initial_values_map.put (reg, initial_sval);
307 return initial_sval;
310 /* Return the svalue * for R using type TYPE, creating it if
311 necessary. */
313 const svalue *
314 region_model_manager::get_or_create_setjmp_svalue (const setjmp_record &r,
315 tree type)
317 setjmp_svalue::key_t key (r, type);
318 if (setjmp_svalue **slot = m_setjmp_values_map.get (key))
319 return *slot;
320 setjmp_svalue *setjmp_sval = new setjmp_svalue (r, type);
321 RETURN_UNKNOWN_IF_TOO_COMPLEX (setjmp_sval);
322 m_setjmp_values_map.put (key, setjmp_sval);
323 return setjmp_sval;
326 /* Return the svalue * for a poisoned value of KIND and TYPE, creating it if
327 necessary. */
329 const svalue *
330 region_model_manager::get_or_create_poisoned_svalue (enum poison_kind kind,
331 tree type)
333 poisoned_svalue::key_t key (kind, type);
334 if (poisoned_svalue **slot = m_poisoned_values_map.get (key))
335 return *slot;
336 poisoned_svalue *poisoned_sval = new poisoned_svalue (kind, type);
337 RETURN_UNKNOWN_IF_TOO_COMPLEX (poisoned_sval);
338 m_poisoned_values_map.put (key, poisoned_sval);
339 return poisoned_sval;
342 /* Return the svalue * for a pointer to POINTEE of type PTR_TYPE,
343 creating it if necessary. */
345 const svalue *
346 region_model_manager::get_ptr_svalue (tree ptr_type, const region *pointee)
348 /* If this is a symbolic region from dereferencing a pointer, and the types
349 match, then return the original pointer. */
350 if (const symbolic_region *sym_reg = pointee->dyn_cast_symbolic_region ())
351 if (ptr_type == sym_reg->get_pointer ()->get_type ())
352 return sym_reg->get_pointer ();
354 region_svalue::key_t key (ptr_type, pointee);
355 if (region_svalue **slot = m_pointer_values_map.get (key))
356 return *slot;
357 region_svalue *sval = new region_svalue (ptr_type, pointee);
358 RETURN_UNKNOWN_IF_TOO_COMPLEX (sval);
359 m_pointer_values_map.put (key, sval);
360 return sval;
363 /* Subroutine of region_model_manager::get_or_create_unaryop.
364 Attempt to fold the inputs and return a simpler svalue *.
365 Otherwise, return NULL. */
367 const svalue *
368 region_model_manager::maybe_fold_unaryop (tree type, enum tree_code op,
369 const svalue *arg)
371 /* Ops on "unknown" are also unknown. */
372 if (arg->get_kind () == SK_UNKNOWN)
373 return get_or_create_unknown_svalue (type);
374 /* Likewise for "poisoned". */
375 else if (const poisoned_svalue *poisoned_sval
376 = arg->dyn_cast_poisoned_svalue ())
377 return get_or_create_poisoned_svalue (poisoned_sval->get_poison_kind (),
378 type);
380 gcc_assert (arg->can_have_associated_state_p ());
382 switch (op)
384 default: break;
385 case VIEW_CONVERT_EXPR:
386 case NOP_EXPR:
388 /* Handle redundant casts. */
389 if (arg->get_type ()
390 && useless_type_conversion_p (arg->get_type (), type))
391 return arg;
393 /* Fold "cast<TYPE> (cast <INNER_TYPE> (innermost_arg))
394 => "cast<TYPE> (innermost_arg)",
395 unless INNER_TYPE is narrower than TYPE. */
396 if (const svalue *innermost_arg = arg->maybe_undo_cast ())
398 tree inner_type = arg->get_type ();
399 if (TYPE_SIZE (type)
400 && TYPE_SIZE (inner_type)
401 && (fold_binary (LE_EXPR, boolean_type_node,
402 TYPE_SIZE (type), TYPE_SIZE (inner_type))
403 == boolean_true_node))
404 return maybe_fold_unaryop (type, op, innermost_arg);
406 /* Avoid creating symbolic regions for pointer casts by
407 simplifying (T*)(&REGION) to ((T*)&REGION). */
408 if (const region_svalue *region_sval = arg->dyn_cast_region_svalue ())
409 if (POINTER_TYPE_P (type)
410 && region_sval->get_type ()
411 && POINTER_TYPE_P (region_sval->get_type ()))
412 return get_ptr_svalue (type, region_sval->get_pointee ());
414 break;
415 case TRUTH_NOT_EXPR:
417 /* Invert comparisons e.g. "!(x == y)" => "x != y". */
418 if (const binop_svalue *binop = arg->dyn_cast_binop_svalue ())
419 if (TREE_CODE_CLASS (binop->get_op ()) == tcc_comparison)
421 enum tree_code inv_op
422 = invert_tree_comparison (binop->get_op (),
423 HONOR_NANS (binop->get_type ()));
424 if (inv_op != ERROR_MARK)
425 return get_or_create_binop (binop->get_type (), inv_op,
426 binop->get_arg0 (),
427 binop->get_arg1 ());
430 break;
431 case NEGATE_EXPR:
433 /* -(-(VAL)) is VAL, for integer types. */
434 if (const unaryop_svalue *unaryop = arg->dyn_cast_unaryop_svalue ())
435 if (unaryop->get_op () == NEGATE_EXPR
436 && type == unaryop->get_type ()
437 && type
438 && INTEGRAL_TYPE_P (type))
439 return unaryop->get_arg ();
441 break;
444 /* Constants. */
445 if (tree cst = arg->maybe_get_constant ())
446 if (tree result = fold_unary (op, type, cst))
448 if (CONSTANT_CLASS_P (result))
449 return get_or_create_constant_svalue (result);
451 /* fold_unary can return casts of constants; try to handle them. */
452 if (op != NOP_EXPR
453 && type
454 && TREE_CODE (result) == NOP_EXPR
455 && CONSTANT_CLASS_P (TREE_OPERAND (result, 0)))
457 const svalue *inner_cst
458 = get_or_create_constant_svalue (TREE_OPERAND (result, 0));
459 return get_or_create_cast (type,
460 get_or_create_cast (TREE_TYPE (result),
461 inner_cst));
465 return NULL;
468 /* Return the svalue * for an unary operation OP on ARG with a result of
469 type TYPE, creating it if necessary. */
471 const svalue *
472 region_model_manager::get_or_create_unaryop (tree type, enum tree_code op,
473 const svalue *arg)
475 if (const svalue *folded = maybe_fold_unaryop (type, op, arg))
476 return folded;
477 unaryop_svalue::key_t key (type, op, arg);
478 if (unaryop_svalue **slot = m_unaryop_values_map.get (key))
479 return *slot;
480 unaryop_svalue *unaryop_sval = new unaryop_svalue (type, op, arg);
481 RETURN_UNKNOWN_IF_TOO_COMPLEX (unaryop_sval);
482 m_unaryop_values_map.put (key, unaryop_sval);
483 return unaryop_sval;
486 /* Get a tree code for a cast to DST_TYPE from SRC_TYPE.
487 Use NOP_EXPR if possible (e.g. to help fold_unary convert casts
488 of 0 to (T*) to simple pointer constants), but use FIX_TRUNC_EXPR
489 and VIEW_CONVERT_EXPR for cases that fold_unary would otherwise crash
490 on. */
492 static enum tree_code
493 get_code_for_cast (tree dst_type, tree src_type)
495 gcc_assert (dst_type);
496 if (!src_type)
497 return NOP_EXPR;
499 if (TREE_CODE (src_type) == REAL_TYPE)
501 if (TREE_CODE (dst_type) == INTEGER_TYPE)
502 return FIX_TRUNC_EXPR;
503 else
504 return VIEW_CONVERT_EXPR;
507 return NOP_EXPR;
510 /* Return the svalue * for a cast of ARG to type TYPE, creating it
511 if necessary. */
513 const svalue *
514 region_model_manager::get_or_create_cast (tree type, const svalue *arg)
516 gcc_assert (type);
518 /* No-op if the types are the same. */
519 if (type == arg->get_type ())
520 return arg;
522 /* Don't attempt to handle casts involving vector types for now. */
523 if (TREE_CODE (type) == VECTOR_TYPE
524 || (arg->get_type ()
525 && TREE_CODE (arg->get_type ()) == VECTOR_TYPE))
526 return get_or_create_unknown_svalue (type);
528 enum tree_code op = get_code_for_cast (type, arg->get_type ());
529 return get_or_create_unaryop (type, op, arg);
532 /* Subroutine of region_model_manager::maybe_fold_binop for handling
533 (TYPE)(COMPOUND_SVAL BIT_AND_EXPR CST) that may have been generated by
534 optimize_bit_field_compare, where CST is from ARG1.
536 Support masking out bits from a compound_svalue for comparing a bitfield
537 against a value, as generated by optimize_bit_field_compare for
538 BITFIELD == VALUE.
540 If COMPOUND_SVAL has a value for the appropriate bits, return it,
541 shifted accordingly.
542 Otherwise return NULL. */
544 const svalue *
545 region_model_manager::
546 maybe_undo_optimize_bit_field_compare (tree type,
547 const compound_svalue *compound_sval,
548 tree cst,
549 const svalue *arg1)
551 if (type != unsigned_char_type_node)
552 return NULL;
554 const binding_map &map = compound_sval->get_map ();
555 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (cst);
556 /* If "mask" is a contiguous range of set bits, see if the
557 compound_sval has a value for those bits. */
558 bit_range bits (0, 0);
559 if (!bit_range::from_mask (mask, &bits))
560 return NULL;
562 bit_range bound_bits (bits);
563 if (BYTES_BIG_ENDIAN)
564 bound_bits = bit_range (BITS_PER_UNIT - bits.get_next_bit_offset (),
565 bits.m_size_in_bits);
566 const concrete_binding *conc
567 = get_store_manager ()->get_concrete_binding (bound_bits);
568 const svalue *sval = map.get (conc);
569 if (!sval)
570 return NULL;
572 /* We have a value;
573 shift it by the correct number of bits. */
574 const svalue *lhs = get_or_create_cast (type, sval);
575 HOST_WIDE_INT bit_offset = bits.get_start_bit_offset ().to_shwi ();
576 const svalue *shift_sval = get_or_create_int_cst (type, bit_offset);
577 const svalue *shifted_sval = get_or_create_binop (type, LSHIFT_EXPR,
578 lhs, shift_sval);
579 /* Reapply the mask (needed for negative
580 signed bitfields). */
581 return get_or_create_binop (type, BIT_AND_EXPR,
582 shifted_sval, arg1);
585 /* Subroutine of region_model_manager::get_or_create_binop.
586 Attempt to fold the inputs and return a simpler svalue *.
587 Otherwise, return NULL. */
589 const svalue *
590 region_model_manager::maybe_fold_binop (tree type, enum tree_code op,
591 const svalue *arg0,
592 const svalue *arg1)
594 tree cst0 = arg0->maybe_get_constant ();
595 tree cst1 = arg1->maybe_get_constant ();
596 /* (CST OP CST). */
597 if (cst0 && cst1)
599 if (tree result = fold_binary (op, type, cst0, cst1))
600 if (CONSTANT_CLASS_P (result))
601 return get_or_create_constant_svalue (result);
604 if (FLOAT_TYPE_P (type)
605 || (arg0->get_type () && FLOAT_TYPE_P (arg0->get_type ()))
606 || (arg1->get_type () && FLOAT_TYPE_P (arg1->get_type ())))
607 return NULL;
609 switch (op)
611 default:
612 break;
613 case POINTER_PLUS_EXPR:
614 case PLUS_EXPR:
615 /* (VAL + 0) -> VAL. */
616 if (cst1 && zerop (cst1))
617 return get_or_create_cast (type, arg0);
618 break;
619 case MINUS_EXPR:
620 /* (VAL - 0) -> VAL. */
621 if (cst1 && zerop (cst1))
622 return get_or_create_cast (type, arg0);
623 /* (0 - VAL) -> -VAL. */
624 if (cst0 && zerop (cst0))
625 return get_or_create_unaryop (type, NEGATE_EXPR, arg1);
626 break;
627 case MULT_EXPR:
628 /* (VAL * 0). */
629 if (cst1 && zerop (cst1) && INTEGRAL_TYPE_P (type))
630 return get_or_create_constant_svalue (build_int_cst (type, 0));
631 /* (VAL * 1) -> VAL. */
632 if (cst1 && integer_onep (cst1))
633 return arg0;
634 break;
635 case BIT_AND_EXPR:
636 if (cst1)
638 if (zerop (cst1) && INTEGRAL_TYPE_P (type))
639 /* "(ARG0 & 0)" -> "0". */
640 return get_or_create_constant_svalue (build_int_cst (type, 0));
642 if (const compound_svalue *compound_sval
643 = arg0->dyn_cast_compound_svalue ())
644 if (const svalue *sval
645 = maybe_undo_optimize_bit_field_compare (type,
646 compound_sval,
647 cst1, arg1))
648 return sval;
650 if (arg0->get_type () == boolean_type_node
651 && arg1->get_type () == boolean_type_node)
653 /* If the LHS are both _Bool, then... */
654 /* ..."(1 & x) -> x". */
655 if (cst0 && !zerop (cst0))
656 return get_or_create_cast (type, arg1);
657 /* ..."(x & 1) -> x". */
658 if (cst1 && !zerop (cst1))
659 return get_or_create_cast (type, arg0);
660 /* ..."(0 & x) -> 0". */
661 if (cst0 && zerop (cst0))
662 return get_or_create_int_cst (type, 0);
663 /* ..."(x & 0) -> 0". */
664 if (cst1 && zerop (cst1))
665 return get_or_create_int_cst (type, 0);
667 break;
668 case BIT_IOR_EXPR:
669 if (arg0->get_type () == boolean_type_node
670 && arg1->get_type () == boolean_type_node)
672 /* If the LHS are both _Bool, then... */
673 /* ..."(1 | x) -> 1". */
674 if (cst0 && !zerop (cst0))
675 return get_or_create_int_cst (type, 1);
676 /* ..."(x | 1) -> 1". */
677 if (cst1 && !zerop (cst1))
678 return get_or_create_int_cst (type, 1);
679 /* ..."(0 | x) -> x". */
680 if (cst0 && zerop (cst0))
681 return get_or_create_cast (type, arg1);
682 /* ..."(x | 0) -> x". */
683 if (cst1 && zerop (cst1))
684 return get_or_create_cast (type, arg0);
686 break;
687 case TRUTH_ANDIF_EXPR:
688 case TRUTH_AND_EXPR:
689 if (cst1)
691 if (zerop (cst1) && INTEGRAL_TYPE_P (type))
692 /* "(ARG0 && 0)" -> "0". */
693 return get_or_create_constant_svalue (build_int_cst (type, 0));
694 else
695 /* "(ARG0 && nonzero-cst)" -> "ARG0". */
696 return get_or_create_cast (type, arg0);
698 break;
699 case TRUTH_ORIF_EXPR:
700 case TRUTH_OR_EXPR:
701 if (cst1)
703 if (zerop (cst1))
704 /* "(ARG0 || 0)" -> "ARG0". */
705 return get_or_create_cast (type, arg0);
706 else
707 /* "(ARG0 && nonzero-cst)" -> "nonzero-cst". */
708 return get_or_create_cast (type, arg1);
710 break;
713 /* For associative ops, fold "(X op CST_A) op CST_B)" to
714 "X op (CST_A op CST_B)". */
715 if (cst1 && associative_tree_code (op))
716 if (const binop_svalue *binop = arg0->dyn_cast_binop_svalue ())
717 if (binop->get_op () == op
718 && binop->get_arg1 ()->maybe_get_constant ()
719 && type == binop->get_type ()
720 && type == binop->get_arg0 ()->get_type ()
721 && type == binop->get_arg1 ()->get_type ())
722 return get_or_create_binop
723 (type, op, binop->get_arg0 (),
724 get_or_create_binop (type, op,
725 binop->get_arg1 (), arg1));
727 /* associative_tree_code is false for POINTER_PLUS_EXPR, but we
728 can fold:
729 "(PTR ptr+ CST_A) ptr+ CST_B)" to "PTR ptr+ (CST_A ptr+ CST_B)"
730 e.g. in data-model-1.c: test_4c. */
731 if (cst1 && op == POINTER_PLUS_EXPR)
732 if (const binop_svalue *binop = arg0->dyn_cast_binop_svalue ())
733 if (binop->get_op () == POINTER_PLUS_EXPR)
734 if (binop->get_arg1 ()->maybe_get_constant ())
735 return get_or_create_binop
736 (type, op, binop->get_arg0 (),
737 get_or_create_binop (size_type_node, op,
738 binop->get_arg1 (), arg1));
740 /* etc. */
742 return NULL;
745 /* Return the svalue * for an binary operation OP on ARG0 and ARG1
746 with a result of type TYPE, creating it if necessary. */
748 const svalue *
749 region_model_manager::get_or_create_binop (tree type, enum tree_code op,
750 const svalue *arg0,
751 const svalue *arg1)
753 /* For commutative ops, put any constant on the RHS. */
754 if (arg0->maybe_get_constant () && commutative_tree_code (op))
755 std::swap (arg0, arg1);
757 if (const svalue *folded = maybe_fold_binop (type, op, arg0, arg1))
758 return folded;
760 /* Ops on "unknown"/"poisoned" are unknown (unless we were able to fold
761 it via an identity in maybe_fold_binop). */
762 if (!arg0->can_have_associated_state_p ()
763 || !arg1->can_have_associated_state_p ())
764 return get_or_create_unknown_svalue (type);
766 binop_svalue::key_t key (type, op, arg0, arg1);
767 if (binop_svalue **slot = m_binop_values_map.get (key))
768 return *slot;
769 binop_svalue *binop_sval = new binop_svalue (type, op, arg0, arg1);
770 RETURN_UNKNOWN_IF_TOO_COMPLEX (binop_sval);
771 m_binop_values_map.put (key, binop_sval);
772 return binop_sval;
775 /* Subroutine of region_model_manager::get_or_create_sub_svalue.
776 Return a folded svalue, or NULL. */
778 const svalue *
779 region_model_manager::maybe_fold_sub_svalue (tree type,
780 const svalue *parent_svalue,
781 const region *subregion)
783 /* Subvalues of "unknown"/"poisoned" are unknown. */
784 if (!parent_svalue->can_have_associated_state_p ())
785 return get_or_create_unknown_svalue (type);
787 /* If we have a subregion of a zero-fill, it's zero. */
788 if (const unaryop_svalue *unary
789 = parent_svalue->dyn_cast_unaryop_svalue ())
791 if (unary->get_op () == NOP_EXPR
792 || unary->get_op () == VIEW_CONVERT_EXPR)
793 if (tree cst = unary->get_arg ()->maybe_get_constant ())
794 if (zerop (cst) && type)
796 const svalue *cst_sval
797 = get_or_create_constant_svalue (cst);
798 return get_or_create_cast (type, cst_sval);
802 /* Handle getting individual chars from a STRING_CST. */
803 if (tree cst = parent_svalue->maybe_get_constant ())
804 if (TREE_CODE (cst) == STRING_CST)
806 /* If we have a concrete 1-byte access within the parent region... */
807 byte_range subregion_bytes (0, 0);
808 if (subregion->get_relative_concrete_byte_range (&subregion_bytes)
809 && subregion_bytes.m_size_in_bytes == 1
810 && type)
812 /* ...then attempt to get that char from the STRING_CST. */
813 HOST_WIDE_INT hwi_start_byte
814 = subregion_bytes.m_start_byte_offset.to_shwi ();
815 tree cst_idx
816 = build_int_cst_type (size_type_node, hwi_start_byte);
817 if (const svalue *char_sval
818 = maybe_get_char_from_string_cst (cst, cst_idx))
819 return get_or_create_cast (type, char_sval);
823 if (const initial_svalue *init_sval
824 = parent_svalue->dyn_cast_initial_svalue ())
826 /* SUB(INIT(r)).FIELD -> INIT(r.FIELD)
827 i.e.
828 Subvalue(InitialValue(R1), FieldRegion(R2, F))
829 -> InitialValue(FieldRegion(R1, F)). */
830 if (const field_region *field_reg = subregion->dyn_cast_field_region ())
832 const region *field_reg_new
833 = get_field_region (init_sval->get_region (),
834 field_reg->get_field ());
835 return get_or_create_initial_value (field_reg_new);
837 /* SUB(INIT(r)[ELEMENT] -> INIT(e[ELEMENT])
838 i.e.
839 Subvalue(InitialValue(R1), ElementRegion(R2, IDX))
840 -> InitialValue(ElementRegion(R1, IDX)). */
841 if (const element_region *element_reg = subregion->dyn_cast_element_region ())
843 const region *element_reg_new
844 = get_element_region (init_sval->get_region (),
845 element_reg->get_type (),
846 element_reg->get_index ());
847 return get_or_create_initial_value (element_reg_new);
851 if (const repeated_svalue *repeated_sval
852 = parent_svalue->dyn_cast_repeated_svalue ())
853 if (type)
854 return get_or_create_cast (type, repeated_sval->get_inner_svalue ());
856 return NULL;
859 /* Return the svalue * for extracting a subvalue of type TYPE from
860 PARENT_SVALUE based on SUBREGION, creating it if necessary. */
862 const svalue *
863 region_model_manager::get_or_create_sub_svalue (tree type,
864 const svalue *parent_svalue,
865 const region *subregion)
867 if (const svalue *folded
868 = maybe_fold_sub_svalue (type, parent_svalue, subregion))
869 return folded;
871 sub_svalue::key_t key (type, parent_svalue, subregion);
872 if (sub_svalue **slot = m_sub_values_map.get (key))
873 return *slot;
874 sub_svalue *sub_sval
875 = new sub_svalue (type, parent_svalue, subregion);
876 RETURN_UNKNOWN_IF_TOO_COMPLEX (sub_sval);
877 m_sub_values_map.put (key, sub_sval);
878 return sub_sval;
881 /* Subroutine of region_model_manager::get_or_create_repeated_svalue.
882 Return a folded svalue, or NULL. */
884 const svalue *
885 region_model_manager::maybe_fold_repeated_svalue (tree type,
886 const svalue *outer_size,
887 const svalue *inner_svalue)
889 /* Repeated "unknown"/"poisoned" is unknown. */
890 if (!outer_size->can_have_associated_state_p ()
891 || !inner_svalue->can_have_associated_state_p ())
892 return get_or_create_unknown_svalue (type);
894 /* If INNER_SVALUE is the same size as OUTER_SIZE,
895 turn into simply a cast. */
896 if (tree cst_outer_num_bytes = outer_size->maybe_get_constant ())
898 HOST_WIDE_INT num_bytes_inner_svalue
899 = int_size_in_bytes (inner_svalue->get_type ());
900 if (num_bytes_inner_svalue != -1)
901 if (num_bytes_inner_svalue
902 == (HOST_WIDE_INT)tree_to_uhwi (cst_outer_num_bytes))
904 if (type)
905 return get_or_create_cast (type, inner_svalue);
906 else
907 return inner_svalue;
911 /* Handle zero-fill of a specific type. */
912 if (tree cst = inner_svalue->maybe_get_constant ())
913 if (zerop (cst) && type)
914 return get_or_create_cast (type, inner_svalue);
916 return NULL;
919 /* Return the svalue * of type TYPE in which INNER_SVALUE is repeated
920 enough times to be of size OUTER_SIZE, creating it if necessary.
921 e.g. for filling buffers with a constant value. */
923 const svalue *
924 region_model_manager::get_or_create_repeated_svalue (tree type,
925 const svalue *outer_size,
926 const svalue *inner_svalue)
928 if (const svalue *folded
929 = maybe_fold_repeated_svalue (type, outer_size, inner_svalue))
930 return folded;
932 repeated_svalue::key_t key (type, outer_size, inner_svalue);
933 if (repeated_svalue **slot = m_repeated_values_map.get (key))
934 return *slot;
935 repeated_svalue *repeated_sval
936 = new repeated_svalue (type, outer_size, inner_svalue);
937 RETURN_UNKNOWN_IF_TOO_COMPLEX (repeated_sval);
938 m_repeated_values_map.put (key, repeated_sval);
939 return repeated_sval;
942 /* Attempt to get the bit_range for FIELD within a RECORD_TYPE.
943 Return true and write the result to OUT if successful.
944 Return false otherwise. */
946 static bool
947 get_bit_range_for_field (tree field, bit_range *out)
949 bit_size_t bit_size;
950 if (!int_size_in_bits (TREE_TYPE (field), &bit_size))
951 return false;
952 int field_bit_offset = int_bit_position (field);
953 *out = bit_range (field_bit_offset, bit_size);
954 return true;
957 /* Attempt to get the byte_range for FIELD within a RECORD_TYPE.
958 Return true and write the result to OUT if successful.
959 Return false otherwise. */
961 static bool
962 get_byte_range_for_field (tree field, byte_range *out)
964 bit_range field_bits (0, 0);
965 if (!get_bit_range_for_field (field, &field_bits))
966 return false;
967 return field_bits.as_byte_range (out);
970 /* Attempt to determine if there is a specific field within RECORD_TYPE
971 at BYTES. If so, return it, and write the location of BYTES relative
972 to the field to *OUT_RANGE_WITHIN_FIELD.
973 Otherwise, return NULL_TREE.
974 For example, given:
975 struct foo { uint32 a; uint32; b};
977 bytes = {bytes 6-7} (of foo)
978 we have bytes 3-4 of field b. */
980 static tree
981 get_field_at_byte_range (tree record_type, const byte_range &bytes,
982 byte_range *out_range_within_field)
984 bit_offset_t bit_offset = bytes.m_start_byte_offset * BITS_PER_UNIT;
986 tree field = get_field_at_bit_offset (record_type, bit_offset);
987 if (!field)
988 return NULL_TREE;
990 byte_range field_bytes (0,0);
991 if (!get_byte_range_for_field (field, &field_bytes))
992 return NULL_TREE;
994 /* Is BYTES fully within field_bytes? */
995 byte_range bytes_within_field (0,0);
996 if (!field_bytes.contains_p (bytes, &bytes_within_field))
997 return NULL_TREE;
999 *out_range_within_field = bytes_within_field;
1000 return field;
1003 /* Subroutine of region_model_manager::get_or_create_bits_within.
1004 Return a folded svalue, or NULL. */
1006 const svalue *
1007 region_model_manager::maybe_fold_bits_within_svalue (tree type,
1008 const bit_range &bits,
1009 const svalue *inner_svalue)
1011 tree inner_type = inner_svalue->get_type ();
1012 /* Fold:
1013 BITS_WITHIN ((0, sizeof (VAL), VAL))
1015 CAST(TYPE, VAL). */
1016 if (bits.m_start_bit_offset == 0 && inner_type)
1018 bit_size_t inner_type_size;
1019 if (int_size_in_bits (inner_type, &inner_type_size))
1020 if (inner_type_size == bits.m_size_in_bits)
1022 if (type)
1023 return get_or_create_cast (type, inner_svalue);
1024 else
1025 return inner_svalue;
1029 /* Kind-specific folding. */
1030 if (const svalue *sval
1031 = inner_svalue->maybe_fold_bits_within (type, bits, this))
1032 return sval;
1034 byte_range bytes (0,0);
1035 if (bits.as_byte_range (&bytes) && inner_type)
1036 switch (TREE_CODE (inner_type))
1038 default:
1039 break;
1040 case ARRAY_TYPE:
1042 /* Fold:
1043 BITS_WITHIN (range, KIND(REG))
1045 BITS_WITHIN (range - offsetof(ELEMENT), KIND(REG.ELEMENT))
1046 if range1 is a byte-range fully within one ELEMENT. */
1047 tree element_type = TREE_TYPE (inner_type);
1048 HOST_WIDE_INT element_byte_size
1049 = int_size_in_bytes (element_type);
1050 if (element_byte_size > 0)
1052 HOST_WIDE_INT start_idx
1053 = (bytes.get_start_byte_offset ().to_shwi ()
1054 / element_byte_size);
1055 HOST_WIDE_INT last_idx
1056 = (bytes.get_last_byte_offset ().to_shwi ()
1057 / element_byte_size);
1058 if (start_idx == last_idx)
1060 if (const initial_svalue *initial_sval
1061 = inner_svalue->dyn_cast_initial_svalue ())
1063 bit_offset_t start_of_element
1064 = start_idx * element_byte_size * BITS_PER_UNIT;
1065 bit_range bits_within_element
1066 (bits.m_start_bit_offset - start_of_element,
1067 bits.m_size_in_bits);
1068 const svalue *idx_sval
1069 = get_or_create_int_cst (integer_type_node, start_idx);
1070 const region *element_reg =
1071 get_element_region (initial_sval->get_region (),
1072 element_type, idx_sval);
1073 const svalue *element_reg_sval
1074 = get_or_create_initial_value (element_reg);
1075 return get_or_create_bits_within (type,
1076 bits_within_element,
1077 element_reg_sval);
1082 break;
1083 case RECORD_TYPE:
1085 /* Fold:
1086 BYTES_WITHIN (range, KIND(REG))
1088 BYTES_WITHIN (range - offsetof(FIELD), KIND(REG.FIELD))
1089 if range1 is fully within FIELD. */
1090 byte_range bytes_within_field (0, 0);
1091 if (tree field = get_field_at_byte_range (inner_type, bytes,
1092 &bytes_within_field))
1094 if (const initial_svalue *initial_sval
1095 = inner_svalue->dyn_cast_initial_svalue ())
1097 const region *field_reg =
1098 get_field_region (initial_sval->get_region (), field);
1099 const svalue *initial_reg_sval
1100 = get_or_create_initial_value (field_reg);
1101 return get_or_create_bits_within
1102 (type,
1103 bytes_within_field.as_bit_range (),
1104 initial_reg_sval);
1108 break;
1110 return NULL;
1113 /* Return the svalue * of type TYPE for extracting BITS from INNER_SVALUE,
1114 creating it if necessary. */
1116 const svalue *
1117 region_model_manager::get_or_create_bits_within (tree type,
1118 const bit_range &bits,
1119 const svalue *inner_svalue)
1121 if (const svalue *folded
1122 = maybe_fold_bits_within_svalue (type, bits, inner_svalue))
1123 return folded;
1125 bits_within_svalue::key_t key (type, bits, inner_svalue);
1126 if (bits_within_svalue **slot = m_bits_within_values_map.get (key))
1127 return *slot;
1128 bits_within_svalue *bits_within_sval
1129 = new bits_within_svalue (type, bits, inner_svalue);
1130 RETURN_UNKNOWN_IF_TOO_COMPLEX (bits_within_sval);
1131 m_bits_within_values_map.put (key, bits_within_sval);
1132 return bits_within_sval;
1135 /* Return the svalue * that decorates ARG as being unmergeable,
1136 creating it if necessary. */
1138 const svalue *
1139 region_model_manager::get_or_create_unmergeable (const svalue *arg)
1141 if (arg->get_kind () == SK_UNMERGEABLE)
1142 return arg;
1144 if (unmergeable_svalue **slot = m_unmergeable_values_map.get (arg))
1145 return *slot;
1146 unmergeable_svalue *unmergeable_sval = new unmergeable_svalue (arg);
1147 RETURN_UNKNOWN_IF_TOO_COMPLEX (unmergeable_sval);
1148 m_unmergeable_values_map.put (arg, unmergeable_sval);
1149 return unmergeable_sval;
1152 /* Return the svalue * of type TYPE for the merger of value BASE_SVAL
1153 and ITER_SVAL at POINT, creating it if necessary. */
1155 const svalue *
1156 region_model_manager::
1157 get_or_create_widening_svalue (tree type,
1158 const function_point &point,
1159 const svalue *base_sval,
1160 const svalue *iter_sval)
1162 gcc_assert (base_sval->get_kind () != SK_WIDENING);
1163 gcc_assert (iter_sval->get_kind () != SK_WIDENING);
1164 widening_svalue::key_t key (type, point, base_sval, iter_sval);
1165 if (widening_svalue **slot = m_widening_values_map.get (key))
1166 return *slot;
1167 widening_svalue *widening_sval
1168 = new widening_svalue (type, point, base_sval, iter_sval);
1169 RETURN_UNKNOWN_IF_TOO_COMPLEX (widening_sval);
1170 m_widening_values_map.put (key, widening_sval);
1171 return widening_sval;
1174 /* Return the svalue * of type TYPE for the compound values in MAP,
1175 creating it if necessary. */
1177 const svalue *
1178 region_model_manager::get_or_create_compound_svalue (tree type,
1179 const binding_map &map)
1181 compound_svalue::key_t tmp_key (type, &map);
1182 if (compound_svalue **slot = m_compound_values_map.get (tmp_key))
1183 return *slot;
1184 compound_svalue *compound_sval
1185 = new compound_svalue (type, map);
1186 RETURN_UNKNOWN_IF_TOO_COMPLEX (compound_sval);
1187 /* Use make_key rather than reusing the key, so that we use a
1188 ptr to compound_sval's binding_map, rather than the MAP param. */
1189 m_compound_values_map.put (compound_sval->make_key (), compound_sval);
1190 return compound_sval;
1193 /* class conjured_purge. */
1195 /* Purge state relating to SVAL. */
1197 void
1198 conjured_purge::purge (const conjured_svalue *sval) const
1200 m_model->purge_state_involving (sval, m_ctxt);
1203 /* Return the svalue * of type TYPE for the value conjured for ID_REG
1204 at STMT, creating it if necessary.
1205 Use P to purge existing state from the svalue, for the case where a
1206 conjured_svalue would be reused along an execution path. */
1208 const svalue *
1209 region_model_manager::get_or_create_conjured_svalue (tree type,
1210 const gimple *stmt,
1211 const region *id_reg,
1212 const conjured_purge &p)
1214 conjured_svalue::key_t key (type, stmt, id_reg);
1215 if (conjured_svalue **slot = m_conjured_values_map.get (key))
1217 const conjured_svalue *sval = *slot;
1218 /* We're reusing an existing conjured_svalue, perhaps from a different
1219 state within this analysis, or perhaps from an earlier state on this
1220 execution path. For the latter, purge any state involving the "new"
1221 svalue from the current program_state. */
1222 p.purge (sval);
1223 return sval;
1225 conjured_svalue *conjured_sval
1226 = new conjured_svalue (type, stmt, id_reg);
1227 RETURN_UNKNOWN_IF_TOO_COMPLEX (conjured_sval);
1228 m_conjured_values_map.put (key, conjured_sval);
1229 return conjured_sval;
1232 /* Subroutine of region_model_manager::get_or_create_asm_output_svalue.
1233 Return a folded svalue, or NULL. */
1235 const svalue *
1236 region_model_manager::
1237 maybe_fold_asm_output_svalue (tree type,
1238 const vec<const svalue *> &inputs)
1240 /* Unknown inputs should lead to unknown results. */
1241 for (const auto &iter : inputs)
1242 if (iter->get_kind () == SK_UNKNOWN)
1243 return get_or_create_unknown_svalue (type);
1245 return NULL;
1248 /* Return the svalue * of type TYPE for OUTPUT_IDX of the deterministic
1249 asm stmt ASM_STMT, given INPUTS as inputs. */
1251 const svalue *
1252 region_model_manager::
1253 get_or_create_asm_output_svalue (tree type,
1254 const gasm *asm_stmt,
1255 unsigned output_idx,
1256 const vec<const svalue *> &inputs)
1258 gcc_assert (inputs.length () <= asm_output_svalue::MAX_INPUTS);
1260 if (const svalue *folded
1261 = maybe_fold_asm_output_svalue (type, inputs))
1262 return folded;
1264 const char *asm_string = gimple_asm_string (asm_stmt);
1265 const unsigned noutputs = gimple_asm_noutputs (asm_stmt);
1267 asm_output_svalue::key_t key (type, asm_string, output_idx, inputs);
1268 if (asm_output_svalue **slot = m_asm_output_values_map.get (key))
1269 return *slot;
1270 asm_output_svalue *asm_output_sval
1271 = new asm_output_svalue (type, asm_string, output_idx, noutputs, inputs);
1272 RETURN_UNKNOWN_IF_TOO_COMPLEX (asm_output_sval);
1273 m_asm_output_values_map.put (key, asm_output_sval);
1274 return asm_output_sval;
1277 /* Return the svalue * of type TYPE for OUTPUT_IDX of a deterministic
1278 asm stmt with string ASM_STRING with NUM_OUTPUTS outputs, given
1279 INPUTS as inputs. */
1281 const svalue *
1282 region_model_manager::
1283 get_or_create_asm_output_svalue (tree type,
1284 const char *asm_string,
1285 unsigned output_idx,
1286 unsigned num_outputs,
1287 const vec<const svalue *> &inputs)
1289 gcc_assert (inputs.length () <= asm_output_svalue::MAX_INPUTS);
1291 if (const svalue *folded
1292 = maybe_fold_asm_output_svalue (type, inputs))
1293 return folded;
1295 asm_output_svalue::key_t key (type, asm_string, output_idx, inputs);
1296 if (asm_output_svalue **slot = m_asm_output_values_map.get (key))
1297 return *slot;
1298 asm_output_svalue *asm_output_sval
1299 = new asm_output_svalue (type, asm_string, output_idx, num_outputs, inputs);
1300 RETURN_UNKNOWN_IF_TOO_COMPLEX (asm_output_sval);
1301 m_asm_output_values_map.put (key, asm_output_sval);
1302 return asm_output_sval;
1305 /* Return the svalue * of type TYPE for the result of a call to FNDECL
1306 with __attribute__((const)), given INPUTS as inputs. */
1308 const svalue *
1309 region_model_manager::
1310 get_or_create_const_fn_result_svalue (tree type,
1311 tree fndecl,
1312 const vec<const svalue *> &inputs)
1314 gcc_assert (type);
1315 gcc_assert (fndecl);
1316 gcc_assert (DECL_P (fndecl));
1317 gcc_assert (TREE_READONLY (fndecl));
1318 gcc_assert (inputs.length () <= const_fn_result_svalue::MAX_INPUTS);
1320 const_fn_result_svalue::key_t key (type, fndecl, inputs);
1321 if (const_fn_result_svalue **slot = m_const_fn_result_values_map.get (key))
1322 return *slot;
1323 const_fn_result_svalue *const_fn_result_sval
1324 = new const_fn_result_svalue (type, fndecl, inputs);
1325 RETURN_UNKNOWN_IF_TOO_COMPLEX (const_fn_result_sval);
1326 m_const_fn_result_values_map.put (key, const_fn_result_sval);
1327 return const_fn_result_sval;
1330 /* Given STRING_CST, a STRING_CST and BYTE_OFFSET_CST a constant,
1331 attempt to get the character at that offset, returning either
1332 the svalue for the character constant, or NULL if unsuccessful. */
1334 const svalue *
1335 region_model_manager::maybe_get_char_from_string_cst (tree string_cst,
1336 tree byte_offset_cst)
1338 gcc_assert (TREE_CODE (string_cst) == STRING_CST);
1340 /* Adapted from fold_read_from_constant_string. */
1341 scalar_int_mode char_mode;
1342 if (TREE_CODE (byte_offset_cst) == INTEGER_CST
1343 && compare_tree_int (byte_offset_cst,
1344 TREE_STRING_LENGTH (string_cst)) < 0
1345 && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string_cst))),
1346 &char_mode)
1347 && GET_MODE_SIZE (char_mode) == 1)
1349 tree char_cst
1350 = build_int_cst_type (TREE_TYPE (TREE_TYPE (string_cst)),
1351 (TREE_STRING_POINTER (string_cst)
1352 [TREE_INT_CST_LOW (byte_offset_cst)]));
1353 return get_or_create_constant_svalue (char_cst);
1355 return NULL;
1358 /* region consolidation. */
1360 /* Return the region for FNDECL, creating it if necessary. */
1362 const function_region *
1363 region_model_manager::get_region_for_fndecl (tree fndecl)
1365 gcc_assert (TREE_CODE (fndecl) == FUNCTION_DECL);
1367 function_region **slot = m_fndecls_map.get (fndecl);
1368 if (slot)
1369 return *slot;
1370 function_region *reg
1371 = new function_region (alloc_region_id (), &m_code_region, fndecl);
1372 m_fndecls_map.put (fndecl, reg);
1373 return reg;
1376 /* Return the region for LABEL, creating it if necessary. */
1378 const label_region *
1379 region_model_manager::get_region_for_label (tree label)
1381 gcc_assert (TREE_CODE (label) == LABEL_DECL);
1383 label_region **slot = m_labels_map.get (label);
1384 if (slot)
1385 return *slot;
1387 tree fndecl = DECL_CONTEXT (label);
1388 gcc_assert (fndecl && TREE_CODE (fndecl) == FUNCTION_DECL);
1390 const function_region *func_reg = get_region_for_fndecl (fndecl);
1391 label_region *reg
1392 = new label_region (alloc_region_id (), func_reg, label);
1393 m_labels_map.put (label, reg);
1394 return reg;
1397 /* Return the region for EXPR, creating it if necessary. */
1399 const decl_region *
1400 region_model_manager::get_region_for_global (tree expr)
1402 gcc_assert (TREE_CODE (expr) == VAR_DECL);
1404 decl_region **slot = m_globals_map.get (expr);
1405 if (slot)
1406 return *slot;
1407 decl_region *reg
1408 = new decl_region (alloc_region_id (), &m_globals_region, expr);
1409 m_globals_map.put (expr, reg);
1410 return reg;
1413 /* Return the region for an unknown access of type REGION_TYPE,
1414 creating it if necessary.
1415 This is a symbolic_region, where the pointer is an unknown_svalue
1416 of type &REGION_TYPE. */
1418 const region *
1419 region_model_manager::get_unknown_symbolic_region (tree region_type)
1421 tree ptr_type = region_type ? build_pointer_type (region_type) : NULL_TREE;
1422 const svalue *unknown_ptr = get_or_create_unknown_svalue (ptr_type);
1423 return get_symbolic_region (unknown_ptr);
1426 /* Return the region that describes accessing field FIELD of PARENT,
1427 creating it if necessary. */
1429 const region *
1430 region_model_manager::get_field_region (const region *parent, tree field)
1432 gcc_assert (TREE_CODE (field) == FIELD_DECL);
1434 /* (*UNKNOWN_PTR).field is (*UNKNOWN_PTR_OF_&FIELD_TYPE). */
1435 if (parent->symbolic_for_unknown_ptr_p ())
1436 return get_unknown_symbolic_region (TREE_TYPE (field));
1438 field_region::key_t key (parent, field);
1439 if (field_region *reg = m_field_regions.get (key))
1440 return reg;
1442 field_region *field_reg
1443 = new field_region (alloc_region_id (), parent, field);
1444 m_field_regions.put (key, field_reg);
1445 return field_reg;
1448 /* Return the region that describes accessing the element of type
1449 ELEMENT_TYPE at index INDEX of PARENT, creating it if necessary. */
1451 const region *
1452 region_model_manager::get_element_region (const region *parent,
1453 tree element_type,
1454 const svalue *index)
1456 /* (UNKNOWN_PTR[IDX]) is (UNKNOWN_PTR). */
1457 if (parent->symbolic_for_unknown_ptr_p ())
1458 return get_unknown_symbolic_region (element_type);
1460 element_region::key_t key (parent, element_type, index);
1461 if (element_region *reg = m_element_regions.get (key))
1462 return reg;
1464 element_region *element_reg
1465 = new element_region (alloc_region_id (), parent, element_type, index);
1466 m_element_regions.put (key, element_reg);
1467 return element_reg;
1470 /* Return the region that describes accessing the subregion of type
1471 ELEMENT_TYPE at offset BYTE_OFFSET within PARENT, creating it if
1472 necessary. */
1474 const region *
1475 region_model_manager::get_offset_region (const region *parent,
1476 tree type,
1477 const svalue *byte_offset)
1479 /* (UNKNOWN_PTR + OFFSET) is (UNKNOWN_PTR). */
1480 if (parent->symbolic_for_unknown_ptr_p ())
1481 return get_unknown_symbolic_region (type);
1483 /* If BYTE_OFFSET is zero, return PARENT. */
1484 if (tree cst_offset = byte_offset->maybe_get_constant ())
1485 if (zerop (cst_offset))
1486 return get_cast_region (parent, type);
1488 /* Fold OFFSET_REGION(OFFSET_REGION(REG, X), Y)
1489 to OFFSET_REGION(REG, (X + Y)). */
1490 if (const offset_region *parent_offset_reg
1491 = parent->dyn_cast_offset_region ())
1493 const svalue *sval_x = parent_offset_reg->get_byte_offset ();
1494 const svalue *sval_sum
1495 = get_or_create_binop (byte_offset->get_type (),
1496 PLUS_EXPR, sval_x, byte_offset);
1497 return get_offset_region (parent->get_parent_region (), type, sval_sum);
1500 offset_region::key_t key (parent, type, byte_offset);
1501 if (offset_region *reg = m_offset_regions.get (key))
1502 return reg;
1504 offset_region *offset_reg
1505 = new offset_region (alloc_region_id (), parent, type, byte_offset);
1506 m_offset_regions.put (key, offset_reg);
1507 return offset_reg;
1510 /* Return the region that describes accessing the subregion of type
1511 TYPE of size BYTE_SIZE_SVAL within PARENT, creating it if necessary. */
1513 const region *
1514 region_model_manager::get_sized_region (const region *parent,
1515 tree type,
1516 const svalue *byte_size_sval)
1518 if (parent->symbolic_for_unknown_ptr_p ())
1519 return get_unknown_symbolic_region (type);
1521 if (byte_size_sval->get_type () != size_type_node)
1522 byte_size_sval = get_or_create_cast (size_type_node, byte_size_sval);
1524 /* If PARENT is already that size, return it. */
1525 const svalue *parent_byte_size_sval = parent->get_byte_size_sval (this);
1526 if (tree parent_size_cst = parent_byte_size_sval->maybe_get_constant ())
1527 if (tree size_cst = byte_size_sval->maybe_get_constant ())
1529 tree comparison
1530 = fold_binary (EQ_EXPR, boolean_type_node, parent_size_cst, size_cst);
1531 if (comparison == boolean_true_node)
1532 return parent;
1535 sized_region::key_t key (parent, type, byte_size_sval);
1536 if (sized_region *reg = m_sized_regions.get (key))
1537 return reg;
1539 sized_region *sized_reg
1540 = new sized_region (alloc_region_id (), parent, type, byte_size_sval);
1541 m_sized_regions.put (key, sized_reg);
1542 return sized_reg;
1545 /* Return the region that describes accessing PARENT_REGION as if
1546 it were of type TYPE, creating it if necessary. */
1548 const region *
1549 region_model_manager::get_cast_region (const region *original_region,
1550 tree type)
1552 /* If types match, return ORIGINAL_REGION. */
1553 if (type == original_region->get_type ())
1554 return original_region;
1556 if (original_region->symbolic_for_unknown_ptr_p ())
1557 return get_unknown_symbolic_region (type);
1559 cast_region::key_t key (original_region, type);
1560 if (cast_region *reg = m_cast_regions.get (key))
1561 return reg;
1563 cast_region *cast_reg
1564 = new cast_region (alloc_region_id (), original_region, type);
1565 m_cast_regions.put (key, cast_reg);
1566 return cast_reg;
1569 /* Return the frame_region for call to FUN from CALLING_FRAME, creating it
1570 if necessary. CALLING_FRAME may be NULL. */
1572 const frame_region *
1573 region_model_manager::get_frame_region (const frame_region *calling_frame,
1574 function *fun)
1576 int index = calling_frame ? calling_frame->get_index () + 1 : 0;
1578 frame_region::key_t key (calling_frame, fun);
1579 if (frame_region *reg = m_frame_regions.get (key))
1580 return reg;
1582 frame_region *frame_reg
1583 = new frame_region (alloc_region_id (), &m_stack_region, calling_frame,
1584 fun, index);
1585 m_frame_regions.put (key, frame_reg);
1586 return frame_reg;
1589 /* Return the region that describes dereferencing SVAL, creating it
1590 if necessary. */
1592 const region *
1593 region_model_manager::get_symbolic_region (const svalue *sval)
1595 symbolic_region::key_t key (&m_root_region, sval);
1596 if (symbolic_region *reg = m_symbolic_regions.get (key))
1597 return reg;
1599 symbolic_region *symbolic_reg
1600 = new symbolic_region (alloc_region_id (), &m_root_region, sval);
1601 m_symbolic_regions.put (key, symbolic_reg);
1602 return symbolic_reg;
1605 /* Return the region that describes accessing STRING_CST, creating it
1606 if necessary. */
1608 const string_region *
1609 region_model_manager::get_region_for_string (tree string_cst)
1611 gcc_assert (TREE_CODE (string_cst) == STRING_CST);
1613 string_region **slot = m_string_map.get (string_cst);
1614 if (slot)
1615 return *slot;
1616 string_region *reg
1617 = new string_region (alloc_region_id (), &m_root_region, string_cst);
1618 m_string_map.put (string_cst, reg);
1619 return reg;
1622 /* Return the region that describes accessing BITS within PARENT as TYPE,
1623 creating it if necessary. */
1625 const region *
1626 region_model_manager::get_bit_range (const region *parent, tree type,
1627 const bit_range &bits)
1629 gcc_assert (parent);
1631 if (parent->symbolic_for_unknown_ptr_p ())
1632 return get_unknown_symbolic_region (type);
1634 bit_range_region::key_t key (parent, type, bits);
1635 if (bit_range_region *reg = m_bit_range_regions.get (key))
1636 return reg;
1638 bit_range_region *bit_range_reg
1639 = new bit_range_region (alloc_region_id (), parent, type, bits);
1640 m_bit_range_regions.put (key, bit_range_reg);
1641 return bit_range_reg;
1644 /* Return the region that describes accessing the IDX-th variadic argument
1645 within PARENT_FRAME, creating it if necessary. */
1647 const var_arg_region *
1648 region_model_manager::get_var_arg_region (const frame_region *parent_frame,
1649 unsigned idx)
1651 gcc_assert (parent_frame);
1653 var_arg_region::key_t key (parent_frame, idx);
1654 if (var_arg_region *reg = m_var_arg_regions.get (key))
1655 return reg;
1657 var_arg_region *var_arg_reg
1658 = new var_arg_region (alloc_region_id (), parent_frame, idx);
1659 m_var_arg_regions.put (key, var_arg_reg);
1660 return var_arg_reg;
1663 /* If we see a tree code we don't know how to handle, rather than
1664 ICE or generate bogus results, create a dummy region, and notify
1665 CTXT so that it can mark the new state as being not properly
1666 modelled. The exploded graph can then stop exploring that path,
1667 since any diagnostics we might issue will have questionable
1668 validity. */
1670 const region *
1671 region_model_manager::
1672 get_region_for_unexpected_tree_code (region_model_context *ctxt,
1673 tree t,
1674 const dump_location_t &loc)
1676 tree type = TYPE_P (t) ? t : TREE_TYPE (t);
1677 region *new_reg
1678 = new unknown_region (alloc_region_id (), &m_root_region, type);
1679 if (ctxt)
1680 ctxt->on_unexpected_tree_code (t, loc);
1681 return new_reg;
1684 /* Return a region describing a heap-allocated block of memory.
1685 Reuse an existing heap_allocated_region is its id is not within
1686 BASE_REGS_IN_USE. */
1688 const region *
1689 region_model_manager::
1690 get_or_create_region_for_heap_alloc (const sbitmap &base_regs_in_use)
1692 /* Try to reuse an existing region, if it's unreferenced in the
1693 client state. */
1694 for (auto existing_reg : m_managed_dynamic_regions)
1695 if (!bitmap_bit_p (base_regs_in_use, existing_reg->get_id ()))
1696 if (existing_reg->get_kind () == RK_HEAP_ALLOCATED)
1697 return existing_reg;
1699 /* All existing ones (if any) are in use; create a new one. */
1700 region *reg
1701 = new heap_allocated_region (alloc_region_id (), &m_heap_region);
1702 m_managed_dynamic_regions.safe_push (reg);
1703 return reg;
1706 /* Return a new region describing a block of memory allocated within FRAME. */
1708 const region *
1709 region_model_manager::create_region_for_alloca (const frame_region *frame)
1711 gcc_assert (frame);
1712 region *reg = new alloca_region (alloc_region_id (), frame);
1713 m_managed_dynamic_regions.safe_push (reg);
1714 return reg;
1717 /* Log OBJ to LOGGER. */
1719 template <typename T>
1720 static void
1721 log_managed_object (logger *logger, const T *obj)
1723 logger->start_log_line ();
1724 pretty_printer *pp = logger->get_printer ();
1725 pp_string (pp, " ");
1726 obj->dump_to_pp (pp, true);
1727 logger->end_log_line ();
1730 /* Specialization for frame_region, which also logs the count of locals
1731 managed by the frame_region. */
1733 template <>
1734 void
1735 log_managed_object (logger *logger, const frame_region *obj)
1737 logger->start_log_line ();
1738 pretty_printer *pp = logger->get_printer ();
1739 pp_string (pp, " ");
1740 obj->dump_to_pp (pp, true);
1741 pp_printf (pp, " [with %i region(s) for locals]", obj->get_num_locals ());
1742 logger->end_log_line ();
1745 /* Dump the number of objects that were managed by UNIQ_MAP to LOGGER.
1746 If SHOW_OBJS is true, also dump the objects themselves. */
1748 template <typename K, typename T>
1749 static void
1750 log_uniq_map (logger *logger, bool show_objs, const char *title,
1751 const hash_map<K, T*> &uniq_map)
1753 logger->log (" # %s: %li", title, (long)uniq_map.elements ());
1754 if (!show_objs)
1755 return;
1756 auto_vec<const T *> vec_objs (uniq_map.elements ());
1757 for (typename hash_map<K, T*>::iterator iter = uniq_map.begin ();
1758 iter != uniq_map.end (); ++iter)
1759 vec_objs.quick_push ((*iter).second);
1761 vec_objs.qsort (T::cmp_ptr_ptr);
1763 unsigned i;
1764 const T *obj;
1765 FOR_EACH_VEC_ELT (vec_objs, i, obj)
1766 log_managed_object<T> (logger, obj);
1769 /* Dump the number of objects that were managed by MAP to LOGGER.
1770 If SHOW_OBJS is true, also dump the objects themselves. */
1772 template <typename T>
1773 static void
1774 log_uniq_map (logger *logger, bool show_objs, const char *title,
1775 const consolidation_map<T> &map)
1777 logger->log (" # %s: %li", title, (long)map.elements ());
1778 if (!show_objs)
1779 return;
1781 auto_vec<const T *> vec_objs (map.elements ());
1782 for (typename consolidation_map<T>::iterator iter = map.begin ();
1783 iter != map.end (); ++iter)
1784 vec_objs.quick_push ((*iter).second);
1786 vec_objs.qsort (T::cmp_ptr_ptr);
1788 unsigned i;
1789 const T *obj;
1790 FOR_EACH_VEC_ELT (vec_objs, i, obj)
1791 log_managed_object<T> (logger, obj);
1794 /* Dump the number of objects of each class that were managed by this
1795 manager to LOGGER.
1796 If SHOW_OBJS is true, also dump the objects themselves. */
1798 void
1799 region_model_manager::log_stats (logger *logger, bool show_objs) const
1801 LOG_SCOPE (logger);
1802 logger->log ("call string consolidation");
1803 m_empty_call_string.recursive_log (logger);
1804 logger->log ("svalue consolidation");
1805 log_uniq_map (logger, show_objs, "constant_svalue", m_constants_map);
1806 log_uniq_map (logger, show_objs, "unknown_svalue", m_unknowns_map);
1807 if (m_unknown_NULL)
1808 log_managed_object (logger, m_unknown_NULL);
1809 log_uniq_map (logger, show_objs, "poisoned_svalue", m_poisoned_values_map);
1810 log_uniq_map (logger, show_objs, "setjmp_svalue", m_setjmp_values_map);
1811 log_uniq_map (logger, show_objs, "initial_svalue", m_initial_values_map);
1812 log_uniq_map (logger, show_objs, "region_svalue", m_pointer_values_map);
1813 log_uniq_map (logger, show_objs, "unaryop_svalue", m_unaryop_values_map);
1814 log_uniq_map (logger, show_objs, "binop_svalue", m_binop_values_map);
1815 log_uniq_map (logger, show_objs, "sub_svalue", m_sub_values_map);
1816 log_uniq_map (logger, show_objs, "repeated_svalue", m_repeated_values_map);
1817 log_uniq_map (logger, show_objs, "bits_within_svalue",
1818 m_bits_within_values_map);
1819 log_uniq_map (logger, show_objs, "unmergeable_svalue",
1820 m_unmergeable_values_map);
1821 log_uniq_map (logger, show_objs, "widening_svalue", m_widening_values_map);
1822 log_uniq_map (logger, show_objs, "compound_svalue", m_compound_values_map);
1823 log_uniq_map (logger, show_objs, "conjured_svalue", m_conjured_values_map);
1824 log_uniq_map (logger, show_objs, "asm_output_svalue",
1825 m_asm_output_values_map);
1826 log_uniq_map (logger, show_objs, "const_fn_result_svalue",
1827 m_const_fn_result_values_map);
1829 logger->log ("max accepted svalue num_nodes: %i",
1830 m_max_complexity.m_num_nodes);
1831 logger->log ("max accepted svalue max_depth: %i",
1832 m_max_complexity.m_max_depth);
1834 logger->log ("region consolidation");
1835 logger->log (" next region id: %i", m_next_region_id);
1836 log_uniq_map (logger, show_objs, "function_region", m_fndecls_map);
1837 log_uniq_map (logger, show_objs, "label_region", m_labels_map);
1838 log_uniq_map (logger, show_objs, "decl_region for globals", m_globals_map);
1839 log_uniq_map (logger, show_objs, "field_region", m_field_regions);
1840 log_uniq_map (logger, show_objs, "element_region", m_element_regions);
1841 log_uniq_map (logger, show_objs, "offset_region", m_offset_regions);
1842 log_uniq_map (logger, show_objs, "sized_region", m_sized_regions);
1843 log_uniq_map (logger, show_objs, "cast_region", m_cast_regions);
1844 log_uniq_map (logger, show_objs, "frame_region", m_frame_regions);
1845 log_uniq_map (logger, show_objs, "symbolic_region", m_symbolic_regions);
1846 log_uniq_map (logger, show_objs, "string_region", m_string_map);
1847 log_uniq_map (logger, show_objs, "bit_range_region", m_bit_range_regions);
1848 log_uniq_map (logger, show_objs, "var_arg_region", m_var_arg_regions);
1849 logger->log (" # managed dynamic regions: %i",
1850 m_managed_dynamic_regions.length ());
1851 m_store_mgr.log_stats (logger, show_objs);
1852 m_range_mgr->log_stats (logger, show_objs);
1855 /* Dump the number of objects of each class that were managed by this
1856 manager to LOGGER.
1857 If SHOW_OBJS is true, also dump the objects themselves.
1858 This is here so it can use log_uniq_map. */
1860 void
1861 store_manager::log_stats (logger *logger, bool show_objs) const
1863 LOG_SCOPE (logger);
1864 log_uniq_map (logger, show_objs, "concrete_binding",
1865 m_concrete_binding_key_mgr);
1866 log_uniq_map (logger, show_objs, "symbolic_binding",
1867 m_symbolic_binding_key_mgr);
1870 /* Emit a warning showing DECL_REG->tracked_p () for use in DejaGnu tests
1871 (using -fdump-analyzer-untracked). */
1873 static void
1874 dump_untracked_region (const decl_region *decl_reg)
1876 tree decl = decl_reg->get_decl ();
1877 if (TREE_CODE (decl) != VAR_DECL)
1878 return;
1879 /* For now, don't emit the status of decls in the constant pool, to avoid
1880 differences in DejaGnu test results between targets that use these vs
1881 those that don't.
1882 (Eventually these decls should probably be untracked and we should test
1883 for that, but that's not stage 4 material). */
1884 if (DECL_IN_CONSTANT_POOL (decl))
1885 return;
1886 warning_at (DECL_SOURCE_LOCATION (decl), 0,
1887 "track %qD: %s",
1888 decl, (decl_reg->tracked_p () ? "yes" : "no"));
1891 /* Implementation of -fdump-analyzer-untracked. */
1893 void
1894 region_model_manager::dump_untracked_regions () const
1896 for (auto iter : m_globals_map)
1898 const decl_region *decl_reg = iter.second;
1899 dump_untracked_region (decl_reg);
1901 for (auto frame_iter : m_frame_regions)
1903 const frame_region *frame_reg = frame_iter.second;
1904 frame_reg->dump_untracked_regions ();
1908 void
1909 frame_region::dump_untracked_regions () const
1911 for (auto iter : m_locals)
1913 const decl_region *decl_reg = iter.second;
1914 dump_untracked_region (decl_reg);
1918 } // namespace ana
1920 #endif /* #if ENABLE_ANALYZER */