d: Add test for PR d/108167 to the testsuite [PR108167]
[official-gcc.git] / gcc / analyzer / region-model-manager.cc
blobfab5bba15d5f42f42b39fee5127e95e64e7f24ba
1 /* Consolidation of svalues and regions.
2 Copyright (C) 2020-2023 Free Software Foundation, Inc.
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #define INCLUDE_MEMORY
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tree.h"
26 #include "diagnostic-core.h"
27 #include "gimple-pretty-print.h"
28 #include "function.h"
29 #include "basic-block.h"
30 #include "gimple.h"
31 #include "gimple-iterator.h"
32 #include "diagnostic-core.h"
33 #include "graphviz.h"
34 #include "options.h"
35 #include "cgraph.h"
36 #include "tree-dfa.h"
37 #include "stringpool.h"
38 #include "convert.h"
39 #include "target.h"
40 #include "fold-const.h"
41 #include "tree-pretty-print.h"
42 #include "bitmap.h"
43 #include "analyzer/analyzer.h"
44 #include "analyzer/analyzer-logging.h"
45 #include "ordered-hash-map.h"
46 #include "options.h"
47 #include "analyzer/supergraph.h"
48 #include "sbitmap.h"
49 #include "analyzer/call-string.h"
50 #include "analyzer/program-point.h"
51 #include "analyzer/store.h"
52 #include "analyzer/region-model.h"
53 #include "analyzer/constraint-manager.h"
55 #if ENABLE_ANALYZER
57 namespace ana {
59 /* class region_model_manager. */
61 /* region_model_manager's ctor. */
63 region_model_manager::region_model_manager (logger *logger)
64 : m_logger (logger),
65 m_empty_call_string (),
66 m_next_region_id (0),
67 m_root_region (alloc_region_id ()),
68 m_stack_region (alloc_region_id (), &m_root_region),
69 m_heap_region (alloc_region_id (), &m_root_region),
70 m_unknown_NULL (NULL),
71 m_checking_feasibility (false),
72 m_max_complexity (0, 0),
73 m_code_region (alloc_region_id (), &m_root_region),
74 m_fndecls_map (), m_labels_map (),
75 m_globals_region (alloc_region_id (), &m_root_region),
76 m_globals_map (),
77 m_thread_local_region (alloc_region_id (), &m_root_region),
78 m_errno_region (alloc_region_id (), &m_thread_local_region),
79 m_store_mgr (this),
80 m_range_mgr (new bounded_ranges_manager ()),
81 m_known_fn_mgr (logger)
85 /* region_model_manager's dtor. Delete all of the managed svalues
86 and regions. */
88 region_model_manager::~region_model_manager ()
90 /* Delete consolidated svalues. */
91 for (constants_map_t::iterator iter = m_constants_map.begin ();
92 iter != m_constants_map.end (); ++iter)
93 delete (*iter).second;
94 for (unknowns_map_t::iterator iter = m_unknowns_map.begin ();
95 iter != m_unknowns_map.end (); ++iter)
96 delete (*iter).second;
97 delete m_unknown_NULL;
98 for (poisoned_values_map_t::iterator iter = m_poisoned_values_map.begin ();
99 iter != m_poisoned_values_map.end (); ++iter)
100 delete (*iter).second;
101 for (setjmp_values_map_t::iterator iter = m_setjmp_values_map.begin ();
102 iter != m_setjmp_values_map.end (); ++iter)
103 delete (*iter).second;
104 for (initial_values_map_t::iterator iter = m_initial_values_map.begin ();
105 iter != m_initial_values_map.end (); ++iter)
106 delete (*iter).second;
107 for (pointer_values_map_t::iterator iter = m_pointer_values_map.begin ();
108 iter != m_pointer_values_map.end (); ++iter)
109 delete (*iter).second;
110 for (unaryop_values_map_t::iterator iter = m_unaryop_values_map.begin ();
111 iter != m_unaryop_values_map.end (); ++iter)
112 delete (*iter).second;
113 for (binop_values_map_t::iterator iter = m_binop_values_map.begin ();
114 iter != m_binop_values_map.end (); ++iter)
115 delete (*iter).second;
116 for (sub_values_map_t::iterator iter = m_sub_values_map.begin ();
117 iter != m_sub_values_map.end (); ++iter)
118 delete (*iter).second;
119 for (auto iter : m_repeated_values_map)
120 delete iter.second;
121 for (auto iter : m_bits_within_values_map)
122 delete iter.second;
123 for (unmergeable_values_map_t::iterator iter
124 = m_unmergeable_values_map.begin ();
125 iter != m_unmergeable_values_map.end (); ++iter)
126 delete (*iter).second;
127 for (widening_values_map_t::iterator iter = m_widening_values_map.begin ();
128 iter != m_widening_values_map.end (); ++iter)
129 delete (*iter).second;
130 for (compound_values_map_t::iterator iter = m_compound_values_map.begin ();
131 iter != m_compound_values_map.end (); ++iter)
132 delete (*iter).second;
133 for (conjured_values_map_t::iterator iter = m_conjured_values_map.begin ();
134 iter != m_conjured_values_map.end (); ++iter)
135 delete (*iter).second;
136 for (auto iter : m_asm_output_values_map)
137 delete iter.second;
138 for (auto iter : m_const_fn_result_values_map)
139 delete iter.second;
141 /* Delete consolidated regions. */
142 for (fndecls_map_t::iterator iter = m_fndecls_map.begin ();
143 iter != m_fndecls_map.end (); ++iter)
144 delete (*iter).second;
145 for (labels_map_t::iterator iter = m_labels_map.begin ();
146 iter != m_labels_map.end (); ++iter)
147 delete (*iter).second;
148 for (globals_map_t::iterator iter = m_globals_map.begin ();
149 iter != m_globals_map.end (); ++iter)
150 delete (*iter).second;
151 for (string_map_t::iterator iter = m_string_map.begin ();
152 iter != m_string_map.end (); ++iter)
153 delete (*iter).second;
155 delete m_range_mgr;
158 /* Return true if C exceeds the complexity limit for svalues. */
160 bool
161 region_model_manager::too_complex_p (const complexity &c) const
163 if (c.m_max_depth > (unsigned)param_analyzer_max_svalue_depth)
164 return true;
165 return false;
168 /* If SVAL exceeds the complexity limit for svalues, delete it
169 and return true.
170 Otherwise update m_max_complexity and return false. */
172 bool
173 region_model_manager::reject_if_too_complex (svalue *sval)
175 if (m_checking_feasibility)
176 return false;
178 const complexity &c = sval->get_complexity ();
179 if (!too_complex_p (c))
181 if (m_max_complexity.m_num_nodes < c.m_num_nodes)
182 m_max_complexity.m_num_nodes = c.m_num_nodes;
183 if (m_max_complexity.m_max_depth < c.m_max_depth)
184 m_max_complexity.m_max_depth = c.m_max_depth;
185 return false;
188 delete sval;
189 return true;
192 /* Macro for imposing a complexity limit on svalues, for use within
193 region_model_manager member functions.
195 If SVAL exceeds the complexity limit, delete it and return an UNKNOWN
196 value of the same type.
197 Otherwise update m_max_complexity and carry on. */
199 #define RETURN_UNKNOWN_IF_TOO_COMPLEX(SVAL) \
200 do { \
201 svalue *sval_ = (SVAL); \
202 tree type_ = sval_->get_type (); \
203 if (reject_if_too_complex (sval_)) \
204 return get_or_create_unknown_svalue (type_); \
205 } while (0)
207 /* svalue consolidation. */
209 /* Return the svalue * for a constant_svalue for CST_EXPR,
210 creating it if necessary.
211 The constant_svalue instances are reused, based on pointer equality
212 of trees */
214 const svalue *
215 region_model_manager::get_or_create_constant_svalue (tree cst_expr)
217 gcc_assert (cst_expr);
218 gcc_assert (CONSTANT_CLASS_P (cst_expr));
220 constant_svalue **slot = m_constants_map.get (cst_expr);
221 if (slot)
222 return *slot;
223 constant_svalue *cst_sval = new constant_svalue (cst_expr);
224 RETURN_UNKNOWN_IF_TOO_COMPLEX (cst_sval);
225 m_constants_map.put (cst_expr, cst_sval);
226 return cst_sval;
229 /* Return the svalue * for a constant_svalue for the INTEGER_CST
230 for VAL of type TYPE, creating it if necessary. */
232 const svalue *
233 region_model_manager::get_or_create_int_cst (tree type, poly_int64 val)
235 gcc_assert (type);
236 tree tree_cst = build_int_cst (type, val);
237 return get_or_create_constant_svalue (tree_cst);
240 /* Return the svalue * for the constant_svalue for the NULL pointer
241 of POINTER_TYPE, creating it if necessary. */
243 const svalue *
244 region_model_manager::get_or_create_null_ptr (tree pointer_type)
246 gcc_assert (pointer_type);
247 gcc_assert (POINTER_TYPE_P (pointer_type));
248 return get_or_create_int_cst (pointer_type, 0);
251 /* Return the svalue * for a unknown_svalue for TYPE (which can be NULL),
252 creating it if necessary.
253 The unknown_svalue instances are reused, based on pointer equality
254 of the types */
256 const svalue *
257 region_model_manager::get_or_create_unknown_svalue (tree type)
259 /* Don't create unknown values when doing feasibility testing;
260 instead, create a unique svalue. */
261 if (m_checking_feasibility)
262 return create_unique_svalue (type);
264 /* Special-case NULL, so that the hash_map can use NULL as the
265 "empty" value. */
266 if (type == NULL_TREE)
268 if (!m_unknown_NULL)
269 m_unknown_NULL = new unknown_svalue (type);
270 return m_unknown_NULL;
273 unknown_svalue **slot = m_unknowns_map.get (type);
274 if (slot)
275 return *slot;
276 unknown_svalue *sval = new unknown_svalue (type);
277 m_unknowns_map.put (type, sval);
278 return sval;
281 /* Return a freshly-allocated svalue of TYPE, owned by this manager. */
283 const svalue *
284 region_model_manager::create_unique_svalue (tree type)
286 svalue *sval = new placeholder_svalue (type, "unique");
287 m_managed_dynamic_svalues.safe_push (sval);
288 return sval;
291 /* Return the svalue * for the initial value of REG, creating it if
292 necessary. */
294 const svalue *
295 region_model_manager::get_or_create_initial_value (const region *reg)
297 if (!reg->can_have_initial_svalue_p ())
298 return get_or_create_poisoned_svalue (POISON_KIND_UNINIT,
299 reg->get_type ());
301 /* The initial value of a cast is a cast of the initial value. */
302 if (const cast_region *cast_reg = reg->dyn_cast_cast_region ())
304 const region *original_reg = cast_reg->get_original_region ();
305 return get_or_create_cast (cast_reg->get_type (),
306 get_or_create_initial_value (original_reg));
309 /* INIT_VAL (*UNKNOWN_PTR) -> UNKNOWN_VAL. */
310 if (reg->symbolic_for_unknown_ptr_p ())
311 return get_or_create_unknown_svalue (reg->get_type ());
313 if (initial_svalue **slot = m_initial_values_map.get (reg))
314 return *slot;
315 initial_svalue *initial_sval = new initial_svalue (reg->get_type (), reg);
316 RETURN_UNKNOWN_IF_TOO_COMPLEX (initial_sval);
317 m_initial_values_map.put (reg, initial_sval);
318 return initial_sval;
321 /* Return the svalue * for R using type TYPE, creating it if
322 necessary. */
324 const svalue *
325 region_model_manager::get_or_create_setjmp_svalue (const setjmp_record &r,
326 tree type)
328 setjmp_svalue::key_t key (r, type);
329 if (setjmp_svalue **slot = m_setjmp_values_map.get (key))
330 return *slot;
331 setjmp_svalue *setjmp_sval = new setjmp_svalue (r, type);
332 RETURN_UNKNOWN_IF_TOO_COMPLEX (setjmp_sval);
333 m_setjmp_values_map.put (key, setjmp_sval);
334 return setjmp_sval;
337 /* Return the svalue * for a poisoned value of KIND and TYPE, creating it if
338 necessary. */
340 const svalue *
341 region_model_manager::get_or_create_poisoned_svalue (enum poison_kind kind,
342 tree type)
344 poisoned_svalue::key_t key (kind, type);
345 if (poisoned_svalue **slot = m_poisoned_values_map.get (key))
346 return *slot;
347 poisoned_svalue *poisoned_sval = new poisoned_svalue (kind, type);
348 RETURN_UNKNOWN_IF_TOO_COMPLEX (poisoned_sval);
349 m_poisoned_values_map.put (key, poisoned_sval);
350 return poisoned_sval;
353 /* Return the svalue * for a pointer to POINTEE of type PTR_TYPE,
354 creating it if necessary. */
356 const svalue *
357 region_model_manager::get_ptr_svalue (tree ptr_type, const region *pointee)
359 /* If this is a symbolic region from dereferencing a pointer, and the types
360 match, then return the original pointer. */
361 if (const symbolic_region *sym_reg = pointee->dyn_cast_symbolic_region ())
362 if (ptr_type == sym_reg->get_pointer ()->get_type ())
363 return sym_reg->get_pointer ();
365 region_svalue::key_t key (ptr_type, pointee);
366 if (region_svalue **slot = m_pointer_values_map.get (key))
367 return *slot;
368 region_svalue *sval = new region_svalue (ptr_type, pointee);
369 RETURN_UNKNOWN_IF_TOO_COMPLEX (sval);
370 m_pointer_values_map.put (key, sval);
371 return sval;
374 /* Subroutine of region_model_manager::get_or_create_unaryop.
375 Attempt to fold the inputs and return a simpler svalue *.
376 Otherwise, return NULL. */
378 const svalue *
379 region_model_manager::maybe_fold_unaryop (tree type, enum tree_code op,
380 const svalue *arg)
382 /* Ops on "unknown" are also unknown. */
383 if (arg->get_kind () == SK_UNKNOWN)
384 return get_or_create_unknown_svalue (type);
385 /* Likewise for "poisoned". */
386 else if (const poisoned_svalue *poisoned_sval
387 = arg->dyn_cast_poisoned_svalue ())
388 return get_or_create_poisoned_svalue (poisoned_sval->get_poison_kind (),
389 type);
391 gcc_assert (arg->can_have_associated_state_p ());
393 switch (op)
395 default: break;
396 case VIEW_CONVERT_EXPR:
397 case NOP_EXPR:
399 /* Handle redundant casts. */
400 if (arg->get_type ()
401 && useless_type_conversion_p (arg->get_type (), type))
402 return arg;
404 /* Fold "cast<TYPE> (cast <INNER_TYPE> (innermost_arg))
405 => "cast<TYPE> (innermost_arg)",
406 unless INNER_TYPE is narrower than TYPE. */
407 if (const svalue *innermost_arg = arg->maybe_undo_cast ())
409 tree inner_type = arg->get_type ();
410 if (TYPE_SIZE (type)
411 && TYPE_SIZE (inner_type)
412 && (fold_binary (LE_EXPR, boolean_type_node,
413 TYPE_SIZE (type), TYPE_SIZE (inner_type))
414 == boolean_true_node))
415 return maybe_fold_unaryop (type, op, innermost_arg);
417 /* Avoid creating symbolic regions for pointer casts by
418 simplifying (T*)(&REGION) to ((T*)&REGION). */
419 if (const region_svalue *region_sval = arg->dyn_cast_region_svalue ())
420 if (POINTER_TYPE_P (type)
421 && region_sval->get_type ()
422 && POINTER_TYPE_P (region_sval->get_type ()))
423 return get_ptr_svalue (type, region_sval->get_pointee ());
425 break;
426 case TRUTH_NOT_EXPR:
428 /* Invert comparisons e.g. "!(x == y)" => "x != y". */
429 if (const binop_svalue *binop = arg->dyn_cast_binop_svalue ())
430 if (TREE_CODE_CLASS (binop->get_op ()) == tcc_comparison)
432 enum tree_code inv_op
433 = invert_tree_comparison (binop->get_op (),
434 HONOR_NANS (binop->get_type ()));
435 if (inv_op != ERROR_MARK)
436 return get_or_create_binop (binop->get_type (), inv_op,
437 binop->get_arg0 (),
438 binop->get_arg1 ());
441 break;
442 case NEGATE_EXPR:
444 /* -(-(VAL)) is VAL, for integer types. */
445 if (const unaryop_svalue *unaryop = arg->dyn_cast_unaryop_svalue ())
446 if (unaryop->get_op () == NEGATE_EXPR
447 && type == unaryop->get_type ()
448 && type
449 && INTEGRAL_TYPE_P (type))
450 return unaryop->get_arg ();
452 break;
455 /* Constants. */
456 if (tree cst = arg->maybe_get_constant ())
457 if (tree result = fold_unary (op, type, cst))
459 if (CONSTANT_CLASS_P (result))
460 return get_or_create_constant_svalue (result);
462 /* fold_unary can return casts of constants; try to handle them. */
463 if (op != NOP_EXPR
464 && type
465 && TREE_CODE (result) == NOP_EXPR
466 && CONSTANT_CLASS_P (TREE_OPERAND (result, 0)))
468 const svalue *inner_cst
469 = get_or_create_constant_svalue (TREE_OPERAND (result, 0));
470 return get_or_create_cast (type,
471 get_or_create_cast (TREE_TYPE (result),
472 inner_cst));
476 return NULL;
479 /* Return the svalue * for an unary operation OP on ARG with a result of
480 type TYPE, creating it if necessary. */
482 const svalue *
483 region_model_manager::get_or_create_unaryop (tree type, enum tree_code op,
484 const svalue *arg)
486 if (const svalue *folded = maybe_fold_unaryop (type, op, arg))
487 return folded;
488 unaryop_svalue::key_t key (type, op, arg);
489 if (unaryop_svalue **slot = m_unaryop_values_map.get (key))
490 return *slot;
491 unaryop_svalue *unaryop_sval = new unaryop_svalue (type, op, arg);
492 RETURN_UNKNOWN_IF_TOO_COMPLEX (unaryop_sval);
493 m_unaryop_values_map.put (key, unaryop_sval);
494 return unaryop_sval;
497 /* Get a tree code for a cast to DST_TYPE from SRC_TYPE.
498 Use NOP_EXPR if possible (e.g. to help fold_unary convert casts
499 of 0 to (T*) to simple pointer constants), but use FIX_TRUNC_EXPR
500 and VIEW_CONVERT_EXPR for cases that fold_unary would otherwise crash
501 on. */
503 static enum tree_code
504 get_code_for_cast (tree dst_type, tree src_type)
506 gcc_assert (dst_type);
507 if (!src_type)
508 return NOP_EXPR;
510 if (TREE_CODE (src_type) == REAL_TYPE)
512 if (TREE_CODE (dst_type) == INTEGER_TYPE)
513 return FIX_TRUNC_EXPR;
514 else
515 return VIEW_CONVERT_EXPR;
518 return NOP_EXPR;
521 /* Return the svalue * for a cast of ARG to type TYPE, creating it
522 if necessary. */
524 const svalue *
525 region_model_manager::get_or_create_cast (tree type, const svalue *arg)
527 gcc_assert (type);
529 /* No-op if the types are the same. */
530 if (type == arg->get_type ())
531 return arg;
533 /* Don't attempt to handle casts involving vector types for now. */
534 if (TREE_CODE (type) == VECTOR_TYPE
535 || (arg->get_type ()
536 && TREE_CODE (arg->get_type ()) == VECTOR_TYPE))
537 return get_or_create_unknown_svalue (type);
539 enum tree_code op = get_code_for_cast (type, arg->get_type ());
540 return get_or_create_unaryop (type, op, arg);
543 /* Subroutine of region_model_manager::maybe_fold_binop for handling
544 (TYPE)(COMPOUND_SVAL BIT_AND_EXPR CST) that may have been generated by
545 optimize_bit_field_compare, where CST is from ARG1.
547 Support masking out bits from a compound_svalue for comparing a bitfield
548 against a value, as generated by optimize_bit_field_compare for
549 BITFIELD == VALUE.
551 If COMPOUND_SVAL has a value for the appropriate bits, return it,
552 shifted accordingly.
553 Otherwise return NULL. */
555 const svalue *
556 region_model_manager::
557 maybe_undo_optimize_bit_field_compare (tree type,
558 const compound_svalue *compound_sval,
559 tree cst,
560 const svalue *arg1)
562 if (type != unsigned_char_type_node)
563 return NULL;
565 const binding_map &map = compound_sval->get_map ();
566 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (cst);
567 /* If "mask" is a contiguous range of set bits, see if the
568 compound_sval has a value for those bits. */
569 bit_range bits (0, 0);
570 if (!bit_range::from_mask (mask, &bits))
571 return NULL;
573 bit_range bound_bits (bits);
574 if (BYTES_BIG_ENDIAN)
575 bound_bits = bit_range (BITS_PER_UNIT - bits.get_next_bit_offset (),
576 bits.m_size_in_bits);
577 const concrete_binding *conc
578 = get_store_manager ()->get_concrete_binding (bound_bits);
579 const svalue *sval = map.get (conc);
580 if (!sval)
581 return NULL;
583 /* We have a value;
584 shift it by the correct number of bits. */
585 const svalue *lhs = get_or_create_cast (type, sval);
586 HOST_WIDE_INT bit_offset = bits.get_start_bit_offset ().to_shwi ();
587 const svalue *shift_sval = get_or_create_int_cst (type, bit_offset);
588 const svalue *shifted_sval = get_or_create_binop (type, LSHIFT_EXPR,
589 lhs, shift_sval);
590 /* Reapply the mask (needed for negative
591 signed bitfields). */
592 return get_or_create_binop (type, BIT_AND_EXPR,
593 shifted_sval, arg1);
596 /* Subroutine of region_model_manager::get_or_create_binop.
597 Attempt to fold the inputs and return a simpler svalue *.
598 Otherwise, return NULL. */
600 const svalue *
601 region_model_manager::maybe_fold_binop (tree type, enum tree_code op,
602 const svalue *arg0,
603 const svalue *arg1)
605 tree cst0 = arg0->maybe_get_constant ();
606 tree cst1 = arg1->maybe_get_constant ();
607 /* (CST OP CST). */
608 if (cst0 && cst1)
610 if (tree result = fold_binary (op, type, cst0, cst1))
611 if (CONSTANT_CLASS_P (result))
612 return get_or_create_constant_svalue (result);
615 if (FLOAT_TYPE_P (type)
616 || (arg0->get_type () && FLOAT_TYPE_P (arg0->get_type ()))
617 || (arg1->get_type () && FLOAT_TYPE_P (arg1->get_type ())))
618 return NULL;
620 switch (op)
622 default:
623 break;
624 case POINTER_PLUS_EXPR:
625 case PLUS_EXPR:
626 /* (VAL + 0) -> VAL. */
627 if (cst1 && zerop (cst1))
628 return get_or_create_cast (type, arg0);
629 break;
630 case MINUS_EXPR:
631 /* (VAL - 0) -> VAL. */
632 if (cst1 && zerop (cst1))
633 return get_or_create_cast (type, arg0);
634 /* (0 - VAL) -> -VAL. */
635 if (cst0 && zerop (cst0))
636 return get_or_create_unaryop (type, NEGATE_EXPR, arg1);
637 break;
638 case MULT_EXPR:
639 /* (VAL * 0). */
640 if (cst1 && zerop (cst1) && INTEGRAL_TYPE_P (type))
641 return get_or_create_constant_svalue (build_int_cst (type, 0));
642 /* (VAL * 1) -> VAL. */
643 if (cst1 && integer_onep (cst1))
644 return arg0;
645 break;
646 case BIT_AND_EXPR:
647 if (cst1)
649 if (zerop (cst1) && INTEGRAL_TYPE_P (type))
650 /* "(ARG0 & 0)" -> "0". */
651 return get_or_create_constant_svalue (build_int_cst (type, 0));
653 if (const compound_svalue *compound_sval
654 = arg0->dyn_cast_compound_svalue ())
655 if (const svalue *sval
656 = maybe_undo_optimize_bit_field_compare (type,
657 compound_sval,
658 cst1, arg1))
659 return sval;
661 if (arg0->get_type () == boolean_type_node
662 && arg1->get_type () == boolean_type_node)
664 /* If the LHS are both _Bool, then... */
665 /* ..."(1 & x) -> x". */
666 if (cst0 && !zerop (cst0))
667 return get_or_create_cast (type, arg1);
668 /* ..."(x & 1) -> x". */
669 if (cst1 && !zerop (cst1))
670 return get_or_create_cast (type, arg0);
671 /* ..."(0 & x) -> 0". */
672 if (cst0 && zerop (cst0))
673 return get_or_create_int_cst (type, 0);
674 /* ..."(x & 0) -> 0". */
675 if (cst1 && zerop (cst1))
676 return get_or_create_int_cst (type, 0);
678 break;
679 case BIT_IOR_EXPR:
680 if (arg0->get_type () == boolean_type_node
681 && arg1->get_type () == boolean_type_node)
683 /* If the LHS are both _Bool, then... */
684 /* ..."(1 | x) -> 1". */
685 if (cst0 && !zerop (cst0))
686 return get_or_create_int_cst (type, 1);
687 /* ..."(x | 1) -> 1". */
688 if (cst1 && !zerop (cst1))
689 return get_or_create_int_cst (type, 1);
690 /* ..."(0 | x) -> x". */
691 if (cst0 && zerop (cst0))
692 return get_or_create_cast (type, arg1);
693 /* ..."(x | 0) -> x". */
694 if (cst1 && zerop (cst1))
695 return get_or_create_cast (type, arg0);
697 break;
698 case TRUTH_ANDIF_EXPR:
699 case TRUTH_AND_EXPR:
700 if (cst1)
702 if (zerop (cst1) && INTEGRAL_TYPE_P (type))
703 /* "(ARG0 && 0)" -> "0". */
704 return get_or_create_constant_svalue (build_int_cst (type, 0));
705 else
706 /* "(ARG0 && nonzero-cst)" -> "ARG0". */
707 return get_or_create_cast (type, arg0);
709 break;
710 case TRUTH_ORIF_EXPR:
711 case TRUTH_OR_EXPR:
712 if (cst1)
714 if (zerop (cst1))
715 /* "(ARG0 || 0)" -> "ARG0". */
716 return get_or_create_cast (type, arg0);
717 else
718 /* "(ARG0 && nonzero-cst)" -> "nonzero-cst". */
719 return get_or_create_cast (type, arg1);
721 break;
724 /* For associative ops, fold "(X op CST_A) op CST_B)" to
725 "X op (CST_A op CST_B)". */
726 if (cst1 && associative_tree_code (op))
727 if (const binop_svalue *binop = arg0->dyn_cast_binop_svalue ())
728 if (binop->get_op () == op
729 && binop->get_arg1 ()->maybe_get_constant ()
730 && type == binop->get_type ()
731 && type == binop->get_arg0 ()->get_type ()
732 && type == binop->get_arg1 ()->get_type ())
733 return get_or_create_binop
734 (type, op, binop->get_arg0 (),
735 get_or_create_binop (type, op,
736 binop->get_arg1 (), arg1));
738 /* associative_tree_code is false for POINTER_PLUS_EXPR, but we
739 can fold:
740 "(PTR ptr+ CST_A) ptr+ CST_B)" to "PTR ptr+ (CST_A ptr+ CST_B)"
741 e.g. in data-model-1.c: test_4c. */
742 if (cst1 && op == POINTER_PLUS_EXPR)
743 if (const binop_svalue *binop = arg0->dyn_cast_binop_svalue ())
744 if (binop->get_op () == POINTER_PLUS_EXPR)
745 if (binop->get_arg1 ()->maybe_get_constant ())
746 return get_or_create_binop
747 (type, op, binop->get_arg0 (),
748 get_or_create_binop (size_type_node, op,
749 binop->get_arg1 (), arg1));
751 /* etc. */
753 return NULL;
756 /* Return the svalue * for an binary operation OP on ARG0 and ARG1
757 with a result of type TYPE, creating it if necessary. */
759 const svalue *
760 region_model_manager::get_or_create_binop (tree type, enum tree_code op,
761 const svalue *arg0,
762 const svalue *arg1)
764 /* For commutative ops, put any constant on the RHS. */
765 if (arg0->maybe_get_constant () && commutative_tree_code (op))
766 std::swap (arg0, arg1);
768 if (const svalue *folded = maybe_fold_binop (type, op, arg0, arg1))
769 return folded;
771 /* Ops on "unknown"/"poisoned" are unknown (unless we were able to fold
772 it via an identity in maybe_fold_binop). */
773 if (!arg0->can_have_associated_state_p ()
774 || !arg1->can_have_associated_state_p ())
775 return get_or_create_unknown_svalue (type);
777 binop_svalue::key_t key (type, op, arg0, arg1);
778 if (binop_svalue **slot = m_binop_values_map.get (key))
779 return *slot;
780 binop_svalue *binop_sval = new binop_svalue (type, op, arg0, arg1);
781 RETURN_UNKNOWN_IF_TOO_COMPLEX (binop_sval);
782 m_binop_values_map.put (key, binop_sval);
783 return binop_sval;
786 /* Subroutine of region_model_manager::get_or_create_sub_svalue.
787 Return a folded svalue, or NULL. */
789 const svalue *
790 region_model_manager::maybe_fold_sub_svalue (tree type,
791 const svalue *parent_svalue,
792 const region *subregion)
794 /* Subvalues of "unknown"/"poisoned" are unknown. */
795 if (!parent_svalue->can_have_associated_state_p ())
796 return get_or_create_unknown_svalue (type);
798 /* If we have a subregion of a zero-fill, it's zero. */
799 if (const unaryop_svalue *unary
800 = parent_svalue->dyn_cast_unaryop_svalue ())
802 if (unary->get_op () == NOP_EXPR
803 || unary->get_op () == VIEW_CONVERT_EXPR)
804 if (tree cst = unary->get_arg ()->maybe_get_constant ())
805 if (zerop (cst) && type)
807 const svalue *cst_sval
808 = get_or_create_constant_svalue (cst);
809 return get_or_create_cast (type, cst_sval);
813 /* Handle getting individual chars from a STRING_CST. */
814 if (tree cst = parent_svalue->maybe_get_constant ())
815 if (TREE_CODE (cst) == STRING_CST)
817 /* If we have a concrete 1-byte access within the parent region... */
818 byte_range subregion_bytes (0, 0);
819 if (subregion->get_relative_concrete_byte_range (&subregion_bytes)
820 && subregion_bytes.m_size_in_bytes == 1
821 && type)
823 /* ...then attempt to get that char from the STRING_CST. */
824 HOST_WIDE_INT hwi_start_byte
825 = subregion_bytes.m_start_byte_offset.to_shwi ();
826 tree cst_idx
827 = build_int_cst_type (size_type_node, hwi_start_byte);
828 if (const svalue *char_sval
829 = maybe_get_char_from_string_cst (cst, cst_idx))
830 return get_or_create_cast (type, char_sval);
834 if (const initial_svalue *init_sval
835 = parent_svalue->dyn_cast_initial_svalue ())
837 /* SUB(INIT(r)).FIELD -> INIT(r.FIELD)
838 i.e.
839 Subvalue(InitialValue(R1), FieldRegion(R2, F))
840 -> InitialValue(FieldRegion(R1, F)). */
841 if (const field_region *field_reg = subregion->dyn_cast_field_region ())
843 const region *field_reg_new
844 = get_field_region (init_sval->get_region (),
845 field_reg->get_field ());
846 return get_or_create_initial_value (field_reg_new);
848 /* SUB(INIT(r)[ELEMENT] -> INIT(e[ELEMENT])
849 i.e.
850 Subvalue(InitialValue(R1), ElementRegion(R2, IDX))
851 -> InitialValue(ElementRegion(R1, IDX)). */
852 if (const element_region *element_reg = subregion->dyn_cast_element_region ())
854 const region *element_reg_new
855 = get_element_region (init_sval->get_region (),
856 element_reg->get_type (),
857 element_reg->get_index ());
858 return get_or_create_initial_value (element_reg_new);
862 if (const repeated_svalue *repeated_sval
863 = parent_svalue->dyn_cast_repeated_svalue ())
864 if (type)
865 return get_or_create_cast (type, repeated_sval->get_inner_svalue ());
867 return NULL;
870 /* Return the svalue * for extracting a subvalue of type TYPE from
871 PARENT_SVALUE based on SUBREGION, creating it if necessary. */
873 const svalue *
874 region_model_manager::get_or_create_sub_svalue (tree type,
875 const svalue *parent_svalue,
876 const region *subregion)
878 if (const svalue *folded
879 = maybe_fold_sub_svalue (type, parent_svalue, subregion))
880 return folded;
882 sub_svalue::key_t key (type, parent_svalue, subregion);
883 if (sub_svalue **slot = m_sub_values_map.get (key))
884 return *slot;
885 sub_svalue *sub_sval
886 = new sub_svalue (type, parent_svalue, subregion);
887 RETURN_UNKNOWN_IF_TOO_COMPLEX (sub_sval);
888 m_sub_values_map.put (key, sub_sval);
889 return sub_sval;
892 /* Subroutine of region_model_manager::get_or_create_repeated_svalue.
893 Return a folded svalue, or NULL. */
895 const svalue *
896 region_model_manager::maybe_fold_repeated_svalue (tree type,
897 const svalue *outer_size,
898 const svalue *inner_svalue)
900 /* Repeated "unknown"/"poisoned" is unknown. */
901 if (!outer_size->can_have_associated_state_p ()
902 || !inner_svalue->can_have_associated_state_p ())
903 return get_or_create_unknown_svalue (type);
905 /* If INNER_SVALUE is the same size as OUTER_SIZE,
906 turn into simply a cast. */
907 if (tree cst_outer_num_bytes = outer_size->maybe_get_constant ())
909 HOST_WIDE_INT num_bytes_inner_svalue
910 = int_size_in_bytes (inner_svalue->get_type ());
911 if (num_bytes_inner_svalue != -1)
912 if (num_bytes_inner_svalue
913 == (HOST_WIDE_INT)tree_to_uhwi (cst_outer_num_bytes))
915 if (type)
916 return get_or_create_cast (type, inner_svalue);
917 else
918 return inner_svalue;
922 /* Handle zero-fill of a specific type. */
923 if (tree cst = inner_svalue->maybe_get_constant ())
924 if (zerop (cst) && type)
925 return get_or_create_cast (type, inner_svalue);
927 return NULL;
930 /* Return the svalue * of type TYPE in which INNER_SVALUE is repeated
931 enough times to be of size OUTER_SIZE, creating it if necessary.
932 e.g. for filling buffers with a constant value. */
934 const svalue *
935 region_model_manager::get_or_create_repeated_svalue (tree type,
936 const svalue *outer_size,
937 const svalue *inner_svalue)
939 if (const svalue *folded
940 = maybe_fold_repeated_svalue (type, outer_size, inner_svalue))
941 return folded;
943 repeated_svalue::key_t key (type, outer_size, inner_svalue);
944 if (repeated_svalue **slot = m_repeated_values_map.get (key))
945 return *slot;
946 repeated_svalue *repeated_sval
947 = new repeated_svalue (type, outer_size, inner_svalue);
948 RETURN_UNKNOWN_IF_TOO_COMPLEX (repeated_sval);
949 m_repeated_values_map.put (key, repeated_sval);
950 return repeated_sval;
953 /* Attempt to get the bit_range for FIELD within a RECORD_TYPE.
954 Return true and write the result to OUT if successful.
955 Return false otherwise. */
957 static bool
958 get_bit_range_for_field (tree field, bit_range *out)
960 bit_size_t bit_size;
961 if (!int_size_in_bits (TREE_TYPE (field), &bit_size))
962 return false;
963 int field_bit_offset = int_bit_position (field);
964 *out = bit_range (field_bit_offset, bit_size);
965 return true;
968 /* Attempt to get the byte_range for FIELD within a RECORD_TYPE.
969 Return true and write the result to OUT if successful.
970 Return false otherwise. */
972 static bool
973 get_byte_range_for_field (tree field, byte_range *out)
975 bit_range field_bits (0, 0);
976 if (!get_bit_range_for_field (field, &field_bits))
977 return false;
978 return field_bits.as_byte_range (out);
981 /* Attempt to determine if there is a specific field within RECORD_TYPE
982 at BYTES. If so, return it, and write the location of BYTES relative
983 to the field to *OUT_RANGE_WITHIN_FIELD.
984 Otherwise, return NULL_TREE.
985 For example, given:
986 struct foo { uint32 a; uint32; b};
988 bytes = {bytes 6-7} (of foo)
989 we have bytes 3-4 of field b. */
991 static tree
992 get_field_at_byte_range (tree record_type, const byte_range &bytes,
993 byte_range *out_range_within_field)
995 bit_offset_t bit_offset = bytes.m_start_byte_offset * BITS_PER_UNIT;
997 tree field = get_field_at_bit_offset (record_type, bit_offset);
998 if (!field)
999 return NULL_TREE;
1001 byte_range field_bytes (0,0);
1002 if (!get_byte_range_for_field (field, &field_bytes))
1003 return NULL_TREE;
1005 /* Is BYTES fully within field_bytes? */
1006 byte_range bytes_within_field (0,0);
1007 if (!field_bytes.contains_p (bytes, &bytes_within_field))
1008 return NULL_TREE;
1010 *out_range_within_field = bytes_within_field;
1011 return field;
1014 /* Subroutine of region_model_manager::get_or_create_bits_within.
1015 Return a folded svalue, or NULL. */
1017 const svalue *
1018 region_model_manager::maybe_fold_bits_within_svalue (tree type,
1019 const bit_range &bits,
1020 const svalue *inner_svalue)
1022 tree inner_type = inner_svalue->get_type ();
1023 /* Fold:
1024 BITS_WITHIN ((0, sizeof (VAL), VAL))
1026 CAST(TYPE, VAL). */
1027 if (bits.m_start_bit_offset == 0 && inner_type)
1029 bit_size_t inner_type_size;
1030 if (int_size_in_bits (inner_type, &inner_type_size))
1031 if (inner_type_size == bits.m_size_in_bits)
1033 if (type)
1034 return get_or_create_cast (type, inner_svalue);
1035 else
1036 return inner_svalue;
1040 /* Kind-specific folding. */
1041 if (const svalue *sval
1042 = inner_svalue->maybe_fold_bits_within (type, bits, this))
1043 return sval;
1045 byte_range bytes (0,0);
1046 if (bits.as_byte_range (&bytes) && inner_type)
1047 switch (TREE_CODE (inner_type))
1049 default:
1050 break;
1051 case ARRAY_TYPE:
1053 /* Fold:
1054 BITS_WITHIN (range, KIND(REG))
1056 BITS_WITHIN (range - offsetof(ELEMENT), KIND(REG.ELEMENT))
1057 if range1 is a byte-range fully within one ELEMENT. */
1058 tree element_type = TREE_TYPE (inner_type);
1059 HOST_WIDE_INT element_byte_size
1060 = int_size_in_bytes (element_type);
1061 if (element_byte_size > 0)
1063 HOST_WIDE_INT start_idx
1064 = (bytes.get_start_byte_offset ().to_shwi ()
1065 / element_byte_size);
1066 HOST_WIDE_INT last_idx
1067 = (bytes.get_last_byte_offset ().to_shwi ()
1068 / element_byte_size);
1069 if (start_idx == last_idx)
1071 if (const initial_svalue *initial_sval
1072 = inner_svalue->dyn_cast_initial_svalue ())
1074 bit_offset_t start_of_element
1075 = start_idx * element_byte_size * BITS_PER_UNIT;
1076 bit_range bits_within_element
1077 (bits.m_start_bit_offset - start_of_element,
1078 bits.m_size_in_bits);
1079 const svalue *idx_sval
1080 = get_or_create_int_cst (integer_type_node, start_idx);
1081 const region *element_reg =
1082 get_element_region (initial_sval->get_region (),
1083 element_type, idx_sval);
1084 const svalue *element_reg_sval
1085 = get_or_create_initial_value (element_reg);
1086 return get_or_create_bits_within (type,
1087 bits_within_element,
1088 element_reg_sval);
1093 break;
1094 case RECORD_TYPE:
1096 /* Fold:
1097 BYTES_WITHIN (range, KIND(REG))
1099 BYTES_WITHIN (range - offsetof(FIELD), KIND(REG.FIELD))
1100 if range1 is fully within FIELD. */
1101 byte_range bytes_within_field (0, 0);
1102 if (tree field = get_field_at_byte_range (inner_type, bytes,
1103 &bytes_within_field))
1105 if (const initial_svalue *initial_sval
1106 = inner_svalue->dyn_cast_initial_svalue ())
1108 const region *field_reg =
1109 get_field_region (initial_sval->get_region (), field);
1110 const svalue *initial_reg_sval
1111 = get_or_create_initial_value (field_reg);
1112 return get_or_create_bits_within
1113 (type,
1114 bytes_within_field.as_bit_range (),
1115 initial_reg_sval);
1119 break;
1121 return NULL;
1124 /* Return the svalue * of type TYPE for extracting BITS from INNER_SVALUE,
1125 creating it if necessary. */
1127 const svalue *
1128 region_model_manager::get_or_create_bits_within (tree type,
1129 const bit_range &bits,
1130 const svalue *inner_svalue)
1132 if (const svalue *folded
1133 = maybe_fold_bits_within_svalue (type, bits, inner_svalue))
1134 return folded;
1136 bits_within_svalue::key_t key (type, bits, inner_svalue);
1137 if (bits_within_svalue **slot = m_bits_within_values_map.get (key))
1138 return *slot;
1139 bits_within_svalue *bits_within_sval
1140 = new bits_within_svalue (type, bits, inner_svalue);
1141 RETURN_UNKNOWN_IF_TOO_COMPLEX (bits_within_sval);
1142 m_bits_within_values_map.put (key, bits_within_sval);
1143 return bits_within_sval;
1146 /* Return the svalue * that decorates ARG as being unmergeable,
1147 creating it if necessary. */
1149 const svalue *
1150 region_model_manager::get_or_create_unmergeable (const svalue *arg)
1152 if (arg->get_kind () == SK_UNMERGEABLE)
1153 return arg;
1155 if (unmergeable_svalue **slot = m_unmergeable_values_map.get (arg))
1156 return *slot;
1157 unmergeable_svalue *unmergeable_sval = new unmergeable_svalue (arg);
1158 RETURN_UNKNOWN_IF_TOO_COMPLEX (unmergeable_sval);
1159 m_unmergeable_values_map.put (arg, unmergeable_sval);
1160 return unmergeable_sval;
1163 /* Return the svalue * of type TYPE for the merger of value BASE_SVAL
1164 and ITER_SVAL at POINT, creating it if necessary. */
1166 const svalue *
1167 region_model_manager::
1168 get_or_create_widening_svalue (tree type,
1169 const function_point &point,
1170 const svalue *base_sval,
1171 const svalue *iter_sval)
1173 gcc_assert (base_sval->get_kind () != SK_WIDENING);
1174 gcc_assert (iter_sval->get_kind () != SK_WIDENING);
1175 widening_svalue::key_t key (type, point, base_sval, iter_sval);
1176 if (widening_svalue **slot = m_widening_values_map.get (key))
1177 return *slot;
1178 widening_svalue *widening_sval
1179 = new widening_svalue (type, point, base_sval, iter_sval);
1180 RETURN_UNKNOWN_IF_TOO_COMPLEX (widening_sval);
1181 m_widening_values_map.put (key, widening_sval);
1182 return widening_sval;
1185 /* Return the svalue * of type TYPE for the compound values in MAP,
1186 creating it if necessary. */
1188 const svalue *
1189 region_model_manager::get_or_create_compound_svalue (tree type,
1190 const binding_map &map)
1192 compound_svalue::key_t tmp_key (type, &map);
1193 if (compound_svalue **slot = m_compound_values_map.get (tmp_key))
1194 return *slot;
1195 compound_svalue *compound_sval
1196 = new compound_svalue (type, map);
1197 RETURN_UNKNOWN_IF_TOO_COMPLEX (compound_sval);
1198 /* Use make_key rather than reusing the key, so that we use a
1199 ptr to compound_sval's binding_map, rather than the MAP param. */
1200 m_compound_values_map.put (compound_sval->make_key (), compound_sval);
1201 return compound_sval;
1204 /* class conjured_purge. */
1206 /* Purge state relating to SVAL. */
1208 void
1209 conjured_purge::purge (const conjured_svalue *sval) const
1211 m_model->purge_state_involving (sval, m_ctxt);
1214 /* Return the svalue * of type TYPE for the value conjured for ID_REG
1215 at STMT, creating it if necessary.
1216 Use P to purge existing state from the svalue, for the case where a
1217 conjured_svalue would be reused along an execution path. */
1219 const svalue *
1220 region_model_manager::get_or_create_conjured_svalue (tree type,
1221 const gimple *stmt,
1222 const region *id_reg,
1223 const conjured_purge &p)
1225 conjured_svalue::key_t key (type, stmt, id_reg);
1226 if (conjured_svalue **slot = m_conjured_values_map.get (key))
1228 const conjured_svalue *sval = *slot;
1229 /* We're reusing an existing conjured_svalue, perhaps from a different
1230 state within this analysis, or perhaps from an earlier state on this
1231 execution path. For the latter, purge any state involving the "new"
1232 svalue from the current program_state. */
1233 p.purge (sval);
1234 return sval;
1236 conjured_svalue *conjured_sval
1237 = new conjured_svalue (type, stmt, id_reg);
1238 RETURN_UNKNOWN_IF_TOO_COMPLEX (conjured_sval);
1239 m_conjured_values_map.put (key, conjured_sval);
1240 return conjured_sval;
1243 /* Subroutine of region_model_manager::get_or_create_asm_output_svalue.
1244 Return a folded svalue, or NULL. */
1246 const svalue *
1247 region_model_manager::
1248 maybe_fold_asm_output_svalue (tree type,
1249 const vec<const svalue *> &inputs)
1251 /* Unknown inputs should lead to unknown results. */
1252 for (const auto &iter : inputs)
1253 if (iter->get_kind () == SK_UNKNOWN)
1254 return get_or_create_unknown_svalue (type);
1256 return NULL;
1259 /* Return the svalue * of type TYPE for OUTPUT_IDX of the deterministic
1260 asm stmt ASM_STMT, given INPUTS as inputs. */
1262 const svalue *
1263 region_model_manager::
1264 get_or_create_asm_output_svalue (tree type,
1265 const gasm *asm_stmt,
1266 unsigned output_idx,
1267 const vec<const svalue *> &inputs)
1269 gcc_assert (inputs.length () <= asm_output_svalue::MAX_INPUTS);
1271 if (const svalue *folded
1272 = maybe_fold_asm_output_svalue (type, inputs))
1273 return folded;
1275 const char *asm_string = gimple_asm_string (asm_stmt);
1276 const unsigned noutputs = gimple_asm_noutputs (asm_stmt);
1278 asm_output_svalue::key_t key (type, asm_string, output_idx, inputs);
1279 if (asm_output_svalue **slot = m_asm_output_values_map.get (key))
1280 return *slot;
1281 asm_output_svalue *asm_output_sval
1282 = new asm_output_svalue (type, asm_string, output_idx, noutputs, inputs);
1283 RETURN_UNKNOWN_IF_TOO_COMPLEX (asm_output_sval);
1284 m_asm_output_values_map.put (key, asm_output_sval);
1285 return asm_output_sval;
1288 /* Return the svalue * of type TYPE for OUTPUT_IDX of a deterministic
1289 asm stmt with string ASM_STRING with NUM_OUTPUTS outputs, given
1290 INPUTS as inputs. */
1292 const svalue *
1293 region_model_manager::
1294 get_or_create_asm_output_svalue (tree type,
1295 const char *asm_string,
1296 unsigned output_idx,
1297 unsigned num_outputs,
1298 const vec<const svalue *> &inputs)
1300 gcc_assert (inputs.length () <= asm_output_svalue::MAX_INPUTS);
1302 if (const svalue *folded
1303 = maybe_fold_asm_output_svalue (type, inputs))
1304 return folded;
1306 asm_output_svalue::key_t key (type, asm_string, output_idx, inputs);
1307 if (asm_output_svalue **slot = m_asm_output_values_map.get (key))
1308 return *slot;
1309 asm_output_svalue *asm_output_sval
1310 = new asm_output_svalue (type, asm_string, output_idx, num_outputs, inputs);
1311 RETURN_UNKNOWN_IF_TOO_COMPLEX (asm_output_sval);
1312 m_asm_output_values_map.put (key, asm_output_sval);
1313 return asm_output_sval;
1316 /* Return the svalue * of type TYPE for the result of a call to FNDECL
1317 with __attribute__((const)), given INPUTS as inputs. */
1319 const svalue *
1320 region_model_manager::
1321 get_or_create_const_fn_result_svalue (tree type,
1322 tree fndecl,
1323 const vec<const svalue *> &inputs)
1325 gcc_assert (type);
1326 gcc_assert (fndecl);
1327 gcc_assert (DECL_P (fndecl));
1328 gcc_assert (TREE_READONLY (fndecl));
1329 gcc_assert (inputs.length () <= const_fn_result_svalue::MAX_INPUTS);
1331 const_fn_result_svalue::key_t key (type, fndecl, inputs);
1332 if (const_fn_result_svalue **slot = m_const_fn_result_values_map.get (key))
1333 return *slot;
1334 const_fn_result_svalue *const_fn_result_sval
1335 = new const_fn_result_svalue (type, fndecl, inputs);
1336 RETURN_UNKNOWN_IF_TOO_COMPLEX (const_fn_result_sval);
1337 m_const_fn_result_values_map.put (key, const_fn_result_sval);
1338 return const_fn_result_sval;
1341 /* Given STRING_CST, a STRING_CST and BYTE_OFFSET_CST a constant,
1342 attempt to get the character at that offset, returning either
1343 the svalue for the character constant, or NULL if unsuccessful. */
1345 const svalue *
1346 region_model_manager::maybe_get_char_from_string_cst (tree string_cst,
1347 tree byte_offset_cst)
1349 gcc_assert (TREE_CODE (string_cst) == STRING_CST);
1351 /* Adapted from fold_read_from_constant_string. */
1352 scalar_int_mode char_mode;
1353 if (TREE_CODE (byte_offset_cst) == INTEGER_CST
1354 && compare_tree_int (byte_offset_cst,
1355 TREE_STRING_LENGTH (string_cst)) < 0
1356 && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string_cst))),
1357 &char_mode)
1358 && GET_MODE_SIZE (char_mode) == 1)
1360 tree char_cst
1361 = build_int_cst_type (TREE_TYPE (TREE_TYPE (string_cst)),
1362 (TREE_STRING_POINTER (string_cst)
1363 [TREE_INT_CST_LOW (byte_offset_cst)]));
1364 return get_or_create_constant_svalue (char_cst);
1366 return NULL;
1369 /* region consolidation. */
1371 /* Return the region for FNDECL, creating it if necessary. */
1373 const function_region *
1374 region_model_manager::get_region_for_fndecl (tree fndecl)
1376 gcc_assert (TREE_CODE (fndecl) == FUNCTION_DECL);
1378 function_region **slot = m_fndecls_map.get (fndecl);
1379 if (slot)
1380 return *slot;
1381 function_region *reg
1382 = new function_region (alloc_region_id (), &m_code_region, fndecl);
1383 m_fndecls_map.put (fndecl, reg);
1384 return reg;
1387 /* Return the region for LABEL, creating it if necessary. */
1389 const label_region *
1390 region_model_manager::get_region_for_label (tree label)
1392 gcc_assert (TREE_CODE (label) == LABEL_DECL);
1394 label_region **slot = m_labels_map.get (label);
1395 if (slot)
1396 return *slot;
1398 tree fndecl = DECL_CONTEXT (label);
1399 gcc_assert (fndecl && TREE_CODE (fndecl) == FUNCTION_DECL);
1401 const function_region *func_reg = get_region_for_fndecl (fndecl);
1402 label_region *reg
1403 = new label_region (alloc_region_id (), func_reg, label);
1404 m_labels_map.put (label, reg);
1405 return reg;
1408 /* Return the region for EXPR, creating it if necessary. */
1410 const decl_region *
1411 region_model_manager::get_region_for_global (tree expr)
1413 gcc_assert (TREE_CODE (expr) == VAR_DECL);
1415 decl_region **slot = m_globals_map.get (expr);
1416 if (slot)
1417 return *slot;
1418 decl_region *reg
1419 = new decl_region (alloc_region_id (), &m_globals_region, expr);
1420 m_globals_map.put (expr, reg);
1421 return reg;
1424 /* Return the region for an unknown access of type REGION_TYPE,
1425 creating it if necessary.
1426 This is a symbolic_region, where the pointer is an unknown_svalue
1427 of type &REGION_TYPE. */
1429 const region *
1430 region_model_manager::get_unknown_symbolic_region (tree region_type)
1432 tree ptr_type = region_type ? build_pointer_type (region_type) : NULL_TREE;
1433 const svalue *unknown_ptr = get_or_create_unknown_svalue (ptr_type);
1434 return get_symbolic_region (unknown_ptr);
1437 /* Return the region that describes accessing field FIELD of PARENT,
1438 creating it if necessary. */
1440 const region *
1441 region_model_manager::get_field_region (const region *parent, tree field)
1443 gcc_assert (TREE_CODE (field) == FIELD_DECL);
1445 /* (*UNKNOWN_PTR).field is (*UNKNOWN_PTR_OF_&FIELD_TYPE). */
1446 if (parent->symbolic_for_unknown_ptr_p ())
1447 return get_unknown_symbolic_region (TREE_TYPE (field));
1449 field_region::key_t key (parent, field);
1450 if (field_region *reg = m_field_regions.get (key))
1451 return reg;
1453 field_region *field_reg
1454 = new field_region (alloc_region_id (), parent, field);
1455 m_field_regions.put (key, field_reg);
1456 return field_reg;
1459 /* Return the region that describes accessing the element of type
1460 ELEMENT_TYPE at index INDEX of PARENT, creating it if necessary. */
1462 const region *
1463 region_model_manager::get_element_region (const region *parent,
1464 tree element_type,
1465 const svalue *index)
1467 /* (UNKNOWN_PTR[IDX]) is (UNKNOWN_PTR). */
1468 if (parent->symbolic_for_unknown_ptr_p ())
1469 return get_unknown_symbolic_region (element_type);
1471 element_region::key_t key (parent, element_type, index);
1472 if (element_region *reg = m_element_regions.get (key))
1473 return reg;
1475 element_region *element_reg
1476 = new element_region (alloc_region_id (), parent, element_type, index);
1477 m_element_regions.put (key, element_reg);
1478 return element_reg;
1481 /* Return the region that describes accessing the subregion of type
1482 ELEMENT_TYPE at offset BYTE_OFFSET within PARENT, creating it if
1483 necessary. */
1485 const region *
1486 region_model_manager::get_offset_region (const region *parent,
1487 tree type,
1488 const svalue *byte_offset)
1490 /* (UNKNOWN_PTR + OFFSET) is (UNKNOWN_PTR). */
1491 if (parent->symbolic_for_unknown_ptr_p ())
1492 return get_unknown_symbolic_region (type);
1494 /* If BYTE_OFFSET is zero, return PARENT. */
1495 if (tree cst_offset = byte_offset->maybe_get_constant ())
1496 if (zerop (cst_offset))
1497 return get_cast_region (parent, type);
1499 /* Fold OFFSET_REGION(OFFSET_REGION(REG, X), Y)
1500 to OFFSET_REGION(REG, (X + Y)). */
1501 if (const offset_region *parent_offset_reg
1502 = parent->dyn_cast_offset_region ())
1504 const svalue *sval_x = parent_offset_reg->get_byte_offset ();
1505 const svalue *sval_sum
1506 = get_or_create_binop (byte_offset->get_type (),
1507 PLUS_EXPR, sval_x, byte_offset);
1508 return get_offset_region (parent->get_parent_region (), type, sval_sum);
1511 offset_region::key_t key (parent, type, byte_offset);
1512 if (offset_region *reg = m_offset_regions.get (key))
1513 return reg;
1515 offset_region *offset_reg
1516 = new offset_region (alloc_region_id (), parent, type, byte_offset);
1517 m_offset_regions.put (key, offset_reg);
1518 return offset_reg;
1521 /* Return the region that describes accessing the subregion of type
1522 TYPE of size BYTE_SIZE_SVAL within PARENT, creating it if necessary. */
1524 const region *
1525 region_model_manager::get_sized_region (const region *parent,
1526 tree type,
1527 const svalue *byte_size_sval)
1529 if (parent->symbolic_for_unknown_ptr_p ())
1530 return get_unknown_symbolic_region (type);
1532 if (byte_size_sval->get_type () != size_type_node)
1533 byte_size_sval = get_or_create_cast (size_type_node, byte_size_sval);
1535 /* If PARENT is already that size, return it. */
1536 const svalue *parent_byte_size_sval = parent->get_byte_size_sval (this);
1537 if (tree parent_size_cst = parent_byte_size_sval->maybe_get_constant ())
1538 if (tree size_cst = byte_size_sval->maybe_get_constant ())
1540 tree comparison
1541 = fold_binary (EQ_EXPR, boolean_type_node, parent_size_cst, size_cst);
1542 if (comparison == boolean_true_node)
1543 return parent;
1546 sized_region::key_t key (parent, type, byte_size_sval);
1547 if (sized_region *reg = m_sized_regions.get (key))
1548 return reg;
1550 sized_region *sized_reg
1551 = new sized_region (alloc_region_id (), parent, type, byte_size_sval);
1552 m_sized_regions.put (key, sized_reg);
1553 return sized_reg;
1556 /* Return the region that describes accessing PARENT_REGION as if
1557 it were of type TYPE, creating it if necessary. */
1559 const region *
1560 region_model_manager::get_cast_region (const region *original_region,
1561 tree type)
1563 /* If types match, return ORIGINAL_REGION. */
1564 if (type == original_region->get_type ())
1565 return original_region;
1567 if (original_region->symbolic_for_unknown_ptr_p ())
1568 return get_unknown_symbolic_region (type);
1570 cast_region::key_t key (original_region, type);
1571 if (cast_region *reg = m_cast_regions.get (key))
1572 return reg;
1574 cast_region *cast_reg
1575 = new cast_region (alloc_region_id (), original_region, type);
1576 m_cast_regions.put (key, cast_reg);
1577 return cast_reg;
1580 /* Return the frame_region for call to FUN from CALLING_FRAME, creating it
1581 if necessary. CALLING_FRAME may be NULL. */
1583 const frame_region *
1584 region_model_manager::get_frame_region (const frame_region *calling_frame,
1585 function *fun)
1587 int index = calling_frame ? calling_frame->get_index () + 1 : 0;
1589 frame_region::key_t key (calling_frame, fun);
1590 if (frame_region *reg = m_frame_regions.get (key))
1591 return reg;
1593 frame_region *frame_reg
1594 = new frame_region (alloc_region_id (), &m_stack_region, calling_frame,
1595 fun, index);
1596 m_frame_regions.put (key, frame_reg);
1597 return frame_reg;
1600 /* Return the region that describes dereferencing SVAL, creating it
1601 if necessary. */
1603 const region *
1604 region_model_manager::get_symbolic_region (const svalue *sval)
1606 symbolic_region::key_t key (&m_root_region, sval);
1607 if (symbolic_region *reg = m_symbolic_regions.get (key))
1608 return reg;
1610 symbolic_region *symbolic_reg
1611 = new symbolic_region (alloc_region_id (), &m_root_region, sval);
1612 m_symbolic_regions.put (key, symbolic_reg);
1613 return symbolic_reg;
1616 /* Return the region that describes accessing STRING_CST, creating it
1617 if necessary. */
1619 const string_region *
1620 region_model_manager::get_region_for_string (tree string_cst)
1622 gcc_assert (TREE_CODE (string_cst) == STRING_CST);
1624 string_region **slot = m_string_map.get (string_cst);
1625 if (slot)
1626 return *slot;
1627 string_region *reg
1628 = new string_region (alloc_region_id (), &m_root_region, string_cst);
1629 m_string_map.put (string_cst, reg);
1630 return reg;
1633 /* Return the region that describes accessing BITS within PARENT as TYPE,
1634 creating it if necessary. */
1636 const region *
1637 region_model_manager::get_bit_range (const region *parent, tree type,
1638 const bit_range &bits)
1640 gcc_assert (parent);
1642 if (parent->symbolic_for_unknown_ptr_p ())
1643 return get_unknown_symbolic_region (type);
1645 bit_range_region::key_t key (parent, type, bits);
1646 if (bit_range_region *reg = m_bit_range_regions.get (key))
1647 return reg;
1649 bit_range_region *bit_range_reg
1650 = new bit_range_region (alloc_region_id (), parent, type, bits);
1651 m_bit_range_regions.put (key, bit_range_reg);
1652 return bit_range_reg;
1655 /* Return the region that describes accessing the IDX-th variadic argument
1656 within PARENT_FRAME, creating it if necessary. */
1658 const var_arg_region *
1659 region_model_manager::get_var_arg_region (const frame_region *parent_frame,
1660 unsigned idx)
1662 gcc_assert (parent_frame);
1664 var_arg_region::key_t key (parent_frame, idx);
1665 if (var_arg_region *reg = m_var_arg_regions.get (key))
1666 return reg;
1668 var_arg_region *var_arg_reg
1669 = new var_arg_region (alloc_region_id (), parent_frame, idx);
1670 m_var_arg_regions.put (key, var_arg_reg);
1671 return var_arg_reg;
1674 /* If we see a tree code we don't know how to handle, rather than
1675 ICE or generate bogus results, create a dummy region, and notify
1676 CTXT so that it can mark the new state as being not properly
1677 modelled. The exploded graph can then stop exploring that path,
1678 since any diagnostics we might issue will have questionable
1679 validity. */
1681 const region *
1682 region_model_manager::
1683 get_region_for_unexpected_tree_code (region_model_context *ctxt,
1684 tree t,
1685 const dump_location_t &loc)
1687 tree type = TYPE_P (t) ? t : TREE_TYPE (t);
1688 region *new_reg
1689 = new unknown_region (alloc_region_id (), &m_root_region, type);
1690 if (ctxt)
1691 ctxt->on_unexpected_tree_code (t, loc);
1692 return new_reg;
1695 /* Return a region describing a heap-allocated block of memory.
1696 Reuse an existing heap_allocated_region is its id is not within
1697 BASE_REGS_IN_USE. */
1699 const region *
1700 region_model_manager::
1701 get_or_create_region_for_heap_alloc (const bitmap &base_regs_in_use)
1703 /* Try to reuse an existing region, if it's unreferenced in the
1704 client state. */
1705 for (auto existing_reg : m_managed_dynamic_regions)
1706 if (!bitmap_bit_p (base_regs_in_use, existing_reg->get_id ()))
1707 if (existing_reg->get_kind () == RK_HEAP_ALLOCATED)
1708 return existing_reg;
1710 /* All existing ones (if any) are in use; create a new one. */
1711 region *reg
1712 = new heap_allocated_region (alloc_region_id (), &m_heap_region);
1713 m_managed_dynamic_regions.safe_push (reg);
1714 return reg;
1717 /* Return a new region describing a block of memory allocated within FRAME. */
1719 const region *
1720 region_model_manager::create_region_for_alloca (const frame_region *frame)
1722 gcc_assert (frame);
1723 region *reg = new alloca_region (alloc_region_id (), frame);
1724 m_managed_dynamic_regions.safe_push (reg);
1725 return reg;
1728 /* Log OBJ to LOGGER. */
1730 template <typename T>
1731 static void
1732 log_managed_object (logger *logger, const T *obj)
1734 logger->start_log_line ();
1735 pretty_printer *pp = logger->get_printer ();
1736 pp_string (pp, " ");
1737 obj->dump_to_pp (pp, true);
1738 logger->end_log_line ();
1741 /* Specialization for frame_region, which also logs the count of locals
1742 managed by the frame_region. */
1744 template <>
1745 void
1746 log_managed_object (logger *logger, const frame_region *obj)
1748 logger->start_log_line ();
1749 pretty_printer *pp = logger->get_printer ();
1750 pp_string (pp, " ");
1751 obj->dump_to_pp (pp, true);
1752 pp_printf (pp, " [with %i region(s) for locals]", obj->get_num_locals ());
1753 logger->end_log_line ();
1756 /* Dump the number of objects that were managed by UNIQ_MAP to LOGGER.
1757 If SHOW_OBJS is true, also dump the objects themselves. */
1759 template <typename K, typename T>
1760 static void
1761 log_uniq_map (logger *logger, bool show_objs, const char *title,
1762 const hash_map<K, T*> &uniq_map)
1764 logger->log (" # %s: %li", title, (long)uniq_map.elements ());
1765 if (!show_objs)
1766 return;
1767 auto_vec<const T *> vec_objs (uniq_map.elements ());
1768 for (typename hash_map<K, T*>::iterator iter = uniq_map.begin ();
1769 iter != uniq_map.end (); ++iter)
1770 vec_objs.quick_push ((*iter).second);
1772 vec_objs.qsort (T::cmp_ptr_ptr);
1774 unsigned i;
1775 const T *obj;
1776 FOR_EACH_VEC_ELT (vec_objs, i, obj)
1777 log_managed_object<T> (logger, obj);
1780 /* Dump the number of objects that were managed by MAP to LOGGER.
1781 If SHOW_OBJS is true, also dump the objects themselves. */
1783 template <typename T>
1784 static void
1785 log_uniq_map (logger *logger, bool show_objs, const char *title,
1786 const consolidation_map<T> &map)
1788 logger->log (" # %s: %li", title, (long)map.elements ());
1789 if (!show_objs)
1790 return;
1792 auto_vec<const T *> vec_objs (map.elements ());
1793 for (typename consolidation_map<T>::iterator iter = map.begin ();
1794 iter != map.end (); ++iter)
1795 vec_objs.quick_push ((*iter).second);
1797 vec_objs.qsort (T::cmp_ptr_ptr);
1799 unsigned i;
1800 const T *obj;
1801 FOR_EACH_VEC_ELT (vec_objs, i, obj)
1802 log_managed_object<T> (logger, obj);
1805 /* Dump the number of objects of each class that were managed by this
1806 manager to LOGGER.
1807 If SHOW_OBJS is true, also dump the objects themselves. */
1809 void
1810 region_model_manager::log_stats (logger *logger, bool show_objs) const
1812 LOG_SCOPE (logger);
1813 logger->log ("call string consolidation");
1814 m_empty_call_string.recursive_log (logger);
1815 logger->log ("svalue consolidation");
1816 log_uniq_map (logger, show_objs, "constant_svalue", m_constants_map);
1817 log_uniq_map (logger, show_objs, "unknown_svalue", m_unknowns_map);
1818 if (m_unknown_NULL)
1819 log_managed_object (logger, m_unknown_NULL);
1820 log_uniq_map (logger, show_objs, "poisoned_svalue", m_poisoned_values_map);
1821 log_uniq_map (logger, show_objs, "setjmp_svalue", m_setjmp_values_map);
1822 log_uniq_map (logger, show_objs, "initial_svalue", m_initial_values_map);
1823 log_uniq_map (logger, show_objs, "region_svalue", m_pointer_values_map);
1824 log_uniq_map (logger, show_objs, "unaryop_svalue", m_unaryop_values_map);
1825 log_uniq_map (logger, show_objs, "binop_svalue", m_binop_values_map);
1826 log_uniq_map (logger, show_objs, "sub_svalue", m_sub_values_map);
1827 log_uniq_map (logger, show_objs, "repeated_svalue", m_repeated_values_map);
1828 log_uniq_map (logger, show_objs, "bits_within_svalue",
1829 m_bits_within_values_map);
1830 log_uniq_map (logger, show_objs, "unmergeable_svalue",
1831 m_unmergeable_values_map);
1832 log_uniq_map (logger, show_objs, "widening_svalue", m_widening_values_map);
1833 log_uniq_map (logger, show_objs, "compound_svalue", m_compound_values_map);
1834 log_uniq_map (logger, show_objs, "conjured_svalue", m_conjured_values_map);
1835 log_uniq_map (logger, show_objs, "asm_output_svalue",
1836 m_asm_output_values_map);
1837 log_uniq_map (logger, show_objs, "const_fn_result_svalue",
1838 m_const_fn_result_values_map);
1840 logger->log ("max accepted svalue num_nodes: %i",
1841 m_max_complexity.m_num_nodes);
1842 logger->log ("max accepted svalue max_depth: %i",
1843 m_max_complexity.m_max_depth);
1845 logger->log ("region consolidation");
1846 logger->log (" next region id: %i", m_next_region_id);
1847 log_uniq_map (logger, show_objs, "function_region", m_fndecls_map);
1848 log_uniq_map (logger, show_objs, "label_region", m_labels_map);
1849 log_uniq_map (logger, show_objs, "decl_region for globals", m_globals_map);
1850 log_uniq_map (logger, show_objs, "field_region", m_field_regions);
1851 log_uniq_map (logger, show_objs, "element_region", m_element_regions);
1852 log_uniq_map (logger, show_objs, "offset_region", m_offset_regions);
1853 log_uniq_map (logger, show_objs, "sized_region", m_sized_regions);
1854 log_uniq_map (logger, show_objs, "cast_region", m_cast_regions);
1855 log_uniq_map (logger, show_objs, "frame_region", m_frame_regions);
1856 log_uniq_map (logger, show_objs, "symbolic_region", m_symbolic_regions);
1857 log_uniq_map (logger, show_objs, "string_region", m_string_map);
1858 log_uniq_map (logger, show_objs, "bit_range_region", m_bit_range_regions);
1859 log_uniq_map (logger, show_objs, "var_arg_region", m_var_arg_regions);
1860 logger->log (" # managed dynamic regions: %i",
1861 m_managed_dynamic_regions.length ());
1862 m_store_mgr.log_stats (logger, show_objs);
1863 m_range_mgr->log_stats (logger, show_objs);
1866 /* Dump the number of objects of each class that were managed by this
1867 manager to LOGGER.
1868 If SHOW_OBJS is true, also dump the objects themselves.
1869 This is here so it can use log_uniq_map. */
1871 void
1872 store_manager::log_stats (logger *logger, bool show_objs) const
1874 LOG_SCOPE (logger);
1875 log_uniq_map (logger, show_objs, "concrete_binding",
1876 m_concrete_binding_key_mgr);
1877 log_uniq_map (logger, show_objs, "symbolic_binding",
1878 m_symbolic_binding_key_mgr);
1881 /* Emit a warning showing DECL_REG->tracked_p () for use in DejaGnu tests
1882 (using -fdump-analyzer-untracked). */
1884 static void
1885 dump_untracked_region (const decl_region *decl_reg)
1887 tree decl = decl_reg->get_decl ();
1888 if (TREE_CODE (decl) != VAR_DECL)
1889 return;
1890 /* For now, don't emit the status of decls in the constant pool, to avoid
1891 differences in DejaGnu test results between targets that use these vs
1892 those that don't.
1893 (Eventually these decls should probably be untracked and we should test
1894 for that, but that's not stage 4 material). */
1895 if (DECL_IN_CONSTANT_POOL (decl))
1896 return;
1897 warning_at (DECL_SOURCE_LOCATION (decl), 0,
1898 "track %qD: %s",
1899 decl, (decl_reg->tracked_p () ? "yes" : "no"));
1902 /* Implementation of -fdump-analyzer-untracked. */
1904 void
1905 region_model_manager::dump_untracked_regions () const
1907 for (auto iter : m_globals_map)
1909 const decl_region *decl_reg = iter.second;
1910 dump_untracked_region (decl_reg);
1912 for (auto frame_iter : m_frame_regions)
1914 const frame_region *frame_reg = frame_iter.second;
1915 frame_reg->dump_untracked_regions ();
1919 void
1920 frame_region::dump_untracked_regions () const
1922 for (auto iter : m_locals)
1924 const decl_region *decl_reg = iter.second;
1925 dump_untracked_region (decl_reg);
1929 } // namespace ana
1931 #endif /* #if ENABLE_ANALYZER */