Require target lra in gcc.dg/pr108095.c
[official-gcc.git] / gcc / analyzer / region-model-manager.cc
blob22246876f8f928659abadb72b67619246f7e0755
1 /* Consolidation of svalues and regions.
2 Copyright (C) 2020-2023 Free Software Foundation, Inc.
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #define INCLUDE_MEMORY
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tree.h"
26 #include "diagnostic-core.h"
27 #include "gimple-pretty-print.h"
28 #include "function.h"
29 #include "basic-block.h"
30 #include "gimple.h"
31 #include "gimple-iterator.h"
32 #include "diagnostic-core.h"
33 #include "graphviz.h"
34 #include "options.h"
35 #include "cgraph.h"
36 #include "tree-dfa.h"
37 #include "stringpool.h"
38 #include "convert.h"
39 #include "target.h"
40 #include "fold-const.h"
41 #include "tree-pretty-print.h"
42 #include "bitmap.h"
43 #include "analyzer/analyzer.h"
44 #include "analyzer/analyzer-logging.h"
45 #include "ordered-hash-map.h"
46 #include "options.h"
47 #include "analyzer/supergraph.h"
48 #include "sbitmap.h"
49 #include "analyzer/call-string.h"
50 #include "analyzer/program-point.h"
51 #include "analyzer/store.h"
52 #include "analyzer/region-model.h"
53 #include "analyzer/constraint-manager.h"
55 #if ENABLE_ANALYZER
57 namespace ana {
59 /* class region_model_manager. */
61 /* region_model_manager's ctor. */
63 region_model_manager::region_model_manager (logger *logger)
64 : m_logger (logger),
65 m_next_symbol_id (0),
66 m_empty_call_string (),
67 m_root_region (alloc_symbol_id ()),
68 m_stack_region (alloc_symbol_id (), &m_root_region),
69 m_heap_region (alloc_symbol_id (), &m_root_region),
70 m_unknown_NULL (NULL),
71 m_checking_feasibility (false),
72 m_max_complexity (0, 0),
73 m_code_region (alloc_symbol_id (), &m_root_region),
74 m_fndecls_map (), m_labels_map (),
75 m_globals_region (alloc_symbol_id (), &m_root_region),
76 m_globals_map (),
77 m_thread_local_region (alloc_symbol_id (), &m_root_region),
78 m_errno_region (alloc_symbol_id (), &m_thread_local_region),
79 m_store_mgr (this),
80 m_range_mgr (new bounded_ranges_manager ()),
81 m_known_fn_mgr (logger)
85 /* region_model_manager's dtor. Delete all of the managed svalues
86 and regions. */
88 region_model_manager::~region_model_manager ()
90 /* Delete consolidated svalues. */
91 for (constants_map_t::iterator iter = m_constants_map.begin ();
92 iter != m_constants_map.end (); ++iter)
93 delete (*iter).second;
94 for (unknowns_map_t::iterator iter = m_unknowns_map.begin ();
95 iter != m_unknowns_map.end (); ++iter)
96 delete (*iter).second;
97 delete m_unknown_NULL;
98 for (poisoned_values_map_t::iterator iter = m_poisoned_values_map.begin ();
99 iter != m_poisoned_values_map.end (); ++iter)
100 delete (*iter).second;
101 for (setjmp_values_map_t::iterator iter = m_setjmp_values_map.begin ();
102 iter != m_setjmp_values_map.end (); ++iter)
103 delete (*iter).second;
104 for (initial_values_map_t::iterator iter = m_initial_values_map.begin ();
105 iter != m_initial_values_map.end (); ++iter)
106 delete (*iter).second;
107 for (pointer_values_map_t::iterator iter = m_pointer_values_map.begin ();
108 iter != m_pointer_values_map.end (); ++iter)
109 delete (*iter).second;
110 for (unaryop_values_map_t::iterator iter = m_unaryop_values_map.begin ();
111 iter != m_unaryop_values_map.end (); ++iter)
112 delete (*iter).second;
113 for (binop_values_map_t::iterator iter = m_binop_values_map.begin ();
114 iter != m_binop_values_map.end (); ++iter)
115 delete (*iter).second;
116 for (sub_values_map_t::iterator iter = m_sub_values_map.begin ();
117 iter != m_sub_values_map.end (); ++iter)
118 delete (*iter).second;
119 for (auto iter : m_repeated_values_map)
120 delete iter.second;
121 for (auto iter : m_bits_within_values_map)
122 delete iter.second;
123 for (unmergeable_values_map_t::iterator iter
124 = m_unmergeable_values_map.begin ();
125 iter != m_unmergeable_values_map.end (); ++iter)
126 delete (*iter).second;
127 for (widening_values_map_t::iterator iter = m_widening_values_map.begin ();
128 iter != m_widening_values_map.end (); ++iter)
129 delete (*iter).second;
130 for (compound_values_map_t::iterator iter = m_compound_values_map.begin ();
131 iter != m_compound_values_map.end (); ++iter)
132 delete (*iter).second;
133 for (conjured_values_map_t::iterator iter = m_conjured_values_map.begin ();
134 iter != m_conjured_values_map.end (); ++iter)
135 delete (*iter).second;
136 for (auto iter : m_asm_output_values_map)
137 delete iter.second;
138 for (auto iter : m_const_fn_result_values_map)
139 delete iter.second;
141 /* Delete consolidated regions. */
142 for (fndecls_map_t::iterator iter = m_fndecls_map.begin ();
143 iter != m_fndecls_map.end (); ++iter)
144 delete (*iter).second;
145 for (labels_map_t::iterator iter = m_labels_map.begin ();
146 iter != m_labels_map.end (); ++iter)
147 delete (*iter).second;
148 for (globals_map_t::iterator iter = m_globals_map.begin ();
149 iter != m_globals_map.end (); ++iter)
150 delete (*iter).second;
151 for (string_map_t::iterator iter = m_string_map.begin ();
152 iter != m_string_map.end (); ++iter)
153 delete (*iter).second;
155 delete m_range_mgr;
158 /* Return true if C exceeds the complexity limit for svalues. */
160 bool
161 region_model_manager::too_complex_p (const complexity &c) const
163 if (c.m_max_depth > (unsigned)param_analyzer_max_svalue_depth)
164 return true;
165 return false;
168 /* If SVAL exceeds the complexity limit for svalues, delete it
169 and return true.
170 Otherwise update m_max_complexity and return false. */
172 bool
173 region_model_manager::reject_if_too_complex (svalue *sval)
175 if (m_checking_feasibility)
176 return false;
178 const complexity &c = sval->get_complexity ();
179 if (!too_complex_p (c))
181 if (m_max_complexity.m_num_nodes < c.m_num_nodes)
182 m_max_complexity.m_num_nodes = c.m_num_nodes;
183 if (m_max_complexity.m_max_depth < c.m_max_depth)
184 m_max_complexity.m_max_depth = c.m_max_depth;
185 return false;
188 delete sval;
189 return true;
192 /* Macro for imposing a complexity limit on svalues, for use within
193 region_model_manager member functions.
195 If SVAL exceeds the complexity limit, delete it and return an UNKNOWN
196 value of the same type.
197 Otherwise update m_max_complexity and carry on. */
199 #define RETURN_UNKNOWN_IF_TOO_COMPLEX(SVAL) \
200 do { \
201 svalue *sval_ = (SVAL); \
202 tree type_ = sval_->get_type (); \
203 if (reject_if_too_complex (sval_)) \
204 return get_or_create_unknown_svalue (type_); \
205 } while (0)
207 /* svalue consolidation. */
209 /* Return the svalue * for a constant_svalue for CST_EXPR,
210 creating it if necessary.
211 The constant_svalue instances are reused, based on pointer equality
212 of trees */
214 const svalue *
215 region_model_manager::get_or_create_constant_svalue (tree cst_expr)
217 gcc_assert (cst_expr);
218 gcc_assert (CONSTANT_CLASS_P (cst_expr));
220 constant_svalue **slot = m_constants_map.get (cst_expr);
221 if (slot)
222 return *slot;
223 constant_svalue *cst_sval
224 = new constant_svalue (alloc_symbol_id (), cst_expr);
225 RETURN_UNKNOWN_IF_TOO_COMPLEX (cst_sval);
226 m_constants_map.put (cst_expr, cst_sval);
227 return cst_sval;
230 /* Return the svalue * for a constant_svalue for the INTEGER_CST
231 for VAL of type TYPE, creating it if necessary. */
233 const svalue *
234 region_model_manager::get_or_create_int_cst (tree type,
235 const poly_wide_int_ref &cst)
237 gcc_assert (type);
238 gcc_assert (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type));
239 tree tree_cst = wide_int_to_tree (type, cst);
240 return get_or_create_constant_svalue (tree_cst);
243 /* Return the svalue * for the constant_svalue for the NULL pointer
244 of POINTER_TYPE, creating it if necessary. */
246 const svalue *
247 region_model_manager::get_or_create_null_ptr (tree pointer_type)
249 gcc_assert (pointer_type);
250 gcc_assert (POINTER_TYPE_P (pointer_type));
251 return get_or_create_int_cst (pointer_type, 0);
254 /* Return the svalue * for a unknown_svalue for TYPE (which can be NULL),
255 creating it if necessary.
256 The unknown_svalue instances are reused, based on pointer equality
257 of the types */
259 const svalue *
260 region_model_manager::get_or_create_unknown_svalue (tree type)
262 /* Don't create unknown values when doing feasibility testing;
263 instead, create a unique svalue. */
264 if (m_checking_feasibility)
265 return create_unique_svalue (type);
267 /* Special-case NULL, so that the hash_map can use NULL as the
268 "empty" value. */
269 if (type == NULL_TREE)
271 if (!m_unknown_NULL)
272 m_unknown_NULL = new unknown_svalue (alloc_symbol_id (), type);
273 return m_unknown_NULL;
276 unknown_svalue **slot = m_unknowns_map.get (type);
277 if (slot)
278 return *slot;
279 unknown_svalue *sval = new unknown_svalue (alloc_symbol_id (), type);
280 m_unknowns_map.put (type, sval);
281 return sval;
284 /* Return a freshly-allocated svalue of TYPE, owned by this manager. */
286 const svalue *
287 region_model_manager::create_unique_svalue (tree type)
289 svalue *sval = new placeholder_svalue (alloc_symbol_id (), type, "unique");
290 m_managed_dynamic_svalues.safe_push (sval);
291 return sval;
294 /* Return the svalue * for the initial value of REG, creating it if
295 necessary. */
297 const svalue *
298 region_model_manager::get_or_create_initial_value (const region *reg,
299 bool check_poisoned)
301 if (!reg->can_have_initial_svalue_p () && check_poisoned)
302 return get_or_create_poisoned_svalue (POISON_KIND_UNINIT,
303 reg->get_type ());
305 /* The initial value of a cast is a cast of the initial value. */
306 if (const cast_region *cast_reg = reg->dyn_cast_cast_region ())
308 const region *original_reg = cast_reg->get_original_region ();
309 return get_or_create_cast (cast_reg->get_type (),
310 get_or_create_initial_value (original_reg));
313 /* Simplify:
314 INIT_VAL(ELEMENT_REG(STRING_REG), CONSTANT_SVAL)
316 CONSTANT_SVAL(STRING[N]). */
317 if (const element_region *element_reg = reg->dyn_cast_element_region ())
318 if (tree cst_idx = element_reg->get_index ()->maybe_get_constant ())
319 if (const string_region *string_reg
320 = element_reg->get_parent_region ()->dyn_cast_string_region ())
321 if (tree_fits_shwi_p (cst_idx))
323 HOST_WIDE_INT idx = tree_to_shwi (cst_idx);
324 tree string_cst = string_reg->get_string_cst ();
325 if (idx >= 0 && idx <= TREE_STRING_LENGTH (string_cst))
327 int ch = TREE_STRING_POINTER (string_cst)[idx];
328 return get_or_create_int_cst (reg->get_type (), ch);
332 /* INIT_VAL (*UNKNOWN_PTR) -> UNKNOWN_VAL. */
333 if (reg->symbolic_for_unknown_ptr_p ())
334 return get_or_create_unknown_svalue (reg->get_type ());
336 if (initial_svalue **slot = m_initial_values_map.get (reg))
337 return *slot;
338 initial_svalue *initial_sval
339 = new initial_svalue (alloc_symbol_id (), reg->get_type (), reg);
340 RETURN_UNKNOWN_IF_TOO_COMPLEX (initial_sval);
341 m_initial_values_map.put (reg, initial_sval);
342 return initial_sval;
345 /* Return the svalue * for R using type TYPE, creating it if
346 necessary. */
348 const svalue *
349 region_model_manager::get_or_create_setjmp_svalue (const setjmp_record &r,
350 tree type)
352 setjmp_svalue::key_t key (r, type);
353 if (setjmp_svalue **slot = m_setjmp_values_map.get (key))
354 return *slot;
355 setjmp_svalue *setjmp_sval = new setjmp_svalue (r, alloc_symbol_id (), type);
356 RETURN_UNKNOWN_IF_TOO_COMPLEX (setjmp_sval);
357 m_setjmp_values_map.put (key, setjmp_sval);
358 return setjmp_sval;
361 /* Return the svalue * for a poisoned value of KIND and TYPE, creating it if
362 necessary. */
364 const svalue *
365 region_model_manager::get_or_create_poisoned_svalue (enum poison_kind kind,
366 tree type)
368 poisoned_svalue::key_t key (kind, type);
369 if (poisoned_svalue **slot = m_poisoned_values_map.get (key))
370 return *slot;
371 poisoned_svalue *poisoned_sval
372 = new poisoned_svalue (kind, alloc_symbol_id (), type);
373 RETURN_UNKNOWN_IF_TOO_COMPLEX (poisoned_sval);
374 m_poisoned_values_map.put (key, poisoned_sval);
375 return poisoned_sval;
378 /* Return the svalue * for a pointer to POINTEE of type PTR_TYPE,
379 creating it if necessary. */
381 const svalue *
382 region_model_manager::get_ptr_svalue (tree ptr_type, const region *pointee)
384 /* If this is a symbolic region from dereferencing a pointer, and the types
385 match, then return the original pointer. */
386 if (const symbolic_region *sym_reg = pointee->dyn_cast_symbolic_region ())
387 if (ptr_type == sym_reg->get_pointer ()->get_type ())
388 return sym_reg->get_pointer ();
390 region_svalue::key_t key (ptr_type, pointee);
391 if (region_svalue **slot = m_pointer_values_map.get (key))
392 return *slot;
393 region_svalue *sval
394 = new region_svalue (alloc_symbol_id (), ptr_type, pointee);
395 RETURN_UNKNOWN_IF_TOO_COMPLEX (sval);
396 m_pointer_values_map.put (key, sval);
397 return sval;
400 /* Subroutine of region_model_manager::get_or_create_unaryop.
401 Attempt to fold the inputs and return a simpler svalue *.
402 Otherwise, return NULL. */
404 const svalue *
405 region_model_manager::maybe_fold_unaryop (tree type, enum tree_code op,
406 const svalue *arg)
408 /* Ops on "unknown" are also unknown. */
409 if (arg->get_kind () == SK_UNKNOWN)
410 return get_or_create_unknown_svalue (type);
411 /* Likewise for "poisoned". */
412 else if (const poisoned_svalue *poisoned_sval
413 = arg->dyn_cast_poisoned_svalue ())
414 return get_or_create_poisoned_svalue (poisoned_sval->get_poison_kind (),
415 type);
417 gcc_assert (arg->can_have_associated_state_p ());
419 switch (op)
421 default: break;
422 case VIEW_CONVERT_EXPR:
423 case NOP_EXPR:
425 /* Handle redundant casts. */
426 if (arg->get_type ()
427 && useless_type_conversion_p (arg->get_type (), type))
428 return arg;
430 /* Fold "cast<TYPE> (cast <INNER_TYPE> (innermost_arg))
431 => "cast<TYPE> (innermost_arg)",
432 unless INNER_TYPE is narrower than TYPE. */
433 if (const svalue *innermost_arg = arg->maybe_undo_cast ())
435 tree inner_type = arg->get_type ();
436 if (TYPE_SIZE (type)
437 && TYPE_SIZE (inner_type)
438 && (fold_binary (LE_EXPR, boolean_type_node,
439 TYPE_SIZE (type), TYPE_SIZE (inner_type))
440 == boolean_true_node))
441 return maybe_fold_unaryop (type, op, innermost_arg);
443 /* Avoid creating symbolic regions for pointer casts by
444 simplifying (T*)(&REGION) to ((T*)&REGION). */
445 if (const region_svalue *region_sval = arg->dyn_cast_region_svalue ())
446 if (POINTER_TYPE_P (type)
447 && region_sval->get_type ()
448 && POINTER_TYPE_P (region_sval->get_type ()))
449 return get_ptr_svalue (type, region_sval->get_pointee ());
451 break;
452 case TRUTH_NOT_EXPR:
454 /* Invert comparisons e.g. "!(x == y)" => "x != y". */
455 if (const binop_svalue *binop = arg->dyn_cast_binop_svalue ())
456 if (TREE_CODE_CLASS (binop->get_op ()) == tcc_comparison)
458 enum tree_code inv_op
459 = invert_tree_comparison (binop->get_op (),
460 HONOR_NANS (binop->get_type ()));
461 if (inv_op != ERROR_MARK)
462 return get_or_create_binop (binop->get_type (), inv_op,
463 binop->get_arg0 (),
464 binop->get_arg1 ());
467 break;
468 case NEGATE_EXPR:
470 /* -(-(VAL)) is VAL, for integer types. */
471 if (const unaryop_svalue *unaryop = arg->dyn_cast_unaryop_svalue ())
472 if (unaryop->get_op () == NEGATE_EXPR
473 && type == unaryop->get_type ()
474 && type
475 && INTEGRAL_TYPE_P (type))
476 return unaryop->get_arg ();
478 break;
481 /* Constants. */
482 if (tree cst = arg->maybe_get_constant ())
483 if (tree result = fold_unary (op, type, cst))
485 if (CONSTANT_CLASS_P (result))
486 return get_or_create_constant_svalue (result);
488 /* fold_unary can return casts of constants; try to handle them. */
489 if (op != NOP_EXPR
490 && type
491 && TREE_CODE (result) == NOP_EXPR
492 && CONSTANT_CLASS_P (TREE_OPERAND (result, 0)))
494 const svalue *inner_cst
495 = get_or_create_constant_svalue (TREE_OPERAND (result, 0));
496 return get_or_create_cast (type,
497 get_or_create_cast (TREE_TYPE (result),
498 inner_cst));
502 return NULL;
505 /* Return the svalue * for an unary operation OP on ARG with a result of
506 type TYPE, creating it if necessary. */
508 const svalue *
509 region_model_manager::get_or_create_unaryop (tree type, enum tree_code op,
510 const svalue *arg)
512 if (const svalue *folded = maybe_fold_unaryop (type, op, arg))
513 return folded;
514 unaryop_svalue::key_t key (type, op, arg);
515 if (unaryop_svalue **slot = m_unaryop_values_map.get (key))
516 return *slot;
517 unaryop_svalue *unaryop_sval
518 = new unaryop_svalue (alloc_symbol_id (), type, op, arg);
519 RETURN_UNKNOWN_IF_TOO_COMPLEX (unaryop_sval);
520 m_unaryop_values_map.put (key, unaryop_sval);
521 return unaryop_sval;
524 /* Get a tree code for a cast to DST_TYPE from SRC_TYPE.
525 Use NOP_EXPR if possible (e.g. to help fold_unary convert casts
526 of 0 to (T*) to simple pointer constants), but use FIX_TRUNC_EXPR
527 and VIEW_CONVERT_EXPR for cases that fold_unary would otherwise crash
528 on. */
530 static enum tree_code
531 get_code_for_cast (tree dst_type, tree src_type)
533 gcc_assert (dst_type);
534 if (!src_type)
535 return NOP_EXPR;
537 if (SCALAR_FLOAT_TYPE_P (src_type))
539 if (TREE_CODE (dst_type) == INTEGER_TYPE)
540 return FIX_TRUNC_EXPR;
541 else
542 return VIEW_CONVERT_EXPR;
545 return NOP_EXPR;
548 /* Return the svalue * for a cast of ARG to type TYPE, creating it
549 if necessary. */
551 const svalue *
552 region_model_manager::get_or_create_cast (tree type, const svalue *arg)
554 gcc_assert (type);
556 /* No-op if the types are the same. */
557 if (type == arg->get_type ())
558 return arg;
560 /* Don't attempt to handle casts involving vector types for now. */
561 if (VECTOR_TYPE_P (type)
562 || (arg->get_type ()
563 && VECTOR_TYPE_P (arg->get_type ())))
564 return get_or_create_unknown_svalue (type);
566 enum tree_code op = get_code_for_cast (type, arg->get_type ());
567 return get_or_create_unaryop (type, op, arg);
570 /* Subroutine of region_model_manager::maybe_fold_binop for handling
571 (TYPE)(COMPOUND_SVAL BIT_AND_EXPR CST) that may have been generated by
572 optimize_bit_field_compare, where CST is from ARG1.
574 Support masking out bits from a compound_svalue for comparing a bitfield
575 against a value, as generated by optimize_bit_field_compare for
576 BITFIELD == VALUE.
578 If COMPOUND_SVAL has a value for the appropriate bits, return it,
579 shifted accordingly.
580 Otherwise return NULL. */
582 const svalue *
583 region_model_manager::
584 maybe_undo_optimize_bit_field_compare (tree type,
585 const compound_svalue *compound_sval,
586 tree cst,
587 const svalue *arg1)
589 if (type != unsigned_char_type_node)
590 return NULL;
592 const binding_map &map = compound_sval->get_map ();
593 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (cst);
594 /* If "mask" is a contiguous range of set bits, see if the
595 compound_sval has a value for those bits. */
596 bit_range bits (0, 0);
597 if (!bit_range::from_mask (mask, &bits))
598 return NULL;
600 bit_range bound_bits (bits);
601 if (BYTES_BIG_ENDIAN)
602 bound_bits = bit_range (BITS_PER_UNIT - bits.get_next_bit_offset (),
603 bits.m_size_in_bits);
604 const concrete_binding *conc
605 = get_store_manager ()->get_concrete_binding (bound_bits);
606 const svalue *sval = map.get (conc);
607 if (!sval)
608 return NULL;
610 /* We have a value;
611 shift it by the correct number of bits. */
612 const svalue *lhs = get_or_create_cast (type, sval);
613 HOST_WIDE_INT bit_offset = bits.get_start_bit_offset ().to_shwi ();
614 const svalue *shift_sval = get_or_create_int_cst (type, bit_offset);
615 const svalue *shifted_sval = get_or_create_binop (type, LSHIFT_EXPR,
616 lhs, shift_sval);
617 /* Reapply the mask (needed for negative
618 signed bitfields). */
619 return get_or_create_binop (type, BIT_AND_EXPR,
620 shifted_sval, arg1);
623 /* Subroutine of region_model_manager::get_or_create_binop.
624 Attempt to fold the inputs and return a simpler svalue *.
625 Otherwise, return NULL. */
627 const svalue *
628 region_model_manager::maybe_fold_binop (tree type, enum tree_code op,
629 const svalue *arg0,
630 const svalue *arg1)
632 tree cst0 = arg0->maybe_get_constant ();
633 tree cst1 = arg1->maybe_get_constant ();
634 /* (CST OP CST). */
635 if (cst0 && cst1)
637 if (tree result = fold_binary (op, type, cst0, cst1))
638 if (CONSTANT_CLASS_P (result))
639 return get_or_create_constant_svalue (result);
642 if ((type && FLOAT_TYPE_P (type))
643 || (arg0->get_type () && FLOAT_TYPE_P (arg0->get_type ()))
644 || (arg1->get_type () && FLOAT_TYPE_P (arg1->get_type ())))
645 return NULL;
647 switch (op)
649 default:
650 break;
651 case POINTER_PLUS_EXPR:
652 case PLUS_EXPR:
653 /* (VAL + 0) -> VAL. */
654 if (cst1 && zerop (cst1))
655 return get_or_create_cast (type, arg0);
656 break;
657 case MINUS_EXPR:
658 /* (VAL - 0) -> VAL. */
659 if (cst1 && zerop (cst1))
660 return get_or_create_cast (type, arg0);
661 /* (0 - VAL) -> -VAL. */
662 if (cst0 && zerop (cst0))
663 return get_or_create_unaryop (type, NEGATE_EXPR, arg1);
664 /* (X + Y) - X -> Y. */
665 if (const binop_svalue *binop = arg0->dyn_cast_binop_svalue ())
666 if (binop->get_op () == PLUS_EXPR)
667 if (binop->get_arg0 () == arg1)
668 return get_or_create_cast (type, binop->get_arg1 ());
669 break;
670 case MULT_EXPR:
671 /* (VAL * 0). */
672 if (cst1 && zerop (cst1) && INTEGRAL_TYPE_P (type))
673 return get_or_create_constant_svalue (build_int_cst (type, 0));
674 /* (VAL * 1) -> VAL. */
675 if (cst1 && integer_onep (cst1))
676 /* TODO: we ought to have a cast to TYPE here, but doing so introduces
677 regressions; see PR analyzer/110902. */
678 return arg0;
679 break;
680 case BIT_AND_EXPR:
681 if (cst1)
683 if (zerop (cst1) && INTEGRAL_TYPE_P (type))
684 /* "(ARG0 & 0)" -> "0". */
685 return get_or_create_constant_svalue (build_int_cst (type, 0));
687 if (const compound_svalue *compound_sval
688 = arg0->dyn_cast_compound_svalue ())
689 if (const svalue *sval
690 = maybe_undo_optimize_bit_field_compare (type,
691 compound_sval,
692 cst1, arg1))
693 return sval;
695 if (arg0->get_type () == boolean_type_node
696 && arg1->get_type () == boolean_type_node)
698 /* If the LHS are both _Bool, then... */
699 /* ..."(1 & x) -> x". */
700 if (cst0 && !zerop (cst0))
701 return get_or_create_cast (type, arg1);
702 /* ..."(x & 1) -> x". */
703 if (cst1 && !zerop (cst1))
704 return get_or_create_cast (type, arg0);
705 /* ..."(0 & x) -> 0". */
706 if (cst0 && zerop (cst0))
707 return get_or_create_int_cst (type, 0);
708 /* ..."(x & 0) -> 0". */
709 if (cst1 && zerop (cst1))
710 return get_or_create_int_cst (type, 0);
712 break;
713 case BIT_IOR_EXPR:
714 if (arg0->get_type () == boolean_type_node
715 && arg1->get_type () == boolean_type_node)
717 /* If the LHS are both _Bool, then... */
718 /* ..."(1 | x) -> 1". */
719 if (cst0 && !zerop (cst0))
720 return get_or_create_int_cst (type, 1);
721 /* ..."(x | 1) -> 1". */
722 if (cst1 && !zerop (cst1))
723 return get_or_create_int_cst (type, 1);
724 /* ..."(0 | x) -> x". */
725 if (cst0 && zerop (cst0))
726 return get_or_create_cast (type, arg1);
727 /* ..."(x | 0) -> x". */
728 if (cst1 && zerop (cst1))
729 return get_or_create_cast (type, arg0);
731 break;
732 case TRUTH_ANDIF_EXPR:
733 case TRUTH_AND_EXPR:
734 if (cst1)
736 if (zerop (cst1) && INTEGRAL_TYPE_P (type))
737 /* "(ARG0 && 0)" -> "0". */
738 return get_or_create_constant_svalue (build_int_cst (type, 0));
739 else
740 /* "(ARG0 && nonzero-cst)" -> "ARG0". */
741 return get_or_create_cast (type, arg0);
743 break;
744 case TRUTH_ORIF_EXPR:
745 case TRUTH_OR_EXPR:
746 if (cst1)
748 if (zerop (cst1))
749 /* "(ARG0 || 0)" -> "ARG0". */
750 return get_or_create_cast (type, arg0);
751 else
752 /* "(ARG0 && nonzero-cst)" -> "nonzero-cst". */
753 return get_or_create_cast (type, arg1);
755 break;
758 /* For associative ops, fold "(X op CST_A) op CST_B)" to
759 "X op (CST_A op CST_B)". */
760 if (cst1 && associative_tree_code (op))
761 if (const binop_svalue *binop = arg0->dyn_cast_binop_svalue ())
762 if (binop->get_op () == op
763 && binop->get_arg1 ()->maybe_get_constant ())
764 return get_or_create_binop
765 (type, op, binop->get_arg0 (),
766 get_or_create_binop (type, op,
767 binop->get_arg1 (), arg1));
769 /* associative_tree_code is false for POINTER_PLUS_EXPR, but we
770 can fold:
771 "(PTR ptr+ CST_A) ptr+ CST_B)" to "PTR ptr+ (CST_A ptr+ CST_B)"
772 e.g. in data-model-1.c: test_4c. */
773 if (cst1 && op == POINTER_PLUS_EXPR)
774 if (const binop_svalue *binop = arg0->dyn_cast_binop_svalue ())
775 if (binop->get_op () == POINTER_PLUS_EXPR)
776 if (binop->get_arg1 ()->maybe_get_constant ())
777 return get_or_create_binop
778 (type, op, binop->get_arg0 (),
779 get_or_create_binop (size_type_node, op,
780 binop->get_arg1 (), arg1));
782 /* Distribute multiplication by a constant through addition/subtraction:
783 (X + Y) * CST => (X * CST) + (Y * CST). */
784 if (cst1 && op == MULT_EXPR)
785 if (const binop_svalue *binop = arg0->dyn_cast_binop_svalue ())
786 if (binop->get_op () == PLUS_EXPR
787 || binop->get_op () == MINUS_EXPR)
789 return get_or_create_binop
790 (type, binop->get_op (),
791 get_or_create_binop (type, op,
792 binop->get_arg0 (), arg1),
793 get_or_create_binop (type, op,
794 binop->get_arg1 (), arg1));
797 /* etc. */
799 return NULL;
802 /* Return the svalue * for an binary operation OP on ARG0 and ARG1
803 with a result of type TYPE, creating it if necessary. */
805 const svalue *
806 region_model_manager::get_or_create_binop (tree type, enum tree_code op,
807 const svalue *arg0,
808 const svalue *arg1)
810 /* For commutative ops, put any constant on the RHS. */
811 if (arg0->maybe_get_constant () && commutative_tree_code (op))
812 std::swap (arg0, arg1);
814 if (const svalue *folded = maybe_fold_binop (type, op, arg0, arg1))
815 return folded;
817 /* Ops on "unknown"/"poisoned" are unknown (unless we were able to fold
818 it via an identity in maybe_fold_binop). */
819 if (!arg0->can_have_associated_state_p ()
820 || !arg1->can_have_associated_state_p ())
821 return get_or_create_unknown_svalue (type);
823 binop_svalue::key_t key (type, op, arg0, arg1);
824 if (binop_svalue **slot = m_binop_values_map.get (key))
825 return *slot;
826 binop_svalue *binop_sval
827 = new binop_svalue (alloc_symbol_id (), type, op, arg0, arg1);
828 RETURN_UNKNOWN_IF_TOO_COMPLEX (binop_sval);
829 m_binop_values_map.put (key, binop_sval);
830 return binop_sval;
833 /* Subroutine of region_model_manager::get_or_create_sub_svalue.
834 Return a folded svalue, or NULL. */
836 const svalue *
837 region_model_manager::maybe_fold_sub_svalue (tree type,
838 const svalue *parent_svalue,
839 const region *subregion)
841 /* Subvalues of "unknown"/"poisoned" are unknown. */
842 if (!parent_svalue->can_have_associated_state_p ())
843 return get_or_create_unknown_svalue (type);
845 /* If we have a subregion of a zero-fill, it's zero. */
846 if (const unaryop_svalue *unary
847 = parent_svalue->dyn_cast_unaryop_svalue ())
849 if (unary->get_op () == NOP_EXPR
850 || unary->get_op () == VIEW_CONVERT_EXPR)
851 if (tree cst = unary->get_arg ()->maybe_get_constant ())
852 if (zerop (cst) && type)
854 const svalue *cst_sval
855 = get_or_create_constant_svalue (cst);
856 return get_or_create_cast (type, cst_sval);
860 /* Handle getting individual chars from a STRING_CST. */
861 if (tree cst = parent_svalue->maybe_get_constant ())
862 if (TREE_CODE (cst) == STRING_CST)
864 /* If we have a concrete 1-byte access within the parent region... */
865 byte_range subregion_bytes (0, 0);
866 if (subregion->get_relative_concrete_byte_range (&subregion_bytes)
867 && subregion_bytes.m_size_in_bytes == 1
868 && type)
870 /* ...then attempt to get that char from the STRING_CST. */
871 HOST_WIDE_INT hwi_start_byte
872 = subregion_bytes.m_start_byte_offset.to_shwi ();
873 tree cst_idx
874 = build_int_cst_type (size_type_node, hwi_start_byte);
875 if (const svalue *char_sval
876 = maybe_get_char_from_string_cst (cst, cst_idx))
877 return get_or_create_cast (type, char_sval);
881 if (const initial_svalue *init_sval
882 = parent_svalue->dyn_cast_initial_svalue ())
884 /* SUB(INIT(r)).FIELD -> INIT(r.FIELD)
885 i.e.
886 Subvalue(InitialValue(R1), FieldRegion(R2, F))
887 -> InitialValue(FieldRegion(R1, F)). */
888 if (const field_region *field_reg = subregion->dyn_cast_field_region ())
890 const region *field_reg_new
891 = get_field_region (init_sval->get_region (),
892 field_reg->get_field ());
893 return get_or_create_initial_value (field_reg_new);
895 /* SUB(INIT(r)[ELEMENT] -> INIT(e[ELEMENT])
896 i.e.
897 Subvalue(InitialValue(R1), ElementRegion(R2, IDX))
898 -> InitialValue(ElementRegion(R1, IDX)). */
899 if (const element_region *element_reg = subregion->dyn_cast_element_region ())
901 const region *element_reg_new
902 = get_element_region (init_sval->get_region (),
903 element_reg->get_type (),
904 element_reg->get_index ());
905 return get_or_create_initial_value (element_reg_new);
909 if (const repeated_svalue *repeated_sval
910 = parent_svalue->dyn_cast_repeated_svalue ())
911 if (type)
912 return get_or_create_cast (type, repeated_sval->get_inner_svalue ());
914 return NULL;
917 /* Return the svalue * for extracting a subvalue of type TYPE from
918 PARENT_SVALUE based on SUBREGION, creating it if necessary. */
920 const svalue *
921 region_model_manager::get_or_create_sub_svalue (tree type,
922 const svalue *parent_svalue,
923 const region *subregion)
925 if (const svalue *folded
926 = maybe_fold_sub_svalue (type, parent_svalue, subregion))
927 return folded;
929 sub_svalue::key_t key (type, parent_svalue, subregion);
930 if (sub_svalue **slot = m_sub_values_map.get (key))
931 return *slot;
932 sub_svalue *sub_sval
933 = new sub_svalue (alloc_symbol_id (), type, parent_svalue, subregion);
934 RETURN_UNKNOWN_IF_TOO_COMPLEX (sub_sval);
935 m_sub_values_map.put (key, sub_sval);
936 return sub_sval;
939 /* Subroutine of region_model_manager::get_or_create_repeated_svalue.
940 Return a folded svalue, or NULL. */
942 const svalue *
943 region_model_manager::maybe_fold_repeated_svalue (tree type,
944 const svalue *outer_size,
945 const svalue *inner_svalue)
947 /* Repeated "unknown"/"poisoned" is unknown. */
948 if (!outer_size->can_have_associated_state_p ()
949 || !inner_svalue->can_have_associated_state_p ())
950 return get_or_create_unknown_svalue (type);
952 /* If INNER_SVALUE is the same size as OUTER_SIZE,
953 turn into simply a cast. */
954 if (tree cst_outer_num_bytes = outer_size->maybe_get_constant ())
956 HOST_WIDE_INT num_bytes_inner_svalue
957 = int_size_in_bytes (inner_svalue->get_type ());
958 if (num_bytes_inner_svalue != -1)
959 if (num_bytes_inner_svalue
960 == (HOST_WIDE_INT)tree_to_uhwi (cst_outer_num_bytes))
962 if (type)
963 return get_or_create_cast (type, inner_svalue);
964 else
965 return inner_svalue;
969 /* Handle zero-fill of a specific type. */
970 if (tree cst = inner_svalue->maybe_get_constant ())
971 if (zerop (cst) && type)
972 return get_or_create_cast (type, inner_svalue);
974 return NULL;
977 /* Return the svalue * of type TYPE in which INNER_SVALUE is repeated
978 enough times to be of size OUTER_SIZE, creating it if necessary.
979 e.g. for filling buffers with a constant value. */
981 const svalue *
982 region_model_manager::get_or_create_repeated_svalue (tree type,
983 const svalue *outer_size,
984 const svalue *inner_svalue)
986 if (const svalue *folded
987 = maybe_fold_repeated_svalue (type, outer_size, inner_svalue))
988 return folded;
990 repeated_svalue::key_t key (type, outer_size, inner_svalue);
991 if (repeated_svalue **slot = m_repeated_values_map.get (key))
992 return *slot;
993 repeated_svalue *repeated_sval
994 = new repeated_svalue (alloc_symbol_id (), type, outer_size, inner_svalue);
995 RETURN_UNKNOWN_IF_TOO_COMPLEX (repeated_sval);
996 m_repeated_values_map.put (key, repeated_sval);
997 return repeated_sval;
1000 /* Attempt to get the bit_range for FIELD within a RECORD_TYPE.
1001 Return true and write the result to OUT if successful.
1002 Return false otherwise. */
1004 static bool
1005 get_bit_range_for_field (tree field, bit_range *out)
1007 bit_size_t bit_size;
1008 if (!int_size_in_bits (TREE_TYPE (field), &bit_size))
1009 return false;
1010 int field_bit_offset = int_bit_position (field);
1011 *out = bit_range (field_bit_offset, bit_size);
1012 return true;
1015 /* Attempt to get the byte_range for FIELD within a RECORD_TYPE.
1016 Return true and write the result to OUT if successful.
1017 Return false otherwise. */
1019 static bool
1020 get_byte_range_for_field (tree field, byte_range *out)
1022 bit_range field_bits (0, 0);
1023 if (!get_bit_range_for_field (field, &field_bits))
1024 return false;
1025 return field_bits.as_byte_range (out);
1028 /* Attempt to determine if there is a specific field within RECORD_TYPE
1029 at BYTES. If so, return it, and write the location of BYTES relative
1030 to the field to *OUT_RANGE_WITHIN_FIELD.
1031 Otherwise, return NULL_TREE.
1032 For example, given:
1033 struct foo { uint32 a; uint32; b};
1035 bytes = {bytes 6-7} (of foo)
1036 we have bytes 3-4 of field b. */
1038 static tree
1039 get_field_at_byte_range (tree record_type, const byte_range &bytes,
1040 byte_range *out_range_within_field)
1042 bit_offset_t bit_offset = bytes.m_start_byte_offset * BITS_PER_UNIT;
1044 tree field = get_field_at_bit_offset (record_type, bit_offset);
1045 if (!field)
1046 return NULL_TREE;
1048 byte_range field_bytes (0,0);
1049 if (!get_byte_range_for_field (field, &field_bytes))
1050 return NULL_TREE;
1052 /* Is BYTES fully within field_bytes? */
1053 byte_range bytes_within_field (0,0);
1054 if (!field_bytes.contains_p (bytes, &bytes_within_field))
1055 return NULL_TREE;
1057 *out_range_within_field = bytes_within_field;
1058 return field;
1061 /* Subroutine of region_model_manager::get_or_create_bits_within.
1062 Return a folded svalue, or NULL. */
1064 const svalue *
1065 region_model_manager::maybe_fold_bits_within_svalue (tree type,
1066 const bit_range &bits,
1067 const svalue *inner_svalue)
1069 tree inner_type = inner_svalue->get_type ();
1070 /* Fold:
1071 BITS_WITHIN ((0, sizeof (VAL), VAL))
1073 CAST(TYPE, VAL). */
1074 if (bits.m_start_bit_offset == 0 && inner_type)
1076 bit_size_t inner_type_size;
1077 if (int_size_in_bits (inner_type, &inner_type_size))
1078 if (inner_type_size == bits.m_size_in_bits)
1080 if (type)
1081 return get_or_create_cast (type, inner_svalue);
1082 else
1083 return inner_svalue;
1087 /* Kind-specific folding. */
1088 if (const svalue *sval
1089 = inner_svalue->maybe_fold_bits_within (type, bits, this))
1090 return sval;
1092 byte_range bytes (0,0);
1093 if (bits.as_byte_range (&bytes) && inner_type)
1094 switch (TREE_CODE (inner_type))
1096 default:
1097 break;
1098 case ARRAY_TYPE:
1100 /* Fold:
1101 BITS_WITHIN (range, KIND(REG))
1103 BITS_WITHIN (range - offsetof(ELEMENT), KIND(REG.ELEMENT))
1104 if range1 is a byte-range fully within one ELEMENT. */
1105 tree element_type = TREE_TYPE (inner_type);
1106 HOST_WIDE_INT element_byte_size
1107 = int_size_in_bytes (element_type);
1108 if (element_byte_size > 0)
1110 HOST_WIDE_INT start_idx
1111 = (bytes.get_start_byte_offset ().to_shwi ()
1112 / element_byte_size);
1113 HOST_WIDE_INT last_idx
1114 = (bytes.get_last_byte_offset ().to_shwi ()
1115 / element_byte_size);
1116 if (start_idx == last_idx)
1118 if (const initial_svalue *initial_sval
1119 = inner_svalue->dyn_cast_initial_svalue ())
1121 bit_offset_t start_of_element
1122 = start_idx * element_byte_size * BITS_PER_UNIT;
1123 bit_range bits_within_element
1124 (bits.m_start_bit_offset - start_of_element,
1125 bits.m_size_in_bits);
1126 const svalue *idx_sval
1127 = get_or_create_int_cst (integer_type_node, start_idx);
1128 const region *element_reg =
1129 get_element_region (initial_sval->get_region (),
1130 element_type, idx_sval);
1131 const svalue *element_reg_sval
1132 = get_or_create_initial_value (element_reg);
1133 return get_or_create_bits_within (type,
1134 bits_within_element,
1135 element_reg_sval);
1140 break;
1141 case RECORD_TYPE:
1143 /* Fold:
1144 BYTES_WITHIN (range, KIND(REG))
1146 BYTES_WITHIN (range - offsetof(FIELD), KIND(REG.FIELD))
1147 if range1 is fully within FIELD. */
1148 byte_range bytes_within_field (0, 0);
1149 if (tree field = get_field_at_byte_range (inner_type, bytes,
1150 &bytes_within_field))
1152 if (const initial_svalue *initial_sval
1153 = inner_svalue->dyn_cast_initial_svalue ())
1155 const region *field_reg =
1156 get_field_region (initial_sval->get_region (), field);
1157 const svalue *initial_reg_sval
1158 = get_or_create_initial_value (field_reg);
1159 return get_or_create_bits_within
1160 (type,
1161 bytes_within_field.as_bit_range (),
1162 initial_reg_sval);
1166 break;
1168 return NULL;
1171 /* Return the svalue * of type TYPE for extracting BITS from INNER_SVALUE,
1172 creating it if necessary. */
1174 const svalue *
1175 region_model_manager::get_or_create_bits_within (tree type,
1176 const bit_range &bits,
1177 const svalue *inner_svalue)
1179 if (const svalue *folded
1180 = maybe_fold_bits_within_svalue (type, bits, inner_svalue))
1181 return folded;
1183 bits_within_svalue::key_t key (type, bits, inner_svalue);
1184 if (bits_within_svalue **slot = m_bits_within_values_map.get (key))
1185 return *slot;
1186 bits_within_svalue *bits_within_sval
1187 = new bits_within_svalue (alloc_symbol_id (), type, bits, inner_svalue);
1188 RETURN_UNKNOWN_IF_TOO_COMPLEX (bits_within_sval);
1189 m_bits_within_values_map.put (key, bits_within_sval);
1190 return bits_within_sval;
1193 /* Return the svalue * that decorates ARG as being unmergeable,
1194 creating it if necessary. */
1196 const svalue *
1197 region_model_manager::get_or_create_unmergeable (const svalue *arg)
1199 if (arg->get_kind () == SK_UNMERGEABLE)
1200 return arg;
1202 if (unmergeable_svalue **slot = m_unmergeable_values_map.get (arg))
1203 return *slot;
1204 unmergeable_svalue *unmergeable_sval
1205 = new unmergeable_svalue (alloc_symbol_id (), arg);
1206 RETURN_UNKNOWN_IF_TOO_COMPLEX (unmergeable_sval);
1207 m_unmergeable_values_map.put (arg, unmergeable_sval);
1208 return unmergeable_sval;
1211 /* Return the svalue * of type TYPE for the merger of value BASE_SVAL
1212 and ITER_SVAL at POINT, creating it if necessary. */
1214 const svalue *
1215 region_model_manager::
1216 get_or_create_widening_svalue (tree type,
1217 const function_point &point,
1218 const svalue *base_sval,
1219 const svalue *iter_sval)
1221 gcc_assert (base_sval->get_kind () != SK_WIDENING);
1222 gcc_assert (iter_sval->get_kind () != SK_WIDENING);
1223 widening_svalue::key_t key (type, point, base_sval, iter_sval);
1224 if (widening_svalue **slot = m_widening_values_map.get (key))
1225 return *slot;
1226 widening_svalue *widening_sval
1227 = new widening_svalue (alloc_symbol_id (), type, point, base_sval,
1228 iter_sval);
1229 RETURN_UNKNOWN_IF_TOO_COMPLEX (widening_sval);
1230 m_widening_values_map.put (key, widening_sval);
1231 return widening_sval;
1234 /* Return the svalue * of type TYPE for the compound values in MAP,
1235 creating it if necessary. */
1237 const svalue *
1238 region_model_manager::get_or_create_compound_svalue (tree type,
1239 const binding_map &map)
1241 compound_svalue::key_t tmp_key (type, &map);
1242 if (compound_svalue **slot = m_compound_values_map.get (tmp_key))
1243 return *slot;
1244 compound_svalue *compound_sval
1245 = new compound_svalue (alloc_symbol_id (), type, map);
1246 RETURN_UNKNOWN_IF_TOO_COMPLEX (compound_sval);
1247 /* Use make_key rather than reusing the key, so that we use a
1248 ptr to compound_sval's binding_map, rather than the MAP param. */
1249 m_compound_values_map.put (compound_sval->make_key (), compound_sval);
1250 return compound_sval;
1253 /* class conjured_purge. */
1255 /* Purge state relating to SVAL. */
1257 void
1258 conjured_purge::purge (const conjured_svalue *sval) const
1260 m_model->purge_state_involving (sval, m_ctxt);
1263 /* Return the svalue * of type TYPE for the value conjured for ID_REG
1264 at STMT, creating it if necessary.
1265 Use P to purge existing state from the svalue, for the case where a
1266 conjured_svalue would be reused along an execution path. */
1268 const svalue *
1269 region_model_manager::get_or_create_conjured_svalue (tree type,
1270 const gimple *stmt,
1271 const region *id_reg,
1272 const conjured_purge &p)
1274 conjured_svalue::key_t key (type, stmt, id_reg);
1275 if (conjured_svalue **slot = m_conjured_values_map.get (key))
1277 const conjured_svalue *sval = *slot;
1278 /* We're reusing an existing conjured_svalue, perhaps from a different
1279 state within this analysis, or perhaps from an earlier state on this
1280 execution path. For the latter, purge any state involving the "new"
1281 svalue from the current program_state. */
1282 p.purge (sval);
1283 return sval;
1285 conjured_svalue *conjured_sval
1286 = new conjured_svalue (alloc_symbol_id (), type, stmt, id_reg);
1287 RETURN_UNKNOWN_IF_TOO_COMPLEX (conjured_sval);
1288 m_conjured_values_map.put (key, conjured_sval);
1289 return conjured_sval;
1292 /* Subroutine of region_model_manager::get_or_create_asm_output_svalue.
1293 Return a folded svalue, or NULL. */
1295 const svalue *
1296 region_model_manager::
1297 maybe_fold_asm_output_svalue (tree type,
1298 const vec<const svalue *> &inputs)
1300 /* Unknown inputs should lead to unknown results. */
1301 for (const auto &iter : inputs)
1302 if (iter->get_kind () == SK_UNKNOWN)
1303 return get_or_create_unknown_svalue (type);
1305 return NULL;
1308 /* Return the svalue * of type TYPE for OUTPUT_IDX of the deterministic
1309 asm stmt ASM_STMT, given INPUTS as inputs. */
1311 const svalue *
1312 region_model_manager::
1313 get_or_create_asm_output_svalue (tree type,
1314 const gasm *asm_stmt,
1315 unsigned output_idx,
1316 const vec<const svalue *> &inputs)
1318 gcc_assert (inputs.length () <= asm_output_svalue::MAX_INPUTS);
1320 if (const svalue *folded
1321 = maybe_fold_asm_output_svalue (type, inputs))
1322 return folded;
1324 const char *asm_string = gimple_asm_string (asm_stmt);
1325 const unsigned noutputs = gimple_asm_noutputs (asm_stmt);
1327 asm_output_svalue::key_t key (type, asm_string, output_idx, inputs);
1328 if (asm_output_svalue **slot = m_asm_output_values_map.get (key))
1329 return *slot;
1330 asm_output_svalue *asm_output_sval
1331 = new asm_output_svalue (alloc_symbol_id (), type, asm_string, output_idx,
1332 noutputs, inputs);
1333 RETURN_UNKNOWN_IF_TOO_COMPLEX (asm_output_sval);
1334 m_asm_output_values_map.put (key, asm_output_sval);
1335 return asm_output_sval;
1338 /* Return the svalue * of type TYPE for OUTPUT_IDX of a deterministic
1339 asm stmt with string ASM_STRING with NUM_OUTPUTS outputs, given
1340 INPUTS as inputs. */
1342 const svalue *
1343 region_model_manager::
1344 get_or_create_asm_output_svalue (tree type,
1345 const char *asm_string,
1346 unsigned output_idx,
1347 unsigned num_outputs,
1348 const vec<const svalue *> &inputs)
1350 gcc_assert (inputs.length () <= asm_output_svalue::MAX_INPUTS);
1352 if (const svalue *folded
1353 = maybe_fold_asm_output_svalue (type, inputs))
1354 return folded;
1356 asm_output_svalue::key_t key (type, asm_string, output_idx, inputs);
1357 if (asm_output_svalue **slot = m_asm_output_values_map.get (key))
1358 return *slot;
1359 asm_output_svalue *asm_output_sval
1360 = new asm_output_svalue (alloc_symbol_id (), type, asm_string, output_idx,
1361 num_outputs, inputs);
1362 RETURN_UNKNOWN_IF_TOO_COMPLEX (asm_output_sval);
1363 m_asm_output_values_map.put (key, asm_output_sval);
1364 return asm_output_sval;
1367 /* Return the svalue * of type TYPE for the result of a call to FNDECL
1368 with __attribute__((const)), given INPUTS as inputs. */
1370 const svalue *
1371 region_model_manager::
1372 get_or_create_const_fn_result_svalue (tree type,
1373 tree fndecl,
1374 const vec<const svalue *> &inputs)
1376 gcc_assert (type);
1377 gcc_assert (fndecl);
1378 gcc_assert (DECL_P (fndecl));
1379 gcc_assert (TREE_READONLY (fndecl));
1380 gcc_assert (inputs.length () <= const_fn_result_svalue::MAX_INPUTS);
1382 const_fn_result_svalue::key_t key (type, fndecl, inputs);
1383 if (const_fn_result_svalue **slot = m_const_fn_result_values_map.get (key))
1384 return *slot;
1385 const_fn_result_svalue *const_fn_result_sval
1386 = new const_fn_result_svalue (alloc_symbol_id (), type, fndecl, inputs);
1387 RETURN_UNKNOWN_IF_TOO_COMPLEX (const_fn_result_sval);
1388 m_const_fn_result_values_map.put (key, const_fn_result_sval);
1389 return const_fn_result_sval;
1392 /* Given STRING_CST, a STRING_CST and BYTE_OFFSET_CST a constant,
1393 attempt to get the character at that offset, returning either
1394 the svalue for the character constant, or NULL if unsuccessful. */
1396 const svalue *
1397 region_model_manager::maybe_get_char_from_string_cst (tree string_cst,
1398 tree byte_offset_cst)
1400 gcc_assert (TREE_CODE (string_cst) == STRING_CST);
1402 /* Adapted from fold_read_from_constant_string. */
1403 scalar_int_mode char_mode;
1404 if (TREE_CODE (byte_offset_cst) == INTEGER_CST
1405 && compare_tree_int (byte_offset_cst,
1406 TREE_STRING_LENGTH (string_cst)) < 0
1407 && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string_cst))),
1408 &char_mode)
1409 && GET_MODE_SIZE (char_mode) == 1)
1411 tree char_cst
1412 = build_int_cst_type (TREE_TYPE (TREE_TYPE (string_cst)),
1413 (TREE_STRING_POINTER (string_cst)
1414 [TREE_INT_CST_LOW (byte_offset_cst)]));
1415 return get_or_create_constant_svalue (char_cst);
1417 return NULL;
1420 /* region consolidation. */
1422 /* Return the region for FNDECL, creating it if necessary. */
1424 const function_region *
1425 region_model_manager::get_region_for_fndecl (tree fndecl)
1427 gcc_assert (TREE_CODE (fndecl) == FUNCTION_DECL);
1429 function_region **slot = m_fndecls_map.get (fndecl);
1430 if (slot)
1431 return *slot;
1432 function_region *reg
1433 = new function_region (alloc_symbol_id (), &m_code_region, fndecl);
1434 m_fndecls_map.put (fndecl, reg);
1435 return reg;
1438 /* Return the region for LABEL, creating it if necessary. */
1440 const label_region *
1441 region_model_manager::get_region_for_label (tree label)
1443 gcc_assert (TREE_CODE (label) == LABEL_DECL);
1445 label_region **slot = m_labels_map.get (label);
1446 if (slot)
1447 return *slot;
1449 tree fndecl = DECL_CONTEXT (label);
1450 gcc_assert (fndecl && TREE_CODE (fndecl) == FUNCTION_DECL);
1452 const function_region *func_reg = get_region_for_fndecl (fndecl);
1453 label_region *reg
1454 = new label_region (alloc_symbol_id (), func_reg, label);
1455 m_labels_map.put (label, reg);
1456 return reg;
1459 /* Return the region for EXPR, creating it if necessary. */
1461 const decl_region *
1462 region_model_manager::get_region_for_global (tree expr)
1464 gcc_assert (VAR_P (expr));
1466 decl_region **slot = m_globals_map.get (expr);
1467 if (slot)
1468 return *slot;
1469 decl_region *reg
1470 = new decl_region (alloc_symbol_id (), &m_globals_region, expr);
1471 m_globals_map.put (expr, reg);
1472 return reg;
1475 /* Return the region for an unknown access of type REGION_TYPE,
1476 creating it if necessary.
1477 This is a symbolic_region, where the pointer is an unknown_svalue
1478 of type &REGION_TYPE. */
1480 const region *
1481 region_model_manager::get_unknown_symbolic_region (tree region_type)
1483 tree ptr_type = region_type ? build_pointer_type (region_type) : NULL_TREE;
1484 const svalue *unknown_ptr = get_or_create_unknown_svalue (ptr_type);
1485 return get_symbolic_region (unknown_ptr);
1488 /* Return the region that describes accessing field FIELD of PARENT,
1489 creating it if necessary. */
1491 const region *
1492 region_model_manager::get_field_region (const region *parent, tree field)
1494 gcc_assert (TREE_CODE (field) == FIELD_DECL);
1496 /* (*UNKNOWN_PTR).field is (*UNKNOWN_PTR_OF_&FIELD_TYPE). */
1497 if (parent->symbolic_for_unknown_ptr_p ())
1498 return get_unknown_symbolic_region (TREE_TYPE (field));
1500 field_region::key_t key (parent, field);
1501 if (field_region *reg = m_field_regions.get (key))
1502 return reg;
1504 field_region *field_reg
1505 = new field_region (alloc_symbol_id (), parent, field);
1506 m_field_regions.put (key, field_reg);
1507 return field_reg;
1510 /* Return the region that describes accessing the element of type
1511 ELEMENT_TYPE at index INDEX of PARENT, creating it if necessary. */
1513 const region *
1514 region_model_manager::get_element_region (const region *parent,
1515 tree element_type,
1516 const svalue *index)
1518 /* (UNKNOWN_PTR[IDX]) is (UNKNOWN_PTR). */
1519 if (parent->symbolic_for_unknown_ptr_p ())
1520 return get_unknown_symbolic_region (element_type);
1522 element_region::key_t key (parent, element_type, index);
1523 if (element_region *reg = m_element_regions.get (key))
1524 return reg;
1526 element_region *element_reg
1527 = new element_region (alloc_symbol_id (), parent, element_type, index);
1528 m_element_regions.put (key, element_reg);
1529 return element_reg;
1532 /* Return the region that describes accessing the subregion of type
1533 ELEMENT_TYPE at offset BYTE_OFFSET within PARENT, creating it if
1534 necessary. */
1536 const region *
1537 region_model_manager::get_offset_region (const region *parent,
1538 tree type,
1539 const svalue *byte_offset)
1541 /* (UNKNOWN_PTR + OFFSET) is (UNKNOWN_PTR). */
1542 if (parent->symbolic_for_unknown_ptr_p ())
1543 return get_unknown_symbolic_region (type);
1545 /* If BYTE_OFFSET is zero, return PARENT. */
1546 if (tree cst_offset = byte_offset->maybe_get_constant ())
1547 if (zerop (cst_offset))
1548 return get_cast_region (parent, type);
1550 /* Fold OFFSET_REGION(OFFSET_REGION(REG, X), Y)
1551 to OFFSET_REGION(REG, (X + Y)). */
1552 if (const offset_region *parent_offset_reg
1553 = parent->dyn_cast_offset_region ())
1555 const svalue *sval_x = parent_offset_reg->get_byte_offset ();
1556 const svalue *sval_sum
1557 = get_or_create_binop (byte_offset->get_type (),
1558 PLUS_EXPR, sval_x, byte_offset);
1559 return get_offset_region (parent->get_parent_region (), type, sval_sum);
1562 offset_region::key_t key (parent, type, byte_offset);
1563 if (offset_region *reg = m_offset_regions.get (key))
1564 return reg;
1566 offset_region *offset_reg
1567 = new offset_region (alloc_symbol_id (), parent, type, byte_offset);
1568 m_offset_regions.put (key, offset_reg);
1569 return offset_reg;
1572 /* Return the region that describes accessing the subregion of type
1573 TYPE of size BYTE_SIZE_SVAL within PARENT, creating it if necessary. */
1575 const region *
1576 region_model_manager::get_sized_region (const region *parent,
1577 tree type,
1578 const svalue *byte_size_sval)
1580 if (parent->symbolic_for_unknown_ptr_p ())
1581 return get_unknown_symbolic_region (type);
1583 if (byte_size_sval->get_type () != size_type_node)
1584 byte_size_sval = get_or_create_cast (size_type_node, byte_size_sval);
1586 /* If PARENT is already that size, return it. */
1587 const svalue *parent_byte_size_sval = parent->get_byte_size_sval (this);
1588 if (tree parent_size_cst = parent_byte_size_sval->maybe_get_constant ())
1589 if (tree size_cst = byte_size_sval->maybe_get_constant ())
1591 tree comparison
1592 = fold_binary (EQ_EXPR, boolean_type_node, parent_size_cst, size_cst);
1593 if (comparison == boolean_true_node)
1594 return parent;
1597 sized_region::key_t key (parent, type, byte_size_sval);
1598 if (sized_region *reg = m_sized_regions.get (key))
1599 return reg;
1601 sized_region *sized_reg
1602 = new sized_region (alloc_symbol_id (), parent, type, byte_size_sval);
1603 m_sized_regions.put (key, sized_reg);
1604 return sized_reg;
1607 /* Return the region that describes accessing PARENT_REGION as if
1608 it were of type TYPE, creating it if necessary. */
1610 const region *
1611 region_model_manager::get_cast_region (const region *original_region,
1612 tree type)
1614 /* If types match, return ORIGINAL_REGION. */
1615 if (type == original_region->get_type ())
1616 return original_region;
1618 if (original_region->symbolic_for_unknown_ptr_p ())
1619 return get_unknown_symbolic_region (type);
1621 cast_region::key_t key (original_region, type);
1622 if (cast_region *reg = m_cast_regions.get (key))
1623 return reg;
1625 cast_region *cast_reg
1626 = new cast_region (alloc_symbol_id (), original_region, type);
1627 m_cast_regions.put (key, cast_reg);
1628 return cast_reg;
1631 /* Return the frame_region for call to FUN from CALLING_FRAME, creating it
1632 if necessary. CALLING_FRAME may be NULL. */
1634 const frame_region *
1635 region_model_manager::get_frame_region (const frame_region *calling_frame,
1636 function *fun)
1638 int index = calling_frame ? calling_frame->get_index () + 1 : 0;
1640 frame_region::key_t key (calling_frame, fun);
1641 if (frame_region *reg = m_frame_regions.get (key))
1642 return reg;
1644 frame_region *frame_reg
1645 = new frame_region (alloc_symbol_id (), &m_stack_region, calling_frame,
1646 fun, index);
1647 m_frame_regions.put (key, frame_reg);
1648 return frame_reg;
1651 /* Return the region that describes dereferencing SVAL, creating it
1652 if necessary. */
1654 const region *
1655 region_model_manager::get_symbolic_region (const svalue *sval)
1657 symbolic_region::key_t key (&m_root_region, sval);
1658 if (symbolic_region *reg = m_symbolic_regions.get (key))
1659 return reg;
1661 symbolic_region *symbolic_reg
1662 = new symbolic_region (alloc_symbol_id (), &m_root_region, sval);
1663 m_symbolic_regions.put (key, symbolic_reg);
1664 return symbolic_reg;
1667 /* Return the region that describes accessing STRING_CST, creating it
1668 if necessary. */
1670 const string_region *
1671 region_model_manager::get_region_for_string (tree string_cst)
1673 gcc_assert (TREE_CODE (string_cst) == STRING_CST);
1675 string_region **slot = m_string_map.get (string_cst);
1676 if (slot)
1677 return *slot;
1678 string_region *reg
1679 = new string_region (alloc_symbol_id (), &m_root_region, string_cst);
1680 m_string_map.put (string_cst, reg);
1681 return reg;
1684 /* Return the region that describes accessing BITS within PARENT as TYPE,
1685 creating it if necessary. */
1687 const region *
1688 region_model_manager::get_bit_range (const region *parent, tree type,
1689 const bit_range &bits)
1691 gcc_assert (parent);
1693 if (parent->symbolic_for_unknown_ptr_p ())
1694 return get_unknown_symbolic_region (type);
1696 bit_range_region::key_t key (parent, type, bits);
1697 if (bit_range_region *reg = m_bit_range_regions.get (key))
1698 return reg;
1700 bit_range_region *bit_range_reg
1701 = new bit_range_region (alloc_symbol_id (), parent, type, bits);
1702 m_bit_range_regions.put (key, bit_range_reg);
1703 return bit_range_reg;
1706 /* Return the region that describes accessing the IDX-th variadic argument
1707 within PARENT_FRAME, creating it if necessary. */
1709 const var_arg_region *
1710 region_model_manager::get_var_arg_region (const frame_region *parent_frame,
1711 unsigned idx)
1713 gcc_assert (parent_frame);
1715 var_arg_region::key_t key (parent_frame, idx);
1716 if (var_arg_region *reg = m_var_arg_regions.get (key))
1717 return reg;
1719 var_arg_region *var_arg_reg
1720 = new var_arg_region (alloc_symbol_id (), parent_frame, idx);
1721 m_var_arg_regions.put (key, var_arg_reg);
1722 return var_arg_reg;
1725 /* If we see a tree code we don't know how to handle, rather than
1726 ICE or generate bogus results, create a dummy region, and notify
1727 CTXT so that it can mark the new state as being not properly
1728 modelled. The exploded graph can then stop exploring that path,
1729 since any diagnostics we might issue will have questionable
1730 validity. */
1732 const region *
1733 region_model_manager::
1734 get_region_for_unexpected_tree_code (region_model_context *ctxt,
1735 tree t,
1736 const dump_location_t &loc)
1738 tree type = TYPE_P (t) ? t : TREE_TYPE (t);
1739 region *new_reg
1740 = new unknown_region (alloc_symbol_id (), &m_root_region, type);
1741 if (ctxt)
1742 ctxt->on_unexpected_tree_code (t, loc);
1743 return new_reg;
1746 /* Return a region describing a heap-allocated block of memory.
1747 Reuse an existing heap_allocated_region is its id is not within
1748 BASE_REGS_IN_USE. */
1750 const region *
1751 region_model_manager::
1752 get_or_create_region_for_heap_alloc (const bitmap &base_regs_in_use)
1754 /* Try to reuse an existing region, if it's unreferenced in the
1755 client state. */
1756 for (auto existing_reg : m_managed_dynamic_regions)
1757 if (!bitmap_bit_p (base_regs_in_use, existing_reg->get_id ()))
1758 if (existing_reg->get_kind () == RK_HEAP_ALLOCATED)
1759 return existing_reg;
1761 /* All existing ones (if any) are in use; create a new one. */
1762 region *reg
1763 = new heap_allocated_region (alloc_symbol_id (), &m_heap_region);
1764 m_managed_dynamic_regions.safe_push (reg);
1765 return reg;
1768 /* Return a new region describing a block of memory allocated within FRAME. */
1770 const region *
1771 region_model_manager::create_region_for_alloca (const frame_region *frame)
1773 gcc_assert (frame);
1774 region *reg = new alloca_region (alloc_symbol_id (), frame);
1775 m_managed_dynamic_regions.safe_push (reg);
1776 return reg;
1779 /* Log OBJ to LOGGER. */
1781 template <typename T>
1782 static void
1783 log_managed_object (logger *logger, const T *obj)
1785 logger->start_log_line ();
1786 pretty_printer *pp = logger->get_printer ();
1787 pp_string (pp, " ");
1788 obj->dump_to_pp (pp, true);
1789 logger->end_log_line ();
1792 /* Specialization for frame_region, which also logs the count of locals
1793 managed by the frame_region. */
1795 template <>
1796 void
1797 log_managed_object (logger *logger, const frame_region *obj)
1799 logger->start_log_line ();
1800 pretty_printer *pp = logger->get_printer ();
1801 pp_string (pp, " ");
1802 obj->dump_to_pp (pp, true);
1803 pp_printf (pp, " [with %i region(s) for locals]", obj->get_num_locals ());
1804 logger->end_log_line ();
1807 /* Dump the number of objects that were managed by UNIQ_MAP to LOGGER.
1808 If SHOW_OBJS is true, also dump the objects themselves. */
1810 template <typename K, typename T>
1811 static void
1812 log_uniq_map (logger *logger, bool show_objs, const char *title,
1813 const hash_map<K, T*> &uniq_map)
1815 logger->log (" # %s: %li", title, (long)uniq_map.elements ());
1816 if (!show_objs)
1817 return;
1818 auto_vec<const T *> vec_objs (uniq_map.elements ());
1819 for (typename hash_map<K, T*>::iterator iter = uniq_map.begin ();
1820 iter != uniq_map.end (); ++iter)
1821 vec_objs.quick_push ((*iter).second);
1823 vec_objs.qsort (T::cmp_ptr_ptr);
1825 unsigned i;
1826 const T *obj;
1827 FOR_EACH_VEC_ELT (vec_objs, i, obj)
1828 log_managed_object<T> (logger, obj);
1831 /* Dump the number of objects that were managed by MAP to LOGGER.
1832 If SHOW_OBJS is true, also dump the objects themselves. */
1834 template <typename T>
1835 static void
1836 log_uniq_map (logger *logger, bool show_objs, const char *title,
1837 const consolidation_map<T> &map)
1839 logger->log (" # %s: %li", title, (long)map.elements ());
1840 if (!show_objs)
1841 return;
1843 auto_vec<const T *> vec_objs (map.elements ());
1844 for (typename consolidation_map<T>::iterator iter = map.begin ();
1845 iter != map.end (); ++iter)
1846 vec_objs.quick_push ((*iter).second);
1848 vec_objs.qsort (T::cmp_ptr_ptr);
1850 unsigned i;
1851 const T *obj;
1852 FOR_EACH_VEC_ELT (vec_objs, i, obj)
1853 log_managed_object<T> (logger, obj);
1856 /* Dump the number of objects of each class that were managed by this
1857 manager to LOGGER.
1858 If SHOW_OBJS is true, also dump the objects themselves. */
1860 void
1861 region_model_manager::log_stats (logger *logger, bool show_objs) const
1863 LOG_SCOPE (logger);
1864 logger->log ("call string consolidation");
1865 m_empty_call_string.recursive_log (logger);
1866 logger->log ("next symbol id: %i", m_next_symbol_id);
1867 logger->log ("svalue consolidation");
1868 log_uniq_map (logger, show_objs, "constant_svalue", m_constants_map);
1869 log_uniq_map (logger, show_objs, "unknown_svalue", m_unknowns_map);
1870 if (m_unknown_NULL)
1871 log_managed_object (logger, m_unknown_NULL);
1872 log_uniq_map (logger, show_objs, "poisoned_svalue", m_poisoned_values_map);
1873 log_uniq_map (logger, show_objs, "setjmp_svalue", m_setjmp_values_map);
1874 log_uniq_map (logger, show_objs, "initial_svalue", m_initial_values_map);
1875 log_uniq_map (logger, show_objs, "region_svalue", m_pointer_values_map);
1876 log_uniq_map (logger, show_objs, "unaryop_svalue", m_unaryop_values_map);
1877 log_uniq_map (logger, show_objs, "binop_svalue", m_binop_values_map);
1878 log_uniq_map (logger, show_objs, "sub_svalue", m_sub_values_map);
1879 log_uniq_map (logger, show_objs, "repeated_svalue", m_repeated_values_map);
1880 log_uniq_map (logger, show_objs, "bits_within_svalue",
1881 m_bits_within_values_map);
1882 log_uniq_map (logger, show_objs, "unmergeable_svalue",
1883 m_unmergeable_values_map);
1884 log_uniq_map (logger, show_objs, "widening_svalue", m_widening_values_map);
1885 log_uniq_map (logger, show_objs, "compound_svalue", m_compound_values_map);
1886 log_uniq_map (logger, show_objs, "conjured_svalue", m_conjured_values_map);
1887 log_uniq_map (logger, show_objs, "asm_output_svalue",
1888 m_asm_output_values_map);
1889 log_uniq_map (logger, show_objs, "const_fn_result_svalue",
1890 m_const_fn_result_values_map);
1892 logger->log ("max accepted svalue num_nodes: %i",
1893 m_max_complexity.m_num_nodes);
1894 logger->log ("max accepted svalue max_depth: %i",
1895 m_max_complexity.m_max_depth);
1897 logger->log ("region consolidation");
1898 log_uniq_map (logger, show_objs, "function_region", m_fndecls_map);
1899 log_uniq_map (logger, show_objs, "label_region", m_labels_map);
1900 log_uniq_map (logger, show_objs, "decl_region for globals", m_globals_map);
1901 log_uniq_map (logger, show_objs, "field_region", m_field_regions);
1902 log_uniq_map (logger, show_objs, "element_region", m_element_regions);
1903 log_uniq_map (logger, show_objs, "offset_region", m_offset_regions);
1904 log_uniq_map (logger, show_objs, "sized_region", m_sized_regions);
1905 log_uniq_map (logger, show_objs, "cast_region", m_cast_regions);
1906 log_uniq_map (logger, show_objs, "frame_region", m_frame_regions);
1907 log_uniq_map (logger, show_objs, "symbolic_region", m_symbolic_regions);
1908 log_uniq_map (logger, show_objs, "string_region", m_string_map);
1909 log_uniq_map (logger, show_objs, "bit_range_region", m_bit_range_regions);
1910 log_uniq_map (logger, show_objs, "var_arg_region", m_var_arg_regions);
1911 logger->log (" # managed dynamic regions: %i",
1912 m_managed_dynamic_regions.length ());
1913 m_store_mgr.log_stats (logger, show_objs);
1914 m_range_mgr->log_stats (logger, show_objs);
1917 /* Dump the number of objects of each class that were managed by this
1918 manager to LOGGER.
1919 If SHOW_OBJS is true, also dump the objects themselves.
1920 This is here so it can use log_uniq_map. */
1922 void
1923 store_manager::log_stats (logger *logger, bool show_objs) const
1925 LOG_SCOPE (logger);
1926 log_uniq_map (logger, show_objs, "concrete_binding",
1927 m_concrete_binding_key_mgr);
1928 log_uniq_map (logger, show_objs, "symbolic_binding",
1929 m_symbolic_binding_key_mgr);
1932 /* Emit a warning showing DECL_REG->tracked_p () for use in DejaGnu tests
1933 (using -fdump-analyzer-untracked). */
1935 static void
1936 dump_untracked_region (const decl_region *decl_reg)
1938 tree decl = decl_reg->get_decl ();
1939 if (TREE_CODE (decl) != VAR_DECL)
1940 return;
1941 /* For now, don't emit the status of decls in the constant pool, to avoid
1942 differences in DejaGnu test results between targets that use these vs
1943 those that don't.
1944 (Eventually these decls should probably be untracked and we should test
1945 for that, but that's not stage 4 material). */
1946 if (DECL_IN_CONSTANT_POOL (decl))
1947 return;
1948 warning_at (DECL_SOURCE_LOCATION (decl), 0,
1949 "track %qD: %s",
1950 decl, (decl_reg->tracked_p () ? "yes" : "no"));
1953 /* Implementation of -fdump-analyzer-untracked. */
1955 void
1956 region_model_manager::dump_untracked_regions () const
1958 for (auto iter : m_globals_map)
1960 const decl_region *decl_reg = iter.second;
1961 dump_untracked_region (decl_reg);
1963 for (auto frame_iter : m_frame_regions)
1965 const frame_region *frame_reg = frame_iter.second;
1966 frame_reg->dump_untracked_regions ();
1970 void
1971 frame_region::dump_untracked_regions () const
1973 for (auto iter : m_locals)
1975 const decl_region *decl_reg = iter.second;
1976 dump_untracked_region (decl_reg);
1980 } // namespace ana
1982 #endif /* #if ENABLE_ANALYZER */