c++: only cache constexpr calls that are constant exprs
[official-gcc.git] / gcc / analyzer / region.cc
blob62ae0b2342d250c4f09f76d6ed9b534587290b34
1 /* Regions of memory.
2 Copyright (C) 2019-2023 Free Software Foundation, Inc.
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #define INCLUDE_MEMORY
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tree.h"
26 #include "diagnostic-core.h"
27 #include "gimple-pretty-print.h"
28 #include "function.h"
29 #include "basic-block.h"
30 #include "gimple.h"
31 #include "gimple-iterator.h"
32 #include "diagnostic-core.h"
33 #include "graphviz.h"
34 #include "options.h"
35 #include "cgraph.h"
36 #include "tree-dfa.h"
37 #include "stringpool.h"
38 #include "convert.h"
39 #include "target.h"
40 #include "fold-const.h"
41 #include "tree-pretty-print.h"
42 #include "diagnostic-color.h"
43 #include "diagnostic-metadata.h"
44 #include "bitmap.h"
45 #include "analyzer/analyzer.h"
46 #include "analyzer/analyzer-logging.h"
47 #include "ordered-hash-map.h"
48 #include "options.h"
49 #include "cgraph.h"
50 #include "cfg.h"
51 #include "digraph.h"
52 #include "analyzer/supergraph.h"
53 #include "sbitmap.h"
54 #include "analyzer/call-string.h"
55 #include "analyzer/program-point.h"
56 #include "analyzer/store.h"
57 #include "analyzer/region.h"
58 #include "analyzer/region-model.h"
59 #include "analyzer/sm.h"
60 #include "analyzer/program-state.h"
62 #if ENABLE_ANALYZER
64 namespace ana {
66 region_offset
67 region_offset::make_byte_offset (const region *base_region,
68 const svalue *num_bytes_sval)
70 if (tree num_bytes_cst = num_bytes_sval->maybe_get_constant ())
72 gcc_assert (TREE_CODE (num_bytes_cst) == INTEGER_CST);
73 bit_offset_t num_bits = wi::to_offset (num_bytes_cst) * BITS_PER_UNIT;
74 return make_concrete (base_region, num_bits);
76 else
78 return make_symbolic (base_region, num_bytes_sval);
82 tree
83 region_offset::calc_symbolic_bit_offset (const region_model &model) const
85 if (symbolic_p ())
87 tree num_bytes_expr = model.get_representative_tree (m_sym_offset);
88 if (!num_bytes_expr)
89 return NULL_TREE;
90 tree bytes_to_bits_scale = build_int_cst (size_type_node, BITS_PER_UNIT);
91 return fold_build2 (MULT_EXPR, size_type_node,
92 num_bytes_expr, bytes_to_bits_scale);
94 else
96 tree cst = wide_int_to_tree (size_type_node, m_offset);
97 return cst;
101 const svalue *
102 region_offset::calc_symbolic_byte_offset (region_model_manager *mgr) const
104 if (symbolic_p ())
105 return m_sym_offset;
106 else
108 byte_offset_t concrete_byte_offset;
109 if (get_concrete_byte_offset (&concrete_byte_offset))
110 return mgr->get_or_create_int_cst (size_type_node,
111 concrete_byte_offset);
112 else
113 /* Can't handle bitfields; return UNKNOWN. */
114 return mgr->get_or_create_unknown_svalue (size_type_node);
118 void
119 region_offset::dump_to_pp (pretty_printer *pp, bool simple) const
121 if (symbolic_p ())
123 /* We don't bother showing the base region. */
124 pp_string (pp, "byte ");
125 m_sym_offset->dump_to_pp (pp, simple);
127 else
129 if (m_offset % BITS_PER_UNIT == 0)
131 pp_string (pp, "byte ");
132 pp_wide_int (pp, m_offset / BITS_PER_UNIT, SIGNED);
134 else
136 pp_string (pp, "bit ");
137 pp_wide_int (pp, m_offset, SIGNED);
142 DEBUG_FUNCTION void
143 region_offset::dump (bool simple) const
145 pretty_printer pp;
146 pp_format_decoder (&pp) = default_tree_printer;
147 pp_show_color (&pp) = pp_show_color (global_dc->printer);
148 pp.buffer->stream = stderr;
149 dump_to_pp (&pp, simple);
150 pp_newline (&pp);
151 pp_flush (&pp);
154 /* An svalue that matches the pattern (BASE * FACTOR) + OFFSET
155 where FACTOR or OFFSET could be the identity (represented as NULL). */
157 struct linear_op
159 linear_op (const svalue *base,
160 const svalue *factor,
161 const svalue *offset)
162 : m_base (base), m_factor (factor), m_offset (offset)
166 bool maybe_get_cst_factor (bit_offset_t *out) const
168 if (m_factor == nullptr)
170 *out = 1;
171 return true;
173 if (tree cst_factor = m_factor->maybe_get_constant ())
175 *out = wi::to_offset (cst_factor);
176 return true;
178 return false;
181 bool maybe_get_cst_offset (bit_offset_t *out) const
183 if (m_offset == nullptr)
185 *out = 0;
186 return true;
188 if (tree cst_offset = m_offset->maybe_get_constant ())
190 *out = wi::to_offset (cst_offset);
191 return true;
193 return false;
196 static tristate
197 less (const linear_op &a, const linear_op &b)
199 /* Same base. */
200 if (a.m_base == b.m_base)
202 bit_offset_t a_wi_factor;
203 bit_offset_t b_wi_factor;
204 if (a.maybe_get_cst_factor (&a_wi_factor)
205 && b.maybe_get_cst_factor (&b_wi_factor))
207 if (a_wi_factor != b_wi_factor)
208 return tristate (a_wi_factor < b_wi_factor);
209 else
211 bit_offset_t a_wi_offset;
212 bit_offset_t b_wi_offset;
213 if (a.maybe_get_cst_offset (&a_wi_offset)
214 && b.maybe_get_cst_offset (&b_wi_offset))
215 return tristate (a_wi_offset < b_wi_offset);
219 return tristate::unknown ();
222 static tristate
223 le (const linear_op &a, const linear_op &b)
225 /* Same base. */
226 if (a.m_base == b.m_base)
228 bit_offset_t a_wi_factor;
229 bit_offset_t b_wi_factor;
230 if (a.maybe_get_cst_factor (&a_wi_factor)
231 && b.maybe_get_cst_factor (&b_wi_factor))
233 if (a_wi_factor != b_wi_factor)
234 return tristate (a_wi_factor <= b_wi_factor);
235 else
237 bit_offset_t a_wi_offset;
238 bit_offset_t b_wi_offset;
239 if (a.maybe_get_cst_offset (&a_wi_offset)
240 && b.maybe_get_cst_offset (&b_wi_offset))
241 return tristate (a_wi_offset <= b_wi_offset);
245 return tristate::unknown ();
248 static bool
249 from_svalue (const svalue &sval, linear_op *out)
251 switch (sval.get_kind ())
253 default:
254 break;
255 case SK_BINOP:
257 const binop_svalue &binop_sval ((const binop_svalue &)sval);
258 if (binop_sval.get_op () == MULT_EXPR)
260 *out = linear_op (binop_sval.get_arg0 (),
261 binop_sval.get_arg1 (),
262 NULL);
263 return true;
265 else if (binop_sval.get_op () == PLUS_EXPR)
267 if (binop_sval.get_arg0 ()->get_kind () == SK_BINOP)
269 const binop_svalue &inner_binop_sval
270 ((const binop_svalue &)*binop_sval.get_arg0 ());
271 if (inner_binop_sval.get_op () == MULT_EXPR)
273 *out = linear_op (inner_binop_sval.get_arg0 (),
274 inner_binop_sval.get_arg1 (),
275 binop_sval.get_arg1 ());
276 return true;
280 *out = linear_op (binop_sval.get_arg0 (),
281 NULL,
282 binop_sval.get_arg1 ());
283 return true;
286 break;
288 return false;
291 const svalue *m_base;
292 const svalue *m_factor;
293 const svalue *m_offset;
296 bool
297 operator< (const region_offset &a, const region_offset &b)
299 if (a.symbolic_p ())
301 if (b.symbolic_p ())
303 /* Symbolic vs symbolic. */
304 const svalue &a_sval = *a.get_symbolic_byte_offset ();
305 const svalue &b_sval = *b.get_symbolic_byte_offset ();
307 linear_op op_a (NULL, NULL, NULL);
308 linear_op op_b (NULL, NULL, NULL);
309 if (linear_op::from_svalue (a_sval, &op_a)
310 && linear_op::from_svalue (b_sval, &op_b))
312 tristate ts = linear_op::less (op_a, op_b);
313 if (ts.is_true ())
314 return true;
315 else if (ts.is_false ())
316 return false;
318 /* Use svalue's deterministic order, for now. */
319 return (svalue::cmp_ptr (a.get_symbolic_byte_offset (),
320 b.get_symbolic_byte_offset ())
321 < 0);
323 else
324 /* Symbolic vs concrete: put all symbolic after all concrete. */
325 return false;
327 else
329 if (b.symbolic_p ())
330 /* Concrete vs symbolic: put all concrete before all symbolic. */
331 return true;
332 else
333 /* Concrete vs concrete. */
334 return a.get_bit_offset () < b.get_bit_offset ();
338 bool
339 operator<= (const region_offset &a, const region_offset &b)
341 if (a.symbolic_p ())
343 if (b.symbolic_p ())
345 /* Symbolic vs symbolic. */
346 const svalue &a_sval = *a.get_symbolic_byte_offset ();
347 const svalue &b_sval = *b.get_symbolic_byte_offset ();
349 linear_op op_a (NULL, NULL, NULL);
350 linear_op op_b (NULL, NULL, NULL);
351 if (linear_op::from_svalue (a_sval, &op_a)
352 && linear_op::from_svalue (b_sval, &op_b))
354 tristate ts = linear_op::le (op_a, op_b);
355 if (ts.is_true ())
356 return true;
357 else if (ts.is_false ())
358 return false;
360 /* Use svalue's deterministic order, for now. */
361 return (svalue::cmp_ptr (a.get_symbolic_byte_offset (),
362 b.get_symbolic_byte_offset ())
363 <= 0);
365 else
366 /* Symbolic vs concrete: put all symbolic after all concrete. */
367 return false;
369 else
371 if (b.symbolic_p ())
372 /* Concrete vs symbolic: put all concrete before all symbolic. */
373 return true;
374 else
375 /* Concrete vs concrete. */
376 return a.get_bit_offset () <= b.get_bit_offset ();
380 bool
381 operator> (const region_offset &a, const region_offset &b)
383 return b < a;
386 bool
387 operator>= (const region_offset &a, const region_offset &b)
389 return b <= a;
392 /* class region and its various subclasses. */
394 /* class region. */
396 region::~region ()
398 delete m_cached_offset;
401 /* Compare REG1 and REG2 by id. */
404 region::cmp_ids (const region *reg1, const region *reg2)
406 return (long)reg1->get_id () - (long)reg2->get_id ();
409 /* Determine the base region for this region: when considering bindings
410 for this region, the base region is the ancestor which identifies
411 which cluster they should be partitioned into.
412 Regions within the same struct/union/array are in the same cluster.
413 Different decls are in different clusters. */
415 const region *
416 region::get_base_region () const
418 const region *iter = this;
419 while (iter)
421 switch (iter->get_kind ())
423 case RK_FIELD:
424 case RK_ELEMENT:
425 case RK_OFFSET:
426 case RK_SIZED:
427 case RK_BIT_RANGE:
428 iter = iter->get_parent_region ();
429 continue;
430 case RK_CAST:
431 iter = iter->dyn_cast_cast_region ()->get_original_region ();
432 continue;
433 default:
434 return iter;
437 return iter;
440 /* Return true if get_base_region() == this for this region. */
442 bool
443 region::base_region_p () const
445 switch (get_kind ())
447 /* Region kinds representing a descendent of a base region. */
448 case RK_FIELD:
449 case RK_ELEMENT:
450 case RK_OFFSET:
451 case RK_SIZED:
452 case RK_CAST:
453 case RK_BIT_RANGE:
454 return false;
456 default:
457 return true;
461 /* Return true if this region is ELDER or one of its descendents. */
463 bool
464 region::descendent_of_p (const region *elder) const
466 const region *iter = this;
467 while (iter)
469 if (iter == elder)
470 return true;
471 if (iter->get_kind () == RK_CAST)
472 iter = iter->dyn_cast_cast_region ()->get_original_region ();
473 else
474 iter = iter->get_parent_region ();
476 return false;
479 /* If this region is a frame_region, or a descendent of one, return it.
480 Otherwise return NULL. */
482 const frame_region *
483 region::maybe_get_frame_region () const
485 const region *iter = this;
486 while (iter)
488 if (const frame_region *frame_reg = iter->dyn_cast_frame_region ())
489 return frame_reg;
490 if (iter->get_kind () == RK_CAST)
491 iter = iter->dyn_cast_cast_region ()->get_original_region ();
492 else
493 iter = iter->get_parent_region ();
495 return NULL;
498 /* Get the memory space of this region. */
500 enum memory_space
501 region::get_memory_space () const
503 const region *iter = this;
504 while (iter)
506 switch (iter->get_kind ())
508 default:
509 break;
510 case RK_GLOBALS:
511 return MEMSPACE_GLOBALS;
512 case RK_CODE:
513 case RK_FUNCTION:
514 case RK_LABEL:
515 return MEMSPACE_CODE;
516 case RK_FRAME:
517 case RK_STACK:
518 case RK_ALLOCA:
519 return MEMSPACE_STACK;
520 case RK_HEAP:
521 case RK_HEAP_ALLOCATED:
522 return MEMSPACE_HEAP;
523 case RK_STRING:
524 return MEMSPACE_READONLY_DATA;
526 if (iter->get_kind () == RK_CAST)
527 iter = iter->dyn_cast_cast_region ()->get_original_region ();
528 else
529 iter = iter->get_parent_region ();
531 return MEMSPACE_UNKNOWN;
534 /* Subroutine for use by region_model_manager::get_or_create_initial_value.
535 Return true if this region has an initial_svalue.
536 Return false if attempting to use INIT_VAL(this_region) should give
537 the "UNINITIALIZED" poison value. */
539 bool
540 region::can_have_initial_svalue_p () const
542 const region *base_reg = get_base_region ();
544 /* Check for memory spaces that are uninitialized by default. */
545 enum memory_space mem_space = base_reg->get_memory_space ();
546 switch (mem_space)
548 default:
549 gcc_unreachable ();
550 case MEMSPACE_UNKNOWN:
551 case MEMSPACE_CODE:
552 case MEMSPACE_GLOBALS:
553 case MEMSPACE_READONLY_DATA:
554 /* Such regions have initial_svalues. */
555 return true;
557 case MEMSPACE_HEAP:
558 /* Heap allocations are uninitialized by default. */
559 return false;
561 case MEMSPACE_STACK:
562 if (tree decl = base_reg->maybe_get_decl ())
564 /* See the assertion in frame_region::get_region_for_local for the
565 tree codes we need to handle here. */
566 switch (TREE_CODE (decl))
568 default:
569 gcc_unreachable ();
571 case PARM_DECL:
572 /* Parameters have initial values. */
573 return true;
575 case VAR_DECL:
576 case RESULT_DECL:
577 /* Function locals don't have initial values. */
578 return false;
580 case SSA_NAME:
582 tree ssa_name = decl;
583 /* SSA names that are the default defn of a PARM_DECL
584 have initial_svalues; other SSA names don't. */
585 if (SSA_NAME_IS_DEFAULT_DEF (ssa_name)
586 && SSA_NAME_VAR (ssa_name)
587 && TREE_CODE (SSA_NAME_VAR (ssa_name)) == PARM_DECL)
588 return true;
589 else
590 return false;
595 /* If we have an on-stack region that isn't associated with a decl
596 or SSA name, then we have VLA/alloca, which is uninitialized. */
597 return false;
601 /* For regions within a global decl, get the svalue for the initial
602 value of this region when the program starts, caching the result. */
604 const svalue *
605 region::get_initial_value_at_main (region_model_manager *mgr) const
607 if (!m_cached_init_sval_at_main)
608 m_cached_init_sval_at_main = calc_initial_value_at_main (mgr);
609 return m_cached_init_sval_at_main;
612 /* Implementation of region::get_initial_value_at_main. */
614 const svalue *
615 region::calc_initial_value_at_main (region_model_manager *mgr) const
617 const decl_region *base_reg = get_base_region ()->dyn_cast_decl_region ();
618 gcc_assert (base_reg);
620 /* Attempt to get the initializer value for base_reg. */
621 if (const svalue *base_reg_init
622 = base_reg->get_svalue_for_initializer (mgr))
624 if (this == base_reg)
625 return base_reg_init;
626 else
628 /* Get the value for REG within base_reg_init. */
629 binding_cluster c (base_reg);
630 c.bind (mgr->get_store_manager (), base_reg, base_reg_init);
631 const svalue *sval
632 = c.get_any_binding (mgr->get_store_manager (), this);
633 if (sval)
635 if (get_type ())
636 sval = mgr->get_or_create_cast (get_type (), sval);
637 return sval;
642 /* Otherwise, return INIT_VAL(REG). */
643 return mgr->get_or_create_initial_value (this);
646 /* If this region is a decl_region, return the decl.
647 Otherwise return NULL. */
649 tree
650 region::maybe_get_decl () const
652 if (const decl_region *decl_reg = dyn_cast_decl_region ())
653 return decl_reg->get_decl ();
654 return NULL_TREE;
657 /* Get the region_offset for this region (calculating it on the
658 first call and caching it internally). */
660 region_offset
661 region::get_offset (region_model_manager *mgr) const
663 if(!m_cached_offset)
664 m_cached_offset = new region_offset (calc_offset (mgr));
665 return *m_cached_offset;
668 /* Get the region_offset for immediately beyond this region. */
670 region_offset
671 region::get_next_offset (region_model_manager *mgr) const
673 region_offset start = get_offset (mgr);
675 bit_size_t bit_size;
676 if (get_bit_size (&bit_size))
678 if (start.concrete_p ())
680 bit_offset_t next_bit_offset = start.get_bit_offset () + bit_size;
681 return region_offset::make_concrete (start.get_base_region (),
682 next_bit_offset);
686 const svalue *start_byte_offset_sval = start.calc_symbolic_byte_offset (mgr);
687 const svalue *byte_size_sval = get_byte_size_sval (mgr);
688 const svalue *sum_sval
689 = mgr->get_or_create_binop (size_type_node,
690 PLUS_EXPR,
691 start_byte_offset_sval,
692 byte_size_sval);
693 return region_offset::make_symbolic (start.get_base_region (),
694 sum_sval);
697 /* Base class implementation of region::get_byte_size vfunc.
698 If the size of this region (in bytes) is known statically, write it to *OUT
699 and return true.
700 Otherwise return false. */
702 bool
703 region::get_byte_size (byte_size_t *out) const
705 tree type = get_type ();
707 /* Bail out e.g. for heap-allocated regions. */
708 if (!type)
709 return false;
711 HOST_WIDE_INT bytes = int_size_in_bytes (type);
712 if (bytes == -1)
713 return false;
714 *out = bytes;
715 return true;
718 /* Base implementation of region::get_byte_size_sval vfunc. */
720 const svalue *
721 region::get_byte_size_sval (region_model_manager *mgr) const
723 tree type = get_type ();
725 /* Bail out e.g. for heap-allocated regions. */
726 if (!type)
727 return mgr->get_or_create_unknown_svalue (size_type_node);
729 HOST_WIDE_INT bytes = int_size_in_bytes (type);
730 if (bytes == -1)
731 return mgr->get_or_create_unknown_svalue (size_type_node);
733 tree byte_size = size_in_bytes (type);
734 if (TREE_TYPE (byte_size) != size_type_node)
735 byte_size = fold_build1 (NOP_EXPR, size_type_node, byte_size);
736 return mgr->get_or_create_constant_svalue (byte_size);
739 /* Attempt to get the size of TYPE in bits.
740 If successful, return true and write the size to *OUT.
741 Otherwise return false. */
743 bool
744 int_size_in_bits (const_tree type, bit_size_t *out)
746 if (INTEGRAL_TYPE_P (type))
748 *out = TYPE_PRECISION (type);
749 return true;
752 tree sz = TYPE_SIZE (type);
753 if (sz && tree_fits_uhwi_p (sz))
755 *out = TREE_INT_CST_LOW (sz);
756 return true;
758 else
759 return false;
762 /* If the size of this region (in bits) is known statically, write it to *OUT
763 and return true.
764 Otherwise return false. */
766 bool
767 region::get_bit_size (bit_size_t *out) const
769 tree type = get_type ();
771 /* Bail out e.g. for heap-allocated regions. */
772 if (!type)
773 return false;
775 return int_size_in_bits (type, out);
778 /* Get the field within RECORD_TYPE at BIT_OFFSET. */
780 tree
781 get_field_at_bit_offset (tree record_type, bit_offset_t bit_offset)
783 gcc_assert (TREE_CODE (record_type) == RECORD_TYPE);
784 if (bit_offset < 0)
785 return NULL;
787 /* Find the first field that has an offset > BIT_OFFSET,
788 then return the one preceding it.
789 Skip other trees within the chain, such as FUNCTION_DECLs. */
790 tree last_field = NULL_TREE;
791 for (tree iter = TYPE_FIELDS (record_type); iter != NULL_TREE;
792 iter = DECL_CHAIN (iter))
794 if (TREE_CODE (iter) == FIELD_DECL)
796 int iter_field_offset = int_bit_position (iter);
797 if (bit_offset < iter_field_offset)
798 return last_field;
799 last_field = iter;
802 return last_field;
805 /* Populate *OUT with descendent regions of type TYPE that match
806 RELATIVE_BIT_OFFSET and SIZE_IN_BITS within this region. */
808 void
809 region::get_subregions_for_binding (region_model_manager *mgr,
810 bit_offset_t relative_bit_offset,
811 bit_size_t size_in_bits,
812 tree type,
813 auto_vec <const region *> *out) const
815 if (get_type () == NULL_TREE || type == NULL_TREE)
816 return;
817 if (relative_bit_offset == 0
818 && types_compatible_p (get_type (), type))
820 out->safe_push (this);
821 return;
823 switch (TREE_CODE (get_type ()))
825 case ARRAY_TYPE:
827 tree element_type = TREE_TYPE (get_type ());
828 HOST_WIDE_INT hwi_byte_size = int_size_in_bytes (element_type);
829 if (hwi_byte_size > 0)
831 HOST_WIDE_INT bits_per_element
832 = hwi_byte_size << LOG2_BITS_PER_UNIT;
833 HOST_WIDE_INT element_index
834 = (relative_bit_offset.to_shwi () / bits_per_element);
835 tree element_index_cst
836 = build_int_cst (integer_type_node, element_index);
837 HOST_WIDE_INT inner_bit_offset
838 = relative_bit_offset.to_shwi () % bits_per_element;
839 const region *subregion = mgr->get_element_region
840 (this, element_type,
841 mgr->get_or_create_constant_svalue (element_index_cst));
842 subregion->get_subregions_for_binding (mgr, inner_bit_offset,
843 size_in_bits, type, out);
846 break;
847 case RECORD_TYPE:
849 /* The bit offset might be *within* one of the fields (such as
850 with nested structs).
851 So we want to find the enclosing field, adjust the offset,
852 and repeat. */
853 if (tree field = get_field_at_bit_offset (get_type (),
854 relative_bit_offset))
856 int field_bit_offset = int_bit_position (field);
857 const region *subregion = mgr->get_field_region (this, field);
858 subregion->get_subregions_for_binding
859 (mgr, relative_bit_offset - field_bit_offset,
860 size_in_bits, type, out);
863 break;
864 case UNION_TYPE:
866 for (tree field = TYPE_FIELDS (get_type ()); field != NULL_TREE;
867 field = DECL_CHAIN (field))
869 if (TREE_CODE (field) != FIELD_DECL)
870 continue;
871 const region *subregion = mgr->get_field_region (this, field);
872 subregion->get_subregions_for_binding (mgr,
873 relative_bit_offset,
874 size_in_bits,
875 type,
876 out);
879 break;
880 default:
881 /* Do nothing. */
882 break;
886 /* Walk from this region up to the base region within its cluster, calculating
887 the offset relative to the base region, either as an offset in bits,
888 or a symbolic offset. */
890 region_offset
891 region::calc_offset (region_model_manager *mgr) const
893 const region *iter_region = this;
894 bit_offset_t accum_bit_offset = 0;
895 const svalue *accum_byte_sval = NULL;
897 while (iter_region)
899 switch (iter_region->get_kind ())
901 case RK_FIELD:
902 case RK_ELEMENT:
903 case RK_OFFSET:
904 case RK_BIT_RANGE:
905 if (accum_byte_sval)
907 const svalue *sval
908 = iter_region->get_relative_symbolic_offset (mgr);
909 accum_byte_sval
910 = mgr->get_or_create_binop (sval->get_type (), PLUS_EXPR,
911 accum_byte_sval, sval);
912 iter_region = iter_region->get_parent_region ();
914 else
916 bit_offset_t rel_bit_offset;
917 if (iter_region->get_relative_concrete_offset (&rel_bit_offset))
919 accum_bit_offset += rel_bit_offset;
920 iter_region = iter_region->get_parent_region ();
922 else
924 /* If the iter_region is not concrete anymore, convert the
925 accumulated bits to a svalue in bytes and revisit the
926 iter_region collecting the symbolic value. */
927 byte_offset_t byte_offset = accum_bit_offset / BITS_PER_UNIT;
928 tree offset_tree = wide_int_to_tree (integer_type_node,
929 byte_offset);
930 accum_byte_sval
931 = mgr->get_or_create_constant_svalue (offset_tree);
934 continue;
935 case RK_SIZED:
936 iter_region = iter_region->get_parent_region ();
937 continue;
939 case RK_CAST:
941 const cast_region *cast_reg
942 = as_a <const cast_region *> (iter_region);
943 iter_region = cast_reg->get_original_region ();
945 continue;
947 default:
948 return accum_byte_sval
949 ? region_offset::make_symbolic (iter_region,
950 accum_byte_sval)
951 : region_offset::make_concrete (iter_region,
952 accum_bit_offset);
956 return accum_byte_sval ? region_offset::make_symbolic (iter_region,
957 accum_byte_sval)
958 : region_offset::make_concrete (iter_region,
959 accum_bit_offset);
962 /* Base implementation of region::get_relative_concrete_offset vfunc. */
964 bool
965 region::get_relative_concrete_offset (bit_offset_t *) const
967 return false;
970 /* Base implementation of region::get_relative_symbolic_offset vfunc. */
972 const svalue *
973 region::get_relative_symbolic_offset (region_model_manager *mgr) const
975 return mgr->get_or_create_unknown_svalue (ptrdiff_type_node);
978 /* Attempt to get the position and size of this region expressed as a
979 concrete range of bytes relative to its parent.
980 If successful, return true and write to *OUT.
981 Otherwise return false. */
983 bool
984 region::get_relative_concrete_byte_range (byte_range *out) const
986 /* We must have a concrete offset relative to the parent. */
987 bit_offset_t rel_bit_offset;
988 if (!get_relative_concrete_offset (&rel_bit_offset))
989 return false;
990 /* ...which must be a whole number of bytes. */
991 if (rel_bit_offset % BITS_PER_UNIT != 0)
992 return false;
993 byte_offset_t start_byte_offset = rel_bit_offset / BITS_PER_UNIT;
995 /* We must have a concrete size, which must be a whole number
996 of bytes. */
997 byte_size_t num_bytes;
998 if (!get_byte_size (&num_bytes))
999 return false;
1001 /* Success. */
1002 *out = byte_range (start_byte_offset, num_bytes);
1003 return true;
1006 /* Dump a description of this region to stderr. */
1008 DEBUG_FUNCTION void
1009 region::dump (bool simple) const
1011 pretty_printer pp;
1012 pp_format_decoder (&pp) = default_tree_printer;
1013 pp_show_color (&pp) = pp_show_color (global_dc->printer);
1014 pp.buffer->stream = stderr;
1015 dump_to_pp (&pp, simple);
1016 pp_newline (&pp);
1017 pp_flush (&pp);
1020 /* Return a new json::string describing the region. */
1022 json::value *
1023 region::to_json () const
1025 label_text desc = get_desc (true);
1026 json::value *reg_js = new json::string (desc.get ());
1027 return reg_js;
1030 /* Generate a description of this region. */
1032 DEBUG_FUNCTION label_text
1033 region::get_desc (bool simple) const
1035 pretty_printer pp;
1036 pp_format_decoder (&pp) = default_tree_printer;
1037 dump_to_pp (&pp, simple);
1038 return label_text::take (xstrdup (pp_formatted_text (&pp)));
1041 /* Base implementation of region::accept vfunc.
1042 Subclass implementations should chain up to this. */
1044 void
1045 region::accept (visitor *v) const
1047 v->visit_region (this);
1048 if (m_parent)
1049 m_parent->accept (v);
1052 /* Return true if this is a symbolic region for deferencing an
1053 unknown ptr.
1054 We shouldn't attempt to bind values for this region (but
1055 can unbind values for other regions). */
1057 bool
1058 region::symbolic_for_unknown_ptr_p () const
1060 if (const symbolic_region *sym_reg = dyn_cast_symbolic_region ())
1061 if (sym_reg->get_pointer ()->get_kind () == SK_UNKNOWN)
1062 return true;
1063 return false;
1066 /* Return true if this is a symbolic region. */
1068 bool
1069 region::symbolic_p () const
1071 return get_kind () == RK_SYMBOLIC;
1074 /* Return true if this region is known to be zero bits in size. */
1076 bool
1077 region::empty_p () const
1079 bit_size_t num_bits;
1080 if (get_bit_size (&num_bits))
1081 if (num_bits == 0)
1082 return true;
1083 return false;
1086 /* Return true if this is a region for a decl with name DECL_NAME.
1087 Intended for use when debugging (for assertions and conditional
1088 breakpoints). */
1090 DEBUG_FUNCTION bool
1091 region::is_named_decl_p (const char *decl_name) const
1093 if (tree decl = maybe_get_decl ())
1094 if (DECL_NAME (decl)
1095 && !strcmp (IDENTIFIER_POINTER (DECL_NAME (decl)), decl_name))
1096 return true;
1097 return false;
1100 /* region's ctor. */
1102 region::region (complexity c, unsigned id, const region *parent, tree type)
1103 : m_complexity (c), m_id (id), m_parent (parent), m_type (type),
1104 m_cached_offset (NULL), m_cached_init_sval_at_main (NULL)
1106 gcc_assert (type == NULL_TREE || TYPE_P (type));
1109 /* Comparator for use by vec<const region *>::qsort,
1110 using their IDs to order them. */
1113 region::cmp_ptr_ptr (const void *p1, const void *p2)
1115 const region * const *reg1 = (const region * const *)p1;
1116 const region * const *reg2 = (const region * const *)p2;
1118 return cmp_ids (*reg1, *reg2);
1121 /* Determine if a pointer to this region must be non-NULL.
1123 Generally, pointers to regions must be non-NULL, but pointers
1124 to symbolic_regions might, in fact, be NULL.
1126 This allows us to simulate functions like malloc and calloc with:
1127 - only one "outcome" from each statement,
1128 - the idea that the pointer is on the heap if non-NULL
1129 - the possibility that the pointer could be NULL
1130 - the idea that successive values returned from malloc are non-equal
1131 - to be able to zero-fill for calloc. */
1133 bool
1134 region::non_null_p () const
1136 switch (get_kind ())
1138 default:
1139 return true;
1140 case RK_SYMBOLIC:
1141 /* Are we within a symbolic_region? If so, it could be NULL, and we
1142 have to fall back on the constraints. */
1143 return false;
1144 case RK_HEAP_ALLOCATED:
1145 return false;
1149 /* Return true iff this region is defined in terms of SVAL. */
1151 bool
1152 region::involves_p (const svalue *sval) const
1154 if (const symbolic_region *symbolic_reg = dyn_cast_symbolic_region ())
1156 if (symbolic_reg->get_pointer ()->involves_p (sval))
1157 return true;
1160 return false;
1163 /* Comparator for trees to impose a deterministic ordering on
1164 T1 and T2. */
1166 static int
1167 tree_cmp (const_tree t1, const_tree t2)
1169 gcc_assert (t1);
1170 gcc_assert (t2);
1172 /* Test tree codes first. */
1173 if (TREE_CODE (t1) != TREE_CODE (t2))
1174 return TREE_CODE (t1) - TREE_CODE (t2);
1176 /* From this point on, we know T1 and T2 have the same tree code. */
1178 if (DECL_P (t1))
1180 if (DECL_NAME (t1) && DECL_NAME (t2))
1181 return strcmp (IDENTIFIER_POINTER (DECL_NAME (t1)),
1182 IDENTIFIER_POINTER (DECL_NAME (t2)));
1183 else
1185 if (DECL_NAME (t1))
1186 return -1;
1187 else if (DECL_NAME (t2))
1188 return 1;
1189 else
1190 return DECL_UID (t1) - DECL_UID (t2);
1194 switch (TREE_CODE (t1))
1196 case SSA_NAME:
1198 if (SSA_NAME_VAR (t1) && SSA_NAME_VAR (t2))
1200 int var_cmp = tree_cmp (SSA_NAME_VAR (t1), SSA_NAME_VAR (t2));
1201 if (var_cmp)
1202 return var_cmp;
1203 return SSA_NAME_VERSION (t1) - SSA_NAME_VERSION (t2);
1205 else
1207 if (SSA_NAME_VAR (t1))
1208 return -1;
1209 else if (SSA_NAME_VAR (t2))
1210 return 1;
1211 else
1212 return SSA_NAME_VERSION (t1) - SSA_NAME_VERSION (t2);
1215 break;
1217 case INTEGER_CST:
1218 return tree_int_cst_compare (t1, t2);
1220 case REAL_CST:
1222 const real_value *rv1 = TREE_REAL_CST_PTR (t1);
1223 const real_value *rv2 = TREE_REAL_CST_PTR (t2);
1224 if (real_compare (UNORDERED_EXPR, rv1, rv2))
1226 /* Impose an arbitrary order on NaNs relative to other NaNs
1227 and to non-NaNs. */
1228 if (int cmp_isnan = real_isnan (rv1) - real_isnan (rv2))
1229 return cmp_isnan;
1230 if (int cmp_issignaling_nan
1231 = real_issignaling_nan (rv1) - real_issignaling_nan (rv2))
1232 return cmp_issignaling_nan;
1233 return real_isneg (rv1) - real_isneg (rv2);
1235 if (real_compare (LT_EXPR, rv1, rv2))
1236 return -1;
1237 if (real_compare (GT_EXPR, rv1, rv2))
1238 return 1;
1239 return 0;
1242 case STRING_CST:
1243 return strcmp (TREE_STRING_POINTER (t1),
1244 TREE_STRING_POINTER (t2));
1246 default:
1247 gcc_unreachable ();
1248 break;
1251 gcc_unreachable ();
1253 return 0;
1256 /* qsort comparator for trees to impose a deterministic ordering on
1257 P1 and P2. */
1260 tree_cmp (const void *p1, const void *p2)
1262 const_tree t1 = *(const_tree const *)p1;
1263 const_tree t2 = *(const_tree const *)p2;
1265 return tree_cmp (t1, t2);
1268 /* class frame_region : public space_region. */
1270 frame_region::~frame_region ()
1272 for (map_t::iterator iter = m_locals.begin ();
1273 iter != m_locals.end ();
1274 ++iter)
1275 delete (*iter).second;
1278 void
1279 frame_region::accept (visitor *v) const
1281 region::accept (v);
1282 if (m_calling_frame)
1283 m_calling_frame->accept (v);
1286 /* Implementation of region::dump_to_pp vfunc for frame_region. */
1288 void
1289 frame_region::dump_to_pp (pretty_printer *pp, bool simple) const
1291 if (simple)
1292 pp_printf (pp, "frame: %qs@%i", function_name (m_fun), get_stack_depth ());
1293 else
1294 pp_printf (pp, "frame_region(%qs, index: %i, depth: %i)",
1295 function_name (m_fun), m_index, get_stack_depth ());
1298 const decl_region *
1299 frame_region::get_region_for_local (region_model_manager *mgr,
1300 tree expr,
1301 const region_model_context *ctxt) const
1303 if (CHECKING_P)
1305 /* Verify that EXPR is a local or SSA name, and that it's for the
1306 correct function for this stack frame. */
1307 gcc_assert (TREE_CODE (expr) == PARM_DECL
1308 || TREE_CODE (expr) == VAR_DECL
1309 || TREE_CODE (expr) == SSA_NAME
1310 || TREE_CODE (expr) == RESULT_DECL);
1311 switch (TREE_CODE (expr))
1313 default:
1314 gcc_unreachable ();
1315 case VAR_DECL:
1316 gcc_assert (!is_global_var (expr));
1317 /* Fall through. */
1318 case PARM_DECL:
1319 case RESULT_DECL:
1320 gcc_assert (DECL_CONTEXT (expr) == m_fun->decl);
1321 break;
1322 case SSA_NAME:
1324 if (tree var = SSA_NAME_VAR (expr))
1326 if (DECL_P (var))
1327 gcc_assert (DECL_CONTEXT (var) == m_fun->decl);
1329 else if (ctxt)
1330 if (const extrinsic_state *ext_state = ctxt->get_ext_state ())
1331 if (const supergraph *sg
1332 = ext_state->get_engine ()->get_supergraph ())
1334 const gimple *def_stmt = SSA_NAME_DEF_STMT (expr);
1335 const supernode *snode
1336 = sg->get_supernode_for_stmt (def_stmt);
1337 gcc_assert (snode->get_function () == m_fun);
1340 break;
1344 /* Ideally we'd use mutable here. */
1345 map_t &mutable_locals = const_cast <map_t &> (m_locals);
1347 if (decl_region **slot = mutable_locals.get (expr))
1348 return *slot;
1349 decl_region *reg
1350 = new decl_region (mgr->alloc_region_id (), this, expr);
1351 mutable_locals.put (expr, reg);
1352 return reg;
1355 /* class globals_region : public space_region. */
1357 /* Implementation of region::dump_to_pp vfunc for globals_region. */
1359 void
1360 globals_region::dump_to_pp (pretty_printer *pp, bool simple) const
1362 if (simple)
1363 pp_string (pp, "::");
1364 else
1365 pp_string (pp, "globals");
1368 /* class code_region : public map_region. */
1370 /* Implementation of region::dump_to_pp vfunc for code_region. */
1372 void
1373 code_region::dump_to_pp (pretty_printer *pp, bool simple) const
1375 if (simple)
1376 pp_string (pp, "code region");
1377 else
1378 pp_string (pp, "code_region()");
1381 /* class function_region : public region. */
1383 /* Implementation of region::dump_to_pp vfunc for function_region. */
1385 void
1386 function_region::dump_to_pp (pretty_printer *pp, bool simple) const
1388 if (simple)
1390 dump_quoted_tree (pp, m_fndecl);
1392 else
1394 pp_string (pp, "function_region(");
1395 dump_quoted_tree (pp, m_fndecl);
1396 pp_string (pp, ")");
1400 /* class label_region : public region. */
1402 /* Implementation of region::dump_to_pp vfunc for label_region. */
1404 void
1405 label_region::dump_to_pp (pretty_printer *pp, bool simple) const
1407 if (simple)
1409 dump_quoted_tree (pp, m_label);
1411 else
1413 pp_string (pp, "label_region(");
1414 dump_quoted_tree (pp, m_label);
1415 pp_string (pp, ")");
1419 /* class stack_region : public region. */
1421 /* Implementation of region::dump_to_pp vfunc for stack_region. */
1423 void
1424 stack_region::dump_to_pp (pretty_printer *pp, bool simple) const
1426 if (simple)
1427 pp_string (pp, "stack region");
1428 else
1429 pp_string (pp, "stack_region()");
1432 /* class heap_region : public region. */
1434 /* Implementation of region::dump_to_pp vfunc for heap_region. */
1436 void
1437 heap_region::dump_to_pp (pretty_printer *pp, bool simple) const
1439 if (simple)
1440 pp_string (pp, "heap region");
1441 else
1442 pp_string (pp, "heap_region()");
1445 /* class root_region : public region. */
1447 /* root_region's ctor. */
1449 root_region::root_region (unsigned id)
1450 : region (complexity (1, 1), id, NULL, NULL_TREE)
1454 /* Implementation of region::dump_to_pp vfunc for root_region. */
1456 void
1457 root_region::dump_to_pp (pretty_printer *pp, bool simple) const
1459 if (simple)
1460 pp_string (pp, "root region");
1461 else
1462 pp_string (pp, "root_region()");
1465 /* class thread_local_region : public space_region. */
1467 void
1468 thread_local_region::dump_to_pp (pretty_printer *pp, bool simple) const
1470 if (simple)
1471 pp_string (pp, "thread_local_region");
1472 else
1473 pp_string (pp, "thread_local_region()");
1476 /* class symbolic_region : public map_region. */
1478 /* symbolic_region's ctor. */
1480 symbolic_region::symbolic_region (unsigned id, region *parent,
1481 const svalue *sval_ptr)
1482 : region (complexity::from_pair (parent, sval_ptr), id, parent,
1483 (sval_ptr->get_type ()
1484 ? TREE_TYPE (sval_ptr->get_type ())
1485 : NULL_TREE)),
1486 m_sval_ptr (sval_ptr)
1490 /* Implementation of region::accept vfunc for symbolic_region. */
1492 void
1493 symbolic_region::accept (visitor *v) const
1495 region::accept (v);
1496 m_sval_ptr->accept (v);
1499 /* Implementation of region::dump_to_pp vfunc for symbolic_region. */
1501 void
1502 symbolic_region::dump_to_pp (pretty_printer *pp, bool simple) const
1504 if (simple)
1506 pp_string (pp, "(*");
1507 m_sval_ptr->dump_to_pp (pp, simple);
1508 pp_string (pp, ")");
1510 else
1512 pp_string (pp, "symbolic_region(");
1513 get_parent_region ()->dump_to_pp (pp, simple);
1514 if (get_type ())
1516 pp_string (pp, ", ");
1517 print_quoted_type (pp, get_type ());
1519 pp_string (pp, ", ");
1520 m_sval_ptr->dump_to_pp (pp, simple);
1521 pp_string (pp, ")");
1525 /* class decl_region : public region. */
1527 /* Implementation of region::dump_to_pp vfunc for decl_region. */
1529 void
1530 decl_region::dump_to_pp (pretty_printer *pp, bool simple) const
1532 if (simple)
1533 pp_printf (pp, "%E", m_decl);
1534 else
1536 pp_string (pp, "decl_region(");
1537 get_parent_region ()->dump_to_pp (pp, simple);
1538 pp_string (pp, ", ");
1539 print_quoted_type (pp, get_type ());
1540 pp_printf (pp, ", %qE)", m_decl);
1544 /* Get the stack depth for the frame containing this decl, or 0
1545 for a global. */
1548 decl_region::get_stack_depth () const
1550 if (get_parent_region () == NULL)
1551 return 0;
1552 if (const frame_region *frame_reg
1553 = get_parent_region ()->dyn_cast_frame_region ())
1554 return frame_reg->get_stack_depth ();
1555 return 0;
1558 /* If the underlying decl is in the global constant pool,
1559 return an svalue representing the constant value.
1560 Otherwise return NULL. */
1562 const svalue *
1563 decl_region::maybe_get_constant_value (region_model_manager *mgr) const
1565 if (VAR_P (m_decl)
1566 && DECL_IN_CONSTANT_POOL (m_decl)
1567 && DECL_INITIAL (m_decl)
1568 && TREE_CODE (DECL_INITIAL (m_decl)) == CONSTRUCTOR)
1569 return get_svalue_for_constructor (DECL_INITIAL (m_decl), mgr);
1570 return NULL;
1573 /* Implementation of decl_region::get_svalue_for_constructor
1574 for when the cached value hasn't yet been calculated. */
1576 const svalue *
1577 decl_region::calc_svalue_for_constructor (tree ctor,
1578 region_model_manager *mgr) const
1580 /* Create a binding map, applying ctor to it, using this
1581 decl_region as the base region when building child regions
1582 for offset calculations. */
1583 binding_map map;
1584 if (!map.apply_ctor_to_region (this, ctor, mgr))
1585 return mgr->get_or_create_unknown_svalue (get_type ());
1587 /* Return a compound svalue for the map we built. */
1588 return mgr->get_or_create_compound_svalue (get_type (), map);
1591 /* Get an svalue for CTOR, a CONSTRUCTOR for this region's decl. */
1593 const svalue *
1594 decl_region::get_svalue_for_constructor (tree ctor,
1595 region_model_manager *mgr) const
1597 gcc_assert (!TREE_CLOBBER_P (ctor));
1598 gcc_assert (ctor == DECL_INITIAL (m_decl));
1600 if (!m_ctor_svalue)
1601 m_ctor_svalue = calc_svalue_for_constructor (ctor, mgr);
1603 return m_ctor_svalue;
1606 /* For use on decl_regions for global variables.
1608 Get an svalue for the initial value of this region at entry to
1609 "main" (either based on DECL_INITIAL, or implicit initialization to
1610 zero.
1612 Return NULL if there is a problem. */
1614 const svalue *
1615 decl_region::get_svalue_for_initializer (region_model_manager *mgr) const
1617 tree init = DECL_INITIAL (m_decl);
1618 if (!init)
1620 /* If we have an "extern" decl then there may be an initializer in
1621 another TU. */
1622 if (DECL_EXTERNAL (m_decl))
1623 return NULL;
1625 if (empty_p ())
1626 return NULL;
1628 /* Implicit initialization to zero; use a compound_svalue for it.
1629 Doing so requires that we have a concrete binding for this region,
1630 which can fail if we have a region with unknown size
1631 (e.g. "extern const char arr[];"). */
1632 const binding_key *binding
1633 = binding_key::make (mgr->get_store_manager (), this);
1634 if (binding->symbolic_p ())
1635 return NULL;
1637 /* If we don't care about tracking the content of this region, then
1638 it's unused, and the value doesn't matter. */
1639 if (!tracked_p ())
1640 return NULL;
1642 binding_cluster c (this);
1643 c.zero_fill_region (mgr->get_store_manager (), this);
1644 return mgr->get_or_create_compound_svalue (TREE_TYPE (m_decl),
1645 c.get_map ());
1648 /* LTO can write out error_mark_node as the DECL_INITIAL for simple scalar
1649 values (to avoid writing out an extra section). */
1650 if (init == error_mark_node)
1651 return NULL;
1653 if (TREE_CODE (init) == CONSTRUCTOR)
1654 return get_svalue_for_constructor (init, mgr);
1656 /* Reuse the get_rvalue logic from region_model. */
1657 region_model m (mgr);
1658 return m.get_rvalue (path_var (init, 0), NULL);
1661 /* Subroutine of symnode_requires_tracking_p; return true if REF
1662 might imply that we should be tracking the value of its decl. */
1664 static bool
1665 ipa_ref_requires_tracking (ipa_ref *ref)
1667 /* If we have a load/store/alias of the symbol, then we'll track
1668 the decl's value. */
1669 if (ref->use != IPA_REF_ADDR)
1670 return true;
1672 if (ref->stmt == NULL)
1673 return true;
1675 switch (ref->stmt->code)
1677 default:
1678 return true;
1679 case GIMPLE_CALL:
1681 cgraph_node *caller_cnode = dyn_cast <cgraph_node *> (ref->referring);
1682 if (caller_cnode == NULL)
1683 return true;
1684 cgraph_edge *edge = caller_cnode->get_edge (ref->stmt);
1685 if (!edge)
1686 return true;
1687 if (edge->callee == NULL)
1688 return true; /* e.g. call through function ptr. */
1689 if (edge->callee->definition)
1690 return true;
1691 /* If we get here, then this ref is a pointer passed to
1692 a function we don't have the definition for. */
1693 return false;
1695 break;
1696 case GIMPLE_ASM:
1698 const gasm *asm_stmt = as_a <const gasm *> (ref->stmt);
1699 if (gimple_asm_noutputs (asm_stmt) > 0)
1700 return true;
1701 if (gimple_asm_nclobbers (asm_stmt) > 0)
1702 return true;
1703 /* If we get here, then this ref is the decl being passed
1704 by pointer to asm with no outputs. */
1705 return false;
1707 break;
1711 /* Determine if the decl for SYMNODE should have binding_clusters
1712 in our state objects; return false to optimize away tracking
1713 certain decls in our state objects, as an optimization. */
1715 static bool
1716 symnode_requires_tracking_p (symtab_node *symnode)
1718 gcc_assert (symnode);
1719 if (symnode->externally_visible)
1720 return true;
1721 tree context_fndecl = DECL_CONTEXT (symnode->decl);
1722 if (context_fndecl == NULL)
1723 return true;
1724 if (TREE_CODE (context_fndecl) != FUNCTION_DECL)
1725 return true;
1726 for (auto ref : symnode->ref_list.referring)
1727 if (ipa_ref_requires_tracking (ref))
1728 return true;
1730 /* If we get here, then we don't have uses of this decl that require
1731 tracking; we never read from it or write to it explicitly. */
1732 return false;
1735 /* Subroutine of decl_region ctor: determine whether this decl_region
1736 can have binding_clusters; return false to optimize away tracking
1737 of certain decls in our state objects, as an optimization. */
1739 bool
1740 decl_region::calc_tracked_p (tree decl)
1742 /* Precondition of symtab_node::get. */
1743 if (TREE_CODE (decl) == VAR_DECL
1744 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl) || in_lto_p))
1745 if (symtab_node *symnode = symtab_node::get (decl))
1746 return symnode_requires_tracking_p (symnode);
1747 return true;
1750 /* class field_region : public region. */
1752 /* Implementation of region::dump_to_pp vfunc for field_region. */
1754 void
1755 field_region::dump_to_pp (pretty_printer *pp, bool simple) const
1757 if (simple)
1759 get_parent_region ()->dump_to_pp (pp, simple);
1760 pp_string (pp, ".");
1761 pp_printf (pp, "%E", m_field);
1763 else
1765 pp_string (pp, "field_region(");
1766 get_parent_region ()->dump_to_pp (pp, simple);
1767 pp_string (pp, ", ");
1768 print_quoted_type (pp, get_type ());
1769 pp_printf (pp, ", %qE)", m_field);
1773 /* Implementation of region::get_relative_concrete_offset vfunc
1774 for field_region. */
1776 bool
1777 field_region::get_relative_concrete_offset (bit_offset_t *out) const
1779 /* Compare with e.g. gimple-fold.cc's
1780 fold_nonarray_ctor_reference. */
1781 tree byte_offset = DECL_FIELD_OFFSET (m_field);
1782 if (TREE_CODE (byte_offset) != INTEGER_CST)
1783 return false;
1784 tree field_offset = DECL_FIELD_BIT_OFFSET (m_field);
1785 /* Compute bit offset of the field. */
1786 offset_int bitoffset
1787 = (wi::to_offset (field_offset)
1788 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
1789 *out = bitoffset;
1790 return true;
1794 /* Implementation of region::get_relative_symbolic_offset vfunc
1795 for field_region.
1796 If known, the returned svalue is equal to the offset converted to bytes and
1797 rounded off. */
1799 const svalue *
1800 field_region::get_relative_symbolic_offset (region_model_manager *mgr) const
1802 bit_offset_t out;
1803 if (get_relative_concrete_offset (&out))
1805 tree cst_tree
1806 = wide_int_to_tree (ptrdiff_type_node, out / BITS_PER_UNIT);
1807 return mgr->get_or_create_constant_svalue (cst_tree);
1809 return mgr->get_or_create_unknown_svalue (ptrdiff_type_node);
1812 /* class element_region : public region. */
1814 /* Implementation of region::accept vfunc for element_region. */
1816 void
1817 element_region::accept (visitor *v) const
1819 region::accept (v);
1820 m_index->accept (v);
1823 /* Implementation of region::dump_to_pp vfunc for element_region. */
1825 void
1826 element_region::dump_to_pp (pretty_printer *pp, bool simple) const
1828 if (simple)
1830 //pp_string (pp, "(");
1831 get_parent_region ()->dump_to_pp (pp, simple);
1832 pp_string (pp, "[");
1833 m_index->dump_to_pp (pp, simple);
1834 pp_string (pp, "]");
1835 //pp_string (pp, ")");
1837 else
1839 pp_string (pp, "element_region(");
1840 get_parent_region ()->dump_to_pp (pp, simple);
1841 pp_string (pp, ", ");
1842 print_quoted_type (pp, get_type ());
1843 pp_string (pp, ", ");
1844 m_index->dump_to_pp (pp, simple);
1845 pp_printf (pp, ")");
1849 /* Implementation of region::get_relative_concrete_offset vfunc
1850 for element_region. */
1852 bool
1853 element_region::get_relative_concrete_offset (bit_offset_t *out) const
1855 if (tree idx_cst = m_index->maybe_get_constant ())
1857 gcc_assert (TREE_CODE (idx_cst) == INTEGER_CST);
1859 tree elem_type = get_type ();
1860 offset_int element_idx = wi::to_offset (idx_cst);
1862 /* First, use int_size_in_bytes, to reject the case where we
1863 have an incomplete type, or a non-constant value. */
1864 HOST_WIDE_INT hwi_byte_size = int_size_in_bytes (elem_type);
1865 if (hwi_byte_size > 0)
1867 offset_int element_bit_size
1868 = hwi_byte_size << LOG2_BITS_PER_UNIT;
1869 offset_int element_bit_offset
1870 = element_idx * element_bit_size;
1871 *out = element_bit_offset;
1872 return true;
1875 return false;
1878 /* Implementation of region::get_relative_symbolic_offset vfunc
1879 for element_region. */
1881 const svalue *
1882 element_region::get_relative_symbolic_offset (region_model_manager *mgr) const
1884 tree elem_type = get_type ();
1886 /* First, use int_size_in_bytes, to reject the case where we
1887 have an incomplete type, or a non-constant value. */
1888 HOST_WIDE_INT hwi_byte_size = int_size_in_bytes (elem_type);
1889 if (hwi_byte_size > 0)
1891 tree byte_size_tree = wide_int_to_tree (ptrdiff_type_node,
1892 hwi_byte_size);
1893 const svalue *byte_size_sval
1894 = mgr->get_or_create_constant_svalue (byte_size_tree);
1895 return mgr->get_or_create_binop (ptrdiff_type_node, MULT_EXPR,
1896 m_index, byte_size_sval);
1898 return mgr->get_or_create_unknown_svalue (ptrdiff_type_node);
1901 /* class offset_region : public region. */
1903 /* Implementation of region::accept vfunc for offset_region. */
1905 void
1906 offset_region::accept (visitor *v) const
1908 region::accept (v);
1909 m_byte_offset->accept (v);
1912 /* Implementation of region::dump_to_pp vfunc for offset_region. */
1914 void
1915 offset_region::dump_to_pp (pretty_printer *pp, bool simple) const
1917 if (simple)
1919 //pp_string (pp, "(");
1920 get_parent_region ()->dump_to_pp (pp, simple);
1921 pp_string (pp, "+");
1922 m_byte_offset->dump_to_pp (pp, simple);
1923 //pp_string (pp, ")");
1925 else
1927 pp_string (pp, "offset_region(");
1928 get_parent_region ()->dump_to_pp (pp, simple);
1929 pp_string (pp, ", ");
1930 print_quoted_type (pp, get_type ());
1931 pp_string (pp, ", ");
1932 m_byte_offset->dump_to_pp (pp, simple);
1933 pp_printf (pp, ")");
1937 /* Implementation of region::get_relative_concrete_offset vfunc
1938 for offset_region. */
1940 bool
1941 offset_region::get_relative_concrete_offset (bit_offset_t *out) const
1943 if (tree byte_offset_cst = m_byte_offset->maybe_get_constant ())
1945 gcc_assert (TREE_CODE (byte_offset_cst) == INTEGER_CST);
1946 /* Use a signed value for the byte offset, to handle
1947 negative offsets. */
1948 HOST_WIDE_INT byte_offset
1949 = wi::to_offset (byte_offset_cst).to_shwi ();
1950 HOST_WIDE_INT bit_offset = byte_offset * BITS_PER_UNIT;
1951 *out = bit_offset;
1952 return true;
1954 return false;
1957 /* Implementation of region::get_relative_symbolic_offset vfunc
1958 for offset_region. */
1960 const svalue *
1961 offset_region::get_relative_symbolic_offset (region_model_manager *mgr
1962 ATTRIBUTE_UNUSED) const
1964 return get_byte_offset ();
1967 /* Implementation of region::get_byte_size_sval vfunc for offset_region. */
1969 const svalue *
1970 offset_region::get_byte_size_sval (region_model_manager *mgr) const
1972 tree offset_cst = get_byte_offset ()->maybe_get_constant ();
1973 byte_size_t byte_size;
1974 /* If the offset points in the middle of the region,
1975 return the remaining bytes. */
1976 if (get_byte_size (&byte_size) && offset_cst)
1978 byte_size_t offset = wi::to_offset (offset_cst);
1979 byte_range r (0, byte_size);
1980 if (r.contains_p (offset))
1982 tree remaining_byte_size = wide_int_to_tree (size_type_node,
1983 byte_size - offset);
1984 return mgr->get_or_create_constant_svalue (remaining_byte_size);
1988 return region::get_byte_size_sval (mgr);
1991 /* class sized_region : public region. */
1993 /* Implementation of region::accept vfunc for sized_region. */
1995 void
1996 sized_region::accept (visitor *v) const
1998 region::accept (v);
1999 m_byte_size_sval->accept (v);
2002 /* Implementation of region::dump_to_pp vfunc for sized_region. */
2004 void
2005 sized_region::dump_to_pp (pretty_printer *pp, bool simple) const
2007 if (simple)
2009 pp_string (pp, "SIZED_REG(");
2010 get_parent_region ()->dump_to_pp (pp, simple);
2011 pp_string (pp, ", ");
2012 m_byte_size_sval->dump_to_pp (pp, simple);
2013 pp_string (pp, ")");
2015 else
2017 pp_string (pp, "sized_region(");
2018 get_parent_region ()->dump_to_pp (pp, simple);
2019 pp_string (pp, ", ");
2020 m_byte_size_sval->dump_to_pp (pp, simple);
2021 pp_printf (pp, ")");
2025 /* Implementation of region::get_byte_size vfunc for sized_region. */
2027 bool
2028 sized_region::get_byte_size (byte_size_t *out) const
2030 if (tree cst = m_byte_size_sval->maybe_get_constant ())
2032 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
2033 *out = tree_to_uhwi (cst);
2034 return true;
2036 return false;
2039 /* Implementation of region::get_bit_size vfunc for sized_region. */
2041 bool
2042 sized_region::get_bit_size (bit_size_t *out) const
2044 byte_size_t byte_size;
2045 if (!get_byte_size (&byte_size))
2046 return false;
2047 *out = byte_size * BITS_PER_UNIT;
2048 return true;
2051 /* class cast_region : public region. */
2053 /* Implementation of region::accept vfunc for cast_region. */
2055 void
2056 cast_region::accept (visitor *v) const
2058 region::accept (v);
2059 m_original_region->accept (v);
2062 /* Implementation of region::dump_to_pp vfunc for cast_region. */
2064 void
2065 cast_region::dump_to_pp (pretty_printer *pp, bool simple) const
2067 if (simple)
2069 pp_string (pp, "CAST_REG(");
2070 print_quoted_type (pp, get_type ());
2071 pp_string (pp, ", ");
2072 m_original_region->dump_to_pp (pp, simple);
2073 pp_string (pp, ")");
2075 else
2077 pp_string (pp, "cast_region(");
2078 m_original_region->dump_to_pp (pp, simple);
2079 pp_string (pp, ", ");
2080 print_quoted_type (pp, get_type ());
2081 pp_printf (pp, ")");
2085 /* Implementation of region::get_relative_concrete_offset vfunc
2086 for cast_region. */
2088 bool
2089 cast_region::get_relative_concrete_offset (bit_offset_t *out) const
2091 *out = (int) 0;
2092 return true;
2095 /* class heap_allocated_region : public region. */
2097 /* Implementation of region::dump_to_pp vfunc for heap_allocated_region. */
2099 void
2100 heap_allocated_region::dump_to_pp (pretty_printer *pp, bool simple) const
2102 if (simple)
2103 pp_printf (pp, "HEAP_ALLOCATED_REGION(%i)", get_id ());
2104 else
2105 pp_printf (pp, "heap_allocated_region(%i)", get_id ());
2108 /* class alloca_region : public region. */
2110 /* Implementation of region::dump_to_pp vfunc for alloca_region. */
2112 void
2113 alloca_region::dump_to_pp (pretty_printer *pp, bool simple) const
2115 if (simple)
2116 pp_printf (pp, "ALLOCA_REGION(%i)", get_id ());
2117 else
2118 pp_printf (pp, "alloca_region(%i)", get_id ());
2121 /* class string_region : public region. */
2123 /* Implementation of region::dump_to_pp vfunc for string_region. */
2125 void
2126 string_region::dump_to_pp (pretty_printer *pp, bool simple) const
2128 if (simple)
2129 dump_tree (pp, m_string_cst);
2130 else
2132 pp_string (pp, "string_region(");
2133 dump_tree (pp, m_string_cst);
2134 if (!flag_dump_noaddr)
2136 pp_string (pp, " (");
2137 pp_pointer (pp, m_string_cst);
2138 pp_string (pp, "))");
2143 /* class bit_range_region : public region. */
2145 /* Implementation of region::dump_to_pp vfunc for bit_range_region. */
2147 void
2148 bit_range_region::dump_to_pp (pretty_printer *pp, bool simple) const
2150 if (simple)
2152 pp_string (pp, "BIT_RANGE_REG(");
2153 get_parent_region ()->dump_to_pp (pp, simple);
2154 pp_string (pp, ", ");
2155 m_bits.dump_to_pp (pp);
2156 pp_string (pp, ")");
2158 else
2160 pp_string (pp, "bit_range_region(");
2161 get_parent_region ()->dump_to_pp (pp, simple);
2162 pp_string (pp, ", ");
2163 m_bits.dump_to_pp (pp);
2164 pp_printf (pp, ")");
2168 /* Implementation of region::get_byte_size vfunc for bit_range_region. */
2170 bool
2171 bit_range_region::get_byte_size (byte_size_t *out) const
2173 if (m_bits.m_size_in_bits % BITS_PER_UNIT == 0)
2175 *out = m_bits.m_size_in_bits / BITS_PER_UNIT;
2176 return true;
2178 return false;
2181 /* Implementation of region::get_bit_size vfunc for bit_range_region. */
2183 bool
2184 bit_range_region::get_bit_size (bit_size_t *out) const
2186 *out = m_bits.m_size_in_bits;
2187 return true;
2190 /* Implementation of region::get_byte_size_sval vfunc for bit_range_region. */
2192 const svalue *
2193 bit_range_region::get_byte_size_sval (region_model_manager *mgr) const
2195 if (m_bits.m_size_in_bits % BITS_PER_UNIT != 0)
2196 return mgr->get_or_create_unknown_svalue (size_type_node);
2198 HOST_WIDE_INT num_bytes = m_bits.m_size_in_bits.to_shwi () / BITS_PER_UNIT;
2199 return mgr->get_or_create_int_cst (size_type_node, num_bytes);
2202 /* Implementation of region::get_relative_concrete_offset vfunc for
2203 bit_range_region. */
2205 bool
2206 bit_range_region::get_relative_concrete_offset (bit_offset_t *out) const
2208 *out = m_bits.get_start_bit_offset ();
2209 return true;
2212 /* Implementation of region::get_relative_symbolic_offset vfunc for
2213 bit_range_region.
2214 The returned svalue is equal to the offset converted to bytes and
2215 rounded off. */
2217 const svalue *
2218 bit_range_region::get_relative_symbolic_offset (region_model_manager *mgr)
2219 const
2221 byte_offset_t start_byte = m_bits.get_start_bit_offset () / BITS_PER_UNIT;
2222 tree start_bit_tree = wide_int_to_tree (ptrdiff_type_node, start_byte);
2223 return mgr->get_or_create_constant_svalue (start_bit_tree);
2226 /* class var_arg_region : public region. */
2228 void
2229 var_arg_region::dump_to_pp (pretty_printer *pp, bool simple) const
2231 if (simple)
2233 pp_string (pp, "VAR_ARG_REG(");
2234 get_parent_region ()->dump_to_pp (pp, simple);
2235 pp_printf (pp, ", arg_idx: %d)", m_idx);
2237 else
2239 pp_string (pp, "var_arg_region(");
2240 get_parent_region ()->dump_to_pp (pp, simple);
2241 pp_printf (pp, ", arg_idx: %d)", m_idx);
2245 /* Get the frame_region for this var_arg_region. */
2247 const frame_region *
2248 var_arg_region::get_frame_region () const
2250 gcc_assert (get_parent_region ());
2251 return as_a <const frame_region *> (get_parent_region ());
2254 /* class errno_region : public region. */
2256 void
2257 errno_region::dump_to_pp (pretty_printer *pp, bool simple) const
2259 if (simple)
2260 pp_string (pp, "errno_region");
2261 else
2262 pp_string (pp, "errno_region()");
2265 /* class unknown_region : public region. */
2267 /* Implementation of region::dump_to_pp vfunc for unknown_region. */
2269 void
2270 unknown_region::dump_to_pp (pretty_printer *pp, bool /*simple*/) const
2272 pp_string (pp, "UNKNOWN_REGION");
2275 } // namespace ana
2277 #endif /* #if ENABLE_ANALYZER */