1 /* Data structure for the modref pass.
2 Copyright (C) 2020-2021 Free Software Foundation, Inc.
3 Contributed by David Cepelik and Jan Hubicka
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* modref_tree represent a decision tree that can be used by alias analysis
22 oracle to determine whether given memory access can be affected by a function
23 call. For every function we collect two trees, one for loads and other
24 for stores. Tree consist of following levels:
26 1) Base: this level represent base alias set of the access and refers
27 to sons (ref nodes). Flag all_refs means that all possible references
30 Because for LTO streaming we need to stream types rather than alias sets
31 modref_base_node is implemented as a template.
32 2) Ref: this level represent ref alias set and links to accesses unless
34 Again ref is an template to allow LTO streaming.
35 3) Access: this level represent info about individual accesses. Presently
36 we record whether access is through a dereference of a function parameter
37 and if so we record the access range.
40 #ifndef GCC_MODREF_TREE_H
41 #define GCC_MODREF_TREE_H
43 struct ipa_modref_summary
;
45 /* parm indexes greater than 0 are normal parms.
46 Some negative values have special meaning. */
47 enum modref_special_parms
{
48 MODREF_UNKNOWN_PARM
= -1,
49 MODREF_STATIC_CHAIN_PARM
= -2,
50 MODREF_RETSLOT_PARM
= -3,
51 /* Used in modref_parm_map to tak references which can be removed
52 from the summary during summary update since they now points to loca
54 MODREF_LOCAL_MEMORY_PARM
= -4
57 /* Modref record accesses relative to function parameters.
58 This is entry for single access specifying its base and access range.
60 Accesses can be collected to boundedly sized arrays using
61 modref_access_node::insert. */
62 struct GTY(()) modref_access_node
64 /* Access range information (in bits). */
69 /* Offset from parameter pointer to the base of the access (in bytes). */
70 poly_int64 parm_offset
;
72 /* Index of parameter which specifies the base of access. -1 if base is not
73 a function parameter. */
75 bool parm_offset_known
;
76 /* Number of times interval was extended during dataflow.
77 This has to be limited in order to keep dataflow finite. */
78 unsigned char adjustments
;
80 /* Return true if access node holds some useful info. */
81 bool useful_p () const
83 return parm_index
!= MODREF_UNKNOWN_PARM
;
85 /* Return true if access can be used to determine a kill. */
86 bool useful_for_kill_p () const
88 return parm_offset_known
&& parm_index
!= MODREF_UNKNOWN_PARM
89 && parm_index
!= MODREF_RETSLOT_PARM
&& known_size_p (size
)
90 && known_eq (max_size
, size
);
92 /* Dump range to debug OUT. */
93 void dump (FILE *out
);
94 /* Return true if both accesses are the same. */
95 bool operator == (modref_access_node
&a
) const;
96 /* Return true if range info is useful. */
97 bool range_info_useful_p () const;
98 /* Return tree corresponding to parameter of the range in STMT. */
99 tree
get_call_arg (const gcall
*stmt
) const;
100 /* Build ao_ref corresponding to the access and return true if succesful. */
101 bool get_ao_ref (const gcall
*stmt
, class ao_ref
*ref
) const;
102 /* Stream access to OB. */
103 void stream_out (struct output_block
*ob
) const;
104 /* Stream access in from IB. */
105 static modref_access_node
stream_in (struct lto_input_block
*ib
);
106 /* Insert A into vector ACCESSES. Limit size of vector to MAX_ACCESSES and
107 if RECORD_ADJUSTMENT is true keep track of adjustment counts.
108 Return 0 if nothing changed, 1 is insertion suceeded and -1 if failed. */
109 static int insert (vec
<modref_access_node
, va_gc
> *&accesses
,
110 modref_access_node a
, size_t max_accesses
,
111 bool record_adjustments
);
112 /* Same as insert but for kills where we are conservative the other way
113 around: if information is lost, the kill is lost. */
114 static bool insert_kill (vec
<modref_access_node
> &kills
,
115 modref_access_node
&a
, bool record_adjustments
);
117 bool contains (const modref_access_node
&) const;
118 bool contains_for_kills (const modref_access_node
&) const;
119 void update (poly_int64
, poly_int64
, poly_int64
, poly_int64
, bool);
120 bool update_for_kills (poly_int64
, poly_int64
, poly_int64
,
121 poly_int64
, poly_int64
, bool);
122 bool merge (const modref_access_node
&, bool);
123 bool merge_for_kills (const modref_access_node
&, bool);
124 static bool closer_pair_p (const modref_access_node
&,
125 const modref_access_node
&,
126 const modref_access_node
&,
127 const modref_access_node
&);
128 void forced_merge (const modref_access_node
&, bool);
129 void update2 (poly_int64
, poly_int64
, poly_int64
, poly_int64
,
130 poly_int64
, poly_int64
, poly_int64
, bool);
131 bool combined_offsets (const modref_access_node
&,
132 poly_int64
*, poly_int64
*, poly_int64
*) const;
133 static void try_merge_with (vec
<modref_access_node
, va_gc
> *&, size_t);
136 /* Access node specifying no useful info. */
137 const modref_access_node unspecified_modref_access_node
138 = {0, -1, -1, 0, MODREF_UNKNOWN_PARM
, false, 0};
140 template <typename T
>
141 struct GTY((user
)) modref_ref_node
145 vec
<modref_access_node
, va_gc
> *accesses
;
147 modref_ref_node (T ref
):
149 every_access (false),
153 /* Collapse the tree. */
161 /* Insert access with OFFSET and SIZE.
162 Collapse tree if it has more than MAX_ACCESSES entries.
163 If RECORD_ADJUSTMENTs is true avoid too many interval extensions.
164 Return true if record was changed. */
165 bool insert_access (modref_access_node a
, size_t max_accesses
,
166 bool record_adjustments
)
168 /* If this base->ref pair has no access information, bail out. */
172 /* Only the following kind of paramters needs to be tracked.
173 We do not track return slots because they are seen as a direct store
175 gcc_checking_assert (a
.parm_index
>= 0
176 || a
.parm_index
== MODREF_STATIC_CHAIN_PARM
177 || a
.parm_index
== MODREF_UNKNOWN_PARM
);
189 int ret
= modref_access_node::insert (accesses
, a
, max_accesses
,
195 "--param param=modref-max-accesses limit reached;"
203 /* Base of an access. */
204 template <typename T
>
205 struct GTY((user
)) modref_base_node
208 vec
<modref_ref_node
<T
> *, va_gc
> *refs
;
211 modref_base_node (T base
):
216 /* Search REF; return NULL if failed. */
217 modref_ref_node
<T
> *search (T ref
)
220 modref_ref_node
<T
> *n
;
221 FOR_EACH_VEC_SAFE_ELT (refs
, i
, n
)
227 /* Insert REF; collapse tree if there are more than MAX_REFS.
228 Return inserted ref and if CHANGED is non-null set it to true if
229 something changed. */
230 modref_ref_node
<T
> *insert_ref (T ref
, size_t max_refs
,
231 bool *changed
= NULL
)
233 modref_ref_node
<T
> *ref_node
;
235 /* If the node is collapsed, don't do anything. */
239 /* Otherwise, insert a node for the ref of the access under the base. */
240 ref_node
= search (ref
);
244 /* We always allow inserting ref 0. For non-0 refs there is upper
245 limit on number of entries and if exceeded,
246 drop ref conservatively to 0. */
247 if (ref
&& refs
&& refs
->length () >= max_refs
)
250 fprintf (dump_file
, "--param param=modref-max-refs limit reached;"
253 ref_node
= search (ref
);
261 ref_node
= new (ggc_alloc
<modref_ref_node
<T
> > ())modref_ref_node
<T
>
263 vec_safe_push (refs
, ref_node
);
270 modref_ref_node
<T
> *r
;
274 FOR_EACH_VEC_SAFE_ELT (refs
, i
, r
)
286 /* Map translating parameters across function call. */
288 struct modref_parm_map
290 /* Default constructor. */
292 : parm_index (MODREF_UNKNOWN_PARM
), parm_offset_known (false), parm_offset ()
295 /* Index of parameter we translate to.
296 Values from special_params enum are permitted too. */
298 bool parm_offset_known
;
299 poly_int64 parm_offset
;
302 /* Access tree for a single function. */
303 template <typename T
>
304 struct GTY((user
)) modref_tree
306 vec
<modref_base_node
<T
> *, va_gc
> *bases
;
311 every_base (false) {}
313 /* Insert BASE; collapse tree if there are more than MAX_REFS.
314 Return inserted base and if CHANGED is non-null set it to true if
316 If table gets full, try to insert REF instead. */
318 modref_base_node
<T
> *insert_base (T base
, T ref
,
319 unsigned int max_bases
,
320 bool *changed
= NULL
)
322 modref_base_node
<T
> *base_node
;
324 /* If the node is collapsed, don't do anything. */
328 /* Otherwise, insert a node for the base of the access into the tree. */
329 base_node
= search (base
);
333 /* We always allow inserting base 0. For non-0 base there is upper
334 limit on number of entries and if exceeded,
335 drop base conservatively to ref and if it still does not fit to 0. */
336 if (base
&& bases
&& bases
->length () >= max_bases
)
338 base_node
= search (ref
);
342 fprintf (dump_file
, "--param param=modref-max-bases"
343 " limit reached; using ref\n");
347 fprintf (dump_file
, "--param param=modref-max-bases"
348 " limit reached; using 0\n");
350 base_node
= search (base
);
358 base_node
= new (ggc_alloc
<modref_base_node
<T
> > ())
359 modref_base_node
<T
> (base
);
360 vec_safe_push (bases
, base_node
);
364 /* Insert memory access to the tree.
365 Return true if something changed. */
366 bool insert (unsigned int max_bases
,
367 unsigned int max_refs
,
368 unsigned int max_accesses
,
369 T base
, T ref
, modref_access_node a
,
370 bool record_adjustments
)
375 bool changed
= false;
377 /* We may end up with max_size being less than size for accesses past the
378 end of array. Those are undefined and safe to ignore. */
379 if (a
.range_info_useful_p ()
380 && known_size_p (a
.size
) && known_size_p (a
.max_size
)
381 && known_lt (a
.max_size
, a
.size
))
385 " - Paradoxical range. Ignoring\n");
388 if (known_size_p (a
.size
)
389 && known_eq (a
.size
, 0))
393 " - Zero size. Ignoring\n");
396 if (known_size_p (a
.max_size
)
397 && known_eq (a
.max_size
, 0))
401 " - Zero max_size. Ignoring\n");
404 gcc_checking_assert (!known_size_p (a
.max_size
)
405 || !known_le (a
.max_size
, 0));
407 /* No useful information tracked; collapse everything. */
408 if (!base
&& !ref
&& !a
.useful_p ())
414 modref_base_node
<T
> *base_node
415 = insert_base (base
, ref
, max_bases
, &changed
);
416 base
= base_node
->base
;
417 /* If table got full we may end up with useless base. */
418 if (!base
&& !ref
&& !a
.useful_p ())
423 if (base_node
->every_ref
)
425 gcc_checking_assert (search (base
) != NULL
);
427 /* No useful ref info tracked; collapse base. */
428 if (!ref
&& !a
.useful_p ())
430 base_node
->collapse ();
434 modref_ref_node
<T
> *ref_node
435 = base_node
->insert_ref (ref
, max_refs
, &changed
);
438 if (ref_node
->every_access
)
440 changed
|= ref_node
->insert_access (a
, max_accesses
,
442 /* See if we failed to add useful access. */
443 if (ref_node
->every_access
)
445 /* Collapse everything if there is no useful base and ref. */
449 gcc_checking_assert (changed
);
451 /* Collapse base if there is no useful ref. */
454 base_node
->collapse ();
455 gcc_checking_assert (changed
);
461 /* Insert memory access to the tree.
462 Return true if something changed. */
463 bool insert (tree fndecl
,
464 T base
, T ref
, const modref_access_node
&a
,
465 bool record_adjustments
)
467 return insert (opt_for_fn (fndecl
, param_modref_max_bases
),
468 opt_for_fn (fndecl
, param_modref_max_refs
),
469 opt_for_fn (fndecl
, param_modref_max_accesses
),
470 base
, ref
, a
, record_adjustments
);
473 /* Remove tree branches that are not useful (i.e. they will always pass). */
478 modref_base_node
<T
> *base_node
;
479 modref_ref_node
<T
> *ref_node
;
484 for (i
= 0; vec_safe_iterate (bases
, i
, &base_node
);)
487 for (j
= 0; vec_safe_iterate (base_node
->refs
, j
, &ref_node
);)
489 if (!ref_node
->every_access
490 && (!ref_node
->accesses
491 || !ref_node
->accesses
->length ()))
493 base_node
->refs
->unordered_remove (j
);
494 vec_free (ref_node
->accesses
);
495 ggc_delete (ref_node
);
500 if (!base_node
->every_ref
501 && (!base_node
->refs
|| !base_node
->refs
->length ()))
503 bases
->unordered_remove (i
);
504 vec_free (base_node
->refs
);
505 ggc_delete (base_node
);
510 if (bases
&& !bases
->length ())
517 /* Merge OTHER into the tree.
518 PARM_MAP, if non-NULL, maps parm indexes of callee to caller.
519 Similar CHAIN_MAP, if non-NULL, maps static chain of callee to caller.
520 Return true if something has changed. */
521 bool merge (unsigned int max_bases
,
522 unsigned int max_refs
,
523 unsigned int max_accesses
,
524 modref_tree
<T
> *other
, vec
<modref_parm_map
> *parm_map
,
525 modref_parm_map
*static_chain_map
,
526 bool record_accesses
)
528 if (!other
|| every_base
)
530 if (other
->every_base
)
536 bool changed
= false;
538 modref_base_node
<T
> *base_node
, *my_base_node
;
539 modref_ref_node
<T
> *ref_node
;
540 modref_access_node
*access_node
;
541 bool release
= false;
543 /* For self-recursive functions we may end up merging summary into itself;
544 produce copy first so we do not modify summary under our own hands. */
548 other
= modref_tree
<T
>::create_ggc ();
549 other
->copy_from (this);
552 FOR_EACH_VEC_SAFE_ELT (other
->bases
, i
, base_node
)
554 if (base_node
->every_ref
)
556 my_base_node
= insert_base (base_node
->base
, 0,
557 max_bases
, &changed
);
558 if (my_base_node
&& !my_base_node
->every_ref
)
560 my_base_node
->collapse ();
566 FOR_EACH_VEC_SAFE_ELT (base_node
->refs
, j
, ref_node
)
568 if (ref_node
->every_access
)
570 changed
|= insert (max_bases
, max_refs
, max_accesses
,
573 unspecified_modref_access_node
,
577 FOR_EACH_VEC_SAFE_ELT (ref_node
->accesses
, k
, access_node
)
579 modref_access_node a
= *access_node
;
581 if (a
.parm_index
!= MODREF_UNKNOWN_PARM
&& parm_map
)
583 if (a
.parm_index
>= (int)parm_map
->length ())
584 a
.parm_index
= MODREF_UNKNOWN_PARM
;
588 = a
.parm_index
== MODREF_STATIC_CHAIN_PARM
590 : (*parm_map
) [a
.parm_index
];
591 if (m
.parm_index
== MODREF_LOCAL_MEMORY_PARM
)
593 a
.parm_offset
+= m
.parm_offset
;
594 a
.parm_offset_known
&= m
.parm_offset_known
;
595 a
.parm_index
= m
.parm_index
;
598 changed
|= insert (max_bases
, max_refs
, max_accesses
,
599 base_node
->base
, ref_node
->ref
,
609 /* Merge OTHER into the tree.
610 PARM_MAP, if non-NULL, maps parm indexes of callee to caller.
611 Similar CHAIN_MAP, if non-NULL, maps static chain of callee to caller.
612 Return true if something has changed. */
613 bool merge (tree fndecl
,
614 modref_tree
<T
> *other
, vec
<modref_parm_map
> *parm_map
,
615 modref_parm_map
*static_chain_map
,
616 bool record_accesses
)
618 return merge (opt_for_fn (fndecl
, param_modref_max_bases
),
619 opt_for_fn (fndecl
, param_modref_max_refs
),
620 opt_for_fn (fndecl
, param_modref_max_accesses
),
621 other
, parm_map
, static_chain_map
, record_accesses
);
624 /* Copy OTHER to THIS. */
625 void copy_from (modref_tree
<T
> *other
)
627 merge (INT_MAX
, INT_MAX
, INT_MAX
, other
, NULL
, NULL
, false);
630 /* Search BASE in tree; return NULL if failed. */
631 modref_base_node
<T
> *search (T base
)
634 modref_base_node
<T
> *n
;
635 FOR_EACH_VEC_SAFE_ELT (bases
, i
, n
)
641 /* Return true if tree contains access to global memory. */
642 bool global_access_p ()
645 modref_base_node
<T
> *base_node
;
646 modref_ref_node
<T
> *ref_node
;
647 modref_access_node
*access_node
;
650 FOR_EACH_VEC_SAFE_ELT (bases
, i
, base_node
)
652 if (base_node
->every_ref
)
654 FOR_EACH_VEC_SAFE_ELT (base_node
->refs
, j
, ref_node
)
656 if (ref_node
->every_access
)
658 FOR_EACH_VEC_SAFE_ELT (ref_node
->accesses
, k
, access_node
)
659 if (access_node
->parm_index
== MODREF_UNKNOWN_PARM
)
666 /* Return ggc allocated instance. We explicitly call destructors via
667 ggc_delete and do not want finalizers to be registered and
668 called at the garbage collection time. */
669 static modref_tree
<T
> *create_ggc ()
671 return new (ggc_alloc_no_dtor
<modref_tree
<T
>> ())
675 /* Remove all records and mark tree to alias with everything. */
679 modref_base_node
<T
> *n
;
683 FOR_EACH_VEC_SAFE_ELT (bases
, i
, n
)
694 /* Release memory. */
700 /* Update parameter indexes in TT according to MAP. */
702 remap_params (vec
<int> *map
)
705 modref_base_node
<T
> *base_node
;
706 FOR_EACH_VEC_SAFE_ELT (bases
, i
, base_node
)
709 modref_ref_node
<T
> *ref_node
;
710 FOR_EACH_VEC_SAFE_ELT (base_node
->refs
, j
, ref_node
)
713 modref_access_node
*access_node
;
714 FOR_EACH_VEC_SAFE_ELT (ref_node
->accesses
, k
, access_node
)
715 if (access_node
->parm_index
>= 0)
717 if (access_node
->parm_index
< (int)map
->length ())
718 access_node
->parm_index
= (*map
)[access_node
->parm_index
];
720 access_node
->parm_index
= MODREF_UNKNOWN_PARM
;
727 void modref_c_tests ();
729 void gt_ggc_mx (modref_tree
<int>* const&);
730 void gt_ggc_mx (modref_tree
<tree_node
*>* const&);
731 void gt_pch_nx (modref_tree
<int>* const&);
732 void gt_pch_nx (modref_tree
<tree_node
*>* const&);
733 void gt_pch_nx (modref_tree
<int>* const&, gt_pointer_operator op
, void *cookie
);
734 void gt_pch_nx (modref_tree
<tree_node
*>* const&, gt_pointer_operator op
,
737 void gt_ggc_mx (modref_base_node
<int>*);
738 void gt_ggc_mx (modref_base_node
<tree_node
*>* &);
739 void gt_pch_nx (modref_base_node
<int>* const&);
740 void gt_pch_nx (modref_base_node
<tree_node
*>* const&);
741 void gt_pch_nx (modref_base_node
<int>* const&, gt_pointer_operator op
,
743 void gt_pch_nx (modref_base_node
<tree_node
*>* const&, gt_pointer_operator op
,
746 void gt_ggc_mx (modref_ref_node
<int>*);
747 void gt_ggc_mx (modref_ref_node
<tree_node
*>* &);
748 void gt_pch_nx (modref_ref_node
<int>* const&);
749 void gt_pch_nx (modref_ref_node
<tree_node
*>* const&);
750 void gt_pch_nx (modref_ref_node
<int>* const&, gt_pointer_operator op
,
752 void gt_pch_nx (modref_ref_node
<tree_node
*>* const&, gt_pointer_operator op
,