1 /* Data structure for the modref pass.
2 Copyright (C) 2020-2021 Free Software Foundation, Inc.
3 Contributed by David Cepelik and Jan Hubicka
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* modref_tree represent a decision tree that can be used by alias analysis
22 oracle to determine whether given memory access can be affected by a function
23 call. For every function we collect two trees, one for loads and other
24 for stores. Tree consist of following levels:
26 1) Base: this level represent base alias set of the access and refers
27 to sons (ref nodes). Flag all_refs means that all possible references
30 Because for LTO streaming we need to stream types rather than alias sets
31 modref_base_node is implemented as a template.
32 2) Ref: this level represent ref alias set and links to accesses unless
34 Again ref is an template to allow LTO streaming.
35 3) Access: this level represent info about individual accesses. Presently
36 we record whether access is through a dereference of a function parameter
37 and if so we record the access range.
40 #ifndef GCC_MODREF_TREE_H
41 #define GCC_MODREF_TREE_H
43 struct ipa_modref_summary
;
46 struct GTY(()) modref_access_node
49 /* Access range information (in bits). */
54 /* Offset from parameter pointer to the base of the access (in bytes). */
55 poly_int64 parm_offset
;
57 /* Index of parameter which specifies the base of access. -1 if base is not
58 a function parameter. */
60 bool parm_offset_known
;
61 /* Number of times interval was extended during dataflow.
62 This has to be limited in order to keep dataflow finite. */
63 unsigned char adjustments
;
65 /* Return true if access node holds no useful info. */
66 bool useful_p () const
68 return parm_index
!= -1;
70 /* Return true if range info is useful. */
71 bool range_info_useful_p () const
73 return parm_index
!= -1 && parm_offset_known
74 && (known_size_p (size
)
75 || known_size_p (max_size
)
76 || known_ge (offset
, 0));
78 /* Return true if both accesses are the same. */
79 bool operator == (modref_access_node
&a
) const
81 if (parm_index
!= a
.parm_index
)
85 if (parm_offset_known
!= a
.parm_offset_known
)
88 && !known_eq (parm_offset
, a
.parm_offset
))
91 if (range_info_useful_p () != a
.range_info_useful_p ())
93 if (range_info_useful_p ()
94 && (!known_eq (a
.offset
, offset
)
95 || !known_eq (a
.size
, size
)
96 || !known_eq (a
.max_size
, max_size
)))
100 /* Return true A is a subaccess. */
101 bool contains (const modref_access_node
&a
) const
103 poly_int64 aoffset_adj
= 0;
106 if (parm_index
!= a
.parm_index
)
108 if (parm_offset_known
)
110 if (!a
.parm_offset_known
)
112 /* Accesses are never below parm_offset, so look
114 If access ranges are known still allow merging
115 when bit offsets comparsion passes. */
116 if (!known_le (parm_offset
, a
.parm_offset
)
117 && !range_info_useful_p ())
119 aoffset_adj
= (a
.parm_offset
- parm_offset
)
120 << LOG2_BITS_PER_UNIT
;
123 if (range_info_useful_p ())
125 if (!a
.range_info_useful_p ())
127 /* Sizes of stores are used to check that object is big enough
128 to fit the store, so smaller or unknown sotre is more general
130 if (known_size_p (size
)
131 && (!known_size_p (a
.size
)
132 || !known_le (size
, a
.size
)))
134 if (known_size_p (max_size
))
135 return known_subrange_p (a
.offset
+ aoffset_adj
,
136 a
.max_size
, offset
, max_size
);
138 return known_le (offset
, a
.offset
+ aoffset_adj
);
142 /* Update access range to new parameters.
143 If RECORD_ADJUSTMENTS is true, record number of changes in the access
144 and if threshold is exceeded start dropping precision
145 so only constantly many updates are possible. This makes dataflow
147 void update (poly_int64 parm_offset1
,
148 poly_int64 offset1
, poly_int64 size1
, poly_int64 max_size1
,
149 bool record_adjustments
)
151 if (known_eq (parm_offset
, parm_offset1
)
152 && known_eq (offset
, offset1
)
153 && known_eq (size
, size1
)
154 && known_eq (max_size
, max_size1
))
156 if (!record_adjustments
157 || (++adjustments
) < param_modref_max_adjustments
)
159 parm_offset
= parm_offset1
;
162 max_size
= max_size1
;
168 "--param param=modref-max-adjustments limit reached:");
169 if (!known_eq (parm_offset
, parm_offset1
))
172 fprintf (dump_file
, " parm_offset cleared");
173 parm_offset_known
= false;
175 if (!known_eq (size
, size1
))
179 fprintf (dump_file
, " size cleared");
181 if (!known_eq (max_size
, max_size1
))
185 fprintf (dump_file
, " max_size cleared");
187 if (!known_eq (offset
, offset1
))
191 fprintf (dump_file
, " offset cleared");
194 fprintf (dump_file
, "\n");
197 /* Merge in access A if it is possible to do without losing
198 precision. Return true if successful.
199 If RECORD_ADJUSTMENTs is true, remember how many interval
200 was prolonged and punt when there are too many. */
201 bool merge (const modref_access_node
&a
, bool record_adjustments
)
203 poly_int64 offset1
= 0;
204 poly_int64 aoffset1
= 0;
205 poly_int64 new_parm_offset
= 0;
207 /* We assume that containment was tested earlier. */
208 gcc_checking_assert (!contains (a
) && !a
.contains (*this));
211 if (parm_index
!= a
.parm_index
)
213 if (parm_offset_known
)
215 if (!a
.parm_offset_known
)
217 if (!combined_offsets (a
, &new_parm_offset
, &offset1
, &aoffset1
))
221 /* See if we can merge ranges. */
222 if (range_info_useful_p ())
224 /* In this case we have containment that should be
226 gcc_checking_assert (a
.range_info_useful_p ());
228 /* If a.size is less specified than size, merge only
229 if intervals are otherwise equivalent. */
230 if (known_size_p (size
)
231 && (!known_size_p (a
.size
) || known_lt (a
.size
, size
)))
233 if (((known_size_p (max_size
) || known_size_p (a
.max_size
))
234 && !known_eq (max_size
, a
.max_size
))
235 || !known_eq (offset1
, aoffset1
))
237 update (new_parm_offset
, offset1
, a
.size
, max_size
,
241 /* If sizes are same, we can extend the interval. */
242 if ((known_size_p (size
) || known_size_p (a
.size
))
243 && !known_eq (size
, a
.size
))
245 if (known_le (offset1
, aoffset1
))
247 if (!known_size_p (max_size
)
248 || known_ge (offset1
+ max_size
, aoffset1
))
250 update2 (new_parm_offset
, offset1
, size
, max_size
,
251 aoffset1
, a
.size
, a
.max_size
,
256 else if (known_le (aoffset1
, offset1
))
258 if (!known_size_p (a
.max_size
)
259 || known_ge (aoffset1
+ a
.max_size
, offset1
))
261 update2 (new_parm_offset
, offset1
, size
, max_size
,
262 aoffset1
, a
.size
, a
.max_size
,
269 update (new_parm_offset
, offset1
,
270 size
, max_size
, record_adjustments
);
273 /* Return true if A1 and B1 can be merged with lower informatoin
275 Assume that no containment or lossless merging is possible. */
276 static bool closer_pair_p (const modref_access_node
&a1
,
277 const modref_access_node
&b1
,
278 const modref_access_node
&a2
,
279 const modref_access_node
&b2
)
281 /* Merging different parm indexes comes to complete loss
283 if (a1
.parm_index
!= b1
.parm_index
)
285 if (a2
.parm_index
!= b2
.parm_index
)
287 /* If parm is known and parm indexes are the same we should
288 already have containment. */
289 gcc_checking_assert (a1
.parm_offset_known
&& b1
.parm_offset_known
);
290 gcc_checking_assert (a2
.parm_offset_known
&& b2
.parm_offset_known
);
292 /* First normalize offsets for parm offsets. */
293 poly_int64 new_parm_offset
, offseta1
, offsetb1
, offseta2
, offsetb2
;
294 if (!a1
.combined_offsets (b1
, &new_parm_offset
, &offseta1
, &offsetb1
)
295 || !a2
.combined_offsets (b2
, &new_parm_offset
, &offseta2
, &offsetb2
))
299 /* Now compute distnace of the intervals. */
300 poly_int64 dist1
, dist2
;
301 if (known_le (offseta1
, offsetb1
))
303 if (!known_size_p (a1
.max_size
))
306 dist1
= offsetb1
- offseta1
- a1
.max_size
;
310 if (!known_size_p (b1
.max_size
))
313 dist1
= offseta1
- offsetb1
- b1
.max_size
;
315 if (known_le (offseta2
, offsetb2
))
317 if (!known_size_p (a2
.max_size
))
320 dist2
= offsetb2
- offseta2
- a2
.max_size
;
324 if (!known_size_p (b2
.max_size
))
327 dist2
= offseta2
- offsetb2
- b2
.max_size
;
329 /* It may happen that intervals overlap in case size
330 is different. Preffer the overlap to non-overlap. */
331 if (known_lt (dist1
, 0) && known_ge (dist2
, 0))
333 if (known_lt (dist2
, 0) && known_ge (dist1
, 0))
335 if (known_lt (dist1
, 0))
336 /* If both overlaps minimize overlap. */
337 return known_le (dist2
, dist1
);
339 /* If both are disjoint look for smaller distance. */
340 return known_le (dist1
, dist2
);
343 /* Merge in access A while losing precision. */
344 void forced_merge (const modref_access_node
&a
, bool record_adjustments
)
346 if (parm_index
!= a
.parm_index
)
348 gcc_checking_assert (parm_index
!= -1);
353 /* We assume that containment and lossless merging
354 was tested earlier. */
355 gcc_checking_assert (!contains (a
) && !a
.contains (*this)
356 && !merge (a
, record_adjustments
));
357 gcc_checking_assert (parm_offset_known
&& a
.parm_offset_known
);
359 poly_int64 new_parm_offset
, offset1
, aoffset1
;
360 if (!combined_offsets (a
, &new_parm_offset
, &offset1
, &aoffset1
))
362 parm_offset_known
= false;
365 gcc_checking_assert (range_info_useful_p ()
366 && a
.range_info_useful_p ());
367 if (record_adjustments
)
368 adjustments
+= a
.adjustments
;
369 update2 (new_parm_offset
,
370 offset1
, size
, max_size
,
371 aoffset1
, a
.size
, a
.max_size
,
375 /* Merge two ranges both starting at parm_offset1 and update THIS
377 void update2 (poly_int64 parm_offset1
,
378 poly_int64 offset1
, poly_int64 size1
, poly_int64 max_size1
,
379 poly_int64 offset2
, poly_int64 size2
, poly_int64 max_size2
,
380 bool record_adjustments
)
382 poly_int64 new_size
= size1
;
384 if (!known_size_p (size2
)
385 || known_le (size2
, size1
))
388 gcc_checking_assert (known_le (size1
, size2
));
390 if (known_le (offset1
, offset2
))
392 else if (known_le (offset2
, offset1
))
394 std::swap (offset1
, offset2
);
395 std::swap (max_size1
, max_size2
);
400 poly_int64 new_max_size
;
402 if (!known_size_p (max_size1
))
403 new_max_size
= max_size1
;
404 else if (!known_size_p (max_size2
))
405 new_max_size
= max_size2
;
408 new_max_size
= max_size2
+ offset2
- offset1
;
409 if (known_le (new_max_size
, max_size1
))
410 new_max_size
= max_size1
;
413 update (parm_offset1
, offset1
,
414 new_size
, new_max_size
, record_adjustments
);
416 /* Given access nodes THIS and A, return true if they
417 can be done with common parm_offsets. In this case
418 return parm offset in new_parm_offset, new_offset
419 which is start of range in THIS and new_aoffset that
420 is start of range in A. */
421 bool combined_offsets (const modref_access_node
&a
,
422 poly_int64
*new_parm_offset
,
423 poly_int64
*new_offset
,
424 poly_int64
*new_aoffset
) const
426 gcc_checking_assert (parm_offset_known
&& a
.parm_offset_known
);
427 if (known_le (a
.parm_offset
, parm_offset
))
430 + ((parm_offset
- a
.parm_offset
)
431 << LOG2_BITS_PER_UNIT
);
432 *new_aoffset
= a
.offset
;
433 *new_parm_offset
= a
.parm_offset
;
436 else if (known_le (parm_offset
, a
.parm_offset
))
438 *new_aoffset
= a
.offset
439 + ((a
.parm_offset
- parm_offset
)
440 << LOG2_BITS_PER_UNIT
);
441 *new_offset
= offset
;
442 *new_parm_offset
= parm_offset
;
450 /* Access node specifying no useful info. */
451 const modref_access_node unspecified_modref_access_node
452 = {0, -1, -1, 0, -1, false, 0};
454 template <typename T
>
455 struct GTY((user
)) modref_ref_node
459 vec
<modref_access_node
, va_gc
> *accesses
;
461 modref_ref_node (T ref
):
463 every_access (false),
467 /* Collapse the tree. */
475 /* Verify that list does not contain redundant accesses. */
479 modref_access_node
*a
, *a2
;
481 FOR_EACH_VEC_SAFE_ELT (accesses
, i
, a
)
483 FOR_EACH_VEC_SAFE_ELT (accesses
, i2
, a2
)
485 gcc_assert (!a
->contains (*a2
));
489 /* Insert access with OFFSET and SIZE.
490 Collapse tree if it has more than MAX_ACCESSES entries.
491 If RECORD_ADJUSTMENTs is true avoid too many interval extensions.
492 Return true if record was changed. */
493 bool insert_access (modref_access_node a
, size_t max_accesses
,
494 bool record_adjustments
)
496 /* If this base->ref pair has no access information, bail out. */
500 /* Otherwise, insert a node for the ref of the access under the base. */
502 modref_access_node
*a2
;
517 FOR_EACH_VEC_SAFE_ELT (accesses
, i
, a2
)
519 if (a2
->contains (a
))
521 if (a
.contains (*a2
))
524 a2
->parm_index
= a
.parm_index
;
525 a2
->parm_offset_known
= a
.parm_offset_known
;
526 a2
->update (a
.parm_offset
, a
.offset
, a
.size
, a
.max_size
,
531 if (a2
->merge (a
, record_adjustments
))
536 gcc_checking_assert (!(a
== *a2
));
539 /* If this base->ref pair has too many accesses stored, we will clear
540 all accesses and bail out. */
541 if (accesses
&& accesses
->length () >= max_accesses
)
543 if (max_accesses
< 2)
548 "--param param=modref-max-accesses limit reached;"
552 /* Find least harmful merge and perform it. */
553 int best1
= -1, best2
= -1;
554 FOR_EACH_VEC_SAFE_ELT (accesses
, i
, a2
)
556 for (j
= i
+ 1; j
< accesses
->length (); j
++)
558 || modref_access_node::closer_pair_p
559 (*a2
, (*accesses
)[j
],
561 best2
< 0 ? a
: (*accesses
)[best2
]))
566 if (modref_access_node::closer_pair_p
569 best2
< 0 ? a
: (*accesses
)[best2
]))
575 (*accesses
)[best1
].forced_merge (best2
< 0 ? a
: (*accesses
)[best2
],
577 /* Check that merging indeed merged ranges. */
578 gcc_checking_assert ((*accesses
)[best1
].contains
579 (best2
< 0 ? a
: (*accesses
)[best2
]));
580 if (!(*accesses
)[best1
].useful_p ())
585 "--param param=modref-max-accesses limit reached;"
589 if (dump_file
&& best2
>= 0)
591 "--param param=modref-max-accesses limit reached;"
592 " merging %i and %i\n", best1
, best2
);
595 "--param param=modref-max-accesses limit reached;"
596 " merging with %i\n", best1
);
597 try_merge_with (best1
);
599 insert_access (a
, max_accesses
, record_adjustments
);
603 vec_safe_push (accesses
, a
);
607 /* Try to optimize the access list after entry INDEX was modified. */
609 try_merge_with (size_t index
)
613 for (i
= 0; i
< accesses
->length ();)
616 bool found
= false, restart
= false;
617 modref_access_node
*a
= &(*accesses
)[i
];
618 modref_access_node
*n
= &(*accesses
)[index
];
620 if (n
->contains (*a
))
622 if (!found
&& n
->merge (*a
, false))
623 found
= restart
= true;
624 gcc_checking_assert (found
|| !a
->merge (*n
, false));
627 accesses
->unordered_remove (i
);
628 if (index
== accesses
->length ())
644 /* Base of an access. */
645 template <typename T
>
646 struct GTY((user
)) modref_base_node
649 vec
<modref_ref_node
<T
> *, va_gc
> *refs
;
652 modref_base_node (T base
):
657 /* Search REF; return NULL if failed. */
658 modref_ref_node
<T
> *search (T ref
)
661 modref_ref_node
<T
> *n
;
662 FOR_EACH_VEC_SAFE_ELT (refs
, i
, n
)
668 /* Insert REF; collapse tree if there are more than MAX_REFS.
669 Return inserted ref and if CHANGED is non-null set it to true if
670 something changed. */
671 modref_ref_node
<T
> *insert_ref (T ref
, size_t max_refs
,
672 bool *changed
= NULL
)
674 modref_ref_node
<T
> *ref_node
;
676 /* If the node is collapsed, don't do anything. */
680 /* Otherwise, insert a node for the ref of the access under the base. */
681 ref_node
= search (ref
);
685 /* We always allow inserting ref 0. For non-0 refs there is upper
686 limit on number of entries and if exceeded,
687 drop ref conservatively to 0. */
688 if (ref
&& refs
&& refs
->length () >= max_refs
)
691 fprintf (dump_file
, "--param param=modref-max-refs limit reached;"
694 ref_node
= search (ref
);
702 ref_node
= new (ggc_alloc
<modref_ref_node
<T
> > ())modref_ref_node
<T
>
704 vec_safe_push (refs
, ref_node
);
711 modref_ref_node
<T
> *r
;
715 FOR_EACH_VEC_SAFE_ELT (refs
, i
, r
)
727 /* Map translating parameters across function call. */
729 struct modref_parm_map
731 /* Index of parameter we translate to.
732 -1 indicates that parameter is unknown
733 -2 indicates that parameter points to local memory and access can be
736 bool parm_offset_known
;
737 poly_int64 parm_offset
;
740 /* Access tree for a single function. */
741 template <typename T
>
742 struct GTY((user
)) modref_tree
744 vec
<modref_base_node
<T
> *, va_gc
> *bases
;
750 modref_tree (size_t max_bases
, size_t max_refs
, size_t max_accesses
):
752 max_bases (max_bases
),
754 max_accesses (max_accesses
),
755 every_base (false) {}
757 /* Insert BASE; collapse tree if there are more than MAX_REFS.
758 Return inserted base and if CHANGED is non-null set it to true if
760 If table gets full, try to insert REF instead. */
762 modref_base_node
<T
> *insert_base (T base
, T ref
, bool *changed
= NULL
)
764 modref_base_node
<T
> *base_node
;
766 /* If the node is collapsed, don't do anything. */
770 /* Otherwise, insert a node for the base of the access into the tree. */
771 base_node
= search (base
);
775 /* We always allow inserting base 0. For non-0 base there is upper
776 limit on number of entries and if exceeded,
777 drop base conservatively to ref and if it still does not fit to 0. */
778 if (base
&& bases
&& bases
->length () >= max_bases
)
780 base_node
= search (ref
);
784 fprintf (dump_file
, "--param param=modref-max-bases"
785 " limit reached; using ref\n");
789 fprintf (dump_file
, "--param param=modref-max-bases"
790 " limit reached; using 0\n");
792 base_node
= search (base
);
800 base_node
= new (ggc_alloc
<modref_base_node
<T
> > ())
801 modref_base_node
<T
> (base
);
802 vec_safe_push (bases
, base_node
);
806 /* Insert memory access to the tree.
807 Return true if something changed. */
808 bool insert (T base
, T ref
, modref_access_node a
,
809 bool record_adjustments
)
814 bool changed
= false;
816 /* No useful information tracked; collapse everything. */
817 if (!base
&& !ref
&& !a
.useful_p ())
823 modref_base_node
<T
> *base_node
= insert_base (base
, ref
, &changed
);
824 base
= base_node
->base
;
825 /* If table got full we may end up with useless base. */
826 if (!base
&& !ref
&& !a
.useful_p ())
831 if (base_node
->every_ref
)
833 gcc_checking_assert (search (base
) != NULL
);
835 /* No useful ref info tracked; collapse base. */
836 if (!ref
&& !a
.useful_p ())
838 base_node
->collapse ();
842 modref_ref_node
<T
> *ref_node
= base_node
->insert_ref (ref
, max_refs
,
846 if (ref_node
->every_access
)
848 changed
|= ref_node
->insert_access (a
, max_accesses
,
850 /* See if we failed to add useful access. */
851 if (ref_node
->every_access
)
853 /* Collapse everything if there is no useful base and ref. */
857 gcc_checking_assert (changed
);
859 /* Collapse base if there is no useful ref. */
862 base_node
->collapse ();
863 gcc_checking_assert (changed
);
869 /* Remove tree branches that are not useful (i.e. they will always pass). */
874 modref_base_node
<T
> *base_node
;
875 modref_ref_node
<T
> *ref_node
;
880 for (i
= 0; vec_safe_iterate (bases
, i
, &base_node
);)
883 for (j
= 0; vec_safe_iterate (base_node
->refs
, j
, &ref_node
);)
885 if (!ref_node
->every_access
886 && (!ref_node
->accesses
887 || !ref_node
->accesses
->length ()))
889 base_node
->refs
->unordered_remove (j
);
890 vec_free (ref_node
->accesses
);
891 ggc_delete (ref_node
);
896 if (!base_node
->every_ref
897 && (!base_node
->refs
|| !base_node
->refs
->length ()))
899 bases
->unordered_remove (i
);
900 vec_free (base_node
->refs
);
901 ggc_delete (base_node
);
906 if (bases
&& !bases
->length ())
913 /* Merge OTHER into the tree.
914 PARM_MAP, if non-NULL, maps parm indexes of callee to caller. -2 is used
915 to signalize that parameter is local and does not need to be tracked.
916 Return true if something has changed. */
917 bool merge (modref_tree
<T
> *other
, vec
<modref_parm_map
> *parm_map
,
918 bool record_accesses
)
920 if (!other
|| every_base
)
922 if (other
->every_base
)
928 bool changed
= false;
930 modref_base_node
<T
> *base_node
, *my_base_node
;
931 modref_ref_node
<T
> *ref_node
;
932 modref_access_node
*access_node
;
933 bool release
= false;
935 /* For self-recursive functions we may end up merging summary into itself;
936 produce copy first so we do not modify summary under our own hands. */
940 other
= modref_tree
<T
>::create_ggc (max_bases
, max_refs
, max_accesses
);
941 other
->copy_from (this);
944 FOR_EACH_VEC_SAFE_ELT (other
->bases
, i
, base_node
)
946 if (base_node
->every_ref
)
948 my_base_node
= insert_base (base_node
->base
, 0, &changed
);
949 if (my_base_node
&& !my_base_node
->every_ref
)
951 my_base_node
->collapse ();
957 FOR_EACH_VEC_SAFE_ELT (base_node
->refs
, j
, ref_node
)
959 if (ref_node
->every_access
)
961 changed
|= insert (base_node
->base
,
963 unspecified_modref_access_node
,
967 FOR_EACH_VEC_SAFE_ELT (ref_node
->accesses
, k
, access_node
)
969 modref_access_node a
= *access_node
;
971 if (a
.parm_index
!= -1 && parm_map
)
973 if (a
.parm_index
>= (int)parm_map
->length ())
975 else if ((*parm_map
) [a
.parm_index
].parm_index
== -2)
980 += (*parm_map
) [a
.parm_index
].parm_offset
;
983 [a
.parm_index
].parm_offset_known
;
985 = (*parm_map
) [a
.parm_index
].parm_index
;
988 changed
|= insert (base_node
->base
, ref_node
->ref
, a
,
998 /* Copy OTHER to THIS. */
999 void copy_from (modref_tree
<T
> *other
)
1001 merge (other
, NULL
, false);
1004 /* Search BASE in tree; return NULL if failed. */
1005 modref_base_node
<T
> *search (T base
)
1008 modref_base_node
<T
> *n
;
1009 FOR_EACH_VEC_SAFE_ELT (bases
, i
, n
)
1010 if (n
->base
== base
)
1015 /* Return true if tree contains access to global memory. */
1016 bool global_access_p ()
1019 modref_base_node
<T
> *base_node
;
1020 modref_ref_node
<T
> *ref_node
;
1021 modref_access_node
*access_node
;
1024 FOR_EACH_VEC_SAFE_ELT (bases
, i
, base_node
)
1026 if (base_node
->every_ref
)
1028 FOR_EACH_VEC_SAFE_ELT (base_node
->refs
, j
, ref_node
)
1030 if (ref_node
->every_access
)
1032 FOR_EACH_VEC_SAFE_ELT (ref_node
->accesses
, k
, access_node
)
1033 if (access_node
->parm_index
< 0)
1040 /* Return ggc allocated instance. We explicitly call destructors via
1041 ggc_delete and do not want finalizers to be registered and
1042 called at the garbage collection time. */
1043 static modref_tree
<T
> *create_ggc (size_t max_bases
, size_t max_refs
,
1044 size_t max_accesses
)
1046 return new (ggc_alloc_no_dtor
<modref_tree
<T
>> ())
1047 modref_tree
<T
> (max_bases
, max_refs
, max_accesses
);
1050 /* Remove all records and mark tree to alias with everything. */
1054 modref_base_node
<T
> *n
;
1058 FOR_EACH_VEC_SAFE_ELT (bases
, i
, n
)
1069 /* Release memory. */
1075 /* Update parameter indexes in TT according to MAP. */
1077 remap_params (vec
<int> *map
)
1080 modref_base_node
<T
> *base_node
;
1081 FOR_EACH_VEC_SAFE_ELT (bases
, i
, base_node
)
1084 modref_ref_node
<T
> *ref_node
;
1085 FOR_EACH_VEC_SAFE_ELT (base_node
->refs
, j
, ref_node
)
1088 modref_access_node
*access_node
;
1089 FOR_EACH_VEC_SAFE_ELT (ref_node
->accesses
, k
, access_node
)
1090 if (access_node
->parm_index
> 0)
1092 if (access_node
->parm_index
< (int)map
->length ())
1093 access_node
->parm_index
= (*map
)[access_node
->parm_index
];
1095 access_node
->parm_index
= -1;
1102 void modref_c_tests ();
1104 void gt_ggc_mx (modref_tree
<int>* const&);
1105 void gt_ggc_mx (modref_tree
<tree_node
*>* const&);
1106 void gt_pch_nx (modref_tree
<int>* const&);
1107 void gt_pch_nx (modref_tree
<tree_node
*>* const&);
1108 void gt_pch_nx (modref_tree
<int>* const&, gt_pointer_operator op
, void *cookie
);
1109 void gt_pch_nx (modref_tree
<tree_node
*>* const&, gt_pointer_operator op
,
1112 void gt_ggc_mx (modref_base_node
<int>*);
1113 void gt_ggc_mx (modref_base_node
<tree_node
*>* &);
1114 void gt_pch_nx (modref_base_node
<int>* const&);
1115 void gt_pch_nx (modref_base_node
<tree_node
*>* const&);
1116 void gt_pch_nx (modref_base_node
<int>* const&, gt_pointer_operator op
,
1118 void gt_pch_nx (modref_base_node
<tree_node
*>* const&, gt_pointer_operator op
,
1121 void gt_ggc_mx (modref_ref_node
<int>*);
1122 void gt_ggc_mx (modref_ref_node
<tree_node
*>* &);
1123 void gt_pch_nx (modref_ref_node
<int>* const&);
1124 void gt_pch_nx (modref_ref_node
<tree_node
*>* const&);
1125 void gt_pch_nx (modref_ref_node
<int>* const&, gt_pointer_operator op
,
1127 void gt_pch_nx (modref_ref_node
<tree_node
*>* const&, gt_pointer_operator op
,