Daily bump.
[official-gcc.git] / gcc / ipa-modref-tree.h
blob94fcebda4680d3e4421fd3e3252087b44c4e75b4
1 /* Data structure for the modref pass.
2 Copyright (C) 2020-2021 Free Software Foundation, Inc.
3 Contributed by David Cepelik and Jan Hubicka
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* modref_tree represent a decision tree that can be used by alias analysis
22 oracle to determine whether given memory access can be affected by a function
23 call. For every function we collect two trees, one for loads and other
24 for stores. Tree consist of following levels:
26 1) Base: this level represent base alias set of the access and refers
27 to sons (ref nodes). Flag all_refs means that all possible references
28 are aliasing.
30 Because for LTO streaming we need to stream types rather than alias sets
31 modref_base_node is implemented as a template.
32 2) Ref: this level represent ref alias set and links to accesses unless
33 all_refs flag is set.
34 Again ref is an template to allow LTO streaming.
35 3) Access: this level represent info about individual accesses. Presently
36 we record whether access is through a dereference of a function parameter
37 and if so we record the access range.
40 #ifndef GCC_MODREF_TREE_H
41 #define GCC_MODREF_TREE_H
43 struct ipa_modref_summary;
45 /* parm indexes greater than 0 are normal parms.
46 Some negative values have special meaning. */
47 enum modref_special_parms {
48 MODREF_UNKNOWN_PARM = -1,
49 MODREF_STATIC_CHAIN_PARM = -2,
50 MODREF_RETSLOT_PARM = -3,
51 /* Used for bases that points to memory that escapes from function. */
52 MODREF_GLOBAL_MEMORY_PARM = -4,
53 /* Used in modref_parm_map to tak references which can be removed
54 from the summary during summary update since they now points to loca
55 memory. */
56 MODREF_LOCAL_MEMORY_PARM = -5
59 /* Modref record accesses relative to function parameters.
60 This is entry for single access specifying its base and access range.
62 Accesses can be collected to boundedly sized arrays using
63 modref_access_node::insert. */
64 struct GTY(()) modref_access_node
66 /* Access range information (in bits). */
67 poly_int64 offset;
68 poly_int64 size;
69 poly_int64 max_size;
71 /* Offset from parameter pointer to the base of the access (in bytes). */
72 poly_int64 parm_offset;
74 /* Index of parameter which specifies the base of access. -1 if base is not
75 a function parameter. */
76 int parm_index;
77 bool parm_offset_known;
78 /* Number of times interval was extended during dataflow.
79 This has to be limited in order to keep dataflow finite. */
80 unsigned char adjustments;
82 /* Return true if access node holds some useful info. */
83 bool useful_p () const
85 return parm_index != MODREF_UNKNOWN_PARM;
87 /* Return true if access can be used to determine a kill. */
88 bool useful_for_kill_p () const
90 return parm_offset_known && parm_index != MODREF_UNKNOWN_PARM
91 && parm_index != MODREF_GLOBAL_MEMORY_PARM
92 && parm_index != MODREF_RETSLOT_PARM && known_size_p (size)
93 && known_eq (max_size, size)
94 && known_gt (size, 0);
96 /* Dump range to debug OUT. */
97 void dump (FILE *out);
98 /* Return true if both accesses are the same. */
99 bool operator == (modref_access_node &a) const;
100 /* Return true if range info is useful. */
101 bool range_info_useful_p () const;
102 /* Return tree corresponding to parameter of the range in STMT. */
103 tree get_call_arg (const gcall *stmt) const;
104 /* Build ao_ref corresponding to the access and return true if succesful. */
105 bool get_ao_ref (const gcall *stmt, class ao_ref *ref) const;
106 /* Stream access to OB. */
107 void stream_out (struct output_block *ob) const;
108 /* Stream access in from IB. */
109 static modref_access_node stream_in (struct lto_input_block *ib);
110 /* Insert A into vector ACCESSES. Limit size of vector to MAX_ACCESSES and
111 if RECORD_ADJUSTMENT is true keep track of adjustment counts.
112 Return 0 if nothing changed, 1 is insertion suceeded and -1 if failed. */
113 static int insert (vec <modref_access_node, va_gc> *&accesses,
114 modref_access_node a, size_t max_accesses,
115 bool record_adjustments);
116 /* Same as insert but for kills where we are conservative the other way
117 around: if information is lost, the kill is lost. */
118 static bool insert_kill (vec<modref_access_node> &kills,
119 modref_access_node &a, bool record_adjustments);
120 private:
121 bool contains (const modref_access_node &) const;
122 bool contains_for_kills (const modref_access_node &) const;
123 void update (poly_int64, poly_int64, poly_int64, poly_int64, bool);
124 bool update_for_kills (poly_int64, poly_int64, poly_int64,
125 poly_int64, poly_int64, bool);
126 bool merge (const modref_access_node &, bool);
127 bool merge_for_kills (const modref_access_node &, bool);
128 static bool closer_pair_p (const modref_access_node &,
129 const modref_access_node &,
130 const modref_access_node &,
131 const modref_access_node &);
132 void forced_merge (const modref_access_node &, bool);
133 void update2 (poly_int64, poly_int64, poly_int64, poly_int64,
134 poly_int64, poly_int64, poly_int64, bool);
135 bool combined_offsets (const modref_access_node &,
136 poly_int64 *, poly_int64 *, poly_int64 *) const;
137 static void try_merge_with (vec <modref_access_node, va_gc> *&, size_t);
140 /* Access node specifying no useful info. */
141 const modref_access_node unspecified_modref_access_node
142 = {0, -1, -1, 0, MODREF_UNKNOWN_PARM, false, 0};
144 template <typename T>
145 struct GTY((user)) modref_ref_node
147 T ref;
148 bool every_access;
149 vec <modref_access_node, va_gc> *accesses;
151 modref_ref_node (T ref):
152 ref (ref),
153 every_access (false),
154 accesses (NULL)
157 /* Collapse the tree. */
158 void collapse ()
160 vec_free (accesses);
161 accesses = NULL;
162 every_access = true;
165 /* Insert access with OFFSET and SIZE.
166 Collapse tree if it has more than MAX_ACCESSES entries.
167 If RECORD_ADJUSTMENTs is true avoid too many interval extensions.
168 Return true if record was changed. */
169 bool insert_access (modref_access_node a, size_t max_accesses,
170 bool record_adjustments)
172 /* If this base->ref pair has no access information, bail out. */
173 if (every_access)
174 return false;
176 /* Only the following kind of paramters needs to be tracked.
177 We do not track return slots because they are seen as a direct store
178 in the caller. */
179 gcc_checking_assert (a.parm_index >= 0
180 || a.parm_index == MODREF_STATIC_CHAIN_PARM
181 || a.parm_index == MODREF_GLOBAL_MEMORY_PARM
182 || a.parm_index == MODREF_UNKNOWN_PARM);
184 if (!a.useful_p ())
186 if (!every_access)
188 collapse ();
189 return true;
191 return false;
194 int ret = modref_access_node::insert (accesses, a, max_accesses,
195 record_adjustments);
196 if (ret == -1)
198 if (dump_file)
199 fprintf (dump_file,
200 "--param param=modref-max-accesses limit reached;"
201 " collapsing\n");
202 collapse ();
204 return ret != 0;
208 /* Base of an access. */
209 template <typename T>
210 struct GTY((user)) modref_base_node
212 T base;
213 vec <modref_ref_node <T> *, va_gc> *refs;
214 bool every_ref;
216 modref_base_node (T base):
217 base (base),
218 refs (NULL),
219 every_ref (false) {}
221 /* Search REF; return NULL if failed. */
222 modref_ref_node <T> *search (T ref)
224 size_t i;
225 modref_ref_node <T> *n;
226 FOR_EACH_VEC_SAFE_ELT (refs, i, n)
227 if (n->ref == ref)
228 return n;
229 return NULL;
232 /* Insert REF; collapse tree if there are more than MAX_REFS.
233 Return inserted ref and if CHANGED is non-null set it to true if
234 something changed. */
235 modref_ref_node <T> *insert_ref (T ref, size_t max_refs,
236 bool *changed = NULL)
238 modref_ref_node <T> *ref_node;
240 /* If the node is collapsed, don't do anything. */
241 if (every_ref)
242 return NULL;
244 /* Otherwise, insert a node for the ref of the access under the base. */
245 ref_node = search (ref);
246 if (ref_node)
247 return ref_node;
249 /* We always allow inserting ref 0. For non-0 refs there is upper
250 limit on number of entries and if exceeded,
251 drop ref conservatively to 0. */
252 if (ref && refs && refs->length () >= max_refs)
254 if (dump_file)
255 fprintf (dump_file, "--param param=modref-max-refs limit reached;"
256 " using 0\n");
257 ref = 0;
258 ref_node = search (ref);
259 if (ref_node)
260 return ref_node;
263 if (changed)
264 *changed = true;
266 ref_node = new (ggc_alloc <modref_ref_node <T> > ())modref_ref_node <T>
267 (ref);
268 vec_safe_push (refs, ref_node);
269 return ref_node;
272 void collapse ()
274 size_t i;
275 modref_ref_node <T> *r;
277 if (refs)
279 FOR_EACH_VEC_SAFE_ELT (refs, i, r)
281 r->collapse ();
282 ggc_free (r);
284 vec_free (refs);
286 refs = NULL;
287 every_ref = true;
291 /* Map translating parameters across function call. */
293 struct modref_parm_map
295 /* Default constructor. */
296 modref_parm_map ()
297 : parm_index (MODREF_UNKNOWN_PARM), parm_offset_known (false), parm_offset ()
300 /* Index of parameter we translate to.
301 Values from special_params enum are permitted too. */
302 int parm_index;
303 bool parm_offset_known;
304 poly_int64 parm_offset;
307 /* Access tree for a single function. */
308 template <typename T>
309 struct GTY((user)) modref_tree
311 vec <modref_base_node <T> *, va_gc> *bases;
312 bool every_base;
314 modref_tree ():
315 bases (NULL),
316 every_base (false) {}
318 /* Insert BASE; collapse tree if there are more than MAX_REFS.
319 Return inserted base and if CHANGED is non-null set it to true if
320 something changed.
321 If table gets full, try to insert REF instead. */
323 modref_base_node <T> *insert_base (T base, T ref,
324 unsigned int max_bases,
325 bool *changed = NULL)
327 modref_base_node <T> *base_node;
329 /* If the node is collapsed, don't do anything. */
330 if (every_base)
331 return NULL;
333 /* Otherwise, insert a node for the base of the access into the tree. */
334 base_node = search (base);
335 if (base_node)
336 return base_node;
338 /* We always allow inserting base 0. For non-0 base there is upper
339 limit on number of entries and if exceeded,
340 drop base conservatively to ref and if it still does not fit to 0. */
341 if (base && bases && bases->length () >= max_bases)
343 base_node = search (ref);
344 if (base_node)
346 if (dump_file)
347 fprintf (dump_file, "--param param=modref-max-bases"
348 " limit reached; using ref\n");
349 return base_node;
351 if (dump_file)
352 fprintf (dump_file, "--param param=modref-max-bases"
353 " limit reached; using 0\n");
354 base = 0;
355 base_node = search (base);
356 if (base_node)
357 return base_node;
360 if (changed)
361 *changed = true;
363 base_node = new (ggc_alloc <modref_base_node <T> > ())
364 modref_base_node <T> (base);
365 vec_safe_push (bases, base_node);
366 return base_node;
369 /* Insert memory access to the tree.
370 Return true if something changed. */
371 bool insert (unsigned int max_bases,
372 unsigned int max_refs,
373 unsigned int max_accesses,
374 T base, T ref, modref_access_node a,
375 bool record_adjustments)
377 if (every_base)
378 return false;
380 bool changed = false;
382 /* We may end up with max_size being less than size for accesses past the
383 end of array. Those are undefined and safe to ignore. */
384 if (a.range_info_useful_p ()
385 && known_size_p (a.size) && known_size_p (a.max_size)
386 && known_lt (a.max_size, a.size))
388 if (dump_file)
389 fprintf (dump_file,
390 " - Paradoxical range. Ignoring\n");
391 return false;
393 if (known_size_p (a.size)
394 && known_eq (a.size, 0))
396 if (dump_file)
397 fprintf (dump_file,
398 " - Zero size. Ignoring\n");
399 return false;
401 if (known_size_p (a.max_size)
402 && known_eq (a.max_size, 0))
404 if (dump_file)
405 fprintf (dump_file,
406 " - Zero max_size. Ignoring\n");
407 return false;
409 gcc_checking_assert (!known_size_p (a.max_size)
410 || !known_le (a.max_size, 0));
412 /* No useful information tracked; collapse everything. */
413 if (!base && !ref && !a.useful_p ())
415 collapse ();
416 return true;
419 modref_base_node <T> *base_node
420 = insert_base (base, ref, max_bases, &changed);
421 base = base_node->base;
422 /* If table got full we may end up with useless base. */
423 if (!base && !ref && !a.useful_p ())
425 collapse ();
426 return true;
428 if (base_node->every_ref)
429 return changed;
430 gcc_checking_assert (search (base) != NULL);
432 /* No useful ref info tracked; collapse base. */
433 if (!ref && !a.useful_p ())
435 base_node->collapse ();
436 return true;
439 modref_ref_node <T> *ref_node
440 = base_node->insert_ref (ref, max_refs, &changed);
441 ref = ref_node->ref;
443 if (ref_node->every_access)
444 return changed;
445 changed |= ref_node->insert_access (a, max_accesses,
446 record_adjustments);
447 /* See if we failed to add useful access. */
448 if (ref_node->every_access)
450 /* Collapse everything if there is no useful base and ref. */
451 if (!base && !ref)
453 collapse ();
454 gcc_checking_assert (changed);
456 /* Collapse base if there is no useful ref. */
457 else if (!ref)
459 base_node->collapse ();
460 gcc_checking_assert (changed);
463 return changed;
466 /* Insert memory access to the tree.
467 Return true if something changed. */
468 bool insert (tree fndecl,
469 T base, T ref, const modref_access_node &a,
470 bool record_adjustments)
472 return insert (opt_for_fn (fndecl, param_modref_max_bases),
473 opt_for_fn (fndecl, param_modref_max_refs),
474 opt_for_fn (fndecl, param_modref_max_accesses),
475 base, ref, a, record_adjustments);
478 /* Remove tree branches that are not useful (i.e. they will always pass). */
480 void cleanup ()
482 size_t i, j;
483 modref_base_node <T> *base_node;
484 modref_ref_node <T> *ref_node;
486 if (!bases)
487 return;
489 for (i = 0; vec_safe_iterate (bases, i, &base_node);)
491 if (base_node->refs)
492 for (j = 0; vec_safe_iterate (base_node->refs, j, &ref_node);)
494 if (!ref_node->every_access
495 && (!ref_node->accesses
496 || !ref_node->accesses->length ()))
498 base_node->refs->unordered_remove (j);
499 vec_free (ref_node->accesses);
500 ggc_delete (ref_node);
502 else
503 j++;
505 if (!base_node->every_ref
506 && (!base_node->refs || !base_node->refs->length ()))
508 bases->unordered_remove (i);
509 vec_free (base_node->refs);
510 ggc_delete (base_node);
512 else
513 i++;
515 if (bases && !bases->length ())
517 vec_free (bases);
518 bases = NULL;
522 /* Merge OTHER into the tree.
523 PARM_MAP, if non-NULL, maps parm indexes of callee to caller.
524 Similar CHAIN_MAP, if non-NULL, maps static chain of callee to caller.
525 Return true if something has changed. */
526 bool merge (unsigned int max_bases,
527 unsigned int max_refs,
528 unsigned int max_accesses,
529 modref_tree <T> *other, vec <modref_parm_map> *parm_map,
530 modref_parm_map *static_chain_map,
531 bool record_accesses,
532 bool promote_unknown_to_global = false)
534 if (!other || every_base)
535 return false;
536 if (other->every_base)
538 collapse ();
539 return true;
542 bool changed = false;
543 size_t i, j, k;
544 modref_base_node <T> *base_node, *my_base_node;
545 modref_ref_node <T> *ref_node;
546 modref_access_node *access_node;
547 bool release = false;
549 /* For self-recursive functions we may end up merging summary into itself;
550 produce copy first so we do not modify summary under our own hands. */
551 if (other == this)
553 release = true;
554 other = modref_tree<T>::create_ggc ();
555 other->copy_from (this);
558 FOR_EACH_VEC_SAFE_ELT (other->bases, i, base_node)
560 if (base_node->every_ref)
562 my_base_node = insert_base (base_node->base, 0,
563 max_bases, &changed);
564 if (my_base_node && !my_base_node->every_ref)
566 my_base_node->collapse ();
567 cleanup ();
568 changed = true;
571 else
572 FOR_EACH_VEC_SAFE_ELT (base_node->refs, j, ref_node)
574 if (ref_node->every_access)
576 changed |= insert (max_bases, max_refs, max_accesses,
577 base_node->base,
578 ref_node->ref,
579 unspecified_modref_access_node,
580 record_accesses);
582 else
583 FOR_EACH_VEC_SAFE_ELT (ref_node->accesses, k, access_node)
585 modref_access_node a = *access_node;
587 if (a.parm_index != MODREF_UNKNOWN_PARM
588 && a.parm_index != MODREF_GLOBAL_MEMORY_PARM
589 && parm_map)
591 if (a.parm_index >= (int)parm_map->length ())
592 a.parm_index = MODREF_UNKNOWN_PARM;
593 else
595 modref_parm_map &m
596 = a.parm_index == MODREF_STATIC_CHAIN_PARM
597 ? *static_chain_map
598 : (*parm_map) [a.parm_index];
599 if (m.parm_index == MODREF_LOCAL_MEMORY_PARM)
600 continue;
601 a.parm_offset += m.parm_offset;
602 a.parm_offset_known &= m.parm_offset_known;
603 a.parm_index = m.parm_index;
606 if (a.parm_index == MODREF_UNKNOWN_PARM
607 && promote_unknown_to_global)
608 a.parm_index = MODREF_GLOBAL_MEMORY_PARM;
609 changed |= insert (max_bases, max_refs, max_accesses,
610 base_node->base, ref_node->ref,
611 a, record_accesses);
615 if (release)
616 ggc_delete (other);
617 return changed;
620 /* Merge OTHER into the tree.
621 PARM_MAP, if non-NULL, maps parm indexes of callee to caller.
622 Similar CHAIN_MAP, if non-NULL, maps static chain of callee to caller.
623 Return true if something has changed. */
624 bool merge (tree fndecl,
625 modref_tree <T> *other, vec <modref_parm_map> *parm_map,
626 modref_parm_map *static_chain_map,
627 bool record_accesses,
628 bool promote_unknown_to_global = false)
630 return merge (opt_for_fn (fndecl, param_modref_max_bases),
631 opt_for_fn (fndecl, param_modref_max_refs),
632 opt_for_fn (fndecl, param_modref_max_accesses),
633 other, parm_map, static_chain_map, record_accesses,
634 promote_unknown_to_global);
637 /* Copy OTHER to THIS. */
638 void copy_from (modref_tree <T> *other)
640 merge (INT_MAX, INT_MAX, INT_MAX, other, NULL, NULL, false);
643 /* Search BASE in tree; return NULL if failed. */
644 modref_base_node <T> *search (T base)
646 size_t i;
647 modref_base_node <T> *n;
648 FOR_EACH_VEC_SAFE_ELT (bases, i, n)
649 if (n->base == base)
650 return n;
651 return NULL;
654 /* Return true if tree contains access to global memory. */
655 bool global_access_p ()
657 size_t i, j, k;
658 modref_base_node <T> *base_node;
659 modref_ref_node <T> *ref_node;
660 modref_access_node *access_node;
661 if (every_base)
662 return true;
663 FOR_EACH_VEC_SAFE_ELT (bases, i, base_node)
665 if (base_node->every_ref)
666 return true;
667 FOR_EACH_VEC_SAFE_ELT (base_node->refs, j, ref_node)
669 if (ref_node->every_access)
670 return true;
671 FOR_EACH_VEC_SAFE_ELT (ref_node->accesses, k, access_node)
672 if (access_node->parm_index == MODREF_UNKNOWN_PARM
673 || access_node->parm_index == MODREF_GLOBAL_MEMORY_PARM)
674 return true;
677 return false;
680 /* Return ggc allocated instance. We explicitly call destructors via
681 ggc_delete and do not want finalizers to be registered and
682 called at the garbage collection time. */
683 static modref_tree<T> *create_ggc ()
685 return new (ggc_alloc_no_dtor<modref_tree<T>> ())
686 modref_tree<T> ();
689 /* Remove all records and mark tree to alias with everything. */
690 void collapse ()
692 size_t i;
693 modref_base_node <T> *n;
695 if (bases)
697 FOR_EACH_VEC_SAFE_ELT (bases, i, n)
699 n->collapse ();
700 ggc_free (n);
702 vec_free (bases);
704 bases = NULL;
705 every_base = true;
708 /* Release memory. */
709 ~modref_tree ()
711 collapse ();
714 /* Update parameter indexes in TT according to MAP. */
715 void
716 remap_params (vec <int> *map)
718 size_t i;
719 modref_base_node <T> *base_node;
720 FOR_EACH_VEC_SAFE_ELT (bases, i, base_node)
722 size_t j;
723 modref_ref_node <T> *ref_node;
724 FOR_EACH_VEC_SAFE_ELT (base_node->refs, j, ref_node)
726 size_t k;
727 modref_access_node *access_node;
728 FOR_EACH_VEC_SAFE_ELT (ref_node->accesses, k, access_node)
729 if (access_node->parm_index >= 0)
731 if (access_node->parm_index < (int)map->length ())
732 access_node->parm_index = (*map)[access_node->parm_index];
733 else
734 access_node->parm_index = MODREF_UNKNOWN_PARM;
741 void modref_c_tests ();
743 void gt_ggc_mx (modref_tree <int>* const&);
744 void gt_ggc_mx (modref_tree <tree_node*>* const&);
745 void gt_pch_nx (modref_tree <int>* const&);
746 void gt_pch_nx (modref_tree <tree_node*>* const&);
747 void gt_pch_nx (modref_tree <int>* const&, gt_pointer_operator op, void *cookie);
748 void gt_pch_nx (modref_tree <tree_node*>* const&, gt_pointer_operator op,
749 void *cookie);
751 void gt_ggc_mx (modref_base_node <int>*);
752 void gt_ggc_mx (modref_base_node <tree_node*>* &);
753 void gt_pch_nx (modref_base_node <int>* const&);
754 void gt_pch_nx (modref_base_node <tree_node*>* const&);
755 void gt_pch_nx (modref_base_node <int>* const&, gt_pointer_operator op,
756 void *cookie);
757 void gt_pch_nx (modref_base_node <tree_node*>* const&, gt_pointer_operator op,
758 void *cookie);
760 void gt_ggc_mx (modref_ref_node <int>*);
761 void gt_ggc_mx (modref_ref_node <tree_node*>* &);
762 void gt_pch_nx (modref_ref_node <int>* const&);
763 void gt_pch_nx (modref_ref_node <tree_node*>* const&);
764 void gt_pch_nx (modref_ref_node <int>* const&, gt_pointer_operator op,
765 void *cookie);
766 void gt_pch_nx (modref_ref_node <tree_node*>* const&, gt_pointer_operator op,
767 void *cookie);
769 #endif