Fix typo in t-dimode
[official-gcc.git] / gcc / ipa-modref-tree.h
blob35190c212ca3ea2048c7549bb0dcb88e6075df93
1 /* Data structure for the modref pass.
2 Copyright (C) 2020-2021 Free Software Foundation, Inc.
3 Contributed by David Cepelik and Jan Hubicka
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* modref_tree represent a decision tree that can be used by alias analysis
22 oracle to determine whether given memory access can be affected by a function
23 call. For every function we collect two trees, one for loads and other
24 for stores. Tree consist of following levels:
26 1) Base: this level represent base alias set of the access and refers
27 to sons (ref nodes). Flag all_refs means that all possible references
28 are aliasing.
30 Because for LTO streaming we need to stream types rather than alias sets
31 modref_base_node is implemented as a template.
32 2) Ref: this level represent ref alias set and links to accesses unless
33 all_refs flag is set.
34 Again ref is an template to allow LTO streaming.
35 3) Access: this level represent info about individual accesses. Presently
36 we record whether access is through a dereference of a function parameter
37 and if so we record the access range.
40 #ifndef GCC_MODREF_TREE_H
41 #define GCC_MODREF_TREE_H
43 struct ipa_modref_summary;
45 /* parm indexes greater than 0 are normal parms.
46 Some negative values have special meaning. */
47 enum modref_special_parms {
48 MODREF_UNKNOWN_PARM = -1,
49 MODREF_STATIC_CHAIN_PARM = -2,
50 MODREF_RETSLOT_PARM = -3,
51 /* Used in modref_parm_map to tak references which can be removed
52 from the summary during summary update since they now points to loca
53 memory. */
54 MODREF_LOCAL_MEMORY_PARM = -4
57 /* Modref record accesses relative to function parameters.
58 This is entry for single access specifying its base and access range.
60 Accesses can be collected to boundedly sized arrays using
61 modref_access_node::insert. */
62 struct GTY(()) modref_access_node
64 /* Access range information (in bits). */
65 poly_int64 offset;
66 poly_int64 size;
67 poly_int64 max_size;
69 /* Offset from parameter pointer to the base of the access (in bytes). */
70 poly_int64 parm_offset;
72 /* Index of parameter which specifies the base of access. -1 if base is not
73 a function parameter. */
74 int parm_index;
75 bool parm_offset_known;
76 /* Number of times interval was extended during dataflow.
77 This has to be limited in order to keep dataflow finite. */
78 unsigned char adjustments;
80 /* Return true if access node holds some useful info. */
81 bool useful_p () const
83 return parm_index != MODREF_UNKNOWN_PARM;
85 /* Return true if access can be used to determine a kill. */
86 bool useful_for_kill_p () const
88 return parm_offset_known && parm_index != MODREF_UNKNOWN_PARM
89 && parm_index != MODREF_RETSLOT_PARM && known_size_p (size)
90 && known_eq (max_size, size);
92 /* Dump range to debug OUT. */
93 void dump (FILE *out);
94 /* Return true if both accesses are the same. */
95 bool operator == (modref_access_node &a) const;
96 /* Return true if range info is useful. */
97 bool range_info_useful_p () const;
98 /* Return tree corresponding to parameter of the range in STMT. */
99 tree get_call_arg (const gcall *stmt) const;
100 /* Build ao_ref corresponding to the access and return true if succesful. */
101 bool get_ao_ref (const gcall *stmt, class ao_ref *ref) const;
102 /* Stream access to OB. */
103 void stream_out (struct output_block *ob) const;
104 /* Stream access in from IB. */
105 static modref_access_node stream_in (struct lto_input_block *ib);
106 /* Insert A into vector ACCESSES. Limit size of vector to MAX_ACCESSES and
107 if RECORD_ADJUSTMENT is true keep track of adjustment counts.
108 Return 0 if nothing changed, 1 is insertion suceeded and -1 if failed. */
109 static int insert (vec <modref_access_node, va_gc> *&accesses,
110 modref_access_node a, size_t max_accesses,
111 bool record_adjustments);
112 /* Same as insert but for kills where we are conservative the other way
113 around: if information is lost, the kill is lost. */
114 static bool insert_kill (vec<modref_access_node> &kills,
115 modref_access_node &a, bool record_adjustments);
116 private:
117 bool contains (const modref_access_node &) const;
118 bool contains_for_kills (const modref_access_node &) const;
119 void update (poly_int64, poly_int64, poly_int64, poly_int64, bool);
120 bool update_for_kills (poly_int64, poly_int64, poly_int64,
121 poly_int64, poly_int64, bool);
122 bool merge (const modref_access_node &, bool);
123 bool merge_for_kills (const modref_access_node &, bool);
124 static bool closer_pair_p (const modref_access_node &,
125 const modref_access_node &,
126 const modref_access_node &,
127 const modref_access_node &);
128 void forced_merge (const modref_access_node &, bool);
129 void update2 (poly_int64, poly_int64, poly_int64, poly_int64,
130 poly_int64, poly_int64, poly_int64, bool);
131 bool combined_offsets (const modref_access_node &,
132 poly_int64 *, poly_int64 *, poly_int64 *) const;
133 static void try_merge_with (vec <modref_access_node, va_gc> *&, size_t);
136 /* Access node specifying no useful info. */
137 const modref_access_node unspecified_modref_access_node
138 = {0, -1, -1, 0, MODREF_UNKNOWN_PARM, false, 0};
140 template <typename T>
141 struct GTY((user)) modref_ref_node
143 T ref;
144 bool every_access;
145 vec <modref_access_node, va_gc> *accesses;
147 modref_ref_node (T ref):
148 ref (ref),
149 every_access (false),
150 accesses (NULL)
153 /* Collapse the tree. */
154 void collapse ()
156 vec_free (accesses);
157 accesses = NULL;
158 every_access = true;
161 /* Insert access with OFFSET and SIZE.
162 Collapse tree if it has more than MAX_ACCESSES entries.
163 If RECORD_ADJUSTMENTs is true avoid too many interval extensions.
164 Return true if record was changed. */
165 bool insert_access (modref_access_node a, size_t max_accesses,
166 bool record_adjustments)
168 /* If this base->ref pair has no access information, bail out. */
169 if (every_access)
170 return false;
172 /* Only the following kind of paramters needs to be tracked.
173 We do not track return slots because they are seen as a direct store
174 in the caller. */
175 gcc_checking_assert (a.parm_index >= 0
176 || a.parm_index == MODREF_STATIC_CHAIN_PARM
177 || a.parm_index == MODREF_UNKNOWN_PARM);
179 if (!a.useful_p ())
181 if (!every_access)
183 collapse ();
184 return true;
186 return false;
189 int ret = modref_access_node::insert (accesses, a, max_accesses,
190 record_adjustments);
191 if (ret == -1)
193 if (dump_file)
194 fprintf (dump_file,
195 "--param param=modref-max-accesses limit reached;"
196 " collapsing\n");
197 collapse ();
199 return ret != 0;
203 /* Base of an access. */
204 template <typename T>
205 struct GTY((user)) modref_base_node
207 T base;
208 vec <modref_ref_node <T> *, va_gc> *refs;
209 bool every_ref;
211 modref_base_node (T base):
212 base (base),
213 refs (NULL),
214 every_ref (false) {}
216 /* Search REF; return NULL if failed. */
217 modref_ref_node <T> *search (T ref)
219 size_t i;
220 modref_ref_node <T> *n;
221 FOR_EACH_VEC_SAFE_ELT (refs, i, n)
222 if (n->ref == ref)
223 return n;
224 return NULL;
227 /* Insert REF; collapse tree if there are more than MAX_REFS.
228 Return inserted ref and if CHANGED is non-null set it to true if
229 something changed. */
230 modref_ref_node <T> *insert_ref (T ref, size_t max_refs,
231 bool *changed = NULL)
233 modref_ref_node <T> *ref_node;
235 /* If the node is collapsed, don't do anything. */
236 if (every_ref)
237 return NULL;
239 /* Otherwise, insert a node for the ref of the access under the base. */
240 ref_node = search (ref);
241 if (ref_node)
242 return ref_node;
244 /* We always allow inserting ref 0. For non-0 refs there is upper
245 limit on number of entries and if exceeded,
246 drop ref conservatively to 0. */
247 if (ref && refs && refs->length () >= max_refs)
249 if (dump_file)
250 fprintf (dump_file, "--param param=modref-max-refs limit reached;"
251 " using 0\n");
252 ref = 0;
253 ref_node = search (ref);
254 if (ref_node)
255 return ref_node;
258 if (changed)
259 *changed = true;
261 ref_node = new (ggc_alloc <modref_ref_node <T> > ())modref_ref_node <T>
262 (ref);
263 vec_safe_push (refs, ref_node);
264 return ref_node;
267 void collapse ()
269 size_t i;
270 modref_ref_node <T> *r;
272 if (refs)
274 FOR_EACH_VEC_SAFE_ELT (refs, i, r)
276 r->collapse ();
277 ggc_free (r);
279 vec_free (refs);
281 refs = NULL;
282 every_ref = true;
286 /* Map translating parameters across function call. */
288 struct modref_parm_map
290 /* Default constructor. */
291 modref_parm_map ()
292 : parm_index (MODREF_UNKNOWN_PARM), parm_offset_known (false), parm_offset ()
295 /* Index of parameter we translate to.
296 Values from special_params enum are permitted too. */
297 int parm_index;
298 bool parm_offset_known;
299 poly_int64 parm_offset;
302 /* Access tree for a single function. */
303 template <typename T>
304 struct GTY((user)) modref_tree
306 vec <modref_base_node <T> *, va_gc> *bases;
307 bool every_base;
309 modref_tree ():
310 bases (NULL),
311 every_base (false) {}
313 /* Insert BASE; collapse tree if there are more than MAX_REFS.
314 Return inserted base and if CHANGED is non-null set it to true if
315 something changed.
316 If table gets full, try to insert REF instead. */
318 modref_base_node <T> *insert_base (T base, T ref,
319 unsigned int max_bases,
320 bool *changed = NULL)
322 modref_base_node <T> *base_node;
324 /* If the node is collapsed, don't do anything. */
325 if (every_base)
326 return NULL;
328 /* Otherwise, insert a node for the base of the access into the tree. */
329 base_node = search (base);
330 if (base_node)
331 return base_node;
333 /* We always allow inserting base 0. For non-0 base there is upper
334 limit on number of entries and if exceeded,
335 drop base conservatively to ref and if it still does not fit to 0. */
336 if (base && bases && bases->length () >= max_bases)
338 base_node = search (ref);
339 if (base_node)
341 if (dump_file)
342 fprintf (dump_file, "--param param=modref-max-bases"
343 " limit reached; using ref\n");
344 return base_node;
346 if (dump_file)
347 fprintf (dump_file, "--param param=modref-max-bases"
348 " limit reached; using 0\n");
349 base = 0;
350 base_node = search (base);
351 if (base_node)
352 return base_node;
355 if (changed)
356 *changed = true;
358 base_node = new (ggc_alloc <modref_base_node <T> > ())
359 modref_base_node <T> (base);
360 vec_safe_push (bases, base_node);
361 return base_node;
364 /* Insert memory access to the tree.
365 Return true if something changed. */
366 bool insert (unsigned int max_bases,
367 unsigned int max_refs,
368 unsigned int max_accesses,
369 T base, T ref, modref_access_node a,
370 bool record_adjustments)
372 if (every_base)
373 return false;
375 bool changed = false;
377 /* We may end up with max_size being less than size for accesses past the
378 end of array. Those are undefined and safe to ignore. */
379 if (a.range_info_useful_p ()
380 && known_size_p (a.size) && known_size_p (a.max_size)
381 && known_lt (a.max_size, a.size))
383 if (dump_file)
384 fprintf (dump_file,
385 " - Paradoxical range. Ignoring\n");
386 return false;
388 if (known_size_p (a.size)
389 && known_eq (a.size, 0))
391 if (dump_file)
392 fprintf (dump_file,
393 " - Zero size. Ignoring\n");
394 return false;
396 if (known_size_p (a.max_size)
397 && known_eq (a.max_size, 0))
399 if (dump_file)
400 fprintf (dump_file,
401 " - Zero max_size. Ignoring\n");
402 return false;
404 gcc_checking_assert (!known_size_p (a.max_size)
405 || !known_le (a.max_size, 0));
407 /* No useful information tracked; collapse everything. */
408 if (!base && !ref && !a.useful_p ())
410 collapse ();
411 return true;
414 modref_base_node <T> *base_node
415 = insert_base (base, ref, max_bases, &changed);
416 base = base_node->base;
417 /* If table got full we may end up with useless base. */
418 if (!base && !ref && !a.useful_p ())
420 collapse ();
421 return true;
423 if (base_node->every_ref)
424 return changed;
425 gcc_checking_assert (search (base) != NULL);
427 /* No useful ref info tracked; collapse base. */
428 if (!ref && !a.useful_p ())
430 base_node->collapse ();
431 return true;
434 modref_ref_node <T> *ref_node
435 = base_node->insert_ref (ref, max_refs, &changed);
436 ref = ref_node->ref;
438 if (ref_node->every_access)
439 return changed;
440 changed |= ref_node->insert_access (a, max_accesses,
441 record_adjustments);
442 /* See if we failed to add useful access. */
443 if (ref_node->every_access)
445 /* Collapse everything if there is no useful base and ref. */
446 if (!base && !ref)
448 collapse ();
449 gcc_checking_assert (changed);
451 /* Collapse base if there is no useful ref. */
452 else if (!ref)
454 base_node->collapse ();
455 gcc_checking_assert (changed);
458 return changed;
461 /* Insert memory access to the tree.
462 Return true if something changed. */
463 bool insert (tree fndecl,
464 T base, T ref, const modref_access_node &a,
465 bool record_adjustments)
467 return insert (opt_for_fn (fndecl, param_modref_max_bases),
468 opt_for_fn (fndecl, param_modref_max_refs),
469 opt_for_fn (fndecl, param_modref_max_accesses),
470 base, ref, a, record_adjustments);
473 /* Remove tree branches that are not useful (i.e. they will always pass). */
475 void cleanup ()
477 size_t i, j;
478 modref_base_node <T> *base_node;
479 modref_ref_node <T> *ref_node;
481 if (!bases)
482 return;
484 for (i = 0; vec_safe_iterate (bases, i, &base_node);)
486 if (base_node->refs)
487 for (j = 0; vec_safe_iterate (base_node->refs, j, &ref_node);)
489 if (!ref_node->every_access
490 && (!ref_node->accesses
491 || !ref_node->accesses->length ()))
493 base_node->refs->unordered_remove (j);
494 vec_free (ref_node->accesses);
495 ggc_delete (ref_node);
497 else
498 j++;
500 if (!base_node->every_ref
501 && (!base_node->refs || !base_node->refs->length ()))
503 bases->unordered_remove (i);
504 vec_free (base_node->refs);
505 ggc_delete (base_node);
507 else
508 i++;
510 if (bases && !bases->length ())
512 vec_free (bases);
513 bases = NULL;
517 /* Merge OTHER into the tree.
518 PARM_MAP, if non-NULL, maps parm indexes of callee to caller.
519 Similar CHAIN_MAP, if non-NULL, maps static chain of callee to caller.
520 Return true if something has changed. */
521 bool merge (unsigned int max_bases,
522 unsigned int max_refs,
523 unsigned int max_accesses,
524 modref_tree <T> *other, vec <modref_parm_map> *parm_map,
525 modref_parm_map *static_chain_map,
526 bool record_accesses)
528 if (!other || every_base)
529 return false;
530 if (other->every_base)
532 collapse ();
533 return true;
536 bool changed = false;
537 size_t i, j, k;
538 modref_base_node <T> *base_node, *my_base_node;
539 modref_ref_node <T> *ref_node;
540 modref_access_node *access_node;
541 bool release = false;
543 /* For self-recursive functions we may end up merging summary into itself;
544 produce copy first so we do not modify summary under our own hands. */
545 if (other == this)
547 release = true;
548 other = modref_tree<T>::create_ggc ();
549 other->copy_from (this);
552 FOR_EACH_VEC_SAFE_ELT (other->bases, i, base_node)
554 if (base_node->every_ref)
556 my_base_node = insert_base (base_node->base, 0,
557 max_bases, &changed);
558 if (my_base_node && !my_base_node->every_ref)
560 my_base_node->collapse ();
561 cleanup ();
562 changed = true;
565 else
566 FOR_EACH_VEC_SAFE_ELT (base_node->refs, j, ref_node)
568 if (ref_node->every_access)
570 changed |= insert (max_bases, max_refs, max_accesses,
571 base_node->base,
572 ref_node->ref,
573 unspecified_modref_access_node,
574 record_accesses);
576 else
577 FOR_EACH_VEC_SAFE_ELT (ref_node->accesses, k, access_node)
579 modref_access_node a = *access_node;
581 if (a.parm_index != MODREF_UNKNOWN_PARM && parm_map)
583 if (a.parm_index >= (int)parm_map->length ())
584 a.parm_index = MODREF_UNKNOWN_PARM;
585 else
587 modref_parm_map &m
588 = a.parm_index == MODREF_STATIC_CHAIN_PARM
589 ? *static_chain_map
590 : (*parm_map) [a.parm_index];
591 if (m.parm_index == MODREF_LOCAL_MEMORY_PARM)
592 continue;
593 a.parm_offset += m.parm_offset;
594 a.parm_offset_known &= m.parm_offset_known;
595 a.parm_index = m.parm_index;
598 changed |= insert (max_bases, max_refs, max_accesses,
599 base_node->base, ref_node->ref,
600 a, record_accesses);
604 if (release)
605 ggc_delete (other);
606 return changed;
609 /* Merge OTHER into the tree.
610 PARM_MAP, if non-NULL, maps parm indexes of callee to caller.
611 Similar CHAIN_MAP, if non-NULL, maps static chain of callee to caller.
612 Return true if something has changed. */
613 bool merge (tree fndecl,
614 modref_tree <T> *other, vec <modref_parm_map> *parm_map,
615 modref_parm_map *static_chain_map,
616 bool record_accesses)
618 return merge (opt_for_fn (fndecl, param_modref_max_bases),
619 opt_for_fn (fndecl, param_modref_max_refs),
620 opt_for_fn (fndecl, param_modref_max_accesses),
621 other, parm_map, static_chain_map, record_accesses);
624 /* Copy OTHER to THIS. */
625 void copy_from (modref_tree <T> *other)
627 merge (INT_MAX, INT_MAX, INT_MAX, other, NULL, NULL, false);
630 /* Search BASE in tree; return NULL if failed. */
631 modref_base_node <T> *search (T base)
633 size_t i;
634 modref_base_node <T> *n;
635 FOR_EACH_VEC_SAFE_ELT (bases, i, n)
636 if (n->base == base)
637 return n;
638 return NULL;
641 /* Return true if tree contains access to global memory. */
642 bool global_access_p ()
644 size_t i, j, k;
645 modref_base_node <T> *base_node;
646 modref_ref_node <T> *ref_node;
647 modref_access_node *access_node;
648 if (every_base)
649 return true;
650 FOR_EACH_VEC_SAFE_ELT (bases, i, base_node)
652 if (base_node->every_ref)
653 return true;
654 FOR_EACH_VEC_SAFE_ELT (base_node->refs, j, ref_node)
656 if (ref_node->every_access)
657 return true;
658 FOR_EACH_VEC_SAFE_ELT (ref_node->accesses, k, access_node)
659 if (access_node->parm_index == MODREF_UNKNOWN_PARM)
660 return true;
663 return false;
666 /* Return ggc allocated instance. We explicitly call destructors via
667 ggc_delete and do not want finalizers to be registered and
668 called at the garbage collection time. */
669 static modref_tree<T> *create_ggc ()
671 return new (ggc_alloc_no_dtor<modref_tree<T>> ())
672 modref_tree<T> ();
675 /* Remove all records and mark tree to alias with everything. */
676 void collapse ()
678 size_t i;
679 modref_base_node <T> *n;
681 if (bases)
683 FOR_EACH_VEC_SAFE_ELT (bases, i, n)
685 n->collapse ();
686 ggc_free (n);
688 vec_free (bases);
690 bases = NULL;
691 every_base = true;
694 /* Release memory. */
695 ~modref_tree ()
697 collapse ();
700 /* Update parameter indexes in TT according to MAP. */
701 void
702 remap_params (vec <int> *map)
704 size_t i;
705 modref_base_node <T> *base_node;
706 FOR_EACH_VEC_SAFE_ELT (bases, i, base_node)
708 size_t j;
709 modref_ref_node <T> *ref_node;
710 FOR_EACH_VEC_SAFE_ELT (base_node->refs, j, ref_node)
712 size_t k;
713 modref_access_node *access_node;
714 FOR_EACH_VEC_SAFE_ELT (ref_node->accesses, k, access_node)
715 if (access_node->parm_index >= 0)
717 if (access_node->parm_index < (int)map->length ())
718 access_node->parm_index = (*map)[access_node->parm_index];
719 else
720 access_node->parm_index = MODREF_UNKNOWN_PARM;
727 void modref_c_tests ();
729 void gt_ggc_mx (modref_tree <int>* const&);
730 void gt_ggc_mx (modref_tree <tree_node*>* const&);
731 void gt_pch_nx (modref_tree <int>* const&);
732 void gt_pch_nx (modref_tree <tree_node*>* const&);
733 void gt_pch_nx (modref_tree <int>* const&, gt_pointer_operator op, void *cookie);
734 void gt_pch_nx (modref_tree <tree_node*>* const&, gt_pointer_operator op,
735 void *cookie);
737 void gt_ggc_mx (modref_base_node <int>*);
738 void gt_ggc_mx (modref_base_node <tree_node*>* &);
739 void gt_pch_nx (modref_base_node <int>* const&);
740 void gt_pch_nx (modref_base_node <tree_node*>* const&);
741 void gt_pch_nx (modref_base_node <int>* const&, gt_pointer_operator op,
742 void *cookie);
743 void gt_pch_nx (modref_base_node <tree_node*>* const&, gt_pointer_operator op,
744 void *cookie);
746 void gt_ggc_mx (modref_ref_node <int>*);
747 void gt_ggc_mx (modref_ref_node <tree_node*>* &);
748 void gt_pch_nx (modref_ref_node <int>* const&);
749 void gt_pch_nx (modref_ref_node <tree_node*>* const&);
750 void gt_pch_nx (modref_ref_node <int>* const&, gt_pointer_operator op,
751 void *cookie);
752 void gt_pch_nx (modref_ref_node <tree_node*>* const&, gt_pointer_operator op,
753 void *cookie);
755 #endif