testsuite: Correct requirements for vadsdu*, vslv and vsrv testcases.
[official-gcc.git] / gcc / ipa-modref.c
blobb903d772c3b04ef111fcd01fc39b97917256820b
1 /* Search for references that a functions loads or stores.
2 Copyright (C) 2020 Free Software Foundation, Inc.
3 Contributed by David Cepelik and Jan Hubicka
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* Mod/ref pass records summary about loads and stores performed by the
22 function. This is later used by alias analysis to disambiguate memory
23 accesses across function calls. The summary has a form of decision tree
24 described in ipa-modref-tree.h.
26 This file contains a tree pass and an IPA pass. Both performs the same
27 analys however tree pass is executed during early and late optimization
28 passes to propagate info downwards in the compilation order. IPA pass
29 propagates across the callgraph and is able to handle recursion and works on
30 whole program during link-time analysis.
32 LTO mode differs from the local mode by not recording alias sets but types
33 that are translated to alias sets later. This is necessary in order stream
34 the information because the alias sets are rebuild at stream-in time and may
35 not correspond to ones seen during analysis. For this reason part of analysis
36 is duplicated. */
38 #include "config.h"
39 #include "system.h"
40 #include "coretypes.h"
41 #include "backend.h"
42 #include "tree.h"
43 #include "gimple.h"
44 #include "alloc-pool.h"
45 #include "tree-pass.h"
46 #include "gimple-iterator.h"
47 #include "tree-dfa.h"
48 #include "cgraph.h"
49 #include "ipa-utils.h"
50 #include "symbol-summary.h"
51 #include "gimple-pretty-print.h"
52 #include "gimple-walk.h"
53 #include "print-tree.h"
54 #include "tree-streamer.h"
55 #include "alias.h"
56 #include "calls.h"
57 #include "ipa-modref-tree.h"
58 #include "ipa-modref.h"
59 #include "value-range.h"
60 #include "ipa-prop.h"
61 #include "ipa-fnsummary.h"
63 /* Class (from which there is one global instance) that holds modref summaries
64 for all analyzed functions. */
65 class GTY((user)) modref_summaries
66 : public fast_function_summary <modref_summary *, va_gc>
68 public:
69 modref_summaries (symbol_table *symtab)
70 : fast_function_summary <modref_summary *, va_gc> (symtab) {}
71 virtual void insert (cgraph_node *, modref_summary *state);
72 virtual void duplicate (cgraph_node *src_node,
73 cgraph_node *dst_node,
74 modref_summary *src_data,
75 modref_summary *dst_data);
76 static modref_summaries *create_ggc (symbol_table *symtab)
78 return new (ggc_alloc_no_dtor<modref_summaries> ())
79 modref_summaries (symtab);
83 class modref_summary_lto;
85 /* Class (from which there is one global instance) that holds modref summaries
86 for all analyzed functions. */
87 class GTY((user)) modref_summaries_lto
88 : public fast_function_summary <modref_summary_lto *, va_gc>
90 public:
91 modref_summaries_lto (symbol_table *symtab)
92 : fast_function_summary <modref_summary_lto *, va_gc> (symtab),
93 propagated (false) {}
94 virtual void insert (cgraph_node *, modref_summary_lto *state);
95 virtual void duplicate (cgraph_node *src_node,
96 cgraph_node *dst_node,
97 modref_summary_lto *src_data,
98 modref_summary_lto *dst_data);
99 static modref_summaries_lto *create_ggc (symbol_table *symtab)
101 return new (ggc_alloc_no_dtor<modref_summaries_lto> ())
102 modref_summaries_lto (symtab);
104 bool propagated;
107 /* Global variable holding all modref summaries
108 (from analysis to IPA propagation time). */
109 static GTY(()) fast_function_summary <modref_summary *, va_gc>
110 *summaries;
112 /* Global variable holding all modref optimizaiton summaries
113 (from IPA propagation time or used by local optimization pass). */
114 static GTY(()) fast_function_summary <modref_summary *, va_gc>
115 *optimization_summaries;
117 /* LTO summaries hold info from analysis to LTO streaming or from LTO
118 stream-in through propagation to LTO stream-out. */
119 static GTY(()) fast_function_summary <modref_summary_lto *, va_gc>
120 *summaries_lto;
122 /* Summary for a single function which this pass produces. */
124 modref_summary::modref_summary ()
125 : loads (NULL), stores (NULL)
129 modref_summary::~modref_summary ()
131 if (loads)
132 ggc_delete (loads);
133 if (stores)
134 ggc_delete (stores);
137 /* Return true if summary is potentially useful for optimization. */
139 bool
140 modref_summary::useful_p (int ecf_flags)
142 if (ecf_flags & (ECF_CONST | ECF_NOVOPS))
143 return false;
144 if (loads && !loads->every_base)
145 return true;
146 if (ecf_flags & ECF_PURE)
147 return false;
148 return stores && !stores->every_base;
151 /* Single function summary used for LTO. */
153 typedef modref_tree <tree> modref_records_lto;
154 struct GTY(()) modref_summary_lto
156 /* Load and stores in functions using types rather then alias sets.
158 This is necessary to make the information streamable for LTO but is also
159 more verbose and thus more likely to hit the limits. */
160 modref_records_lto *loads;
161 modref_records_lto *stores;
163 modref_summary_lto ();
164 ~modref_summary_lto ();
165 void dump (FILE *);
166 bool useful_p (int ecf_flags);
169 /* Summary for a single function which this pass produces. */
171 modref_summary_lto::modref_summary_lto ()
172 : loads (NULL), stores (NULL)
176 modref_summary_lto::~modref_summary_lto ()
178 if (loads)
179 ggc_delete (loads);
180 if (stores)
181 ggc_delete (stores);
185 /* Return true if lto summary is potentially useful for optimization. */
187 bool
188 modref_summary_lto::useful_p (int ecf_flags)
190 if (ecf_flags & (ECF_CONST | ECF_NOVOPS))
191 return false;
192 if (loads && !loads->every_base)
193 return true;
194 if (ecf_flags & ECF_PURE)
195 return false;
196 return stores && !stores->every_base;
199 /* Dump A to OUT. */
201 static void
202 dump_access (modref_access_node *a, FILE *out)
204 fprintf (out, " access:");
205 if (a->parm_index != -1)
207 fprintf (out, " Parm %i", a->parm_index);
208 if (a->parm_offset_known)
210 fprintf (out, " param offset:");
211 print_dec ((poly_int64_pod)a->parm_offset, out, SIGNED);
214 if (a->range_info_useful_p ())
216 fprintf (out, " offset:");
217 print_dec ((poly_int64_pod)a->offset, out, SIGNED);
218 fprintf (out, " size:");
219 print_dec ((poly_int64_pod)a->size, out, SIGNED);
220 fprintf (out, " max_size:");
221 print_dec ((poly_int64_pod)a->max_size, out, SIGNED);
223 fprintf (out, "\n");
226 /* Dump records TT to OUT. */
228 static void
229 dump_records (modref_records *tt, FILE *out)
231 fprintf (out, " Limits: %i bases, %i refs\n",
232 (int)tt->max_bases, (int)tt->max_refs);
233 if (tt->every_base)
235 fprintf (out, " Every base\n");
236 return;
238 size_t i;
239 modref_base_node <alias_set_type> *n;
240 FOR_EACH_VEC_SAFE_ELT (tt->bases, i, n)
242 fprintf (out, " Base %i: alias set %i\n", (int)i, n->base);
243 if (n->every_ref)
245 fprintf (out, " Every ref\n");
246 continue;
248 size_t j;
249 modref_ref_node <alias_set_type> *r;
250 FOR_EACH_VEC_SAFE_ELT (n->refs, j, r)
252 fprintf (out, " Ref %i: alias set %i\n", (int)j, r->ref);
253 if (r->every_access)
255 fprintf (out, " Every access\n");
256 continue;
258 size_t k;
259 modref_access_node *a;
260 FOR_EACH_VEC_SAFE_ELT (r->accesses, k, a)
261 dump_access (a, out);
266 /* Dump records TT to OUT. */
268 static void
269 dump_lto_records (modref_records_lto *tt, FILE *out)
271 fprintf (out, " Limits: %i bases, %i refs\n",
272 (int)tt->max_bases, (int)tt->max_refs);
273 if (tt->every_base)
275 fprintf (out, " Every base\n");
276 return;
278 size_t i;
279 modref_base_node <tree> *n;
280 FOR_EACH_VEC_SAFE_ELT (tt->bases, i, n)
282 fprintf (out, " Base %i:", (int)i);
283 print_generic_expr (dump_file, n->base);
284 fprintf (out, " (alias set %i)\n",
285 n->base ? get_alias_set (n->base) : 0);
286 if (n->every_ref)
288 fprintf (out, " Every ref\n");
289 continue;
291 size_t j;
292 modref_ref_node <tree> *r;
293 FOR_EACH_VEC_SAFE_ELT (n->refs, j, r)
295 fprintf (out, " Ref %i:", (int)j);
296 print_generic_expr (dump_file, r->ref);
297 fprintf (out, " (alias set %i)\n",
298 r->ref ? get_alias_set (r->ref) : 0);
299 if (r->every_access)
301 fprintf (out, " Every access\n");
302 continue;
304 size_t k;
305 modref_access_node *a;
306 FOR_EACH_VEC_SAFE_ELT (r->accesses, k, a)
307 dump_access (a, out);
312 /* Dump summary. */
314 void
315 modref_summary::dump (FILE *out)
317 fprintf (out, " loads:\n");
318 dump_records (loads, out);
319 fprintf (out, " stores:\n");
320 dump_records (stores, out);
323 /* Dump summary. */
325 void
326 modref_summary_lto::dump (FILE *out)
328 fprintf (out, " loads:\n");
329 dump_lto_records (loads, out);
330 fprintf (out, " stores:\n");
331 dump_lto_records (stores, out);
334 /* Get function summary for FUNC if it exists, return NULL otherwise. */
336 modref_summary *
337 get_modref_function_summary (cgraph_node *func)
339 /* Avoid creation of the summary too early (e.g. when front-end calls us). */
340 if (!optimization_summaries)
341 return NULL;
343 /* A single function body may be represented by multiple symbols with
344 different visibility. For example, if FUNC is an interposable alias,
345 we don't want to return anything, even if we have summary for the target
346 function. */
347 enum availability avail;
348 func = func->function_or_virtual_thunk_symbol
349 (&avail, cgraph_node::get (current_function_decl));
350 if (avail <= AVAIL_INTERPOSABLE)
351 return NULL;
353 modref_summary *r = optimization_summaries->get (func);
354 return r;
357 /* Construct modref_access_node from REF. */
358 static modref_access_node
359 get_access (ao_ref *ref)
361 tree base;
363 base = ao_ref_base (ref);
364 modref_access_node a = {ref->offset, ref->size, ref->max_size,
365 0, -1, false};
366 if (TREE_CODE (base) == MEM_REF || TREE_CODE (base) == TARGET_MEM_REF)
368 tree memref = base;
369 base = TREE_OPERAND (base, 0);
370 if (TREE_CODE (base) == SSA_NAME
371 && SSA_NAME_IS_DEFAULT_DEF (base)
372 && TREE_CODE (SSA_NAME_VAR (base)) == PARM_DECL)
374 a.parm_index = 0;
375 for (tree t = DECL_ARGUMENTS (current_function_decl);
376 t != SSA_NAME_VAR (base); t = DECL_CHAIN (t))
378 if (!t)
380 a.parm_index = -1;
381 break;
383 a.parm_index++;
385 if (TREE_CODE (memref) == MEM_REF)
387 a.parm_offset_known
388 = wi::to_poly_wide (TREE_OPERAND
389 (memref, 1)).to_shwi (&a.parm_offset);
391 else
392 a.parm_offset_known = false;
394 else
395 a.parm_index = -1;
397 else
398 a.parm_index = -1;
399 return a;
402 /* Record access into the modref_records data structure. */
404 static void
405 record_access (modref_records *tt, ao_ref *ref)
407 alias_set_type base_set = !flag_strict_aliasing ? 0
408 : ao_ref_base_alias_set (ref);
409 alias_set_type ref_set = !flag_strict_aliasing ? 0
410 : (ao_ref_alias_set (ref));
411 modref_access_node a = get_access (ref);
412 if (dump_file)
414 fprintf (dump_file, " - Recording base_set=%i ref_set=%i parm=%i\n",
415 base_set, ref_set, a.parm_index);
417 tt->insert (base_set, ref_set, a);
420 /* IPA version of record_access_tree. */
422 static void
423 record_access_lto (modref_records_lto *tt, ao_ref *ref)
425 /* get_alias_set sometimes use different type to compute the alias set
426 than TREE_TYPE (base). Do same adjustments. */
427 tree base_type = NULL_TREE, ref_type = NULL_TREE;
428 if (flag_strict_aliasing)
430 tree base;
432 base = ref->ref;
433 while (handled_component_p (base))
434 base = TREE_OPERAND (base, 0);
436 base_type = reference_alias_ptr_type_1 (&base);
438 if (!base_type)
439 base_type = TREE_TYPE (base);
440 else
441 base_type = TYPE_REF_CAN_ALIAS_ALL (base_type)
442 ? NULL_TREE : TREE_TYPE (base_type);
444 tree ref_expr = ref->ref;
445 ref_type = reference_alias_ptr_type_1 (&ref_expr);
447 if (!ref_type)
448 ref_type = TREE_TYPE (ref_expr);
449 else
450 ref_type = TYPE_REF_CAN_ALIAS_ALL (ref_type)
451 ? NULL_TREE : TREE_TYPE (ref_type);
453 /* Sanity check that we are in sync with what get_alias_set does. */
454 gcc_checking_assert ((!base_type && !ao_ref_base_alias_set (ref))
455 || get_alias_set (base_type)
456 == ao_ref_base_alias_set (ref));
457 gcc_checking_assert ((!ref_type && !ao_ref_alias_set (ref))
458 || get_alias_set (ref_type)
459 == ao_ref_alias_set (ref));
461 /* Do not bother to record types that have no meaningful alias set.
462 Also skip variably modified types since these go to local streams. */
463 if (base_type && (!get_alias_set (base_type)
464 || variably_modified_type_p (base_type, NULL_TREE)))
465 base_type = NULL_TREE;
466 if (ref_type && (!get_alias_set (ref_type)
467 || variably_modified_type_p (ref_type, NULL_TREE)))
468 ref_type = NULL_TREE;
470 modref_access_node a = get_access (ref);
471 if (dump_file)
473 fprintf (dump_file, " - Recording base type:");
474 print_generic_expr (dump_file, base_type);
475 fprintf (dump_file, " (alias set %i) ref type:",
476 base_type ? get_alias_set (base_type) : 0);
477 print_generic_expr (dump_file, ref_type);
478 fprintf (dump_file, " (alias set %i) parm:%i\n",
479 ref_type ? get_alias_set (ref_type) : 0,
480 a.parm_index);
483 tt->insert (base_type, ref_type, a);
486 /* Returns true if and only if we should store the access to EXPR.
487 Some accesses, e.g. loads from automatic variables, are not interesting. */
489 static bool
490 record_access_p (tree expr)
492 if (refs_local_or_readonly_memory_p (expr))
494 if (dump_file)
495 fprintf (dump_file, " - Read-only or local, ignoring.\n");
496 return false;
498 return true;
501 /* Return true if ECF flags says that stores can be ignored. */
503 static bool
504 ignore_stores_p (tree caller, int flags)
506 if (flags & ECF_PURE)
507 return true;
508 if ((flags & (ECF_NORETURN | ECF_NOTHROW)) == (ECF_NORETURN | ECF_NOTHROW)
509 || (!opt_for_fn (caller, flag_exceptions) && (flags & ECF_NORETURN)))
510 return true;
511 return false;
514 /* Merge side effects of call STMT to function with CALLEE_SUMMARY
515 int CUR_SUMMARY. Return true if something changed.
516 If IGNORE_STORES is true, do not merge stores. */
518 bool
519 merge_call_side_effects (modref_summary *cur_summary,
520 gimple *stmt, modref_summary *callee_summary,
521 bool ignore_stores, cgraph_node *callee_node)
523 auto_vec <modref_parm_map, 32> parm_map;
524 bool changed = false;
526 if (dump_file)
527 fprintf (dump_file, " - Merging side effects of %s with parm map:",
528 callee_node->dump_name ());
530 parm_map.safe_grow_cleared (gimple_call_num_args (stmt));
531 for (unsigned i = 0; i < gimple_call_num_args (stmt); i++)
533 tree op = gimple_call_arg (stmt, i);
534 bool offset_known;
535 poly_int64 offset;
537 offset_known = unadjusted_ptr_and_unit_offset (op, &op, &offset);
538 if (TREE_CODE (op) == SSA_NAME
539 && SSA_NAME_IS_DEFAULT_DEF (op)
540 && TREE_CODE (SSA_NAME_VAR (op)) == PARM_DECL)
542 int index = 0;
543 for (tree t = DECL_ARGUMENTS (current_function_decl);
544 t != SSA_NAME_VAR (op); t = DECL_CHAIN (t))
546 if (!t)
548 index = -1;
549 break;
551 index++;
553 parm_map[i].parm_index = index;
554 parm_map[i].parm_offset_known = offset_known;
555 parm_map[i].parm_offset = offset;
557 else if (points_to_local_or_readonly_memory_p (op))
558 parm_map[i].parm_index = -2;
559 else
560 parm_map[i].parm_index = -1;
561 if (dump_file)
563 fprintf (dump_file, " %i", parm_map[i].parm_index);
564 if (parm_map[i].parm_offset_known)
566 fprintf (dump_file, " offset:");
567 print_dec ((poly_int64_pod)parm_map[i].parm_offset,
568 dump_file, SIGNED);
572 if (dump_file)
573 fprintf (dump_file, "\n");
575 /* Merge with callee's summary. */
576 changed |= cur_summary->loads->merge (callee_summary->loads, &parm_map);
577 if (!ignore_stores)
578 changed |= cur_summary->stores->merge (callee_summary->stores,
579 &parm_map);
580 return changed;
583 /* Analyze function call STMT in function F.
584 Remember recursive calls in RECURSIVE_CALLS. */
586 static bool
587 analyze_call (modref_summary *cur_summary,
588 gimple *stmt, vec <gimple *> *recursive_calls)
590 /* Check flags on the function call. In certain cases, analysis can be
591 simplified. */
592 int flags = gimple_call_flags (stmt);
593 if (flags & (ECF_CONST | ECF_NOVOPS))
595 if (dump_file)
596 fprintf (dump_file,
597 " - ECF_CONST | ECF_NOVOPS, ignoring all stores and all loads "
598 "except for args.\n");
599 return true;
602 /* Pure functions do not affect global memory. Stores by functions which are
603 noreturn and do not throw can safely be ignored. */
604 bool ignore_stores = ignore_stores_p (current_function_decl, flags);
606 /* Next, we try to get the callee's function declaration. The goal is to
607 merge their summary with ours. */
608 tree callee = gimple_call_fndecl (stmt);
610 /* Check if this is an indirect call. */
611 if (!callee)
613 /* If the indirect call does not write memory, our store summary is
614 unaffected, but we have to discard our loads summary (we don't know
615 anything about the loads that the called function performs). */
616 if (ignore_stores)
618 if (dump_file)
619 fprintf (dump_file, " - Indirect call which does not write memory, "
620 "discarding loads.\n");
621 cur_summary->loads->collapse ();
622 return true;
624 if (dump_file)
625 fprintf (dump_file, " - Indirect call.\n");
626 return false;
629 struct cgraph_node *callee_node = cgraph_node::get_create (callee);
631 /* We can not safely optimize based on summary of callee if it does
632 not always bind to current def: it is possible that memory load
633 was optimized out earlier which may not happen in the interposed
634 variant. */
635 if (!callee_node->binds_to_current_def_p ())
637 if (dump_file)
638 fprintf (dump_file, " - May be interposed: collapsing loads.\n");
639 cur_summary->loads->collapse ();
642 /* If this is a recursive call, the target summary is the same as ours, so
643 there's nothing to do. */
644 if (recursive_call_p (current_function_decl, callee))
646 recursive_calls->safe_push (stmt);
647 if (dump_file)
648 fprintf (dump_file, " - Skipping recursive call.\n");
649 return true;
652 gcc_assert (callee_node != NULL);
654 /* Get the function symbol and its availability. */
655 enum availability avail;
656 callee_node = callee_node->function_symbol (&avail);
657 if (avail <= AVAIL_INTERPOSABLE)
659 /* Keep stores summary, but discard all loads for interposable function
660 symbols. */
661 if (ignore_stores)
663 cur_summary->loads->collapse ();
664 return true;
666 if (dump_file)
667 fprintf (dump_file, " - Function availability <= AVAIL_INTERPOSABLE.\n");
668 return false;
671 /* Get callee's modref summary. As above, if there's no summary, we either
672 have to give up or, if stores are ignored, we can just purge loads. */
673 modref_summary *callee_summary = optimization_summaries->get (callee_node);
674 if (!callee_summary)
676 if (ignore_stores)
678 cur_summary->loads->collapse ();
679 return true;
681 if (dump_file)
682 fprintf (dump_file, " - No modref summary available for callee.\n");
683 return false;
686 merge_call_side_effects (cur_summary, stmt, callee_summary, ignore_stores,
687 callee_node);
689 return true;
692 /* Support analyzis in non-lto and lto mode in parallel. */
694 struct summary_ptrs
696 struct modref_summary *nolto;
697 struct modref_summary_lto *lto;
700 /* Helper for analyze_stmt. */
702 static bool
703 analyze_load (gimple *, tree, tree op, void *data)
705 modref_summary *summary = ((summary_ptrs *)data)->nolto;
706 modref_summary_lto *summary_lto = ((summary_ptrs *)data)->lto;
708 if (dump_file)
710 fprintf (dump_file, " - Analyzing load: ");
711 print_generic_expr (dump_file, op);
712 fprintf (dump_file, "\n");
715 if (!record_access_p (op))
716 return false;
718 ao_ref r;
719 ao_ref_init (&r, op);
721 if (summary)
722 record_access (summary->loads, &r);
723 if (summary_lto)
724 record_access_lto (summary_lto->loads, &r);
725 return false;
728 /* Helper for analyze_stmt. */
730 static bool
731 analyze_store (gimple *, tree, tree op, void *data)
733 modref_summary *summary = ((summary_ptrs *)data)->nolto;
734 modref_summary_lto *summary_lto = ((summary_ptrs *)data)->lto;
736 if (dump_file)
738 fprintf (dump_file, " - Analyzing store: ");
739 print_generic_expr (dump_file, op);
740 fprintf (dump_file, "\n");
743 if (!record_access_p (op))
744 return false;
746 ao_ref r;
747 ao_ref_init (&r, op);
749 if (summary)
750 record_access (summary->stores, &r);
751 if (summary_lto)
752 record_access_lto (summary_lto->stores, &r);
753 return false;
756 /* Analyze statement STMT of function F.
757 If IPA is true do not merge in side effects of calls. */
759 static bool
760 analyze_stmt (modref_summary *summary, modref_summary_lto *summary_lto,
761 gimple *stmt, bool ipa, vec <gimple *> *recursive_calls)
763 /* In general we can not ignore clobbers because they are barries for code
764 motion, however after inlining it is safe to do becuase local optimization
765 passes do not consider clobbers from other functions.
766 Similar logic is in ipa-pure-consts. */
767 if ((ipa || cfun->after_inlining) && gimple_clobber_p (stmt))
768 return true;
770 struct summary_ptrs sums = {summary, summary_lto};
772 /* Analyze all loads and stores in STMT. */
773 walk_stmt_load_store_ops (stmt, &sums,
774 analyze_load, analyze_store);
776 switch (gimple_code (stmt))
778 case GIMPLE_ASM:
779 /* If the ASM statement does not read nor write memory, there's nothing
780 to do. Otherwise just give up. */
781 if (!gimple_asm_clobbers_memory_p (as_a <gasm *> (stmt)))
782 return true;
783 if (dump_file)
784 fprintf (dump_file, " - Function contains GIMPLE_ASM statement "
785 "which clobbers memory.\n");
786 return false;
787 case GIMPLE_CALL:
788 if (!ipa)
789 return analyze_call (summary, stmt, recursive_calls);
790 return true;
791 default:
792 /* Nothing to do for other types of statements. */
793 return true;
797 /* Remove summary of current function because during the function body
798 scan we determined it is not useful. LTO, NOLTO and IPA determines the
799 mode of scan. */
801 static void
802 remove_summary (bool lto, bool nolto, bool ipa)
804 cgraph_node *fnode = cgraph_node::get (current_function_decl);
805 if (!ipa)
806 optimization_summaries->remove (fnode);
807 else
809 if (nolto)
810 summaries->remove (fnode);
811 if (lto)
812 summaries_lto->remove (fnode);
814 if (dump_file)
815 fprintf (dump_file,
816 " - modref done with result: not tracked.\n");
819 /* Analyze function F. IPA indicates whether we're running in local mode
820 (false) or the IPA mode (true). */
822 static void
823 analyze_function (function *f, bool ipa)
825 if (dump_file)
826 fprintf (dump_file, "modref analyzing '%s' (ipa=%i)%s%s\n",
827 function_name (f), ipa,
828 TREE_READONLY (current_function_decl) ? " (const)" : "",
829 DECL_PURE_P (current_function_decl) ? " (pure)" : "");
831 /* Don't analyze this function if it's compiled with -fno-strict-aliasing. */
832 if (!flag_ipa_modref)
833 return;
835 /* Compute no-LTO summaries when local optimization is going to happen. */
836 bool nolto = (!ipa || ((!flag_lto || flag_fat_lto_objects) && !in_lto_p)
837 || (in_lto_p && !flag_wpa
838 && flag_incremental_link != INCREMENTAL_LINK_LTO));
839 /* Compute LTO when LTO streaming is going to happen. */
840 bool lto = ipa && ((flag_lto && !in_lto_p)
841 || flag_wpa
842 || flag_incremental_link == INCREMENTAL_LINK_LTO);
843 cgraph_node *fnode = cgraph_node::get (current_function_decl);
845 modref_summary *summary = NULL;
846 modref_summary_lto *summary_lto = NULL;
848 /* Initialize the summary.
849 If we run in local mode there is possibly pre-existing summary from
850 IPA pass. Dump it so it is easy to compare if mod-ref info has
851 improved. */
852 if (!ipa)
854 if (!optimization_summaries)
855 optimization_summaries = modref_summaries::create_ggc (symtab);
856 else /* Remove existing summary if we are re-running the pass. */
858 if (dump_file
859 && (summary
860 = optimization_summaries->get (cgraph_node::get (f->decl)))
861 != NULL
862 && summary->loads)
864 fprintf (dump_file, "Past summary:\n");
865 optimization_summaries->get
866 (cgraph_node::get (f->decl))->dump (dump_file);
868 optimization_summaries->remove (cgraph_node::get (f->decl));
870 summary = optimization_summaries->get_create (cgraph_node::get (f->decl));
871 gcc_checking_assert (nolto && !lto);
873 /* In IPA mode we analyze every function precisely once. Asser that. */
874 else
876 if (nolto)
878 if (!summaries)
879 summaries = modref_summaries::create_ggc (symtab);
880 else
881 summaries->remove (cgraph_node::get (f->decl));
882 summary = summaries->get_create (cgraph_node::get (f->decl));
884 if (lto)
886 if (!summaries_lto)
887 summaries_lto = modref_summaries_lto::create_ggc (symtab);
888 else
889 summaries_lto->remove (cgraph_node::get (f->decl));
890 summary_lto = summaries_lto->get_create (cgraph_node::get (f->decl));
895 /* Create and initialize summary for F.
896 Note that summaries may be already allocated from previous
897 run of the pass. */
898 if (nolto)
900 gcc_assert (!summary->loads);
901 summary->loads = modref_records::create_ggc (param_modref_max_bases,
902 param_modref_max_refs,
903 param_modref_max_accesses);
904 gcc_assert (!summary->stores);
905 summary->stores = modref_records::create_ggc (param_modref_max_bases,
906 param_modref_max_refs,
907 param_modref_max_accesses);
909 if (lto)
911 gcc_assert (!summary_lto->loads);
912 summary_lto->loads = modref_records_lto::create_ggc
913 (param_modref_max_bases,
914 param_modref_max_refs,
915 param_modref_max_accesses);
916 gcc_assert (!summary_lto->stores);
917 summary_lto->stores = modref_records_lto::create_ggc
918 (param_modref_max_bases,
919 param_modref_max_refs,
920 param_modref_max_accesses);
922 int ecf_flags = flags_from_decl_or_type (current_function_decl);
923 auto_vec <gimple *, 32> recursive_calls;
925 /* Analyze each statement in each basic block of the function. If the
926 statement cannot be analyzed (for any reason), the entire function cannot
927 be analyzed by modref. */
928 basic_block bb;
929 FOR_EACH_BB_FN (bb, f)
931 gimple_stmt_iterator si;
932 for (si = gsi_after_labels (bb); !gsi_end_p (si); gsi_next (&si))
934 if (!analyze_stmt (summary, summary_lto,
935 gsi_stmt (si), ipa, &recursive_calls)
936 || ((!summary || !summary->useful_p (ecf_flags))
937 && (!summary_lto || !summary_lto->useful_p (ecf_flags))))
939 remove_summary (lto, nolto, ipa);
940 return;
945 /* In non-IPA mode we need to perform iterative datafow on recursive calls.
946 This needs to be done after all other side effects are computed. */
947 if (!ipa)
949 bool changed = true;
950 while (changed)
952 changed = false;
953 for (unsigned i = 0; i < recursive_calls.length (); i++)
955 changed |= merge_call_side_effects
956 (summary, recursive_calls[i], summary,
957 ignore_stores_p (current_function_decl,
958 gimple_call_flags
959 (recursive_calls[i])),
960 fnode);
961 if (!summary->useful_p (ecf_flags))
963 remove_summary (lto, nolto, ipa);
964 return;
969 if (summary && !summary->useful_p (ecf_flags))
971 if (!ipa)
972 optimization_summaries->remove (fnode);
973 else
974 summaries->remove (fnode);
975 summary = NULL;
977 if (summary_lto && !summary_lto->useful_p (ecf_flags))
979 summaries_lto->remove (fnode);
980 summary_lto = NULL;
983 if (dump_file)
985 fprintf (dump_file, " - modref done with result: tracked.\n");
986 if (summary)
987 summary->dump (dump_file);
988 if (summary_lto)
989 summary_lto->dump (dump_file);
993 /* Callback for generate_summary. */
995 static void
996 modref_generate (void)
998 struct cgraph_node *node;
999 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
1001 function *f = DECL_STRUCT_FUNCTION (node->decl);
1002 if (!f)
1003 continue;
1004 push_cfun (f);
1005 analyze_function (f, true);
1006 pop_cfun ();
1010 /* Called when a new function is inserted to callgraph late. */
1012 void
1013 modref_summaries::insert (struct cgraph_node *node, modref_summary *)
1015 /* Local passes ought to be executed by the pass manager. */
1016 if (this == optimization_summaries)
1018 optimization_summaries->remove (node);
1019 return;
1021 if (!DECL_STRUCT_FUNCTION (node->decl))
1023 summaries->remove (node);
1024 return;
1026 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
1027 analyze_function (DECL_STRUCT_FUNCTION (node->decl), true);
1028 pop_cfun ();
1031 /* Called when a new function is inserted to callgraph late. */
1033 void
1034 modref_summaries_lto::insert (struct cgraph_node *node, modref_summary_lto *)
1036 /* We do not support adding new function when IPA information is already
1037 propagated. This is done only by SIMD cloning that is not very
1038 critical. */
1039 if (!DECL_STRUCT_FUNCTION (node->decl)
1040 || propagated)
1042 summaries_lto->remove (node);
1043 return;
1045 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
1046 analyze_function (DECL_STRUCT_FUNCTION (node->decl), true);
1047 pop_cfun ();
1050 /* Called when new clone is inserted to callgraph late. */
1052 void
1053 modref_summaries::duplicate (cgraph_node *, cgraph_node *dst,
1054 modref_summary *src_data,
1055 modref_summary *dst_data)
1057 /* Do not duplicte optimization summaries; we do not handle parameter
1058 transforms on them. */
1059 if (this == optimization_summaries)
1061 optimization_summaries->remove (dst);
1062 return;
1064 dst_data->stores = modref_records::create_ggc
1065 (src_data->stores->max_bases,
1066 src_data->stores->max_refs,
1067 src_data->stores->max_accesses);
1068 dst_data->stores->copy_from (src_data->stores);
1069 dst_data->loads = modref_records::create_ggc
1070 (src_data->loads->max_bases,
1071 src_data->loads->max_refs,
1072 src_data->loads->max_accesses);
1073 dst_data->loads->copy_from (src_data->loads);
1076 /* Called when new clone is inserted to callgraph late. */
1078 void
1079 modref_summaries_lto::duplicate (cgraph_node *, cgraph_node *,
1080 modref_summary_lto *src_data,
1081 modref_summary_lto *dst_data)
1083 /* Be sure that no furhter cloning happens after ipa-modref. If it does
1084 we will need to update signatures for possible param changes. */
1085 gcc_checking_assert (!((modref_summaries_lto *)summaries_lto)->propagated);
1086 dst_data->stores = modref_records_lto::create_ggc
1087 (src_data->stores->max_bases,
1088 src_data->stores->max_refs,
1089 src_data->stores->max_accesses);
1090 dst_data->stores->copy_from (src_data->stores);
1091 dst_data->loads = modref_records_lto::create_ggc
1092 (src_data->loads->max_bases,
1093 src_data->loads->max_refs,
1094 src_data->loads->max_accesses);
1095 dst_data->loads->copy_from (src_data->loads);
1098 namespace
1100 /* Definition of the modref pass on GIMPLE. */
1101 const pass_data pass_data_modref = {
1102 GIMPLE_PASS,
1103 "modref",
1104 OPTGROUP_IPA,
1105 TV_TREE_MODREF,
1106 (PROP_cfg | PROP_ssa),
1113 class pass_modref : public gimple_opt_pass
1115 public:
1116 pass_modref (gcc::context *ctxt)
1117 : gimple_opt_pass (pass_data_modref, ctxt) {}
1119 /* opt_pass methods: */
1120 opt_pass *clone ()
1122 return new pass_modref (m_ctxt);
1124 virtual bool gate (function *)
1126 return flag_ipa_modref;
1128 virtual unsigned int execute (function *);
1131 /* Encode TT to the output block OB using the summary streaming API. */
1133 static void
1134 write_modref_records (modref_records_lto *tt, struct output_block *ob)
1136 streamer_write_uhwi (ob, tt->max_bases);
1137 streamer_write_uhwi (ob, tt->max_refs);
1138 streamer_write_uhwi (ob, tt->max_accesses);
1140 streamer_write_uhwi (ob, tt->every_base);
1141 streamer_write_uhwi (ob, vec_safe_length (tt->bases));
1142 size_t i;
1143 modref_base_node <tree> *base_node;
1144 FOR_EACH_VEC_SAFE_ELT (tt->bases, i, base_node)
1146 stream_write_tree (ob, base_node->base, true);
1148 streamer_write_uhwi (ob, base_node->every_ref);
1149 streamer_write_uhwi (ob, vec_safe_length (base_node->refs));
1151 size_t j;
1152 modref_ref_node <tree> *ref_node;
1153 FOR_EACH_VEC_SAFE_ELT (base_node->refs, j, ref_node)
1155 stream_write_tree (ob, ref_node->ref, true);
1156 streamer_write_uhwi (ob, ref_node->every_access);
1157 streamer_write_uhwi (ob, vec_safe_length (ref_node->accesses));
1159 size_t k;
1160 modref_access_node *access_node;
1161 FOR_EACH_VEC_SAFE_ELT (ref_node->accesses, k, access_node)
1163 streamer_write_hwi (ob, access_node->parm_index);
1164 if (access_node->parm_index != -1)
1166 streamer_write_uhwi (ob, access_node->parm_offset_known);
1167 if (access_node->parm_offset_known)
1169 streamer_write_poly_int64 (ob, access_node->parm_offset);
1170 streamer_write_poly_int64 (ob, access_node->offset);
1171 streamer_write_poly_int64 (ob, access_node->size);
1172 streamer_write_poly_int64 (ob, access_node->max_size);
1180 /* Read a modref_tree from the input block IB using the data from DATA_IN.
1181 This assumes that the tree was encoded using write_modref_tree.
1182 Either nolto_ret or lto_ret is initialized by the tree depending whether
1183 LTO streaming is expected or not. */
1185 void
1186 read_modref_records (lto_input_block *ib, struct data_in *data_in,
1187 modref_records **nolto_ret,
1188 modref_records_lto **lto_ret)
1190 size_t max_bases = streamer_read_uhwi (ib);
1191 size_t max_refs = streamer_read_uhwi (ib);
1192 size_t max_accesses = streamer_read_uhwi (ib);
1194 if (lto_ret)
1195 *lto_ret = modref_records_lto::create_ggc (max_bases, max_refs,
1196 max_accesses);
1197 if (nolto_ret)
1198 *nolto_ret = modref_records::create_ggc (max_bases, max_refs,
1199 max_accesses);
1200 gcc_checking_assert (lto_ret || nolto_ret);
1202 size_t every_base = streamer_read_uhwi (ib);
1203 size_t nbase = streamer_read_uhwi (ib);
1205 gcc_assert (!every_base || nbase == 0);
1206 if (every_base)
1208 if (nolto_ret)
1209 (*nolto_ret)->collapse ();
1210 if (lto_ret)
1211 (*lto_ret)->collapse ();
1213 for (size_t i = 0; i < nbase; i++)
1215 tree base_tree = stream_read_tree (ib, data_in);
1216 modref_base_node <alias_set_type> *nolto_base_node = NULL;
1217 modref_base_node <tree> *lto_base_node = NULL;
1219 /* At stream in time we have LTO alias info. Check if we streamed in
1220 something obviously unnecessary. Do not glob types by alias sets;
1221 it is not 100% clear that ltrans types will get merged same way.
1222 Types may get refined based on ODR type conflicts. */
1223 if (base_tree && !get_alias_set (base_tree))
1225 if (dump_file)
1227 fprintf (dump_file, "Streamed in alias set 0 type ");
1228 print_generic_expr (dump_file, base_tree);
1229 fprintf (dump_file, "\n");
1231 base_tree = NULL;
1234 if (nolto_ret)
1235 nolto_base_node = (*nolto_ret)->insert_base (base_tree
1236 ? get_alias_set (base_tree)
1237 : 0);
1238 if (lto_ret)
1239 lto_base_node = (*lto_ret)->insert_base (base_tree);
1240 size_t every_ref = streamer_read_uhwi (ib);
1241 size_t nref = streamer_read_uhwi (ib);
1243 gcc_assert (!every_ref || nref == 0);
1244 if (every_ref)
1246 if (nolto_base_node)
1247 nolto_base_node->collapse ();
1248 if (lto_base_node)
1249 lto_base_node->collapse ();
1251 for (size_t j = 0; j < nref; j++)
1253 tree ref_tree = stream_read_tree (ib, data_in);
1255 if (ref_tree && !get_alias_set (ref_tree))
1257 if (dump_file)
1259 fprintf (dump_file, "Streamed in alias set 0 type ");
1260 print_generic_expr (dump_file, ref_tree);
1261 fprintf (dump_file, "\n");
1263 ref_tree = NULL;
1266 modref_ref_node <alias_set_type> *nolto_ref_node = NULL;
1267 modref_ref_node <tree> *lto_ref_node = NULL;
1269 if (nolto_base_node)
1270 nolto_ref_node
1271 = nolto_base_node->insert_ref (ref_tree
1272 ? get_alias_set (ref_tree) : 0,
1273 max_refs);
1274 if (lto_base_node)
1275 lto_ref_node = lto_base_node->insert_ref (ref_tree, max_refs);
1277 size_t every_access = streamer_read_uhwi (ib);
1278 size_t naccesses = streamer_read_uhwi (ib);
1280 if (nolto_ref_node)
1281 nolto_ref_node->every_access = every_access;
1282 if (lto_ref_node)
1283 lto_ref_node->every_access = every_access;
1285 for (size_t k = 0; k < naccesses; k++)
1287 int parm_index = streamer_read_hwi (ib);
1288 bool parm_offset_known = false;
1289 poly_int64 parm_offset = 0;
1290 poly_int64 offset = 0;
1291 poly_int64 size = -1;
1292 poly_int64 max_size = -1;
1294 if (parm_index != -1)
1296 parm_offset_known = streamer_read_uhwi (ib);
1297 if (parm_offset_known)
1299 parm_offset = streamer_read_poly_int64 (ib);
1300 offset = streamer_read_poly_int64 (ib);
1301 size = streamer_read_poly_int64 (ib);
1302 max_size = streamer_read_poly_int64 (ib);
1305 modref_access_node a = {offset, size, max_size, parm_offset,
1306 parm_index, parm_offset_known};
1307 if (nolto_ref_node)
1308 nolto_ref_node->insert_access (a, max_accesses);
1309 if (lto_ref_node)
1310 lto_ref_node->insert_access (a, max_accesses);
1314 if (lto_ret)
1315 (*lto_ret)->cleanup ();
1316 if (nolto_ret)
1317 (*nolto_ret)->cleanup ();
1320 /* Callback for write_summary. */
1322 static void
1323 modref_write ()
1325 struct output_block *ob = create_output_block (LTO_section_ipa_modref);
1326 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
1327 unsigned int count = 0;
1328 int i;
1330 if (!summaries_lto)
1332 streamer_write_uhwi (ob, 0);
1333 streamer_write_char_stream (ob->main_stream, 0);
1334 produce_asm (ob, NULL);
1335 destroy_output_block (ob);
1336 return;
1339 for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
1341 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
1342 cgraph_node *cnode = dyn_cast <cgraph_node *> (snode);
1343 modref_summary_lto *r;
1345 if (cnode && cnode->definition && !cnode->alias
1346 && (r = summaries_lto->get (cnode))
1347 && r->useful_p (flags_from_decl_or_type (cnode->decl)))
1348 count++;
1350 streamer_write_uhwi (ob, count);
1352 for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
1354 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
1355 cgraph_node *cnode = dyn_cast <cgraph_node *> (snode);
1357 if (cnode && cnode->definition && !cnode->alias)
1360 modref_summary_lto *r = summaries_lto->get (cnode);
1362 if (!r || !r->useful_p (flags_from_decl_or_type (cnode->decl)))
1363 continue;
1365 streamer_write_uhwi (ob, lto_symtab_encoder_encode (encoder, cnode));
1367 write_modref_records (r->loads, ob);
1368 write_modref_records (r->stores, ob);
1371 streamer_write_char_stream (ob->main_stream, 0);
1372 produce_asm (ob, NULL);
1373 destroy_output_block (ob);
1376 static void
1377 read_section (struct lto_file_decl_data *file_data, const char *data,
1378 size_t len)
1380 const struct lto_function_header *header
1381 = (const struct lto_function_header *) data;
1382 const int cfg_offset = sizeof (struct lto_function_header);
1383 const int main_offset = cfg_offset + header->cfg_size;
1384 const int string_offset = main_offset + header->main_size;
1385 struct data_in *data_in;
1386 unsigned int i;
1387 unsigned int f_count;
1389 lto_input_block ib ((const char *) data + main_offset, header->main_size,
1390 file_data->mode_table);
1392 data_in
1393 = lto_data_in_create (file_data, (const char *) data + string_offset,
1394 header->string_size, vNULL);
1395 f_count = streamer_read_uhwi (&ib);
1396 for (i = 0; i < f_count; i++)
1398 struct cgraph_node *node;
1399 lto_symtab_encoder_t encoder;
1401 unsigned int index = streamer_read_uhwi (&ib);
1402 encoder = file_data->symtab_node_encoder;
1403 node = dyn_cast <cgraph_node *> (lto_symtab_encoder_deref (encoder,
1404 index));
1406 modref_summary *modref_sum = summaries
1407 ? summaries->get_create (node) : NULL;
1408 modref_summary_lto *modref_sum_lto = summaries_lto
1409 ? summaries_lto->get_create (node)
1410 : NULL;
1412 if (optimization_summaries)
1413 modref_sum = optimization_summaries->get_create (node);
1415 gcc_assert (!modref_sum || (!modref_sum->loads
1416 && !modref_sum->stores));
1417 gcc_assert (!modref_sum_lto || (!modref_sum_lto->loads
1418 && !modref_sum_lto->stores));
1419 read_modref_records (&ib, data_in,
1420 modref_sum ? &modref_sum->loads : NULL,
1421 modref_sum_lto ? &modref_sum_lto->loads : NULL);
1422 read_modref_records (&ib, data_in,
1423 modref_sum ? &modref_sum->stores : NULL,
1424 modref_sum_lto ? &modref_sum_lto->stores : NULL);
1425 if (dump_file)
1427 fprintf (dump_file, "Read modref for %s\n",
1428 node->dump_name ());
1429 if (modref_sum)
1430 modref_sum->dump (dump_file);
1431 if (modref_sum_lto)
1432 modref_sum_lto->dump (dump_file);
1436 lto_free_section_data (file_data, LTO_section_ipa_modref, NULL, data,
1437 len);
1438 lto_data_in_delete (data_in);
1441 /* Callback for read_summary. */
1443 static void
1444 modref_read (void)
1446 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1447 struct lto_file_decl_data *file_data;
1448 unsigned int j = 0;
1450 gcc_checking_assert (!optimization_summaries && !summaries && !summaries_lto);
1451 if (flag_ltrans)
1452 optimization_summaries = modref_summaries::create_ggc (symtab);
1453 else
1455 if (flag_wpa || flag_incremental_link == INCREMENTAL_LINK_LTO)
1456 summaries_lto = modref_summaries_lto::create_ggc (symtab);
1457 if (!flag_wpa
1458 || (flag_incremental_link == INCREMENTAL_LINK_LTO
1459 && flag_fat_lto_objects))
1460 summaries = modref_summaries::create_ggc (symtab);
1463 while ((file_data = file_data_vec[j++]))
1465 size_t len;
1466 const char *data = lto_get_summary_section_data (file_data,
1467 LTO_section_ipa_modref,
1468 &len);
1469 if (data)
1470 read_section (file_data, data, len);
1471 else
1472 /* Fatal error here. We do not want to support compiling ltrans units
1473 with different version of compiler or different flags than the WPA
1474 unit, so this should never happen. */
1475 fatal_error (input_location,
1476 "IPA modref summary is missing in input file");
1480 /* If signature changed, update the summary. */
1482 static void
1483 update_signature (struct cgraph_node *node)
1485 if (!node->clone.param_adjustments)
1486 return;
1488 modref_summary *r = optimization_summaries
1489 ? optimization_summaries->get (node) : NULL;
1490 modref_summary_lto *r_lto = summaries_lto
1491 ? summaries_lto->get (node) : NULL;
1492 if (!r && !r_lto)
1493 return;
1494 if (dump_file)
1496 fprintf (dump_file, "Updating summary for %s from:\n",
1497 node->dump_name ());
1498 r->dump (dump_file);
1501 size_t i, max = 0;
1502 ipa_adjusted_param *p;
1504 FOR_EACH_VEC_SAFE_ELT (node->clone.param_adjustments->m_adj_params, i, p)
1506 int idx = node->clone.param_adjustments->get_original_index (i);
1507 if (idx > (int)max)
1508 max = idx;
1511 auto_vec <int, 32> map;
1513 map.reserve (max + 1);
1514 for (i = 0; i <= max; i++)
1515 map.quick_push (-1);
1516 FOR_EACH_VEC_SAFE_ELT (node->clone.param_adjustments->m_adj_params, i, p)
1518 int idx = node->clone.param_adjustments->get_original_index (i);
1519 if (idx >= 0)
1520 map[idx] = i;
1522 if (r)
1524 r->loads->remap_params (&map);
1525 r->stores->remap_params (&map);
1527 if (r_lto)
1529 r_lto->loads->remap_params (&map);
1530 r_lto->stores->remap_params (&map);
1532 if (dump_file)
1534 fprintf (dump_file, "to:\n");
1535 if (r)
1536 r->dump (dump_file);
1537 if (r_lto)
1538 r_lto->dump (dump_file);
1540 return;
1543 /* Definition of the modref IPA pass. */
1544 const pass_data pass_data_ipa_modref =
1546 IPA_PASS, /* type */
1547 "modref", /* name */
1548 OPTGROUP_IPA, /* optinfo_flags */
1549 TV_IPA_MODREF, /* tv_id */
1550 0, /* properties_required */
1551 0, /* properties_provided */
1552 0, /* properties_destroyed */
1553 0, /* todo_flags_start */
1554 ( TODO_dump_symtab ), /* todo_flags_finish */
1557 class pass_ipa_modref : public ipa_opt_pass_d
1559 public:
1560 pass_ipa_modref (gcc::context *ctxt)
1561 : ipa_opt_pass_d (pass_data_ipa_modref, ctxt,
1562 modref_generate, /* generate_summary */
1563 modref_write, /* write_summary */
1564 modref_read, /* read_summary */
1565 modref_write, /* write_optimization_summary */
1566 modref_read, /* read_optimization_summary */
1567 NULL, /* stmt_fixup */
1568 0, /* function_transform_todo_flags_start */
1569 NULL, /* function_transform */
1570 NULL) /* variable_transform */
1573 /* opt_pass methods: */
1574 opt_pass *clone () { return new pass_ipa_modref (m_ctxt); }
1575 virtual bool gate (function *)
1577 return true;
1579 virtual unsigned int execute (function *);
1585 unsigned int pass_modref::execute (function *f)
1587 analyze_function (f, false);
1588 return 0;
1591 gimple_opt_pass *
1592 make_pass_modref (gcc::context *ctxt)
1594 return new pass_modref (ctxt);
1597 ipa_opt_pass_d *
1598 make_pass_ipa_modref (gcc::context *ctxt)
1600 return new pass_ipa_modref (ctxt);
1603 /* Skip edges from and to nodes without ipa_pure_const enabled.
1604 Ignore not available symbols. */
1606 static bool
1607 ignore_edge (struct cgraph_edge *e)
1609 /* We merge summaries of inline clones into summaries of functions they
1610 are inlined to. For that reason the complete function bodies must
1611 act as unit. */
1612 if (!e->inline_failed)
1613 return false;
1614 enum availability avail;
1615 cgraph_node *callee = e->callee->function_or_virtual_thunk_symbol
1616 (&avail, e->caller);
1618 return (avail <= AVAIL_INTERPOSABLE
1619 || ((!optimization_summaries || !optimization_summaries->get (callee))
1620 && (!summaries_lto || !summaries_lto->get (callee)))
1621 || flags_from_decl_or_type (e->callee->decl)
1622 & (ECF_CONST | ECF_NOVOPS));
1625 /* Compute parm_map for CALLE_EDGE. */
1627 static void
1628 compute_parm_map (cgraph_edge *callee_edge, vec<modref_parm_map> *parm_map)
1630 class ipa_edge_args *args;
1631 if (ipa_node_params_sum
1632 && !callee_edge->call_stmt_cannot_inline_p
1633 && (args = IPA_EDGE_REF (callee_edge)) != NULL)
1635 int i, count = ipa_get_cs_argument_count (args);
1636 class ipa_node_params *caller_parms_info, *callee_pi;
1637 class ipa_call_summary *es
1638 = ipa_call_summaries->get (callee_edge);
1639 cgraph_node *callee
1640 = callee_edge->callee->function_or_virtual_thunk_symbol
1641 (NULL, callee_edge->caller);
1643 caller_parms_info = IPA_NODE_REF (callee_edge->caller->inlined_to
1644 ? callee_edge->caller->inlined_to
1645 : callee_edge->caller);
1646 callee_pi = IPA_NODE_REF (callee);
1648 (*parm_map).safe_grow_cleared (count);
1650 for (i = 0; i < count; i++)
1652 if (es && es->param[i].points_to_local_or_readonly_memory)
1654 (*parm_map)[i].parm_index = -2;
1655 continue;
1658 struct ipa_jump_func *jf
1659 = ipa_get_ith_jump_func (args, i);
1660 if (jf && callee_pi)
1662 tree cst = ipa_value_from_jfunc (caller_parms_info,
1664 ipa_get_type
1665 (callee_pi, i));
1666 if (cst && points_to_local_or_readonly_memory_p (cst))
1668 (*parm_map)[i].parm_index = -2;
1669 continue;
1672 if (jf && jf->type == IPA_JF_PASS_THROUGH)
1674 (*parm_map)[i].parm_index
1675 = ipa_get_jf_pass_through_formal_id (jf);
1676 if (ipa_get_jf_pass_through_operation (jf) == NOP_EXPR)
1678 (*parm_map)[i].parm_offset_known = true;
1679 (*parm_map)[i].parm_offset = 0;
1681 else if (ipa_get_jf_pass_through_operation (jf)
1682 == POINTER_PLUS_EXPR
1683 && ptrdiff_tree_p (ipa_get_jf_pass_through_operand (jf),
1684 &(*parm_map)[i].parm_offset))
1685 (*parm_map)[i].parm_offset_known = true;
1686 else
1687 (*parm_map)[i].parm_offset_known = false;
1688 continue;
1690 if (jf && jf->type == IPA_JF_ANCESTOR)
1692 (*parm_map)[i].parm_index = ipa_get_jf_ancestor_formal_id (jf);
1693 (*parm_map)[i].parm_offset_known = true;
1694 gcc_checking_assert
1695 (!(ipa_get_jf_ancestor_offset (jf) & (BITS_PER_UNIT - 1)));
1696 (*parm_map)[i].parm_offset
1697 = ipa_get_jf_ancestor_offset (jf) >> LOG2_BITS_PER_UNIT;
1699 else
1700 (*parm_map)[i].parm_index = -1;
1702 if (dump_file)
1704 fprintf (dump_file, " Parm map: ");
1705 for (i = 0; i < count; i++)
1706 fprintf (dump_file, " %i", (*parm_map)[i].parm_index);
1707 fprintf (dump_file, "\n");
1712 /* Call EDGE was inlined; merge summary from callee to the caller. */
1714 void
1715 ipa_merge_modref_summary_after_inlining (cgraph_edge *edge)
1717 if (!summaries && !summaries_lto)
1718 return;
1720 struct cgraph_node *to = (edge->caller->inlined_to
1721 ? edge->caller->inlined_to : edge->caller);
1722 class modref_summary *to_info = summaries ? summaries->get (to) : NULL;
1723 class modref_summary_lto *to_info_lto = summaries_lto
1724 ? summaries_lto->get (to) : NULL;
1726 if (!to_info && !to_info_lto)
1728 if (summaries)
1729 summaries->remove (edge->callee);
1730 if (summaries_lto)
1731 summaries_lto->remove (edge->callee);
1732 return;
1735 class modref_summary *callee_info = summaries ? summaries->get (edge->callee)
1736 : NULL;
1737 class modref_summary_lto *callee_info_lto
1738 = summaries_lto ? summaries_lto->get (edge->callee) : NULL;
1739 int flags = flags_from_decl_or_type (edge->callee->decl);
1741 if (!callee_info && to_info)
1743 if (ignore_stores_p (edge->caller->decl, flags))
1744 to_info->loads->collapse ();
1745 else
1747 summaries->remove (to);
1748 to_info = NULL;
1751 if (!callee_info_lto && to_info_lto)
1753 if (ignore_stores_p (edge->caller->decl, flags))
1754 to_info_lto->loads->collapse ();
1755 else
1757 summaries_lto->remove (to);
1758 to_info_lto = NULL;
1761 if (callee_info || callee_info_lto)
1763 auto_vec <modref_parm_map, 32> parm_map;
1765 compute_parm_map (edge, &parm_map);
1767 if (!ignore_stores_p (edge->caller->decl, flags))
1769 if (to_info && callee_info)
1770 to_info->stores->merge (callee_info->stores, &parm_map);
1771 if (to_info_lto && callee_info_lto)
1772 to_info_lto->stores->merge (callee_info_lto->stores, &parm_map);
1774 if (to_info && callee_info)
1775 to_info->loads->merge (callee_info->loads, &parm_map);
1776 if (to_info_lto && callee_info_lto)
1777 to_info_lto->loads->merge (callee_info_lto->loads, &parm_map);
1779 if (summaries)
1781 if (to_info && !to_info->useful_p (flags))
1783 if (dump_file)
1784 fprintf (dump_file, "Removed mod-ref summary for %s\n",
1785 to->dump_name ());
1786 summaries->remove (to);
1788 else if (to_info && dump_file)
1790 if (dump_file)
1791 fprintf (dump_file, "Updated mod-ref summary for %s\n",
1792 to->dump_name ());
1793 to_info->dump (dump_file);
1795 if (callee_info)
1796 summaries->remove (edge->callee);
1798 if (summaries_lto)
1800 if (to_info_lto && !to_info_lto->useful_p (flags))
1802 if (dump_file)
1803 fprintf (dump_file, "Removed mod-ref summary for %s\n",
1804 to->dump_name ());
1805 summaries_lto->remove (to);
1807 else if (to_info_lto && dump_file)
1809 if (dump_file)
1810 fprintf (dump_file, "Updated mod-ref summary for %s\n",
1811 to->dump_name ());
1812 to_info_lto->dump (dump_file);
1814 if (callee_info_lto)
1815 summaries_lto->remove (edge->callee);
1817 return;
1820 /* Collapse loads and return true if something changed. */
1822 bool
1823 collapse_loads (modref_summary *cur_summary,
1824 modref_summary_lto *cur_summary_lto)
1826 bool changed = false;
1828 if (cur_summary && !cur_summary->loads->every_base)
1830 cur_summary->loads->collapse ();
1831 changed = true;
1833 if (cur_summary_lto
1834 && !cur_summary_lto->loads->every_base)
1836 cur_summary_lto->loads->collapse ();
1837 changed = true;
1839 return changed;
1842 /* Perform iterative dataflow on SCC component starting in COMPONENT_NODE. */
1844 static void
1845 modref_propagate_in_scc (cgraph_node *component_node)
1847 bool changed = true;
1848 int iteration = 0;
1850 while (changed)
1852 changed = false;
1853 for (struct cgraph_node *cur = component_node; cur;
1854 cur = ((struct ipa_dfs_info *) cur->aux)->next_cycle)
1856 cgraph_node *node = cur->inlined_to ? cur->inlined_to : cur;
1857 modref_summary *cur_summary = optimization_summaries
1858 ? optimization_summaries->get (node)
1859 : NULL;
1860 modref_summary_lto *cur_summary_lto = summaries_lto
1861 ? summaries_lto->get (node)
1862 : NULL;
1864 if (!cur_summary && !cur_summary_lto)
1865 continue;
1867 if (dump_file)
1868 fprintf (dump_file, " Processing %s%s%s\n",
1869 cur->dump_name (),
1870 TREE_READONLY (cur->decl) ? " (const)" : "",
1871 DECL_PURE_P (cur->decl) ? " (pure)" : "");
1873 for (cgraph_edge *e = cur->indirect_calls; e; e = e->next_callee)
1875 if (e->indirect_info->ecf_flags & (ECF_CONST | ECF_NOVOPS))
1876 continue;
1877 if (ignore_stores_p (cur->decl, e->indirect_info->ecf_flags))
1879 if (dump_file)
1880 fprintf (dump_file, " Indirect call: "
1881 "collapsing loads\n");
1882 changed |= collapse_loads (cur_summary, cur_summary_lto);
1884 else
1886 if (dump_file)
1887 fprintf (dump_file, " Indirect call: giving up\n");
1888 if (optimization_summaries)
1889 optimization_summaries->remove (node);
1890 if (summaries_lto)
1891 summaries_lto->remove (node);
1892 changed = true;
1893 cur_summary = NULL;
1894 cur_summary_lto = NULL;
1895 break;
1899 if (!cur_summary && !cur_summary_lto)
1900 continue;
1902 for (cgraph_edge *callee_edge = cur->callees; callee_edge;
1903 callee_edge = callee_edge->next_callee)
1905 int flags = flags_from_decl_or_type (callee_edge->callee->decl);
1906 modref_summary *callee_summary = NULL;
1907 modref_summary_lto *callee_summary_lto = NULL;
1908 struct cgraph_node *callee;
1910 if (flags & (ECF_CONST | ECF_NOVOPS)
1911 || !callee_edge->inline_failed)
1912 continue;
1914 /* Get the callee and its summary. */
1915 enum availability avail;
1916 callee = callee_edge->callee->function_or_virtual_thunk_symbol
1917 (&avail, cur);
1919 /* It is not necessary to re-process calls outside of the
1920 SCC component. */
1921 if (iteration > 0
1922 && (!callee->aux
1923 || ((struct ipa_dfs_info *)cur->aux)->scc_no
1924 != ((struct ipa_dfs_info *)callee->aux)->scc_no))
1925 continue;
1927 if (dump_file)
1928 fprintf (dump_file, " Call to %s\n",
1929 callee_edge->callee->dump_name ());
1931 bool ignore_stores = ignore_stores_p (cur->decl, flags);
1933 if (avail <= AVAIL_INTERPOSABLE)
1935 if (!ignore_stores)
1937 if (dump_file)
1938 fprintf (dump_file, " Call target interposable"
1939 " or not available\n");
1941 if (optimization_summaries)
1942 optimization_summaries->remove (node);
1943 if (summaries_lto)
1944 summaries_lto->remove (node);
1945 cur_summary = NULL;
1946 cur_summary_lto = NULL;
1947 changed = true;
1948 break;
1950 else
1952 if (dump_file)
1953 fprintf (dump_file, " Call target interposable"
1954 " or not available; collapsing loads\n");
1956 changed |= collapse_loads (cur_summary, cur_summary_lto);
1957 continue;
1961 /* We don't know anything about CALLEE, hence we cannot tell
1962 anything about the entire component. */
1964 if (cur_summary
1965 && !(callee_summary = optimization_summaries->get (callee)))
1967 if (!ignore_stores)
1969 if (dump_file)
1970 fprintf (dump_file, " No call target summary\n");
1972 optimization_summaries->remove (node);
1973 cur_summary = NULL;
1974 changed = true;
1976 else
1978 if (dump_file)
1979 fprintf (dump_file, " No call target summary;"
1980 " collapsing loads\n");
1982 if (!cur_summary->loads->every_base)
1984 cur_summary->loads->collapse ();
1985 changed = true;
1989 if (cur_summary_lto
1990 && !(callee_summary_lto = summaries_lto->get (callee)))
1992 if (!ignore_stores)
1994 if (dump_file)
1995 fprintf (dump_file, " No call target summary\n");
1997 summaries_lto->remove (node);
1998 cur_summary_lto = NULL;
1999 changed = true;
2001 else
2003 if (dump_file)
2004 fprintf (dump_file, " No call target summary;"
2005 " collapsing loads\n");
2007 if (!cur_summary_lto->loads->every_base)
2009 cur_summary_lto->loads->collapse ();
2010 changed = true;
2015 /* We can not safely optimize based on summary of callee if it
2016 does not always bind to current def: it is possible that
2017 memory load was optimized out earlier which may not happen in
2018 the interposed variant. */
2019 if (!callee_edge->binds_to_current_def_p ())
2021 changed |= collapse_loads (cur_summary, cur_summary_lto);
2022 if (dump_file)
2023 fprintf (dump_file, " May not bind local;"
2024 " collapsing loads\n");
2028 auto_vec <modref_parm_map, 32> parm_map;
2030 compute_parm_map (callee_edge, &parm_map);
2032 /* Merge in callee's information. */
2033 if (callee_summary)
2035 changed |= cur_summary->loads->merge
2036 (callee_summary->loads, &parm_map);
2037 if (!ignore_stores)
2038 changed |= cur_summary->stores->merge
2039 (callee_summary->stores, &parm_map);
2041 if (callee_summary_lto)
2043 changed |= cur_summary_lto->loads->merge
2044 (callee_summary_lto->loads, &parm_map);
2045 if (!ignore_stores)
2046 changed |= cur_summary_lto->stores->merge
2047 (callee_summary_lto->stores, &parm_map);
2049 if (dump_file && changed)
2051 if (cur_summary)
2052 cur_summary->dump (dump_file);
2053 if (cur_summary_lto)
2054 cur_summary_lto->dump (dump_file);
2058 iteration++;
2060 if (dump_file)
2062 fprintf (dump_file,
2063 "Propagation finished in %i iterations\n", iteration);
2064 for (struct cgraph_node *cur = component_node; cur;
2065 cur = ((struct ipa_dfs_info *) cur->aux)->next_cycle)
2066 if (!cur->inlined_to)
2068 modref_summary *cur_summary = optimization_summaries
2069 ? optimization_summaries->get (cur)
2070 : NULL;
2071 modref_summary_lto *cur_summary_lto = summaries_lto
2072 ? summaries_lto->get (cur)
2073 : NULL;
2075 fprintf (dump_file, "Propagated modref for %s%s%s\n",
2076 cur->dump_name (),
2077 TREE_READONLY (cur->decl) ? " (const)" : "",
2078 DECL_PURE_P (cur->decl) ? " (pure)" : "");
2079 if (optimization_summaries)
2081 if (cur_summary)
2082 cur_summary->dump (dump_file);
2083 else
2084 fprintf (dump_file, " Not tracked\n");
2086 if (summaries_lto)
2088 if (cur_summary_lto)
2089 cur_summary_lto->dump (dump_file);
2090 else
2091 fprintf (dump_file, " Not tracked (lto)\n");
2097 /* Run the IPA pass. This will take a function's summaries and calls and
2098 construct new summaries which represent a transitive closure. So that
2099 summary of an analyzed function contains information about the loads and
2100 stores that the function or any function that it calls does. */
2102 unsigned int
2103 pass_ipa_modref::execute (function *)
2105 if (!summaries && !summaries_lto)
2106 return 0;
2108 if (optimization_summaries)
2109 ggc_delete (optimization_summaries);
2110 optimization_summaries = summaries;
2111 summaries = NULL;
2113 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *,
2114 symtab->cgraph_count);
2115 int order_pos;
2116 order_pos = ipa_reduced_postorder (order, true, ignore_edge);
2117 int i;
2119 /* Iterate over all strongly connected components in post-order. */
2120 for (i = 0; i < order_pos; i++)
2122 /* Get the component's representative. That's just any node in the
2123 component from which we can traverse the entire component. */
2124 struct cgraph_node *component_node = order[i];
2126 if (dump_file)
2127 fprintf (dump_file, "\n\nStart of SCC component\n");
2129 modref_propagate_in_scc (component_node);
2131 cgraph_node *node;
2132 FOR_EACH_FUNCTION (node)
2133 update_signature (node);
2134 if (summaries_lto)
2135 ((modref_summaries_lto *)summaries_lto)->propagated = true;
2136 ipa_free_postorder_info ();
2137 free (order);
2138 return 0;
2141 /* Summaries must stay alive until end of compilation. */
2143 void
2144 ipa_modref_c_finalize ()
2146 if (optimization_summaries)
2147 ggc_delete (optimization_summaries);
2148 optimization_summaries = NULL;
2149 gcc_checking_assert (!summaries);
2150 if (summaries_lto)
2152 ggc_delete (summaries_lto);
2153 summaries_lto = NULL;
2157 #include "gt-ipa-modref.h"