c++: Add test for C++23 auto(x)
[official-gcc.git] / gcc / ipa-modref.c
blob9e537b04196562fcd62c7a433f827bb670482bd9
1 /* Search for references that a functions loads or stores.
2 Copyright (C) 2020-2021 Free Software Foundation, Inc.
3 Contributed by David Cepelik and Jan Hubicka
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* Mod/ref pass records summary about loads and stores performed by the
22 function. This is later used by alias analysis to disambiguate memory
23 accesses across function calls.
25 This file contains a tree pass and an IPA pass. Both performs the same
26 analysis however tree pass is executed during early and late optimization
27 passes to propagate info downwards in the compilation order. IPA pass
28 propagates across the callgraph and is able to handle recursion and works on
29 whole program during link-time analysis.
31 LTO mode differs from the local mode by not recording alias sets but types
32 that are translated to alias sets later. This is necessary in order stream
33 the information because the alias sets are rebuild at stream-in time and may
34 not correspond to ones seen during analysis. For this reason part of
35 analysis is duplicated.
37 The following information is computed
38 1) load/store access tree described in ipa-modref-tree.h
39 This is used by tree-ssa-alias to disambiguate load/stores
40 2) EAF flags used by points-to analysis (in tree-ssa-structlias).
41 and defined in tree-core.h.
42 and stored to optimization_summaries.
44 There are multiple summaries computed and used during the propagation:
45 - summaries holds summaries from analysis to IPA propagation
46 time.
47 - summaries_lto is same as summaries but holds them in a format
48 that can be streamed (as described above).
49 - fnspec_summary holds fnspec strings for call. This is
50 necessary because gimple_call_fnspec performs additional
51 analysis except for looking callee fndecl.
52 - escape_summary holds escape points for given call edge.
53 That is a vector recording what function parmaeters
54 may escape to a function call (and with what parameter index). */
56 #include "config.h"
57 #include "system.h"
58 #include "coretypes.h"
59 #include "backend.h"
60 #include "tree.h"
61 #include "gimple.h"
62 #include "alloc-pool.h"
63 #include "tree-pass.h"
64 #include "gimple-iterator.h"
65 #include "tree-dfa.h"
66 #include "cgraph.h"
67 #include "ipa-utils.h"
68 #include "symbol-summary.h"
69 #include "gimple-pretty-print.h"
70 #include "gimple-walk.h"
71 #include "print-tree.h"
72 #include "tree-streamer.h"
73 #include "alias.h"
74 #include "calls.h"
75 #include "ipa-modref-tree.h"
76 #include "ipa-modref.h"
77 #include "value-range.h"
78 #include "ipa-prop.h"
79 #include "ipa-fnsummary.h"
80 #include "attr-fnspec.h"
81 #include "symtab-clones.h"
82 #include "gimple-ssa.h"
83 #include "tree-phinodes.h"
84 #include "tree-ssa-operands.h"
85 #include "ssa-iterators.h"
86 #include "stringpool.h"
87 #include "tree-ssanames.h"
88 #include "attribs.h"
89 #include "tree-cfg.h"
90 #include "tree-eh.h"
93 namespace {
95 /* We record fnspec specifiers for call edges since they depends on actual
96 gimple statements. */
98 class fnspec_summary
100 public:
101 char *fnspec;
103 fnspec_summary ()
104 : fnspec (NULL)
108 ~fnspec_summary ()
110 free (fnspec);
114 /* Summary holding fnspec string for a given call. */
116 class fnspec_summaries_t : public call_summary <fnspec_summary *>
118 public:
119 fnspec_summaries_t (symbol_table *symtab)
120 : call_summary <fnspec_summary *> (symtab) {}
121 /* Hook that is called by summary when an edge is duplicated. */
122 virtual void duplicate (cgraph_edge *,
123 cgraph_edge *,
124 fnspec_summary *src,
125 fnspec_summary *dst)
127 dst->fnspec = xstrdup (src->fnspec);
131 static fnspec_summaries_t *fnspec_summaries = NULL;
133 /* Escape summary holds a vector of param indexes that escape to
134 a given call. */
135 struct escape_entry
137 /* Parameter that escapes at a given call. */
138 int parm_index;
139 /* Argument it escapes to. */
140 unsigned int arg;
141 /* Minimal flags known about the argument. */
142 eaf_flags_t min_flags;
143 /* Does it escape directly or indirectly? */
144 bool direct;
147 /* Dump EAF flags. */
149 static void
150 dump_eaf_flags (FILE *out, int flags, bool newline = true)
152 if (flags & EAF_UNUSED)
153 fprintf (out, " unused");
154 if (flags & EAF_NO_DIRECT_CLOBBER)
155 fprintf (out, " no_direct_clobber");
156 if (flags & EAF_NO_INDIRECT_CLOBBER)
157 fprintf (out, " no_indirect_clobber");
158 if (flags & EAF_NO_DIRECT_ESCAPE)
159 fprintf (out, " no_direct_escape");
160 if (flags & EAF_NO_INDIRECT_ESCAPE)
161 fprintf (out, " no_indirect_escape");
162 if (flags & EAF_NOT_RETURNED_DIRECTLY)
163 fprintf (out, " not_returned_directly");
164 if (flags & EAF_NOT_RETURNED_INDIRECTLY)
165 fprintf (out, " not_returned_indirectly");
166 if (flags & EAF_NO_DIRECT_READ)
167 fprintf (out, " no_direct_read");
168 if (flags & EAF_NO_INDIRECT_READ)
169 fprintf (out, " no_indirect_read");
170 if (newline)
171 fprintf (out, "\n");
174 struct escape_summary
176 auto_vec <escape_entry> esc;
177 void dump (FILE *out)
179 for (unsigned int i = 0; i < esc.length (); i++)
181 fprintf (out, " parm %i arg %i %s min:",
182 esc[i].parm_index,
183 esc[i].arg,
184 esc[i].direct ? "(direct)" : "(indirect)");
185 dump_eaf_flags (out, esc[i].min_flags, false);
187 fprintf (out, "\n");
191 class escape_summaries_t : public call_summary <escape_summary *>
193 public:
194 escape_summaries_t (symbol_table *symtab)
195 : call_summary <escape_summary *> (symtab) {}
196 /* Hook that is called by summary when an edge is duplicated. */
197 virtual void duplicate (cgraph_edge *,
198 cgraph_edge *,
199 escape_summary *src,
200 escape_summary *dst)
202 dst->esc = src->esc.copy ();
206 static escape_summaries_t *escape_summaries = NULL;
208 } /* ANON namespace: GTY annotated summaries can not be anonymous. */
211 /* Class (from which there is one global instance) that holds modref summaries
212 for all analyzed functions. */
214 class GTY((user)) modref_summaries
215 : public fast_function_summary <modref_summary *, va_gc>
217 public:
218 modref_summaries (symbol_table *symtab)
219 : fast_function_summary <modref_summary *, va_gc> (symtab) {}
220 virtual void insert (cgraph_node *, modref_summary *state);
221 virtual void duplicate (cgraph_node *src_node,
222 cgraph_node *dst_node,
223 modref_summary *src_data,
224 modref_summary *dst_data);
225 static modref_summaries *create_ggc (symbol_table *symtab)
227 return new (ggc_alloc_no_dtor<modref_summaries> ())
228 modref_summaries (symtab);
232 class modref_summary_lto;
234 /* Class (from which there is one global instance) that holds modref summaries
235 for all analyzed functions. */
237 class GTY((user)) modref_summaries_lto
238 : public fast_function_summary <modref_summary_lto *, va_gc>
240 public:
241 modref_summaries_lto (symbol_table *symtab)
242 : fast_function_summary <modref_summary_lto *, va_gc> (symtab),
243 propagated (false) {}
244 virtual void insert (cgraph_node *, modref_summary_lto *state);
245 virtual void duplicate (cgraph_node *src_node,
246 cgraph_node *dst_node,
247 modref_summary_lto *src_data,
248 modref_summary_lto *dst_data);
249 static modref_summaries_lto *create_ggc (symbol_table *symtab)
251 return new (ggc_alloc_no_dtor<modref_summaries_lto> ())
252 modref_summaries_lto (symtab);
254 bool propagated;
257 /* Global variable holding all modref summaries
258 (from analysis to IPA propagation time). */
260 static GTY(()) fast_function_summary <modref_summary *, va_gc>
261 *summaries;
263 /* Global variable holding all modref optimization summaries
264 (from IPA propagation time or used by local optimization pass). */
266 static GTY(()) fast_function_summary <modref_summary *, va_gc>
267 *optimization_summaries;
269 /* LTO summaries hold info from analysis to LTO streaming or from LTO
270 stream-in through propagation to LTO stream-out. */
272 static GTY(()) fast_function_summary <modref_summary_lto *, va_gc>
273 *summaries_lto;
275 /* Summary for a single function which this pass produces. */
277 modref_summary::modref_summary ()
278 : loads (NULL), stores (NULL), retslot_flags (0), static_chain_flags (0),
279 writes_errno (false), side_effects (false), nondeterministic (false),
280 calls_interposable (false), global_memory_read (false),
281 global_memory_written (false), try_dse (false)
285 modref_summary::~modref_summary ()
287 if (loads)
288 ggc_delete (loads);
289 if (stores)
290 ggc_delete (stores);
293 /* Remove all flags from EAF_FLAGS that are implied by ECF_FLAGS and not
294 useful to track. If returns_void is true moreover clear
295 EAF_NOT_RETURNED. */
296 static int
297 remove_useless_eaf_flags (int eaf_flags, int ecf_flags, bool returns_void)
299 if (ecf_flags & (ECF_CONST | ECF_NOVOPS))
300 eaf_flags &= ~implicit_const_eaf_flags;
301 else if (ecf_flags & ECF_PURE)
302 eaf_flags &= ~implicit_pure_eaf_flags;
303 else if ((ecf_flags & ECF_NORETURN) || returns_void)
304 eaf_flags &= ~(EAF_NOT_RETURNED_DIRECTLY | EAF_NOT_RETURNED_INDIRECTLY);
305 return eaf_flags;
308 /* Return true if FLAGS holds some useful information. */
310 static bool
311 eaf_flags_useful_p (vec <eaf_flags_t> &flags, int ecf_flags)
313 for (unsigned i = 0; i < flags.length (); i++)
314 if (remove_useless_eaf_flags (flags[i], ecf_flags, false))
315 return true;
316 return false;
319 /* Return true if summary is potentially useful for optimization.
320 If CHECK_FLAGS is false assume that arg_flags are useful. */
322 bool
323 modref_summary::useful_p (int ecf_flags, bool check_flags)
325 if (arg_flags.length () && !check_flags)
326 return true;
327 if (check_flags && eaf_flags_useful_p (arg_flags, ecf_flags))
328 return true;
329 arg_flags.release ();
330 if (check_flags && remove_useless_eaf_flags (retslot_flags, ecf_flags, false))
331 return true;
332 if (check_flags
333 && remove_useless_eaf_flags (static_chain_flags, ecf_flags, false))
334 return true;
335 if (ecf_flags & (ECF_CONST | ECF_NOVOPS))
336 return ((!side_effects || !nondeterministic)
337 && (ecf_flags & ECF_LOOPING_CONST_OR_PURE));
338 if (loads && !loads->every_base)
339 return true;
340 else
341 kills.release ();
342 if (ecf_flags & ECF_PURE)
343 return ((!side_effects || !nondeterministic)
344 && (ecf_flags & ECF_LOOPING_CONST_OR_PURE));
345 return stores && !stores->every_base;
348 /* Single function summary used for LTO. */
350 typedef modref_tree <tree> modref_records_lto;
351 struct GTY(()) modref_summary_lto
353 /* Load and stores in functions using types rather then alias sets.
355 This is necessary to make the information streamable for LTO but is also
356 more verbose and thus more likely to hit the limits. */
357 modref_records_lto *loads;
358 modref_records_lto *stores;
359 auto_vec<modref_access_node> GTY((skip)) kills;
360 auto_vec<eaf_flags_t> GTY((skip)) arg_flags;
361 eaf_flags_t retslot_flags;
362 eaf_flags_t static_chain_flags;
363 unsigned writes_errno : 1;
364 unsigned side_effects : 1;
365 unsigned nondeterministic : 1;
366 unsigned calls_interposable : 1;
368 modref_summary_lto ();
369 ~modref_summary_lto ();
370 void dump (FILE *);
371 bool useful_p (int ecf_flags, bool check_flags = true);
374 /* Summary for a single function which this pass produces. */
376 modref_summary_lto::modref_summary_lto ()
377 : loads (NULL), stores (NULL), retslot_flags (0), static_chain_flags (0),
378 writes_errno (false), side_effects (false), nondeterministic (false),
379 calls_interposable (false)
383 modref_summary_lto::~modref_summary_lto ()
385 if (loads)
386 ggc_delete (loads);
387 if (stores)
388 ggc_delete (stores);
392 /* Return true if lto summary is potentially useful for optimization.
393 If CHECK_FLAGS is false assume that arg_flags are useful. */
395 bool
396 modref_summary_lto::useful_p (int ecf_flags, bool check_flags)
398 if (arg_flags.length () && !check_flags)
399 return true;
400 if (check_flags && eaf_flags_useful_p (arg_flags, ecf_flags))
401 return true;
402 arg_flags.release ();
403 if (check_flags && remove_useless_eaf_flags (retslot_flags, ecf_flags, false))
404 return true;
405 if (check_flags
406 && remove_useless_eaf_flags (static_chain_flags, ecf_flags, false))
407 return true;
408 if (ecf_flags & (ECF_CONST | ECF_NOVOPS))
409 return ((!side_effects || !nondeterministic)
410 && (ecf_flags & ECF_LOOPING_CONST_OR_PURE));
411 if (loads && !loads->every_base)
412 return true;
413 else
414 kills.release ();
415 if (ecf_flags & ECF_PURE)
416 return ((!side_effects || !nondeterministic)
417 && (ecf_flags & ECF_LOOPING_CONST_OR_PURE));
418 return stores && !stores->every_base;
421 /* Dump records TT to OUT. */
423 static void
424 dump_records (modref_records *tt, FILE *out)
426 if (tt->every_base)
428 fprintf (out, " Every base\n");
429 return;
431 size_t i;
432 modref_base_node <alias_set_type> *n;
433 FOR_EACH_VEC_SAFE_ELT (tt->bases, i, n)
435 fprintf (out, " Base %i: alias set %i\n", (int)i, n->base);
436 if (n->every_ref)
438 fprintf (out, " Every ref\n");
439 continue;
441 size_t j;
442 modref_ref_node <alias_set_type> *r;
443 FOR_EACH_VEC_SAFE_ELT (n->refs, j, r)
445 fprintf (out, " Ref %i: alias set %i\n", (int)j, r->ref);
446 if (r->every_access)
448 fprintf (out, " Every access\n");
449 continue;
451 size_t k;
452 modref_access_node *a;
453 FOR_EACH_VEC_SAFE_ELT (r->accesses, k, a)
455 fprintf (out, " access:");
456 a->dump (out);
462 /* Dump records TT to OUT. */
464 static void
465 dump_lto_records (modref_records_lto *tt, FILE *out)
467 if (tt->every_base)
469 fprintf (out, " Every base\n");
470 return;
472 size_t i;
473 modref_base_node <tree> *n;
474 FOR_EACH_VEC_SAFE_ELT (tt->bases, i, n)
476 fprintf (out, " Base %i:", (int)i);
477 print_generic_expr (dump_file, n->base);
478 fprintf (out, " (alias set %i)\n",
479 n->base ? get_alias_set (n->base) : 0);
480 if (n->every_ref)
482 fprintf (out, " Every ref\n");
483 continue;
485 size_t j;
486 modref_ref_node <tree> *r;
487 FOR_EACH_VEC_SAFE_ELT (n->refs, j, r)
489 fprintf (out, " Ref %i:", (int)j);
490 print_generic_expr (dump_file, r->ref);
491 fprintf (out, " (alias set %i)\n",
492 r->ref ? get_alias_set (r->ref) : 0);
493 if (r->every_access)
495 fprintf (out, " Every access\n");
496 continue;
498 size_t k;
499 modref_access_node *a;
500 FOR_EACH_VEC_SAFE_ELT (r->accesses, k, a)
502 fprintf (out, " access:");
503 a->dump (out);
509 /* Dump all escape points of NODE to OUT. */
511 static void
512 dump_modref_edge_summaries (FILE *out, cgraph_node *node, int depth)
514 int i = 0;
515 if (!escape_summaries)
516 return;
517 for (cgraph_edge *e = node->indirect_calls; e; e = e->next_callee)
519 class escape_summary *sum = escape_summaries->get (e);
520 if (sum)
522 fprintf (out, "%*sIndirect call %i in %s escapes:",
523 depth, "", i, node->dump_name ());
524 sum->dump (out);
526 i++;
528 for (cgraph_edge *e = node->callees; e; e = e->next_callee)
530 if (!e->inline_failed)
531 dump_modref_edge_summaries (out, e->callee, depth + 1);
532 class escape_summary *sum = escape_summaries->get (e);
533 if (sum)
535 fprintf (out, "%*sCall %s->%s escapes:", depth, "",
536 node->dump_name (), e->callee->dump_name ());
537 sum->dump (out);
539 class fnspec_summary *fsum = fnspec_summaries->get (e);
540 if (fsum)
542 fprintf (out, "%*sCall %s->%s fnspec: %s\n", depth, "",
543 node->dump_name (), e->callee->dump_name (),
544 fsum->fnspec);
549 /* Remove all call edge summaries associated with NODE. */
551 static void
552 remove_modref_edge_summaries (cgraph_node *node)
554 if (!escape_summaries)
555 return;
556 for (cgraph_edge *e = node->indirect_calls; e; e = e->next_callee)
557 escape_summaries->remove (e);
558 for (cgraph_edge *e = node->callees; e; e = e->next_callee)
560 if (!e->inline_failed)
561 remove_modref_edge_summaries (e->callee);
562 escape_summaries->remove (e);
563 fnspec_summaries->remove (e);
567 /* Dump summary. */
569 void
570 modref_summary::dump (FILE *out)
572 if (loads)
574 fprintf (out, " loads:\n");
575 dump_records (loads, out);
577 if (stores)
579 fprintf (out, " stores:\n");
580 dump_records (stores, out);
582 if (kills.length ())
584 fprintf (out, " kills:\n");
585 for (auto kill : kills)
587 fprintf (out, " ");
588 kill.dump (out);
591 if (writes_errno)
592 fprintf (out, " Writes errno\n");
593 if (side_effects)
594 fprintf (out, " Side effects\n");
595 if (nondeterministic)
596 fprintf (out, " Nondeterministic\n");
597 if (calls_interposable)
598 fprintf (out, " Calls interposable\n");
599 if (global_memory_read)
600 fprintf (out, " Global memory read\n");
601 if (global_memory_written)
602 fprintf (out, " Global memory written\n");
603 if (try_dse)
604 fprintf (out, " Try dse\n");
605 if (arg_flags.length ())
607 for (unsigned int i = 0; i < arg_flags.length (); i++)
608 if (arg_flags[i])
610 fprintf (out, " parm %i flags:", i);
611 dump_eaf_flags (out, arg_flags[i]);
614 if (retslot_flags)
616 fprintf (out, " Retslot flags:");
617 dump_eaf_flags (out, retslot_flags);
619 if (static_chain_flags)
621 fprintf (out, " Static chain flags:");
622 dump_eaf_flags (out, static_chain_flags);
626 /* Dump summary. */
628 void
629 modref_summary_lto::dump (FILE *out)
631 fprintf (out, " loads:\n");
632 dump_lto_records (loads, out);
633 fprintf (out, " stores:\n");
634 dump_lto_records (stores, out);
635 if (kills.length ())
637 fprintf (out, " kills:\n");
638 for (auto kill : kills)
640 fprintf (out, " ");
641 kill.dump (out);
644 if (writes_errno)
645 fprintf (out, " Writes errno\n");
646 if (side_effects)
647 fprintf (out, " Side effects\n");
648 if (nondeterministic)
649 fprintf (out, " Nondeterministic\n");
650 if (calls_interposable)
651 fprintf (out, " Calls interposable\n");
652 if (arg_flags.length ())
654 for (unsigned int i = 0; i < arg_flags.length (); i++)
655 if (arg_flags[i])
657 fprintf (out, " parm %i flags:", i);
658 dump_eaf_flags (out, arg_flags[i]);
661 if (retslot_flags)
663 fprintf (out, " Retslot flags:");
664 dump_eaf_flags (out, retslot_flags);
666 if (static_chain_flags)
668 fprintf (out, " Static chain flags:");
669 dump_eaf_flags (out, static_chain_flags);
673 /* Called after summary is produced and before it is used by local analysis.
674 Can be called multiple times in case summary needs to update signature.
675 FUN is decl of function summary is attached to. */
676 void
677 modref_summary::finalize (tree fun)
679 global_memory_read = !loads || loads->global_access_p ();
680 global_memory_written = !stores || stores->global_access_p ();
682 /* We can do DSE if we know function has no side effects and
683 we can analyse all stores. Disable dse if there are too many
684 stores to try. */
685 if (side_effects || global_memory_written || writes_errno)
686 try_dse = false;
687 else
689 try_dse = true;
690 size_t i, j, k;
691 int num_tests = 0, max_tests
692 = opt_for_fn (fun, param_modref_max_tests);
693 modref_base_node <alias_set_type> *base_node;
694 modref_ref_node <alias_set_type> *ref_node;
695 modref_access_node *access_node;
696 FOR_EACH_VEC_SAFE_ELT (stores->bases, i, base_node)
698 if (base_node->every_ref)
700 try_dse = false;
701 break;
703 FOR_EACH_VEC_SAFE_ELT (base_node->refs, j, ref_node)
705 if (base_node->every_ref)
707 try_dse = false;
708 break;
710 FOR_EACH_VEC_SAFE_ELT (ref_node->accesses, k, access_node)
711 if (num_tests++ > max_tests
712 || !access_node->parm_offset_known)
714 try_dse = false;
715 break;
717 if (!try_dse)
718 break;
720 if (!try_dse)
721 break;
724 if (loads->every_base)
725 load_accesses = 1;
726 else
728 load_accesses = 0;
729 for (auto base_node : loads->bases)
731 if (base_node->every_ref)
732 load_accesses++;
733 else
734 for (auto ref_node : base_node->refs)
735 if (ref_node->every_access)
736 load_accesses++;
737 else
738 load_accesses += ref_node->accesses->length ();
743 /* Get function summary for FUNC if it exists, return NULL otherwise. */
745 modref_summary *
746 get_modref_function_summary (cgraph_node *func)
748 /* Avoid creation of the summary too early (e.g. when front-end calls us). */
749 if (!optimization_summaries)
750 return NULL;
752 /* A single function body may be represented by multiple symbols with
753 different visibility. For example, if FUNC is an interposable alias,
754 we don't want to return anything, even if we have summary for the target
755 function. */
756 enum availability avail;
757 func = func->function_or_virtual_thunk_symbol
758 (&avail, current_function_decl ?
759 cgraph_node::get (current_function_decl) : NULL);
760 if (avail <= AVAIL_INTERPOSABLE)
761 return NULL;
763 modref_summary *r = optimization_summaries->get (func);
764 return r;
767 /* Get function summary for CALL if it exists, return NULL otherwise.
768 If non-null set interposed to indicate whether function may not
769 bind to current def. In this case sometimes loads from function
770 needs to be ignored. */
772 modref_summary *
773 get_modref_function_summary (gcall *call, bool *interposed)
775 tree callee = gimple_call_fndecl (call);
776 if (!callee)
777 return NULL;
778 struct cgraph_node *node = cgraph_node::get (callee);
779 if (!node)
780 return NULL;
781 modref_summary *r = get_modref_function_summary (node);
782 if (interposed && r)
783 *interposed = r->calls_interposable
784 || !node->binds_to_current_def_p ();
785 return r;
789 namespace {
791 /* Return true if ECF flags says that nondeterminsm can be ignored. */
793 static bool
794 ignore_nondeterminism_p (tree caller, int flags)
796 if (flags & (ECF_CONST | ECF_PURE))
797 return true;
798 if ((flags & (ECF_NORETURN | ECF_NOTHROW)) == (ECF_NORETURN | ECF_NOTHROW)
799 || (!opt_for_fn (caller, flag_exceptions) && (flags & ECF_NORETURN)))
800 return true;
801 return false;
804 /* Return true if ECF flags says that return value can be ignored. */
806 static bool
807 ignore_retval_p (tree caller, int flags)
809 if ((flags & (ECF_NORETURN | ECF_NOTHROW)) == (ECF_NORETURN | ECF_NOTHROW)
810 || (!opt_for_fn (caller, flag_exceptions) && (flags & ECF_NORETURN)))
811 return true;
812 return false;
815 /* Return true if ECF flags says that stores can be ignored. */
817 static bool
818 ignore_stores_p (tree caller, int flags)
820 if (flags & (ECF_PURE | ECF_CONST | ECF_NOVOPS))
821 return true;
822 if ((flags & (ECF_NORETURN | ECF_NOTHROW)) == (ECF_NORETURN | ECF_NOTHROW)
823 || (!opt_for_fn (caller, flag_exceptions) && (flags & ECF_NORETURN)))
824 return true;
825 return false;
828 /* Determine parm_map for PTR which is supposed to be a pointer. */
830 modref_parm_map
831 parm_map_for_ptr (tree op)
833 bool offset_known;
834 poly_int64 offset;
835 struct modref_parm_map parm_map;
836 gcall *call;
838 parm_map.parm_offset_known = false;
839 parm_map.parm_offset = 0;
841 offset_known = unadjusted_ptr_and_unit_offset (op, &op, &offset);
842 if (TREE_CODE (op) == SSA_NAME
843 && SSA_NAME_IS_DEFAULT_DEF (op)
844 && TREE_CODE (SSA_NAME_VAR (op)) == PARM_DECL)
846 int index = 0;
848 if (cfun->static_chain_decl
849 && op == ssa_default_def (cfun, cfun->static_chain_decl))
850 index = MODREF_STATIC_CHAIN_PARM;
851 else
852 for (tree t = DECL_ARGUMENTS (current_function_decl);
853 t != SSA_NAME_VAR (op); t = DECL_CHAIN (t))
854 index++;
855 parm_map.parm_index = index;
856 parm_map.parm_offset_known = offset_known;
857 parm_map.parm_offset = offset;
859 else if (points_to_local_or_readonly_memory_p (op))
860 parm_map.parm_index = MODREF_LOCAL_MEMORY_PARM;
861 /* Memory allocated in the function is not visible to caller before the
862 call and thus we do not need to record it as load/stores/kills. */
863 else if (TREE_CODE (op) == SSA_NAME
864 && (call = dyn_cast<gcall *>(SSA_NAME_DEF_STMT (op))) != NULL
865 && gimple_call_flags (call) & ECF_MALLOC)
866 parm_map.parm_index = MODREF_LOCAL_MEMORY_PARM;
867 else
868 parm_map.parm_index = MODREF_UNKNOWN_PARM;
869 return parm_map;
872 /* Analyze memory accesses (loads, stores and kills) performed
873 by the function. Set also side_effects, calls_interposable
874 and nondeterminism flags. */
876 class modref_access_analysis
878 public:
879 modref_access_analysis (bool ipa, modref_summary *summary,
880 modref_summary_lto *summary_lto)
881 : m_summary (summary), m_summary_lto (summary_lto), m_ipa (ipa)
884 void analyze ();
885 private:
886 bool set_side_effects ();
887 bool set_nondeterministic ();
888 static modref_access_node get_access (ao_ref *ref);
889 static void record_access (modref_records *, ao_ref *, modref_access_node &);
890 static void record_access_lto (modref_records_lto *, ao_ref *,
891 modref_access_node &a);
892 bool record_access_p (tree);
893 bool record_unknown_load ();
894 bool record_unknown_store ();
895 bool merge_call_side_effects (gimple *, modref_summary *,
896 cgraph_node *, bool);
897 modref_access_node get_access_for_fnspec (gcall *, attr_fnspec &,
898 unsigned int, modref_parm_map &);
899 void process_fnspec (gcall *);
900 void analyze_call (gcall *);
901 static bool analyze_load (gimple *, tree, tree, void *);
902 static bool analyze_store (gimple *, tree, tree, void *);
903 void analyze_stmt (gimple *, bool);
904 void propagate ();
906 /* Summary being computed.
907 We work eitehr with m_summary or m_summary_lto. Never on both. */
908 modref_summary *m_summary;
909 modref_summary_lto *m_summary_lto;
910 /* Recursive calls needs simplisitc dataflow after analysis finished.
911 Collect all calls into this vector during analysis and later process
912 them in propagate. */
913 auto_vec <gimple *, 32> m_recursive_calls;
914 /* ECF flags of function being analysed. */
915 int m_ecf_flags;
916 /* True if IPA propagation will be done later. */
917 bool m_ipa;
918 /* Set true if statement currently analysed is known to be
919 executed each time function is called. */
920 bool m_always_executed;
923 /* Set side_effects flag and return if someting changed. */
925 bool
926 modref_access_analysis::set_side_effects ()
928 bool changed = false;
930 if (m_summary && !m_summary->side_effects)
932 m_summary->side_effects = true;
933 changed = true;
935 if (m_summary_lto && !m_summary_lto->side_effects)
937 m_summary_lto->side_effects = true;
938 changed = true;
940 return changed;
943 /* Set nondeterministic flag and return if someting changed. */
945 bool
946 modref_access_analysis::set_nondeterministic ()
948 bool changed = false;
950 if (m_summary && !m_summary->nondeterministic)
952 m_summary->side_effects = m_summary->nondeterministic = true;
953 changed = true;
955 if (m_summary_lto && !m_summary_lto->nondeterministic)
957 m_summary_lto->side_effects = m_summary_lto->nondeterministic = true;
958 changed = true;
960 return changed;
963 /* Construct modref_access_node from REF. */
965 modref_access_node
966 modref_access_analysis::get_access (ao_ref *ref)
968 tree base;
970 base = ao_ref_base (ref);
971 modref_access_node a = {ref->offset, ref->size, ref->max_size,
972 0, MODREF_UNKNOWN_PARM, false, 0};
973 if (TREE_CODE (base) == MEM_REF || TREE_CODE (base) == TARGET_MEM_REF)
975 tree memref = base;
976 modref_parm_map m = parm_map_for_ptr (TREE_OPERAND (base, 0));
978 a.parm_index = m.parm_index;
979 if (a.parm_index != MODREF_UNKNOWN_PARM && TREE_CODE (memref) == MEM_REF)
981 a.parm_offset_known
982 = wi::to_poly_wide (TREE_OPERAND
983 (memref, 1)).to_shwi (&a.parm_offset);
984 if (a.parm_offset_known && m.parm_offset_known)
985 a.parm_offset += m.parm_offset;
986 else
987 a.parm_offset_known = false;
990 else
991 a.parm_index = MODREF_UNKNOWN_PARM;
992 return a;
995 /* Record access into the modref_records data structure. */
997 void
998 modref_access_analysis::record_access (modref_records *tt,
999 ao_ref *ref,
1000 modref_access_node &a)
1002 alias_set_type base_set = !flag_strict_aliasing ? 0
1003 : ao_ref_base_alias_set (ref);
1004 alias_set_type ref_set = !flag_strict_aliasing ? 0
1005 : (ao_ref_alias_set (ref));
1006 if (dump_file)
1008 fprintf (dump_file, " - Recording base_set=%i ref_set=%i ",
1009 base_set, ref_set);
1010 a.dump (dump_file);
1012 tt->insert (current_function_decl, base_set, ref_set, a, false);
1015 /* IPA version of record_access_tree. */
1017 void
1018 modref_access_analysis::record_access_lto (modref_records_lto *tt, ao_ref *ref,
1019 modref_access_node &a)
1021 /* get_alias_set sometimes use different type to compute the alias set
1022 than TREE_TYPE (base). Do same adjustments. */
1023 tree base_type = NULL_TREE, ref_type = NULL_TREE;
1024 if (flag_strict_aliasing)
1026 tree base;
1028 base = ref->ref;
1029 while (handled_component_p (base))
1030 base = TREE_OPERAND (base, 0);
1032 base_type = reference_alias_ptr_type_1 (&base);
1034 if (!base_type)
1035 base_type = TREE_TYPE (base);
1036 else
1037 base_type = TYPE_REF_CAN_ALIAS_ALL (base_type)
1038 ? NULL_TREE : TREE_TYPE (base_type);
1040 tree ref_expr = ref->ref;
1041 ref_type = reference_alias_ptr_type_1 (&ref_expr);
1043 if (!ref_type)
1044 ref_type = TREE_TYPE (ref_expr);
1045 else
1046 ref_type = TYPE_REF_CAN_ALIAS_ALL (ref_type)
1047 ? NULL_TREE : TREE_TYPE (ref_type);
1049 /* Sanity check that we are in sync with what get_alias_set does. */
1050 gcc_checking_assert ((!base_type && !ao_ref_base_alias_set (ref))
1051 || get_alias_set (base_type)
1052 == ao_ref_base_alias_set (ref));
1053 gcc_checking_assert ((!ref_type && !ao_ref_alias_set (ref))
1054 || get_alias_set (ref_type)
1055 == ao_ref_alias_set (ref));
1057 /* Do not bother to record types that have no meaningful alias set.
1058 Also skip variably modified types since these go to local streams. */
1059 if (base_type && (!get_alias_set (base_type)
1060 || variably_modified_type_p (base_type, NULL_TREE)))
1061 base_type = NULL_TREE;
1062 if (ref_type && (!get_alias_set (ref_type)
1063 || variably_modified_type_p (ref_type, NULL_TREE)))
1064 ref_type = NULL_TREE;
1066 if (dump_file)
1068 fprintf (dump_file, " - Recording base type:");
1069 print_generic_expr (dump_file, base_type);
1070 fprintf (dump_file, " (alias set %i) ref type:",
1071 base_type ? get_alias_set (base_type) : 0);
1072 print_generic_expr (dump_file, ref_type);
1073 fprintf (dump_file, " (alias set %i) ",
1074 ref_type ? get_alias_set (ref_type) : 0);
1075 a.dump (dump_file);
1078 tt->insert (current_function_decl, base_type, ref_type, a, false);
1081 /* Returns true if and only if we should store the access to EXPR.
1082 Some accesses, e.g. loads from automatic variables, are not interesting. */
1084 bool
1085 modref_access_analysis::record_access_p (tree expr)
1087 if (TREE_THIS_VOLATILE (expr))
1089 if (dump_file)
1090 fprintf (dump_file, " (volatile; marking nondeterministic) ");
1091 set_nondeterministic ();
1093 if (cfun->can_throw_non_call_exceptions
1094 && tree_could_throw_p (expr))
1096 if (dump_file)
1097 fprintf (dump_file, " (can throw; marking side effects) ");
1098 set_side_effects ();
1101 if (refs_local_or_readonly_memory_p (expr))
1103 if (dump_file)
1104 fprintf (dump_file, " - Read-only or local, ignoring.\n");
1105 return false;
1107 return true;
1110 /* Collapse loads and return true if something changed. */
1112 bool
1113 modref_access_analysis::record_unknown_load ()
1115 bool changed = false;
1117 if (m_summary && !m_summary->loads->every_base)
1119 m_summary->loads->collapse ();
1120 changed = true;
1122 if (m_summary_lto && !m_summary_lto->loads->every_base)
1124 m_summary_lto->loads->collapse ();
1125 changed = true;
1127 return changed;
1130 /* Collapse loads and return true if something changed. */
1132 bool
1133 modref_access_analysis::record_unknown_store ()
1135 bool changed = false;
1137 if (m_summary && !m_summary->stores->every_base)
1139 m_summary->stores->collapse ();
1140 changed = true;
1142 if (m_summary_lto && !m_summary_lto->stores->every_base)
1144 m_summary_lto->stores->collapse ();
1145 changed = true;
1147 return changed;
1150 /* Merge side effects of call STMT to function with CALLEE_SUMMARY.
1151 Return true if something changed.
1152 If IGNORE_STORES is true, do not merge stores.
1153 If RECORD_ADJUSTMENTS is true cap number of adjustments to
1154 a given access to make dataflow finite. */
1156 bool
1157 modref_access_analysis::merge_call_side_effects
1158 (gimple *stmt, modref_summary *callee_summary,
1159 cgraph_node *callee_node, bool record_adjustments)
1161 int flags = gimple_call_flags (stmt);
1163 /* Nothing to do for non-looping cont functions. */
1164 if ((flags & (ECF_CONST | ECF_NOVOPS))
1165 && !(flags & ECF_LOOPING_CONST_OR_PURE))
1166 return false;
1168 bool changed = false;
1170 if (dump_file)
1171 fprintf (dump_file, " - Merging side effects of %s\n",
1172 callee_node->dump_name ());
1174 /* Merge side effects and non-determinism.
1175 PURE/CONST flags makes functions deterministic and if there is
1176 no LOOPING_CONST_OR_PURE they also have no side effects. */
1177 if (!(flags & (ECF_CONST | ECF_NOVOPS | ECF_PURE))
1178 || (flags & ECF_LOOPING_CONST_OR_PURE))
1180 if (!m_summary->side_effects && callee_summary->side_effects)
1182 if (dump_file)
1183 fprintf (dump_file, " - merging side effects.\n");
1184 m_summary->side_effects = true;
1185 changed = true;
1187 if (!m_summary->nondeterministic && callee_summary->nondeterministic
1188 && !ignore_nondeterminism_p (current_function_decl, flags))
1190 if (dump_file)
1191 fprintf (dump_file, " - merging nondeterministic.\n");
1192 m_summary->nondeterministic = true;
1193 changed = true;
1197 /* For const functions we are done. */
1198 if (flags & (ECF_CONST | ECF_NOVOPS))
1199 return changed;
1201 /* Merge calls_interposable flags. */
1202 if (!m_summary->calls_interposable && callee_summary->calls_interposable)
1204 if (dump_file)
1205 fprintf (dump_file, " - merging calls interposable.\n");
1206 m_summary->calls_interposable = true;
1207 changed = true;
1210 if (!callee_node->binds_to_current_def_p () && !m_summary->calls_interposable)
1212 if (dump_file)
1213 fprintf (dump_file, " - May be interposed.\n");
1214 m_summary->calls_interposable = true;
1215 changed = true;
1218 /* Now merge the actual load, store and kill vectors. For this we need
1219 to compute map translating new parameters to old. */
1220 if (dump_file)
1221 fprintf (dump_file, " Parm map:");
1223 auto_vec <modref_parm_map, 32> parm_map;
1224 parm_map.safe_grow_cleared (gimple_call_num_args (stmt), true);
1225 for (unsigned i = 0; i < gimple_call_num_args (stmt); i++)
1227 parm_map[i] = parm_map_for_ptr (gimple_call_arg (stmt, i));
1228 if (dump_file)
1230 fprintf (dump_file, " %i", parm_map[i].parm_index);
1231 if (parm_map[i].parm_offset_known)
1233 fprintf (dump_file, " offset:");
1234 print_dec ((poly_int64_pod)parm_map[i].parm_offset,
1235 dump_file, SIGNED);
1240 modref_parm_map chain_map;
1241 if (gimple_call_chain (stmt))
1243 chain_map = parm_map_for_ptr (gimple_call_chain (stmt));
1244 if (dump_file)
1246 fprintf (dump_file, "static chain %i", chain_map.parm_index);
1247 if (chain_map.parm_offset_known)
1249 fprintf (dump_file, " offset:");
1250 print_dec ((poly_int64_pod)chain_map.parm_offset,
1251 dump_file, SIGNED);
1255 if (dump_file)
1256 fprintf (dump_file, "\n");
1258 /* Kills can me merged in only if we know the function is going to be
1259 always executed. */
1260 if (m_always_executed
1261 && callee_summary->kills.length ()
1262 && (!cfun->can_throw_non_call_exceptions
1263 || !stmt_could_throw_p (cfun, stmt)))
1265 /* Watch for self recursive updates. */
1266 auto_vec<modref_access_node, 32> saved_kills;
1268 saved_kills.reserve_exact (callee_summary->kills.length ());
1269 saved_kills.splice (callee_summary->kills);
1270 for (auto kill : saved_kills)
1272 if (kill.parm_index >= (int)parm_map.length ())
1273 continue;
1274 modref_parm_map &m
1275 = kill.parm_index == MODREF_STATIC_CHAIN_PARM
1276 ? chain_map
1277 : parm_map[kill.parm_index];
1278 if (m.parm_index == MODREF_LOCAL_MEMORY_PARM
1279 || m.parm_index == MODREF_UNKNOWN_PARM
1280 || m.parm_index == MODREF_RETSLOT_PARM
1281 || !m.parm_offset_known)
1282 continue;
1283 modref_access_node n = kill;
1284 n.parm_index = m.parm_index;
1285 n.parm_offset += m.parm_offset;
1286 if (modref_access_node::insert_kill (m_summary->kills, n,
1287 record_adjustments))
1288 changed = true;
1292 /* Merge in loads. */
1293 changed |= m_summary->loads->merge (current_function_decl,
1294 callee_summary->loads,
1295 &parm_map, &chain_map,
1296 record_adjustments);
1297 /* Merge in stores. */
1298 if (!ignore_stores_p (current_function_decl, flags))
1300 changed |= m_summary->stores->merge (current_function_decl,
1301 callee_summary->stores,
1302 &parm_map, &chain_map,
1303 record_adjustments);
1304 if (!m_summary->writes_errno
1305 && callee_summary->writes_errno)
1307 m_summary->writes_errno = true;
1308 changed = true;
1311 return changed;
1314 /* Return access mode for argument I of call STMT with FNSPEC. */
1316 modref_access_node
1317 modref_access_analysis::get_access_for_fnspec (gcall *call, attr_fnspec &fnspec,
1318 unsigned int i,
1319 modref_parm_map &map)
1321 tree size = NULL_TREE;
1322 unsigned int size_arg;
1324 if (!fnspec.arg_specified_p (i))
1326 else if (fnspec.arg_max_access_size_given_by_arg_p (i, &size_arg))
1327 size = gimple_call_arg (call, size_arg);
1328 else if (fnspec.arg_access_size_given_by_type_p (i))
1330 tree callee = gimple_call_fndecl (call);
1331 tree t = TYPE_ARG_TYPES (TREE_TYPE (callee));
1333 for (unsigned int p = 0; p < i; p++)
1334 t = TREE_CHAIN (t);
1335 size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_VALUE (t)));
1337 modref_access_node a = {0, -1, -1,
1338 map.parm_offset, map.parm_index,
1339 map.parm_offset_known, 0};
1340 poly_int64 size_hwi;
1341 if (size
1342 && poly_int_tree_p (size, &size_hwi)
1343 && coeffs_in_range_p (size_hwi, 0,
1344 HOST_WIDE_INT_MAX / BITS_PER_UNIT))
1346 a.size = -1;
1347 a.max_size = size_hwi << LOG2_BITS_PER_UNIT;
1349 return a;
1352 /* Apply side effects of call STMT to CUR_SUMMARY using FNSPEC.
1353 If IGNORE_STORES is true ignore them.
1354 Return false if no useful summary can be produced. */
1356 void
1357 modref_access_analysis::process_fnspec (gcall *call)
1359 int flags = gimple_call_flags (call);
1361 /* PURE/CONST flags makes functions deterministic and if there is
1362 no LOOPING_CONST_OR_PURE they also have no side effects. */
1363 if (!(flags & (ECF_CONST | ECF_NOVOPS | ECF_PURE))
1364 || (flags & ECF_LOOPING_CONST_OR_PURE)
1365 || (cfun->can_throw_non_call_exceptions
1366 && stmt_could_throw_p (cfun, call)))
1368 set_side_effects ();
1369 if (!ignore_nondeterminism_p (current_function_decl, flags))
1370 set_nondeterministic ();
1373 /* For const functions we are done. */
1374 if (flags & (ECF_CONST | ECF_NOVOPS))
1375 return;
1377 attr_fnspec fnspec = gimple_call_fnspec (call);
1378 /* If there is no fnpec we know nothing about loads & stores. */
1379 if (!fnspec.known_p ())
1381 if (dump_file && gimple_call_builtin_p (call, BUILT_IN_NORMAL))
1382 fprintf (dump_file, " Builtin with no fnspec: %s\n",
1383 IDENTIFIER_POINTER (DECL_NAME (gimple_call_fndecl (call))));
1384 record_unknown_load ();
1385 if (!ignore_stores_p (current_function_decl, flags))
1386 record_unknown_store ();
1387 return;
1389 /* Process fnspec. */
1390 if (fnspec.global_memory_read_p ())
1391 record_unknown_load ();
1392 else
1394 for (unsigned int i = 0; i < gimple_call_num_args (call); i++)
1395 if (!POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, i))))
1397 else if (!fnspec.arg_specified_p (i)
1398 || fnspec.arg_maybe_read_p (i))
1400 modref_parm_map map = parm_map_for_ptr
1401 (gimple_call_arg (call, i));
1403 if (map.parm_index == MODREF_LOCAL_MEMORY_PARM)
1404 continue;
1405 if (map.parm_index == MODREF_UNKNOWN_PARM)
1407 record_unknown_load ();
1408 break;
1410 modref_access_node a = get_access_for_fnspec (call, fnspec, i, map);
1411 if (a.parm_index == MODREF_LOCAL_MEMORY_PARM)
1412 continue;
1413 if (m_summary)
1414 m_summary->loads->insert (current_function_decl, 0, 0, a, false);
1415 if (m_summary_lto)
1416 m_summary_lto->loads->insert (current_function_decl, 0, 0, a,
1417 false);
1420 if (ignore_stores_p (current_function_decl, flags))
1421 return;
1422 if (fnspec.global_memory_written_p ())
1423 record_unknown_store ();
1424 else
1426 for (unsigned int i = 0; i < gimple_call_num_args (call); i++)
1427 if (!POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, i))))
1429 else if (!fnspec.arg_specified_p (i)
1430 || fnspec.arg_maybe_written_p (i))
1432 modref_parm_map map = parm_map_for_ptr
1433 (gimple_call_arg (call, i));
1435 if (map.parm_index == MODREF_LOCAL_MEMORY_PARM)
1436 continue;
1437 if (map.parm_index == MODREF_UNKNOWN_PARM)
1439 record_unknown_store ();
1440 break;
1442 modref_access_node a = get_access_for_fnspec (call, fnspec, i, map);
1443 if (a.parm_index == MODREF_LOCAL_MEMORY_PARM)
1444 continue;
1445 if (m_summary)
1446 m_summary->stores->insert (current_function_decl, 0, 0, a, false);
1447 if (m_summary_lto)
1448 m_summary_lto->stores->insert (current_function_decl,
1449 0, 0, a, false);
1451 if (fnspec.errno_maybe_written_p () && flag_errno_math)
1453 if (m_summary)
1454 m_summary->writes_errno = true;
1455 if (m_summary_lto)
1456 m_summary_lto->writes_errno = true;
1461 /* Analyze function call STMT in function F.
1462 Remember recursive calls in RECURSIVE_CALLS. */
1464 void
1465 modref_access_analysis::analyze_call (gcall *stmt)
1467 /* Check flags on the function call. In certain cases, analysis can be
1468 simplified. */
1469 int flags = gimple_call_flags (stmt);
1471 if ((flags & (ECF_CONST | ECF_NOVOPS))
1472 && !(flags & ECF_LOOPING_CONST_OR_PURE))
1474 if (dump_file)
1475 fprintf (dump_file,
1476 " - ECF_CONST | ECF_NOVOPS, ignoring all stores and all loads "
1477 "except for args.\n");
1478 return;
1481 /* Next, we try to get the callee's function declaration. The goal is to
1482 merge their summary with ours. */
1483 tree callee = gimple_call_fndecl (stmt);
1485 /* Check if this is an indirect call. */
1486 if (!callee)
1488 if (dump_file)
1489 fprintf (dump_file, gimple_call_internal_p (stmt)
1490 ? " - Internal call" : " - Indirect call.\n");
1491 process_fnspec (stmt);
1492 return;
1494 /* We only need to handle internal calls in IPA mode. */
1495 gcc_checking_assert (!m_summary_lto && !m_ipa);
1497 struct cgraph_node *callee_node = cgraph_node::get_create (callee);
1499 /* If this is a recursive call, the target summary is the same as ours, so
1500 there's nothing to do. */
1501 if (recursive_call_p (current_function_decl, callee))
1503 m_recursive_calls.safe_push (stmt);
1504 set_side_effects ();
1505 if (dump_file)
1506 fprintf (dump_file, " - Skipping recursive call.\n");
1507 return;
1510 gcc_assert (callee_node != NULL);
1512 /* Get the function symbol and its availability. */
1513 enum availability avail;
1514 callee_node = callee_node->function_symbol (&avail);
1515 bool looping;
1516 if (builtin_safe_for_const_function_p (&looping, callee))
1518 if (looping)
1519 set_side_effects ();
1520 if (dump_file)
1521 fprintf (dump_file, " - Builtin is safe for const.\n");
1522 return;
1524 if (avail <= AVAIL_INTERPOSABLE)
1526 if (dump_file)
1527 fprintf (dump_file,
1528 " - Function availability <= AVAIL_INTERPOSABLE.\n");
1529 process_fnspec (stmt);
1530 return;
1533 /* Get callee's modref summary. As above, if there's no summary, we either
1534 have to give up or, if stores are ignored, we can just purge loads. */
1535 modref_summary *callee_summary = optimization_summaries->get (callee_node);
1536 if (!callee_summary)
1538 if (dump_file)
1539 fprintf (dump_file, " - No modref summary available for callee.\n");
1540 process_fnspec (stmt);
1541 return;
1544 merge_call_side_effects (stmt, callee_summary, callee_node, false);
1546 return;
1549 /* Helper for analyze_stmt. */
1551 bool
1552 modref_access_analysis::analyze_load (gimple *, tree, tree op, void *data)
1554 modref_access_analysis *t = (modref_access_analysis *)data;
1556 if (dump_file)
1558 fprintf (dump_file, " - Analyzing load: ");
1559 print_generic_expr (dump_file, op);
1560 fprintf (dump_file, "\n");
1563 if (!t->record_access_p (op))
1564 return false;
1566 ao_ref r;
1567 ao_ref_init (&r, op);
1568 modref_access_node a = get_access (&r);
1569 if (a.parm_index == MODREF_LOCAL_MEMORY_PARM)
1570 return false;
1572 if (t->m_summary)
1573 t->record_access (t->m_summary->loads, &r, a);
1574 if (t->m_summary_lto)
1575 t->record_access_lto (t->m_summary_lto->loads, &r, a);
1576 return false;
1579 /* Helper for analyze_stmt. */
1581 bool
1582 modref_access_analysis::analyze_store (gimple *stmt, tree, tree op, void *data)
1584 modref_access_analysis *t = (modref_access_analysis *)data;
1586 if (dump_file)
1588 fprintf (dump_file, " - Analyzing store: ");
1589 print_generic_expr (dump_file, op);
1590 fprintf (dump_file, "\n");
1593 if (!t->record_access_p (op))
1594 return false;
1596 ao_ref r;
1597 ao_ref_init (&r, op);
1598 modref_access_node a = get_access (&r);
1599 if (a.parm_index == MODREF_LOCAL_MEMORY_PARM)
1600 return false;
1602 if (t->m_summary)
1603 t->record_access (t->m_summary->stores, &r, a);
1604 if (t->m_summary_lto)
1605 t->record_access_lto (t->m_summary_lto->stores, &r, a);
1606 if (t->m_always_executed
1607 && a.useful_for_kill_p ()
1608 && (!cfun->can_throw_non_call_exceptions
1609 || !stmt_could_throw_p (cfun, stmt)))
1611 if (dump_file)
1612 fprintf (dump_file, " - Recording kill\n");
1613 if (t->m_summary)
1614 modref_access_node::insert_kill (t->m_summary->kills, a, false);
1615 if (t->m_summary_lto)
1616 modref_access_node::insert_kill (t->m_summary_lto->kills, a, false);
1618 return false;
1621 /* Analyze statement STMT of function F.
1622 If IPA is true do not merge in side effects of calls. */
1624 void
1625 modref_access_analysis::analyze_stmt (gimple *stmt, bool always_executed)
1627 m_always_executed = always_executed;
1628 /* In general we can not ignore clobbers because they are barriers for code
1629 motion, however after inlining it is safe to do because local optimization
1630 passes do not consider clobbers from other functions.
1631 Similar logic is in ipa-pure-const.c. */
1632 if ((m_ipa || cfun->after_inlining) && gimple_clobber_p (stmt))
1634 if (always_executed && record_access_p (gimple_assign_lhs (stmt)))
1636 ao_ref r;
1637 ao_ref_init (&r, gimple_assign_lhs (stmt));
1638 modref_access_node a = get_access (&r);
1639 if (a.useful_for_kill_p ())
1641 if (dump_file)
1642 fprintf (dump_file, " - Recording kill\n");
1643 if (m_summary)
1644 modref_access_node::insert_kill (m_summary->kills, a, false);
1645 if (m_summary_lto)
1646 modref_access_node::insert_kill (m_summary_lto->kills,
1647 a, false);
1650 return;
1653 /* Analyze all loads and stores in STMT. */
1654 walk_stmt_load_store_ops (stmt, this,
1655 analyze_load, analyze_store);
1657 switch (gimple_code (stmt))
1659 case GIMPLE_ASM:
1660 if (gimple_asm_volatile_p (as_a <gasm *> (stmt)))
1661 set_nondeterministic ();
1662 if (cfun->can_throw_non_call_exceptions
1663 && stmt_could_throw_p (cfun, stmt))
1664 set_side_effects ();
1665 /* If the ASM statement does not read nor write memory, there's nothing
1666 to do. Otherwise just give up. */
1667 if (!gimple_asm_clobbers_memory_p (as_a <gasm *> (stmt)))
1668 return;
1669 if (dump_file)
1670 fprintf (dump_file, " - Function contains GIMPLE_ASM statement "
1671 "which clobbers memory.\n");
1672 record_unknown_load ();
1673 record_unknown_store ();
1674 return;
1675 case GIMPLE_CALL:
1676 if (!m_ipa || gimple_call_internal_p (stmt))
1677 analyze_call (as_a <gcall *> (stmt));
1678 else
1680 attr_fnspec fnspec = gimple_call_fnspec (as_a <gcall *>(stmt));
1682 if (fnspec.known_p ()
1683 && (!fnspec.global_memory_read_p ()
1684 || !fnspec.global_memory_written_p ()))
1686 cgraph_edge *e = cgraph_node::get
1687 (current_function_decl)->get_edge (stmt);
1688 if (e->callee)
1690 fnspec_summaries->get_create (e)->fnspec
1691 = xstrdup (fnspec.get_str ());
1692 if (dump_file)
1693 fprintf (dump_file, " Recorded fnspec %s\n",
1694 fnspec.get_str ());
1698 return;
1699 default:
1700 if (cfun->can_throw_non_call_exceptions
1701 && stmt_could_throw_p (cfun, stmt))
1702 set_side_effects ();
1703 return;
1707 /* Propagate load/stres acress recursive calls. */
1709 void
1710 modref_access_analysis::propagate ()
1712 if (m_ipa && m_summary)
1713 return;
1715 bool changed = true;
1716 bool first = true;
1717 cgraph_node *fnode = cgraph_node::get (current_function_decl);
1719 m_always_executed = false;
1720 while (changed && m_summary->useful_p (m_ecf_flags, false))
1722 changed = false;
1723 for (unsigned i = 0; i < m_recursive_calls.length (); i++)
1725 changed |= merge_call_side_effects (m_recursive_calls[i], m_summary,
1726 fnode, !first);
1728 first = false;
1732 /* Analyze function. */
1734 void
1735 modref_access_analysis::analyze ()
1737 m_ecf_flags = flags_from_decl_or_type (current_function_decl);
1738 bool summary_useful = true;
1740 /* Analyze each statement in each basic block of the function. If the
1741 statement cannot be analyzed (for any reason), the entire function cannot
1742 be analyzed by modref. */
1743 basic_block bb;
1744 FOR_EACH_BB_FN (bb, cfun)
1746 gimple_stmt_iterator si;
1747 bool always_executed
1748 = bb == single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun))->dest;
1750 for (si = gsi_start_nondebug_after_labels_bb (bb);
1751 !gsi_end_p (si); gsi_next_nondebug (&si))
1753 analyze_stmt (gsi_stmt (si), always_executed);
1755 /* Avoid doing useles work. */
1756 if ((!m_summary || !m_summary->useful_p (m_ecf_flags, false))
1757 && (!m_summary_lto
1758 || !m_summary_lto->useful_p (m_ecf_flags, false)))
1760 summary_useful = false;
1761 break;
1763 if (always_executed
1764 && stmt_can_throw_external (cfun, gsi_stmt (si)))
1765 always_executed = false;
1767 if (!summary_useful)
1768 break;
1770 /* In non-IPA mode we need to perform iterative datafow on recursive calls.
1771 This needs to be done after all other side effects are computed. */
1772 if (summary_useful)
1774 if (!m_ipa)
1775 propagate ();
1776 if (m_summary && !m_summary->side_effects && !finite_function_p ())
1777 m_summary->side_effects = true;
1778 if (m_summary_lto && !m_summary_lto->side_effects
1779 && !finite_function_p ())
1780 m_summary_lto->side_effects = true;
1784 /* Return true if OP accesses memory pointed to by SSA_NAME. */
1786 bool
1787 memory_access_to (tree op, tree ssa_name)
1789 tree base = get_base_address (op);
1790 if (!base)
1791 return false;
1792 if (TREE_CODE (base) != MEM_REF && TREE_CODE (base) != TARGET_MEM_REF)
1793 return false;
1794 return TREE_OPERAND (base, 0) == ssa_name;
1797 /* Consider statement val = *arg.
1798 return EAF flags of ARG that can be determined from EAF flags of VAL
1799 (which are known to be FLAGS). If IGNORE_STORES is true we can ignore
1800 all stores to VAL, i.e. when handling noreturn function. */
1802 static int
1803 deref_flags (int flags, bool ignore_stores)
1805 /* Dereference is also a direct read but dereferenced value does not
1806 yield any other direct use. */
1807 int ret = EAF_NO_DIRECT_CLOBBER | EAF_NO_DIRECT_ESCAPE
1808 | EAF_NOT_RETURNED_DIRECTLY;
1809 /* If argument is unused just account for
1810 the read involved in dereference. */
1811 if (flags & EAF_UNUSED)
1812 ret |= EAF_NO_INDIRECT_READ | EAF_NO_INDIRECT_CLOBBER
1813 | EAF_NO_INDIRECT_ESCAPE;
1814 else
1816 /* Direct or indirect accesses leads to indirect accesses. */
1817 if (((flags & EAF_NO_DIRECT_CLOBBER)
1818 && (flags & EAF_NO_INDIRECT_CLOBBER))
1819 || ignore_stores)
1820 ret |= EAF_NO_INDIRECT_CLOBBER;
1821 if (((flags & EAF_NO_DIRECT_ESCAPE)
1822 && (flags & EAF_NO_INDIRECT_ESCAPE))
1823 || ignore_stores)
1824 ret |= EAF_NO_INDIRECT_ESCAPE;
1825 if ((flags & EAF_NO_DIRECT_READ)
1826 && (flags & EAF_NO_INDIRECT_READ))
1827 ret |= EAF_NO_INDIRECT_READ;
1828 if ((flags & EAF_NOT_RETURNED_DIRECTLY)
1829 && (flags & EAF_NOT_RETURNED_INDIRECTLY))
1830 ret |= EAF_NOT_RETURNED_INDIRECTLY;
1832 return ret;
1836 /* Description of an escape point: a call which affects flags of a given
1837 SSA name. */
1839 struct escape_point
1841 /* Value escapes to this call. */
1842 gcall *call;
1843 /* Argument it escapes to. */
1844 int arg;
1845 /* Flags already known about the argument (this can save us from recording
1846 esape points if local analysis did good job already). */
1847 eaf_flags_t min_flags;
1848 /* Does value escape directly or indiretly? */
1849 bool direct;
1852 /* Lattice used during the eaf flags analsysis dataflow. For a given SSA name
1853 we aim to compute its flags and escape points. We also use the lattice
1854 to dynamically build dataflow graph to propagate on. */
1856 class modref_lattice
1858 public:
1859 /* EAF flags of the SSA name. */
1860 eaf_flags_t flags;
1861 /* Used during DFS walk to mark names where final value was determined
1862 without need for dataflow. */
1863 bool known;
1864 /* Used during DFS walk to mark open vertices (for cycle detection). */
1865 bool open;
1866 /* Set during DFS walk for names that needs dataflow propagation. */
1867 bool do_dataflow;
1868 /* Used during the iterative dataflow. */
1869 bool changed;
1871 /* When doing IPA analysis we can not merge in callee escape points;
1872 Only remember them and do the merging at IPA propagation time. */
1873 vec <escape_point, va_heap, vl_ptr> escape_points;
1875 /* Representation of a graph for dataaflow. This graph is built on-demand
1876 using modref_eaf_analysis::analyze_ssa and later solved by
1877 modref_eaf_analysis::propagate.
1878 Each edge represents the fact that flags of current lattice should be
1879 propagated to lattice of SSA_NAME. */
1880 struct propagate_edge
1882 int ssa_name;
1883 bool deref;
1885 vec <propagate_edge, va_heap, vl_ptr> propagate_to;
1887 void init ();
1888 void release ();
1889 bool merge (const modref_lattice &with);
1890 bool merge (int flags);
1891 bool merge_deref (const modref_lattice &with, bool ignore_stores);
1892 bool merge_direct_load ();
1893 bool merge_direct_store ();
1894 bool add_escape_point (gcall *call, int arg, int min_flags, bool diret);
1895 void dump (FILE *out, int indent = 0) const;
1898 /* Lattices are saved to vectors, so keep them PODs. */
1899 void
1900 modref_lattice::init ()
1902 /* All flags we track. */
1903 int f = EAF_NO_DIRECT_CLOBBER | EAF_NO_INDIRECT_CLOBBER
1904 | EAF_NO_DIRECT_ESCAPE | EAF_NO_INDIRECT_ESCAPE
1905 | EAF_NO_DIRECT_READ | EAF_NO_INDIRECT_READ
1906 | EAF_NOT_RETURNED_DIRECTLY | EAF_NOT_RETURNED_INDIRECTLY
1907 | EAF_UNUSED;
1908 flags = f;
1909 /* Check that eaf_flags_t is wide enough to hold all flags. */
1910 gcc_checking_assert (f == flags);
1911 open = true;
1912 known = false;
1915 /* Release memory. */
1916 void
1917 modref_lattice::release ()
1919 escape_points.release ();
1920 propagate_to.release ();
1923 /* Dump lattice to OUT; indent with INDENT spaces. */
1925 void
1926 modref_lattice::dump (FILE *out, int indent) const
1928 dump_eaf_flags (out, flags);
1929 if (escape_points.length ())
1931 fprintf (out, "%*sEscapes:\n", indent, "");
1932 for (unsigned int i = 0; i < escape_points.length (); i++)
1934 fprintf (out, "%*s Arg %i (%s) min flags", indent, "",
1935 escape_points[i].arg,
1936 escape_points[i].direct ? "direct" : "indirect");
1937 dump_eaf_flags (out, escape_points[i].min_flags, false);
1938 fprintf (out, " in call ");
1939 print_gimple_stmt (out, escape_points[i].call, 0);
1944 /* Add escape point CALL, ARG, MIN_FLAGS, DIRECT. Return false if such escape
1945 point exists. */
1947 bool
1948 modref_lattice::add_escape_point (gcall *call, int arg, int min_flags,
1949 bool direct)
1951 escape_point *ep;
1952 unsigned int i;
1954 /* If we already determined flags to be bad enough,
1955 we do not need to record. */
1956 if ((flags & min_flags) == flags || (min_flags & EAF_UNUSED))
1957 return false;
1959 FOR_EACH_VEC_ELT (escape_points, i, ep)
1960 if (ep->call == call && ep->arg == arg && ep->direct == direct)
1962 if ((ep->min_flags & min_flags) == min_flags)
1963 return false;
1964 ep->min_flags &= min_flags;
1965 return true;
1967 /* Give up if max escape points is met. */
1968 if ((int)escape_points.length () > param_modref_max_escape_points)
1970 if (dump_file)
1971 fprintf (dump_file, "--param modref-max-escape-points limit reached\n");
1972 merge (0);
1973 return true;
1975 escape_point new_ep = {call, arg, min_flags, direct};
1976 escape_points.safe_push (new_ep);
1977 return true;
1980 /* Merge in flags from F. */
1981 bool
1982 modref_lattice::merge (int f)
1984 if (f & EAF_UNUSED)
1985 return false;
1986 /* Check that flags seems sane: if function does not read the parameter
1987 it can not access it indirectly. */
1988 gcc_checking_assert (!(f & EAF_NO_DIRECT_READ)
1989 || ((f & EAF_NO_INDIRECT_READ)
1990 && (f & EAF_NO_INDIRECT_CLOBBER)
1991 && (f & EAF_NO_INDIRECT_ESCAPE)
1992 && (f & EAF_NOT_RETURNED_INDIRECTLY)));
1993 if ((flags & f) != flags)
1995 flags &= f;
1996 /* Prune obvoiusly useless flags;
1997 We do not have ECF_FLAGS handy which is not big problem since
1998 we will do final flags cleanup before producing summary.
1999 Merging should be fast so it can work well with dataflow. */
2000 flags = remove_useless_eaf_flags (flags, 0, false);
2001 if (!flags)
2002 escape_points.release ();
2003 return true;
2005 return false;
2008 /* Merge in WITH. Return true if anyting changed. */
2010 bool
2011 modref_lattice::merge (const modref_lattice &with)
2013 if (!with.known)
2014 do_dataflow = true;
2016 bool changed = merge (with.flags);
2018 if (!flags)
2019 return changed;
2020 for (unsigned int i = 0; i < with.escape_points.length (); i++)
2021 changed |= add_escape_point (with.escape_points[i].call,
2022 with.escape_points[i].arg,
2023 with.escape_points[i].min_flags,
2024 with.escape_points[i].direct);
2025 return changed;
2028 /* Merge in deref of WITH. If IGNORE_STORES is true do not consider
2029 stores. Return true if anyting changed. */
2031 bool
2032 modref_lattice::merge_deref (const modref_lattice &with, bool ignore_stores)
2034 if (!with.known)
2035 do_dataflow = true;
2037 bool changed = merge (deref_flags (with.flags, ignore_stores));
2039 if (!flags)
2040 return changed;
2041 for (unsigned int i = 0; i < with.escape_points.length (); i++)
2043 int min_flags = with.escape_points[i].min_flags;
2045 if (with.escape_points[i].direct)
2046 min_flags = deref_flags (min_flags, ignore_stores);
2047 else if (ignore_stores)
2048 min_flags |= ignore_stores_eaf_flags;
2049 changed |= add_escape_point (with.escape_points[i].call,
2050 with.escape_points[i].arg,
2051 min_flags,
2052 false);
2054 return changed;
2057 /* Merge in flags for direct load. */
2059 bool
2060 modref_lattice::merge_direct_load ()
2062 return merge (~(EAF_UNUSED | EAF_NO_DIRECT_READ));
2065 /* Merge in flags for direct store. */
2067 bool
2068 modref_lattice::merge_direct_store ()
2070 return merge (~(EAF_UNUSED | EAF_NO_DIRECT_CLOBBER));
2073 /* Analyzer of EAF flags.
2074 This is genrally dataflow problem over the SSA graph, however we only
2075 care about flags of few selected ssa names (arguments, return slot and
2076 static chain). So we first call analyze_ssa_name on all relevant names
2077 and perform a DFS walk to discover SSA names where flags needs to be
2078 determined. For acyclic graphs we try to determine final flags during
2079 this walk. Once cycles or recursin depth is met we enlist SSA names
2080 for dataflow which is done by propagate call.
2082 After propagation the flags can be obtained using get_ssa_name_flags. */
2084 class modref_eaf_analysis
2086 public:
2087 /* Mark NAME as relevant for analysis. */
2088 void analyze_ssa_name (tree name);
2089 /* Dataflow slover. */
2090 void propagate ();
2091 /* Return flags computed earlier for NAME. */
2092 int get_ssa_name_flags (tree name)
2094 int version = SSA_NAME_VERSION (name);
2095 gcc_checking_assert (m_lattice[version].known);
2096 return m_lattice[version].flags;
2098 /* In IPA mode this will record all escape points
2099 determined for NAME to PARM_IDNEX. Flags are minimal
2100 flags known. */
2101 void record_escape_points (tree name, int parm_index, int flags);
2102 modref_eaf_analysis (bool ipa)
2104 m_ipa = ipa;
2105 m_depth = 0;
2106 m_lattice.safe_grow_cleared (num_ssa_names, true);
2108 ~modref_eaf_analysis ()
2110 gcc_checking_assert (!m_depth);
2111 if (m_ipa || m_names_to_propagate.length ())
2112 for (unsigned int i = 0; i < num_ssa_names; i++)
2113 m_lattice[i].release ();
2115 private:
2116 /* If true, we produce analysis for IPA mode. In this case escape points ar
2117 collected. */
2118 bool m_ipa;
2119 /* Depth of recursion of analyze_ssa_name. */
2120 int m_depth;
2121 /* Propagation lattice for individual ssa names. */
2122 auto_vec<modref_lattice> m_lattice;
2123 auto_vec<tree> m_deferred_names;
2124 auto_vec<int> m_names_to_propagate;
2126 void merge_with_ssa_name (tree dest, tree src, bool deref);
2127 void merge_call_lhs_flags (gcall *call, int arg, tree name, bool direct,
2128 bool deref);
2132 /* Call statements may return tgeir parameters. Consider argument number
2133 ARG of USE_STMT and determine flags that can needs to be cleared
2134 in case pointer possibly indirectly references from ARG I is returned.
2135 If DIRECT is true consider direct returns and if INDIRECT consider
2136 indirect returns.
2137 LATTICE, DEPTH and ipa are same as in analyze_ssa_name.
2138 ARG is set to -1 for static chain. */
2140 void
2141 modref_eaf_analysis::merge_call_lhs_flags (gcall *call, int arg,
2142 tree name, bool direct,
2143 bool indirect)
2145 int index = SSA_NAME_VERSION (name);
2146 bool returned_directly = false;
2148 /* If there is no return value, no flags are affected. */
2149 if (!gimple_call_lhs (call))
2150 return;
2152 /* If we know that function returns given argument and it is not ARG
2153 we can still be happy. */
2154 if (arg >= 0)
2156 int flags = gimple_call_return_flags (call);
2157 if (flags & ERF_RETURNS_ARG)
2159 if ((flags & ERF_RETURN_ARG_MASK) == arg)
2160 returned_directly = true;
2161 else
2162 return;
2165 /* Make ERF_RETURNS_ARG overwrite EAF_UNUSED. */
2166 if (returned_directly)
2168 direct = true;
2169 indirect = false;
2171 /* If value is not returned at all, do nothing. */
2172 else if (!direct && !indirect)
2173 return;
2175 /* If return value is SSA name determine its flags. */
2176 if (TREE_CODE (gimple_call_lhs (call)) == SSA_NAME)
2178 tree lhs = gimple_call_lhs (call);
2179 if (direct)
2180 merge_with_ssa_name (name, lhs, false);
2181 if (indirect)
2182 merge_with_ssa_name (name, lhs, true);
2184 /* In the case of memory store we can do nothing. */
2185 else if (!direct)
2186 m_lattice[index].merge (deref_flags (0, false));
2187 else
2188 m_lattice[index].merge (0);
2191 /* CALL_FLAGS are EAF_FLAGS of the argument. Turn them
2192 into flags for caller, update LATTICE of corresponding
2193 argument if needed. */
2195 static int
2196 callee_to_caller_flags (int call_flags, bool ignore_stores,
2197 modref_lattice &lattice)
2199 /* call_flags is about callee returning a value
2200 that is not the same as caller returning it. */
2201 call_flags |= EAF_NOT_RETURNED_DIRECTLY
2202 | EAF_NOT_RETURNED_INDIRECTLY;
2203 if (!ignore_stores && !(call_flags & EAF_UNUSED))
2205 /* If value escapes we are no longer able to track what happens
2206 with it because we can read it from the escaped location
2207 anytime. */
2208 if (!(call_flags & EAF_NO_DIRECT_ESCAPE))
2209 lattice.merge (0);
2210 else if (!(call_flags & EAF_NO_INDIRECT_ESCAPE))
2211 lattice.merge (~(EAF_NOT_RETURNED_INDIRECTLY
2212 | EAF_NO_DIRECT_READ
2213 | EAF_NO_INDIRECT_READ
2214 | EAF_NO_INDIRECT_CLOBBER
2215 | EAF_UNUSED));
2217 else
2218 call_flags |= ignore_stores_eaf_flags;
2219 return call_flags;
2222 /* Analyze EAF flags for SSA name NAME and store result to LATTICE.
2223 LATTICE is an array of modref_lattices.
2224 DEPTH is a recursion depth used to make debug output prettier.
2225 If IPA is true we analyze for IPA propagation (and thus call escape points
2226 are processed later) */
2228 void
2229 modref_eaf_analysis::analyze_ssa_name (tree name)
2231 imm_use_iterator ui;
2232 gimple *use_stmt;
2233 int index = SSA_NAME_VERSION (name);
2235 /* See if value is already computed. */
2236 if (m_lattice[index].known || m_lattice[index].do_dataflow)
2237 return;
2238 if (m_lattice[index].open)
2240 if (dump_file)
2241 fprintf (dump_file,
2242 "%*sCycle in SSA graph\n",
2243 m_depth * 4, "");
2244 return;
2246 /* Recursion guard. */
2247 m_lattice[index].init ();
2248 if (m_depth == param_modref_max_depth)
2250 if (dump_file)
2251 fprintf (dump_file,
2252 "%*sMax recursion depth reached; postponing\n",
2253 m_depth * 4, "");
2254 m_deferred_names.safe_push (name);
2255 return;
2258 if (dump_file)
2260 fprintf (dump_file,
2261 "%*sAnalyzing flags of ssa name: ", m_depth * 4, "");
2262 print_generic_expr (dump_file, name);
2263 fprintf (dump_file, "\n");
2266 FOR_EACH_IMM_USE_STMT (use_stmt, ui, name)
2268 if (m_lattice[index].flags == 0)
2269 break;
2270 if (is_gimple_debug (use_stmt))
2271 continue;
2272 if (dump_file)
2274 fprintf (dump_file, "%*s Analyzing stmt: ", m_depth * 4, "");
2275 print_gimple_stmt (dump_file, use_stmt, 0);
2277 /* If we see a direct non-debug use, clear unused bit.
2278 All dereferneces should be accounted below using deref_flags. */
2279 m_lattice[index].merge (~EAF_UNUSED);
2281 /* Gimple return may load the return value.
2282 Returning name counts as an use by tree-ssa-structalias.c */
2283 if (greturn *ret = dyn_cast <greturn *> (use_stmt))
2285 /* Returning through return slot is seen as memory write earlier. */
2286 if (DECL_RESULT (current_function_decl)
2287 && DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
2289 else if (gimple_return_retval (ret) == name)
2290 m_lattice[index].merge (~(EAF_UNUSED | EAF_NOT_RETURNED_DIRECTLY
2291 | EAF_NOT_RETURNED_DIRECTLY));
2292 else if (memory_access_to (gimple_return_retval (ret), name))
2294 m_lattice[index].merge_direct_load ();
2295 m_lattice[index].merge (~(EAF_UNUSED
2296 | EAF_NOT_RETURNED_INDIRECTLY));
2299 /* Account for LHS store, arg loads and flags from callee function. */
2300 else if (gcall *call = dyn_cast <gcall *> (use_stmt))
2302 tree callee = gimple_call_fndecl (call);
2304 /* IPA PTA internally it treats calling a function as "writing" to
2305 the argument space of all functions the function pointer points to
2306 (PR101949). We can not drop EAF_NOCLOBBER only when ipa-pta
2307 is on since that would allow propagation of this from -fno-ipa-pta
2308 to -fipa-pta functions. */
2309 if (gimple_call_fn (use_stmt) == name)
2310 m_lattice[index].merge (~(EAF_NO_DIRECT_CLOBBER | EAF_UNUSED));
2312 /* Recursion would require bit of propagation; give up for now. */
2313 if (callee && !m_ipa && recursive_call_p (current_function_decl,
2314 callee))
2315 m_lattice[index].merge (0);
2316 else
2318 int ecf_flags = gimple_call_flags (call);
2319 bool ignore_stores = ignore_stores_p (current_function_decl,
2320 ecf_flags);
2321 bool ignore_retval = ignore_retval_p (current_function_decl,
2322 ecf_flags);
2324 /* Handle *name = func (...). */
2325 if (gimple_call_lhs (call)
2326 && memory_access_to (gimple_call_lhs (call), name))
2328 m_lattice[index].merge_direct_store ();
2329 /* Return slot optimization passes address of
2330 LHS to callee via hidden parameter and this
2331 may make LHS to escape. See PR 98499. */
2332 if (gimple_call_return_slot_opt_p (call)
2333 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (call))))
2335 int call_flags = gimple_call_retslot_flags (call);
2336 bool isretslot = false;
2338 if (DECL_RESULT (current_function_decl)
2339 && DECL_BY_REFERENCE
2340 (DECL_RESULT (current_function_decl)))
2341 isretslot = ssa_default_def
2342 (cfun,
2343 DECL_RESULT (current_function_decl))
2344 == name;
2346 /* Passing returnslot to return slot is special because
2347 not_returned and escape has same meaning.
2348 However passing arg to return slot is different. If
2349 the callee's return slot is returned it means that
2350 arg is written to itself which is an escape.
2351 Since we do not track the memory it is written to we
2352 need to give up on analysisng it. */
2353 if (!isretslot)
2355 if (!(call_flags & (EAF_NOT_RETURNED_DIRECTLY
2356 | EAF_UNUSED)))
2357 m_lattice[index].merge (0);
2358 else gcc_checking_assert
2359 (call_flags & (EAF_NOT_RETURNED_INDIRECTLY
2360 | EAF_UNUSED));
2361 call_flags = callee_to_caller_flags
2362 (call_flags, false,
2363 m_lattice[index]);
2365 m_lattice[index].merge (call_flags);
2369 if (gimple_call_chain (call)
2370 && (gimple_call_chain (call) == name))
2372 int call_flags = gimple_call_static_chain_flags (call);
2373 if (!ignore_retval && !(call_flags & EAF_UNUSED))
2374 merge_call_lhs_flags
2375 (call, -1, name,
2376 !(call_flags & EAF_NOT_RETURNED_DIRECTLY),
2377 !(call_flags & EAF_NOT_RETURNED_INDIRECTLY));
2378 call_flags = callee_to_caller_flags
2379 (call_flags, ignore_stores,
2380 m_lattice[index]);
2381 if (!(ecf_flags & (ECF_CONST | ECF_NOVOPS)))
2382 m_lattice[index].merge (call_flags);
2385 /* Process internal functions and right away. */
2386 bool record_ipa = m_ipa && !gimple_call_internal_p (call);
2388 /* Handle all function parameters. */
2389 for (unsigned i = 0;
2390 i < gimple_call_num_args (call)
2391 && m_lattice[index].flags; i++)
2392 /* Name is directly passed to the callee. */
2393 if (gimple_call_arg (call, i) == name)
2395 int call_flags = gimple_call_arg_flags (call, i);
2396 if (!ignore_retval)
2397 merge_call_lhs_flags
2398 (call, i, name,
2399 !(call_flags & (EAF_NOT_RETURNED_DIRECTLY
2400 | EAF_UNUSED)),
2401 !(call_flags & (EAF_NOT_RETURNED_INDIRECTLY
2402 | EAF_UNUSED)));
2403 if (!(ecf_flags & (ECF_CONST | ECF_NOVOPS)))
2405 call_flags = callee_to_caller_flags
2406 (call_flags, ignore_stores,
2407 m_lattice[index]);
2408 if (!record_ipa)
2409 m_lattice[index].merge (call_flags);
2410 else
2411 m_lattice[index].add_escape_point (call, i,
2412 call_flags, true);
2415 /* Name is dereferenced and passed to a callee. */
2416 else if (memory_access_to (gimple_call_arg (call, i), name))
2418 int call_flags = deref_flags
2419 (gimple_call_arg_flags (call, i), ignore_stores);
2420 if (!ignore_retval && !(call_flags & EAF_UNUSED)
2421 && !(call_flags & EAF_NOT_RETURNED_DIRECTLY)
2422 && !(call_flags & EAF_NOT_RETURNED_INDIRECTLY))
2423 merge_call_lhs_flags (call, i, name, false, true);
2424 if (ecf_flags & (ECF_CONST | ECF_NOVOPS))
2425 m_lattice[index].merge_direct_load ();
2426 else
2428 call_flags = callee_to_caller_flags
2429 (call_flags, ignore_stores,
2430 m_lattice[index]);
2431 if (!record_ipa)
2432 m_lattice[index].merge (call_flags);
2433 else
2434 m_lattice[index].add_escape_point (call, i,
2435 call_flags, false);
2440 else if (gimple_assign_load_p (use_stmt))
2442 gassign *assign = as_a <gassign *> (use_stmt);
2443 /* Memory to memory copy. */
2444 if (gimple_store_p (assign))
2446 /* Handle *lhs = *name.
2448 We do not track memory locations, so assume that value
2449 is used arbitrarily. */
2450 if (memory_access_to (gimple_assign_rhs1 (assign), name))
2451 m_lattice[index].merge (deref_flags (0, false));
2452 /* Handle *name = *exp. */
2453 else if (memory_access_to (gimple_assign_lhs (assign), name))
2454 m_lattice[index].merge_direct_store ();
2456 /* Handle lhs = *name. */
2457 else if (memory_access_to (gimple_assign_rhs1 (assign), name))
2459 tree lhs = gimple_assign_lhs (assign);
2460 merge_with_ssa_name (name, lhs, true);
2463 else if (gimple_store_p (use_stmt))
2465 gassign *assign = dyn_cast <gassign *> (use_stmt);
2467 /* Handle *lhs = name. */
2468 if (assign && gimple_assign_rhs1 (assign) == name)
2470 if (dump_file)
2471 fprintf (dump_file, "%*s ssa name saved to memory\n",
2472 m_depth * 4, "");
2473 m_lattice[index].merge (0);
2475 /* Handle *name = exp. */
2476 else if (assign
2477 && memory_access_to (gimple_assign_lhs (assign), name))
2479 /* In general we can not ignore clobbers because they are
2480 barriers for code motion, however after inlining it is safe to
2481 do because local optimization passes do not consider clobbers
2482 from other functions.
2483 Similar logic is in ipa-pure-const.c. */
2484 if (!cfun->after_inlining || !gimple_clobber_p (assign))
2485 m_lattice[index].merge_direct_store ();
2487 /* ASM statements etc. */
2488 else if (!assign)
2490 if (dump_file)
2491 fprintf (dump_file, "%*s Unhandled store\n", m_depth * 4, "");
2492 m_lattice[index].merge (0);
2495 else if (gassign *assign = dyn_cast <gassign *> (use_stmt))
2497 enum tree_code code = gimple_assign_rhs_code (assign);
2499 /* See if operation is a merge as considered by
2500 tree-ssa-structalias.c:find_func_aliases. */
2501 if (!truth_value_p (code)
2502 && code != POINTER_DIFF_EXPR
2503 && (code != POINTER_PLUS_EXPR
2504 || gimple_assign_rhs1 (assign) == name))
2506 tree lhs = gimple_assign_lhs (assign);
2507 merge_with_ssa_name (name, lhs, false);
2510 else if (gphi *phi = dyn_cast <gphi *> (use_stmt))
2512 tree result = gimple_phi_result (phi);
2513 merge_with_ssa_name (name, result, false);
2515 /* Conditions are not considered escape points
2516 by tree-ssa-structalias. */
2517 else if (gimple_code (use_stmt) == GIMPLE_COND)
2519 else
2521 if (dump_file)
2522 fprintf (dump_file, "%*s Unhandled stmt\n", m_depth * 4, "");
2523 m_lattice[index].merge (0);
2526 if (dump_file)
2528 fprintf (dump_file, "%*s current flags of ", m_depth * 4, "");
2529 print_generic_expr (dump_file, name);
2530 m_lattice[index].dump (dump_file, m_depth * 4 + 4);
2533 if (dump_file)
2535 fprintf (dump_file, "%*sflags of ssa name ", m_depth * 4, "");
2536 print_generic_expr (dump_file, name);
2537 m_lattice[index].dump (dump_file, m_depth * 4 + 2);
2539 m_lattice[index].open = false;
2540 if (!m_lattice[index].do_dataflow)
2541 m_lattice[index].known = true;
2544 /* Propagate info from SRC to DEST. If DEREF it true, assume that SRC
2545 is dereferenced. */
2547 void
2548 modref_eaf_analysis::merge_with_ssa_name (tree dest, tree src, bool deref)
2550 int index = SSA_NAME_VERSION (dest);
2551 int src_index = SSA_NAME_VERSION (src);
2553 /* Merging lattice with itself is a no-op. */
2554 if (!deref && src == dest)
2555 return;
2557 m_depth++;
2558 analyze_ssa_name (src);
2559 m_depth--;
2560 if (deref)
2561 m_lattice[index].merge_deref (m_lattice[src_index], false);
2562 else
2563 m_lattice[index].merge (m_lattice[src_index]);
2565 /* If we failed to produce final solution add an edge to the dataflow
2566 graph. */
2567 if (!m_lattice[src_index].known)
2569 modref_lattice::propagate_edge e = {index, deref};
2571 if (!m_lattice[src_index].propagate_to.length ())
2572 m_names_to_propagate.safe_push (src_index);
2573 m_lattice[src_index].propagate_to.safe_push (e);
2574 m_lattice[src_index].changed = true;
2575 m_lattice[src_index].do_dataflow = true;
2576 if (dump_file)
2577 fprintf (dump_file,
2578 "%*sWill propgate from ssa_name %i to %i%s\n",
2579 m_depth * 4 + 4,
2580 "", src_index, index, deref ? " (deref)" : "");
2584 /* In the case we deferred some SSA names, reprocess them. In the case some
2585 dataflow edges were introduced, do the actual iterative dataflow. */
2587 void
2588 modref_eaf_analysis::propagate ()
2590 int iterations = 0;
2591 size_t i;
2592 int index;
2593 bool changed = true;
2595 while (m_deferred_names.length ())
2597 tree name = m_deferred_names.pop ();
2598 m_lattice[SSA_NAME_VERSION (name)].open = false;
2599 if (dump_file)
2600 fprintf (dump_file, "Analyzing deferred SSA name\n");
2601 analyze_ssa_name (name);
2604 if (!m_names_to_propagate.length ())
2605 return;
2606 if (dump_file)
2607 fprintf (dump_file, "Propagating EAF flags\n");
2609 /* Compute reverse postorder. */
2610 auto_vec <int> rpo;
2611 struct stack_entry
2613 int name;
2614 unsigned pos;
2616 auto_vec <struct stack_entry> stack;
2617 int pos = m_names_to_propagate.length () - 1;
2619 rpo.safe_grow (m_names_to_propagate.length (), true);
2620 stack.reserve_exact (m_names_to_propagate.length ());
2622 /* We reuse known flag for RPO DFS walk bookeeping. */
2623 if (flag_checking)
2624 FOR_EACH_VEC_ELT (m_names_to_propagate, i, index)
2625 gcc_assert (!m_lattice[index].known && m_lattice[index].changed);
2627 FOR_EACH_VEC_ELT (m_names_to_propagate, i, index)
2629 if (!m_lattice[index].known)
2631 stack_entry e = {index, 0};
2633 stack.quick_push (e);
2634 m_lattice[index].known = true;
2636 while (stack.length ())
2638 bool found = false;
2639 int index1 = stack.last ().name;
2641 while (stack.last ().pos < m_lattice[index1].propagate_to.length ())
2643 int index2 = m_lattice[index1]
2644 .propagate_to[stack.last ().pos].ssa_name;
2646 stack.last ().pos++;
2647 if (!m_lattice[index2].known
2648 && m_lattice[index2].propagate_to.length ())
2650 stack_entry e = {index2, 0};
2652 stack.quick_push (e);
2653 m_lattice[index2].known = true;
2654 found = true;
2655 break;
2658 if (!found
2659 && stack.last ().pos == m_lattice[index1].propagate_to.length ())
2661 rpo[pos--] = index1;
2662 stack.pop ();
2667 /* Perform itrative dataflow. */
2668 while (changed)
2670 changed = false;
2671 iterations++;
2672 if (dump_file)
2673 fprintf (dump_file, " iteration %i\n", iterations);
2674 FOR_EACH_VEC_ELT (rpo, i, index)
2676 if (m_lattice[index].changed)
2678 size_t j;
2680 m_lattice[index].changed = false;
2681 if (dump_file)
2682 fprintf (dump_file, " Visiting ssa name %i\n", index);
2683 for (j = 0; j < m_lattice[index].propagate_to.length (); j++)
2685 bool ch;
2686 int target = m_lattice[index].propagate_to[j].ssa_name;
2687 bool deref = m_lattice[index].propagate_to[j].deref;
2689 if (dump_file)
2690 fprintf (dump_file, " Propagating flags of ssa name"
2691 " %i to %i%s\n",
2692 index, target, deref ? " (deref)" : "");
2693 m_lattice[target].known = true;
2694 if (!m_lattice[index].propagate_to[j].deref)
2695 ch = m_lattice[target].merge (m_lattice[index]);
2696 else
2697 ch = m_lattice[target].merge_deref (m_lattice[index],
2698 false);
2699 if (!ch)
2700 continue;
2701 if (dump_file)
2703 fprintf (dump_file, " New lattice: ");
2704 m_lattice[target].dump (dump_file);
2706 changed = true;
2707 m_lattice[target].changed = true;
2712 if (dump_file)
2713 fprintf (dump_file, "EAF flags propagated in %i iterations\n", iterations);
2716 /* Record escape points of PARM_INDEX according to LATTICE. */
2718 void
2719 modref_eaf_analysis::record_escape_points (tree name, int parm_index, int flags)
2721 modref_lattice &lattice = m_lattice[SSA_NAME_VERSION (name)];
2723 if (lattice.escape_points.length ())
2725 escape_point *ep;
2726 unsigned int ip;
2727 cgraph_node *node = cgraph_node::get (current_function_decl);
2729 gcc_assert (m_ipa);
2730 FOR_EACH_VEC_ELT (lattice.escape_points, ip, ep)
2731 if ((ep->min_flags & flags) != flags)
2733 cgraph_edge *e = node->get_edge (ep->call);
2734 struct escape_entry ee = {parm_index, ep->arg,
2735 ep->min_flags, ep->direct};
2737 escape_summaries->get_create (e)->esc.safe_push (ee);
2742 /* Determine EAF flags for function parameters
2743 and fill in SUMMARY/SUMMARY_LTO. If IPA is true work in IPA mode
2744 where we also collect scape points.
2745 PAST_FLAGS, PAST_RETSLOT_FLAGS, PAST_STATIC_CHAIN_FLAGS can be
2746 used to preserve flags from prevoius (IPA) run for cases where
2747 late optimizations changed code in a way we can no longer analyze
2748 it easily. */
2750 static void
2751 analyze_parms (modref_summary *summary, modref_summary_lto *summary_lto,
2752 bool ipa, vec<eaf_flags_t> &past_flags,
2753 int past_retslot_flags, int past_static_chain_flags)
2755 unsigned int parm_index = 0;
2756 unsigned int count = 0;
2757 int ecf_flags = flags_from_decl_or_type (current_function_decl);
2758 tree retslot = NULL;
2759 tree static_chain = NULL;
2761 /* If there is return slot, look up its SSA name. */
2762 if (DECL_RESULT (current_function_decl)
2763 && DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
2764 retslot = ssa_default_def (cfun, DECL_RESULT (current_function_decl));
2765 if (cfun->static_chain_decl)
2766 static_chain = ssa_default_def (cfun, cfun->static_chain_decl);
2768 for (tree parm = DECL_ARGUMENTS (current_function_decl); parm;
2769 parm = TREE_CHAIN (parm))
2770 count++;
2772 if (!count && !retslot && !static_chain)
2773 return;
2775 modref_eaf_analysis eaf_analysis (ipa);
2777 /* Determine all SSA names we need to know flags for. */
2778 for (tree parm = DECL_ARGUMENTS (current_function_decl); parm;
2779 parm = TREE_CHAIN (parm))
2781 tree name = ssa_default_def (cfun, parm);
2782 if (name)
2783 eaf_analysis.analyze_ssa_name (name);
2785 if (retslot)
2786 eaf_analysis.analyze_ssa_name (retslot);
2787 if (static_chain)
2788 eaf_analysis.analyze_ssa_name (static_chain);
2790 /* Do the dataflow. */
2791 eaf_analysis.propagate ();
2793 tree attr = lookup_attribute ("fn spec",
2794 TYPE_ATTRIBUTES
2795 (TREE_TYPE (current_function_decl)));
2796 attr_fnspec fnspec (attr
2797 ? TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)))
2798 : "");
2801 /* Store results to summaries. */
2802 for (tree parm = DECL_ARGUMENTS (current_function_decl); parm; parm_index++,
2803 parm = TREE_CHAIN (parm))
2805 tree name = ssa_default_def (cfun, parm);
2806 if (!name || has_zero_uses (name))
2808 /* We do not track non-SSA parameters,
2809 but we want to track unused gimple_regs. */
2810 if (!is_gimple_reg (parm))
2811 continue;
2812 if (summary)
2814 if (parm_index >= summary->arg_flags.length ())
2815 summary->arg_flags.safe_grow_cleared (count, true);
2816 summary->arg_flags[parm_index] = EAF_UNUSED;
2818 else if (summary_lto)
2820 if (parm_index >= summary_lto->arg_flags.length ())
2821 summary_lto->arg_flags.safe_grow_cleared (count, true);
2822 summary_lto->arg_flags[parm_index] = EAF_UNUSED;
2824 continue;
2826 int flags = eaf_analysis.get_ssa_name_flags (name);
2827 int attr_flags = fnspec.arg_eaf_flags (parm_index);
2829 if (dump_file && (flags | attr_flags) != flags && !(flags & EAF_UNUSED))
2831 fprintf (dump_file,
2832 " Flags for param %i combined with fnspec flags:",
2833 (int)parm_index);
2834 dump_eaf_flags (dump_file, attr_flags, false);
2835 fprintf (dump_file, " determined: ");
2836 dump_eaf_flags (dump_file, flags, true);
2838 flags |= attr_flags;
2840 /* Eliminate useless flags so we do not end up storing unnecessary
2841 summaries. */
2843 flags = remove_useless_eaf_flags
2844 (flags, ecf_flags,
2845 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))));
2846 if (past_flags.length () > parm_index)
2848 int past = past_flags[parm_index];
2849 past = remove_useless_eaf_flags
2850 (past, ecf_flags,
2851 VOID_TYPE_P (TREE_TYPE
2852 (TREE_TYPE (current_function_decl))));
2853 if (dump_file && (flags | past) != flags && !(flags & EAF_UNUSED))
2855 fprintf (dump_file,
2856 " Flags for param %i combined with IPA pass:",
2857 (int)parm_index);
2858 dump_eaf_flags (dump_file, past, false);
2859 fprintf (dump_file, " determined: ");
2860 dump_eaf_flags (dump_file, flags, true);
2862 if (!(flags & EAF_UNUSED))
2863 flags |= past;
2866 if (flags)
2868 if (summary)
2870 if (parm_index >= summary->arg_flags.length ())
2871 summary->arg_flags.safe_grow_cleared (count, true);
2872 summary->arg_flags[parm_index] = flags;
2874 else if (summary_lto)
2876 if (parm_index >= summary_lto->arg_flags.length ())
2877 summary_lto->arg_flags.safe_grow_cleared (count, true);
2878 summary_lto->arg_flags[parm_index] = flags;
2880 eaf_analysis.record_escape_points (name, parm_index, flags);
2883 if (retslot)
2885 int flags = eaf_analysis.get_ssa_name_flags (retslot);
2886 int past = past_retslot_flags;
2888 flags = remove_useless_eaf_flags (flags, ecf_flags, false);
2889 past = remove_useless_eaf_flags
2890 (past, ecf_flags,
2891 VOID_TYPE_P (TREE_TYPE
2892 (TREE_TYPE (current_function_decl))));
2893 if (dump_file && (flags | past) != flags && !(flags & EAF_UNUSED))
2895 fprintf (dump_file,
2896 " Retslot flags combined with IPA pass:");
2897 dump_eaf_flags (dump_file, past, false);
2898 fprintf (dump_file, " determined: ");
2899 dump_eaf_flags (dump_file, flags, true);
2901 if (!(flags & EAF_UNUSED))
2902 flags |= past;
2903 if (flags)
2905 if (summary)
2906 summary->retslot_flags = flags;
2907 if (summary_lto)
2908 summary_lto->retslot_flags = flags;
2909 eaf_analysis.record_escape_points (retslot,
2910 MODREF_RETSLOT_PARM, flags);
2913 if (static_chain)
2915 int flags = eaf_analysis.get_ssa_name_flags (static_chain);
2916 int past = past_static_chain_flags;
2918 flags = remove_useless_eaf_flags (flags, ecf_flags, false);
2919 past = remove_useless_eaf_flags
2920 (past, ecf_flags,
2921 VOID_TYPE_P (TREE_TYPE
2922 (TREE_TYPE (current_function_decl))));
2923 if (dump_file && (flags | past) != flags && !(flags & EAF_UNUSED))
2925 fprintf (dump_file,
2926 " Static chain flags combined with IPA pass:");
2927 dump_eaf_flags (dump_file, past, false);
2928 fprintf (dump_file, " determined: ");
2929 dump_eaf_flags (dump_file, flags, true);
2931 if (!(flags & EAF_UNUSED))
2932 flags |= past;
2933 if (flags)
2935 if (summary)
2936 summary->static_chain_flags = flags;
2937 if (summary_lto)
2938 summary_lto->static_chain_flags = flags;
2939 eaf_analysis.record_escape_points (static_chain,
2940 MODREF_STATIC_CHAIN_PARM,
2941 flags);
2946 /* Analyze function. IPA indicates whether we're running in local mode
2947 (false) or the IPA mode (true).
2948 Return true if fixup cfg is needed after the pass. */
2950 static bool
2951 analyze_function (bool ipa)
2953 bool fixup_cfg = false;
2954 if (dump_file)
2955 fprintf (dump_file, "\n\nmodref analyzing '%s' (ipa=%i)%s%s\n",
2956 cgraph_node::get (current_function_decl)->dump_name (), ipa,
2957 TREE_READONLY (current_function_decl) ? " (const)" : "",
2958 DECL_PURE_P (current_function_decl) ? " (pure)" : "");
2960 /* Don't analyze this function if it's compiled with -fno-strict-aliasing. */
2961 if (!flag_ipa_modref
2962 || lookup_attribute ("noipa", DECL_ATTRIBUTES (current_function_decl)))
2963 return false;
2965 /* Compute no-LTO summaries when local optimization is going to happen. */
2966 bool nolto = (!ipa || ((!flag_lto || flag_fat_lto_objects) && !in_lto_p)
2967 || (in_lto_p && !flag_wpa
2968 && flag_incremental_link != INCREMENTAL_LINK_LTO));
2969 /* Compute LTO when LTO streaming is going to happen. */
2970 bool lto = ipa && ((flag_lto && !in_lto_p)
2971 || flag_wpa
2972 || flag_incremental_link == INCREMENTAL_LINK_LTO);
2973 cgraph_node *fnode = cgraph_node::get (current_function_decl);
2975 modref_summary *summary = NULL;
2976 modref_summary_lto *summary_lto = NULL;
2978 bool past_flags_known = false;
2979 auto_vec <eaf_flags_t> past_flags;
2980 int past_retslot_flags = 0;
2981 int past_static_chain_flags = 0;
2983 /* Initialize the summary.
2984 If we run in local mode there is possibly pre-existing summary from
2985 IPA pass. Dump it so it is easy to compare if mod-ref info has
2986 improved. */
2987 if (!ipa)
2989 if (!optimization_summaries)
2990 optimization_summaries = modref_summaries::create_ggc (symtab);
2991 else /* Remove existing summary if we are re-running the pass. */
2993 summary = optimization_summaries->get (fnode);
2994 if (summary != NULL
2995 && summary->loads)
2997 if (dump_file)
2999 fprintf (dump_file, "Past summary:\n");
3000 optimization_summaries->get (fnode)->dump (dump_file);
3002 past_flags.reserve_exact (summary->arg_flags.length ());
3003 past_flags.splice (summary->arg_flags);
3004 past_retslot_flags = summary->retslot_flags;
3005 past_static_chain_flags = summary->static_chain_flags;
3006 past_flags_known = true;
3008 optimization_summaries->remove (fnode);
3010 summary = optimization_summaries->get_create (fnode);
3011 gcc_checking_assert (nolto && !lto);
3013 /* In IPA mode we analyze every function precisely once. Assert that. */
3014 else
3016 if (nolto)
3018 if (!summaries)
3019 summaries = modref_summaries::create_ggc (symtab);
3020 else
3021 summaries->remove (fnode);
3022 summary = summaries->get_create (fnode);
3024 if (lto)
3026 if (!summaries_lto)
3027 summaries_lto = modref_summaries_lto::create_ggc (symtab);
3028 else
3029 summaries_lto->remove (fnode);
3030 summary_lto = summaries_lto->get_create (fnode);
3032 if (!fnspec_summaries)
3033 fnspec_summaries = new fnspec_summaries_t (symtab);
3034 if (!escape_summaries)
3035 escape_summaries = new escape_summaries_t (symtab);
3039 /* Create and initialize summary for F.
3040 Note that summaries may be already allocated from previous
3041 run of the pass. */
3042 if (nolto)
3044 gcc_assert (!summary->loads);
3045 summary->loads = modref_records::create_ggc ();
3046 gcc_assert (!summary->stores);
3047 summary->stores = modref_records::create_ggc ();
3048 summary->writes_errno = false;
3049 summary->side_effects = false;
3050 summary->nondeterministic = false;
3051 summary->calls_interposable = false;
3053 if (lto)
3055 gcc_assert (!summary_lto->loads);
3056 summary_lto->loads = modref_records_lto::create_ggc ();
3057 gcc_assert (!summary_lto->stores);
3058 summary_lto->stores = modref_records_lto::create_ggc ();
3059 summary_lto->writes_errno = false;
3060 summary_lto->side_effects = false;
3061 summary_lto->nondeterministic = false;
3062 summary_lto->calls_interposable = false;
3065 analyze_parms (summary, summary_lto, ipa,
3066 past_flags, past_retslot_flags, past_static_chain_flags);
3069 modref_access_analysis analyzer (ipa, summary, summary_lto);
3070 analyzer.analyze ();
3073 if (!ipa && flag_ipa_pure_const)
3075 if (!summary->stores->every_base && !summary->stores->bases
3076 && !summary->nondeterministic)
3078 if (!summary->loads->every_base && !summary->loads->bases
3079 && !summary->calls_interposable)
3080 fixup_cfg = ipa_make_function_const (fnode,
3081 summary->side_effects, true);
3082 else
3083 fixup_cfg = ipa_make_function_pure (fnode,
3084 summary->side_effects, true);
3087 int ecf_flags = flags_from_decl_or_type (current_function_decl);
3088 if (summary && !summary->useful_p (ecf_flags))
3090 if (!ipa)
3091 optimization_summaries->remove (fnode);
3092 else
3093 summaries->remove (fnode);
3094 summary = NULL;
3096 if (summary)
3097 summary->finalize (current_function_decl);
3098 if (summary_lto && !summary_lto->useful_p (ecf_flags))
3100 summaries_lto->remove (fnode);
3101 summary_lto = NULL;
3104 if (ipa && !summary && !summary_lto)
3105 remove_modref_edge_summaries (fnode);
3107 if (dump_file)
3109 fprintf (dump_file, " - modref done with result: tracked.\n");
3110 if (summary)
3111 summary->dump (dump_file);
3112 if (summary_lto)
3113 summary_lto->dump (dump_file);
3114 dump_modref_edge_summaries (dump_file, fnode, 2);
3115 /* To simplify debugging, compare IPA and local solutions. */
3116 if (past_flags_known && summary)
3118 size_t len = summary->arg_flags.length ();
3120 if (past_flags.length () > len)
3121 len = past_flags.length ();
3122 for (size_t i = 0; i < len; i++)
3124 int old_flags = i < past_flags.length () ? past_flags[i] : 0;
3125 int new_flags = i < summary->arg_flags.length ()
3126 ? summary->arg_flags[i] : 0;
3127 old_flags = remove_useless_eaf_flags
3128 (old_flags, flags_from_decl_or_type (current_function_decl),
3129 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))));
3130 if (old_flags != new_flags)
3132 if ((old_flags & ~new_flags) == 0
3133 || (new_flags & EAF_UNUSED))
3134 fprintf (dump_file, " Flags for param %i improved:",
3135 (int)i);
3136 else
3137 gcc_unreachable ();
3138 dump_eaf_flags (dump_file, old_flags, false);
3139 fprintf (dump_file, " -> ");
3140 dump_eaf_flags (dump_file, new_flags, true);
3143 past_retslot_flags = remove_useless_eaf_flags
3144 (past_retslot_flags,
3145 flags_from_decl_or_type (current_function_decl),
3146 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))));
3147 if (past_retslot_flags != summary->retslot_flags)
3149 if ((past_retslot_flags & ~summary->retslot_flags) == 0
3150 || (summary->retslot_flags & EAF_UNUSED))
3151 fprintf (dump_file, " Flags for retslot improved:");
3152 else
3153 gcc_unreachable ();
3154 dump_eaf_flags (dump_file, past_retslot_flags, false);
3155 fprintf (dump_file, " -> ");
3156 dump_eaf_flags (dump_file, summary->retslot_flags, true);
3158 past_static_chain_flags = remove_useless_eaf_flags
3159 (past_static_chain_flags,
3160 flags_from_decl_or_type (current_function_decl),
3161 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))));
3162 if (past_static_chain_flags != summary->static_chain_flags)
3164 if ((past_static_chain_flags & ~summary->static_chain_flags) == 0
3165 || (summary->static_chain_flags & EAF_UNUSED))
3166 fprintf (dump_file, " Flags for static chain improved:");
3167 else
3168 gcc_unreachable ();
3169 dump_eaf_flags (dump_file, past_static_chain_flags, false);
3170 fprintf (dump_file, " -> ");
3171 dump_eaf_flags (dump_file, summary->static_chain_flags, true);
3174 else if (past_flags_known && !summary)
3176 for (size_t i = 0; i < past_flags.length (); i++)
3178 int old_flags = past_flags[i];
3179 old_flags = remove_useless_eaf_flags
3180 (old_flags, flags_from_decl_or_type (current_function_decl),
3181 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))));
3182 if (old_flags)
3184 fprintf (dump_file, " Flags for param %i worsened:",
3185 (int)i);
3186 dump_eaf_flags (dump_file, old_flags, false);
3187 fprintf (dump_file, " -> \n");
3190 past_retslot_flags = remove_useless_eaf_flags
3191 (past_retslot_flags,
3192 flags_from_decl_or_type (current_function_decl),
3193 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))));
3194 if (past_retslot_flags)
3196 fprintf (dump_file, " Flags for retslot worsened:");
3197 dump_eaf_flags (dump_file, past_retslot_flags, false);
3198 fprintf (dump_file, " ->\n");
3200 past_static_chain_flags = remove_useless_eaf_flags
3201 (past_static_chain_flags,
3202 flags_from_decl_or_type (current_function_decl),
3203 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))));
3204 if (past_static_chain_flags)
3206 fprintf (dump_file, " Flags for static chain worsened:");
3207 dump_eaf_flags (dump_file, past_static_chain_flags, false);
3208 fprintf (dump_file, " ->\n");
3212 return fixup_cfg;
3215 /* Callback for generate_summary. */
3217 static void
3218 modref_generate (void)
3220 struct cgraph_node *node;
3221 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
3223 function *f = DECL_STRUCT_FUNCTION (node->decl);
3224 if (!f)
3225 continue;
3226 push_cfun (f);
3227 analyze_function (true);
3228 pop_cfun ();
3232 } /* ANON namespace. */
3234 /* Debugging helper. */
3236 void
3237 debug_eaf_flags (int flags)
3239 dump_eaf_flags (stderr, flags, true);
3242 /* Called when a new function is inserted to callgraph late. */
3244 void
3245 modref_summaries::insert (struct cgraph_node *node, modref_summary *)
3247 /* Local passes ought to be executed by the pass manager. */
3248 if (this == optimization_summaries)
3250 optimization_summaries->remove (node);
3251 return;
3253 if (!DECL_STRUCT_FUNCTION (node->decl)
3254 || !opt_for_fn (node->decl, flag_ipa_modref))
3256 summaries->remove (node);
3257 return;
3259 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
3260 analyze_function (true);
3261 pop_cfun ();
3264 /* Called when a new function is inserted to callgraph late. */
3266 void
3267 modref_summaries_lto::insert (struct cgraph_node *node, modref_summary_lto *)
3269 /* We do not support adding new function when IPA information is already
3270 propagated. This is done only by SIMD cloning that is not very
3271 critical. */
3272 if (!DECL_STRUCT_FUNCTION (node->decl)
3273 || !opt_for_fn (node->decl, flag_ipa_modref)
3274 || propagated)
3276 summaries_lto->remove (node);
3277 return;
3279 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
3280 analyze_function (true);
3281 pop_cfun ();
3284 /* Called when new clone is inserted to callgraph late. */
3286 void
3287 modref_summaries::duplicate (cgraph_node *, cgraph_node *dst,
3288 modref_summary *src_data,
3289 modref_summary *dst_data)
3291 /* Do not duplicate optimization summaries; we do not handle parameter
3292 transforms on them. */
3293 if (this == optimization_summaries)
3295 optimization_summaries->remove (dst);
3296 return;
3298 dst_data->stores = modref_records::create_ggc ();
3299 dst_data->stores->copy_from (src_data->stores);
3300 dst_data->loads = modref_records::create_ggc ();
3301 dst_data->loads->copy_from (src_data->loads);
3302 dst_data->kills.reserve_exact (src_data->kills.length ());
3303 dst_data->kills.splice (src_data->kills);
3304 dst_data->writes_errno = src_data->writes_errno;
3305 dst_data->side_effects = src_data->side_effects;
3306 dst_data->nondeterministic = src_data->nondeterministic;
3307 dst_data->calls_interposable = src_data->calls_interposable;
3308 if (src_data->arg_flags.length ())
3309 dst_data->arg_flags = src_data->arg_flags.copy ();
3310 dst_data->retslot_flags = src_data->retslot_flags;
3311 dst_data->static_chain_flags = src_data->static_chain_flags;
3314 /* Called when new clone is inserted to callgraph late. */
3316 void
3317 modref_summaries_lto::duplicate (cgraph_node *, cgraph_node *,
3318 modref_summary_lto *src_data,
3319 modref_summary_lto *dst_data)
3321 /* Be sure that no further cloning happens after ipa-modref. If it does
3322 we will need to update signatures for possible param changes. */
3323 gcc_checking_assert (!((modref_summaries_lto *)summaries_lto)->propagated);
3324 dst_data->stores = modref_records_lto::create_ggc ();
3325 dst_data->stores->copy_from (src_data->stores);
3326 dst_data->loads = modref_records_lto::create_ggc ();
3327 dst_data->loads->copy_from (src_data->loads);
3328 dst_data->kills.reserve_exact (src_data->kills.length ());
3329 dst_data->kills.splice (src_data->kills);
3330 dst_data->writes_errno = src_data->writes_errno;
3331 dst_data->side_effects = src_data->side_effects;
3332 dst_data->nondeterministic = src_data->nondeterministic;
3333 dst_data->calls_interposable = src_data->calls_interposable;
3334 if (src_data->arg_flags.length ())
3335 dst_data->arg_flags = src_data->arg_flags.copy ();
3336 dst_data->retslot_flags = src_data->retslot_flags;
3337 dst_data->static_chain_flags = src_data->static_chain_flags;
3340 namespace
3342 /* Definition of the modref pass on GIMPLE. */
3343 const pass_data pass_data_modref = {
3344 GIMPLE_PASS,
3345 "modref",
3346 OPTGROUP_IPA,
3347 TV_TREE_MODREF,
3348 (PROP_cfg | PROP_ssa),
3355 class pass_modref : public gimple_opt_pass
3357 public:
3358 pass_modref (gcc::context *ctxt)
3359 : gimple_opt_pass (pass_data_modref, ctxt) {}
3361 /* opt_pass methods: */
3362 opt_pass *clone ()
3364 return new pass_modref (m_ctxt);
3366 virtual bool gate (function *)
3368 return flag_ipa_modref;
3370 virtual unsigned int execute (function *);
3373 /* Encode TT to the output block OB using the summary streaming API. */
3375 static void
3376 write_modref_records (modref_records_lto *tt, struct output_block *ob)
3378 streamer_write_uhwi (ob, tt->every_base);
3379 streamer_write_uhwi (ob, vec_safe_length (tt->bases));
3380 for (auto base_node : tt->bases)
3382 stream_write_tree (ob, base_node->base, true);
3384 streamer_write_uhwi (ob, base_node->every_ref);
3385 streamer_write_uhwi (ob, vec_safe_length (base_node->refs));
3387 for (auto ref_node : base_node->refs)
3389 stream_write_tree (ob, ref_node->ref, true);
3390 streamer_write_uhwi (ob, ref_node->every_access);
3391 streamer_write_uhwi (ob, vec_safe_length (ref_node->accesses));
3393 for (auto access_node : ref_node->accesses)
3394 access_node.stream_out (ob);
3399 /* Read a modref_tree from the input block IB using the data from DATA_IN.
3400 This assumes that the tree was encoded using write_modref_tree.
3401 Either nolto_ret or lto_ret is initialized by the tree depending whether
3402 LTO streaming is expected or not. */
3404 static void
3405 read_modref_records (tree decl,
3406 lto_input_block *ib, struct data_in *data_in,
3407 modref_records **nolto_ret,
3408 modref_records_lto **lto_ret)
3410 size_t max_bases = opt_for_fn (decl, param_modref_max_bases);
3411 size_t max_refs = opt_for_fn (decl, param_modref_max_refs);
3412 size_t max_accesses = opt_for_fn (decl, param_modref_max_accesses);
3414 if (lto_ret)
3415 *lto_ret = modref_records_lto::create_ggc ();
3416 if (nolto_ret)
3417 *nolto_ret = modref_records::create_ggc ();
3418 gcc_checking_assert (lto_ret || nolto_ret);
3420 size_t every_base = streamer_read_uhwi (ib);
3421 size_t nbase = streamer_read_uhwi (ib);
3423 gcc_assert (!every_base || nbase == 0);
3424 if (every_base)
3426 if (nolto_ret)
3427 (*nolto_ret)->collapse ();
3428 if (lto_ret)
3429 (*lto_ret)->collapse ();
3431 for (size_t i = 0; i < nbase; i++)
3433 tree base_tree = stream_read_tree (ib, data_in);
3434 modref_base_node <alias_set_type> *nolto_base_node = NULL;
3435 modref_base_node <tree> *lto_base_node = NULL;
3437 /* At stream in time we have LTO alias info. Check if we streamed in
3438 something obviously unnecessary. Do not glob types by alias sets;
3439 it is not 100% clear that ltrans types will get merged same way.
3440 Types may get refined based on ODR type conflicts. */
3441 if (base_tree && !get_alias_set (base_tree))
3443 if (dump_file)
3445 fprintf (dump_file, "Streamed in alias set 0 type ");
3446 print_generic_expr (dump_file, base_tree);
3447 fprintf (dump_file, "\n");
3449 base_tree = NULL;
3452 if (nolto_ret)
3453 nolto_base_node = (*nolto_ret)->insert_base (base_tree
3454 ? get_alias_set (base_tree)
3455 : 0, 0, INT_MAX);
3456 if (lto_ret)
3457 lto_base_node = (*lto_ret)->insert_base (base_tree, 0, max_bases);
3458 size_t every_ref = streamer_read_uhwi (ib);
3459 size_t nref = streamer_read_uhwi (ib);
3461 gcc_assert (!every_ref || nref == 0);
3462 if (every_ref)
3464 if (nolto_base_node)
3465 nolto_base_node->collapse ();
3466 if (lto_base_node)
3467 lto_base_node->collapse ();
3469 for (size_t j = 0; j < nref; j++)
3471 tree ref_tree = stream_read_tree (ib, data_in);
3473 if (ref_tree && !get_alias_set (ref_tree))
3475 if (dump_file)
3477 fprintf (dump_file, "Streamed in alias set 0 type ");
3478 print_generic_expr (dump_file, ref_tree);
3479 fprintf (dump_file, "\n");
3481 ref_tree = NULL;
3484 modref_ref_node <alias_set_type> *nolto_ref_node = NULL;
3485 modref_ref_node <tree> *lto_ref_node = NULL;
3487 if (nolto_base_node)
3488 nolto_ref_node
3489 = nolto_base_node->insert_ref (ref_tree
3490 ? get_alias_set (ref_tree) : 0,
3491 max_refs);
3492 if (lto_base_node)
3493 lto_ref_node = lto_base_node->insert_ref (ref_tree, max_refs);
3495 size_t every_access = streamer_read_uhwi (ib);
3496 size_t naccesses = streamer_read_uhwi (ib);
3498 if (nolto_ref_node && every_access)
3499 nolto_ref_node->collapse ();
3500 if (lto_ref_node && every_access)
3501 lto_ref_node->collapse ();
3503 for (size_t k = 0; k < naccesses; k++)
3505 modref_access_node a = modref_access_node::stream_in (ib);
3506 if (nolto_ref_node)
3507 nolto_ref_node->insert_access (a, max_accesses, false);
3508 if (lto_ref_node)
3509 lto_ref_node->insert_access (a, max_accesses, false);
3513 if (lto_ret)
3514 (*lto_ret)->cleanup ();
3515 if (nolto_ret)
3516 (*nolto_ret)->cleanup ();
3519 /* Write ESUM to BP. */
3521 static void
3522 modref_write_escape_summary (struct bitpack_d *bp, escape_summary *esum)
3524 if (!esum)
3526 bp_pack_var_len_unsigned (bp, 0);
3527 return;
3529 bp_pack_var_len_unsigned (bp, esum->esc.length ());
3530 unsigned int i;
3531 escape_entry *ee;
3532 FOR_EACH_VEC_ELT (esum->esc, i, ee)
3534 bp_pack_var_len_int (bp, ee->parm_index);
3535 bp_pack_var_len_unsigned (bp, ee->arg);
3536 bp_pack_var_len_unsigned (bp, ee->min_flags);
3537 bp_pack_value (bp, ee->direct, 1);
3541 /* Read escape summary for E from BP. */
3543 static void
3544 modref_read_escape_summary (struct bitpack_d *bp, cgraph_edge *e)
3546 unsigned int n = bp_unpack_var_len_unsigned (bp);
3547 if (!n)
3548 return;
3549 escape_summary *esum = escape_summaries->get_create (e);
3550 esum->esc.reserve_exact (n);
3551 for (unsigned int i = 0; i < n; i++)
3553 escape_entry ee;
3554 ee.parm_index = bp_unpack_var_len_int (bp);
3555 ee.arg = bp_unpack_var_len_unsigned (bp);
3556 ee.min_flags = bp_unpack_var_len_unsigned (bp);
3557 ee.direct = bp_unpack_value (bp, 1);
3558 esum->esc.quick_push (ee);
3562 /* Callback for write_summary. */
3564 static void
3565 modref_write ()
3567 struct output_block *ob = create_output_block (LTO_section_ipa_modref);
3568 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
3569 unsigned int count = 0;
3570 int i;
3572 if (!summaries_lto)
3574 streamer_write_uhwi (ob, 0);
3575 streamer_write_char_stream (ob->main_stream, 0);
3576 produce_asm (ob, NULL);
3577 destroy_output_block (ob);
3578 return;
3581 for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
3583 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
3584 cgraph_node *cnode = dyn_cast <cgraph_node *> (snode);
3585 modref_summary_lto *r;
3587 if (cnode && cnode->definition && !cnode->alias
3588 && (r = summaries_lto->get (cnode))
3589 && r->useful_p (flags_from_decl_or_type (cnode->decl)))
3590 count++;
3592 streamer_write_uhwi (ob, count);
3594 for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
3596 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
3597 cgraph_node *cnode = dyn_cast <cgraph_node *> (snode);
3599 if (cnode && cnode->definition && !cnode->alias)
3601 modref_summary_lto *r = summaries_lto->get (cnode);
3603 if (!r || !r->useful_p (flags_from_decl_or_type (cnode->decl)))
3604 continue;
3606 streamer_write_uhwi (ob, lto_symtab_encoder_encode (encoder, cnode));
3608 streamer_write_uhwi (ob, r->arg_flags.length ());
3609 for (unsigned int i = 0; i < r->arg_flags.length (); i++)
3610 streamer_write_uhwi (ob, r->arg_flags[i]);
3611 streamer_write_uhwi (ob, r->retslot_flags);
3612 streamer_write_uhwi (ob, r->static_chain_flags);
3614 write_modref_records (r->loads, ob);
3615 write_modref_records (r->stores, ob);
3616 streamer_write_uhwi (ob, r->kills.length ());
3617 for (auto kill : r->kills)
3618 kill.stream_out (ob);
3620 struct bitpack_d bp = bitpack_create (ob->main_stream);
3621 bp_pack_value (&bp, r->writes_errno, 1);
3622 bp_pack_value (&bp, r->side_effects, 1);
3623 bp_pack_value (&bp, r->nondeterministic, 1);
3624 bp_pack_value (&bp, r->calls_interposable, 1);
3625 if (!flag_wpa)
3627 for (cgraph_edge *e = cnode->indirect_calls;
3628 e; e = e->next_callee)
3630 class fnspec_summary *sum = fnspec_summaries->get (e);
3631 bp_pack_value (&bp, sum != NULL, 1);
3632 if (sum)
3633 bp_pack_string (ob, &bp, sum->fnspec, true);
3634 class escape_summary *esum = escape_summaries->get (e);
3635 modref_write_escape_summary (&bp,esum);
3637 for (cgraph_edge *e = cnode->callees; e; e = e->next_callee)
3639 class fnspec_summary *sum = fnspec_summaries->get (e);
3640 bp_pack_value (&bp, sum != NULL, 1);
3641 if (sum)
3642 bp_pack_string (ob, &bp, sum->fnspec, true);
3643 class escape_summary *esum = escape_summaries->get (e);
3644 modref_write_escape_summary (&bp,esum);
3647 streamer_write_bitpack (&bp);
3650 streamer_write_char_stream (ob->main_stream, 0);
3651 produce_asm (ob, NULL);
3652 destroy_output_block (ob);
3655 static void
3656 read_section (struct lto_file_decl_data *file_data, const char *data,
3657 size_t len)
3659 const struct lto_function_header *header
3660 = (const struct lto_function_header *) data;
3661 const int cfg_offset = sizeof (struct lto_function_header);
3662 const int main_offset = cfg_offset + header->cfg_size;
3663 const int string_offset = main_offset + header->main_size;
3664 struct data_in *data_in;
3665 unsigned int i;
3666 unsigned int f_count;
3668 lto_input_block ib ((const char *) data + main_offset, header->main_size,
3669 file_data->mode_table);
3671 data_in
3672 = lto_data_in_create (file_data, (const char *) data + string_offset,
3673 header->string_size, vNULL);
3674 f_count = streamer_read_uhwi (&ib);
3675 for (i = 0; i < f_count; i++)
3677 struct cgraph_node *node;
3678 lto_symtab_encoder_t encoder;
3680 unsigned int index = streamer_read_uhwi (&ib);
3681 encoder = file_data->symtab_node_encoder;
3682 node = dyn_cast <cgraph_node *> (lto_symtab_encoder_deref (encoder,
3683 index));
3685 modref_summary *modref_sum = summaries
3686 ? summaries->get_create (node) : NULL;
3687 modref_summary_lto *modref_sum_lto = summaries_lto
3688 ? summaries_lto->get_create (node)
3689 : NULL;
3690 if (optimization_summaries)
3691 modref_sum = optimization_summaries->get_create (node);
3693 if (modref_sum)
3695 modref_sum->writes_errno = false;
3696 modref_sum->side_effects = false;
3697 modref_sum->nondeterministic = false;
3698 modref_sum->calls_interposable = false;
3700 if (modref_sum_lto)
3702 modref_sum_lto->writes_errno = false;
3703 modref_sum_lto->side_effects = false;
3704 modref_sum_lto->nondeterministic = false;
3705 modref_sum_lto->calls_interposable = false;
3708 gcc_assert (!modref_sum || (!modref_sum->loads
3709 && !modref_sum->stores));
3710 gcc_assert (!modref_sum_lto || (!modref_sum_lto->loads
3711 && !modref_sum_lto->stores));
3712 unsigned int args = streamer_read_uhwi (&ib);
3713 if (args && modref_sum)
3714 modref_sum->arg_flags.reserve_exact (args);
3715 if (args && modref_sum_lto)
3716 modref_sum_lto->arg_flags.reserve_exact (args);
3717 for (unsigned int i = 0; i < args; i++)
3719 eaf_flags_t flags = streamer_read_uhwi (&ib);
3720 if (modref_sum)
3721 modref_sum->arg_flags.quick_push (flags);
3722 if (modref_sum_lto)
3723 modref_sum_lto->arg_flags.quick_push (flags);
3725 eaf_flags_t flags = streamer_read_uhwi (&ib);
3726 if (modref_sum)
3727 modref_sum->retslot_flags = flags;
3728 if (modref_sum_lto)
3729 modref_sum_lto->retslot_flags = flags;
3731 flags = streamer_read_uhwi (&ib);
3732 if (modref_sum)
3733 modref_sum->static_chain_flags = flags;
3734 if (modref_sum_lto)
3735 modref_sum_lto->static_chain_flags = flags;
3737 read_modref_records (node->decl, &ib, data_in,
3738 modref_sum ? &modref_sum->loads : NULL,
3739 modref_sum_lto ? &modref_sum_lto->loads : NULL);
3740 read_modref_records (node->decl, &ib, data_in,
3741 modref_sum ? &modref_sum->stores : NULL,
3742 modref_sum_lto ? &modref_sum_lto->stores : NULL);
3743 int j = streamer_read_uhwi (&ib);
3744 if (j && modref_sum)
3745 modref_sum->kills.reserve_exact (j);
3746 if (j && modref_sum_lto)
3747 modref_sum_lto->kills.reserve_exact (j);
3748 for (int k = 0; k < j; k++)
3750 modref_access_node a = modref_access_node::stream_in (&ib);
3752 if (modref_sum)
3753 modref_sum->kills.quick_push (a);
3754 if (modref_sum_lto)
3755 modref_sum_lto->kills.quick_push (a);
3757 struct bitpack_d bp = streamer_read_bitpack (&ib);
3758 if (bp_unpack_value (&bp, 1))
3760 if (modref_sum)
3761 modref_sum->writes_errno = true;
3762 if (modref_sum_lto)
3763 modref_sum_lto->writes_errno = true;
3765 if (bp_unpack_value (&bp, 1))
3767 if (modref_sum)
3768 modref_sum->side_effects = true;
3769 if (modref_sum_lto)
3770 modref_sum_lto->side_effects = true;
3772 if (bp_unpack_value (&bp, 1))
3774 if (modref_sum)
3775 modref_sum->nondeterministic = true;
3776 if (modref_sum_lto)
3777 modref_sum_lto->nondeterministic = true;
3779 if (bp_unpack_value (&bp, 1))
3781 if (modref_sum)
3782 modref_sum->calls_interposable = true;
3783 if (modref_sum_lto)
3784 modref_sum_lto->calls_interposable = true;
3786 if (!flag_ltrans)
3788 for (cgraph_edge *e = node->indirect_calls; e; e = e->next_callee)
3790 if (bp_unpack_value (&bp, 1))
3792 class fnspec_summary *sum = fnspec_summaries->get_create (e);
3793 sum->fnspec = xstrdup (bp_unpack_string (data_in, &bp));
3795 modref_read_escape_summary (&bp, e);
3797 for (cgraph_edge *e = node->callees; e; e = e->next_callee)
3799 if (bp_unpack_value (&bp, 1))
3801 class fnspec_summary *sum = fnspec_summaries->get_create (e);
3802 sum->fnspec = xstrdup (bp_unpack_string (data_in, &bp));
3804 modref_read_escape_summary (&bp, e);
3807 if (flag_ltrans)
3808 modref_sum->finalize (node->decl);
3809 if (dump_file)
3811 fprintf (dump_file, "Read modref for %s\n",
3812 node->dump_name ());
3813 if (modref_sum)
3814 modref_sum->dump (dump_file);
3815 if (modref_sum_lto)
3816 modref_sum_lto->dump (dump_file);
3817 dump_modref_edge_summaries (dump_file, node, 4);
3821 lto_free_section_data (file_data, LTO_section_ipa_modref, NULL, data,
3822 len);
3823 lto_data_in_delete (data_in);
3826 /* Callback for read_summary. */
3828 static void
3829 modref_read (void)
3831 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
3832 struct lto_file_decl_data *file_data;
3833 unsigned int j = 0;
3835 gcc_checking_assert (!optimization_summaries && !summaries && !summaries_lto);
3836 if (flag_ltrans)
3837 optimization_summaries = modref_summaries::create_ggc (symtab);
3838 else
3840 if (flag_wpa || flag_incremental_link == INCREMENTAL_LINK_LTO)
3841 summaries_lto = modref_summaries_lto::create_ggc (symtab);
3842 if (!flag_wpa
3843 || (flag_incremental_link == INCREMENTAL_LINK_LTO
3844 && flag_fat_lto_objects))
3845 summaries = modref_summaries::create_ggc (symtab);
3846 if (!fnspec_summaries)
3847 fnspec_summaries = new fnspec_summaries_t (symtab);
3848 if (!escape_summaries)
3849 escape_summaries = new escape_summaries_t (symtab);
3852 while ((file_data = file_data_vec[j++]))
3854 size_t len;
3855 const char *data = lto_get_summary_section_data (file_data,
3856 LTO_section_ipa_modref,
3857 &len);
3858 if (data)
3859 read_section (file_data, data, len);
3860 else
3861 /* Fatal error here. We do not want to support compiling ltrans units
3862 with different version of compiler or different flags than the WPA
3863 unit, so this should never happen. */
3864 fatal_error (input_location,
3865 "IPA modref summary is missing in input file");
3869 /* Recompute arg_flags for param adjustments in INFO. */
3871 static void
3872 remap_arg_flags (auto_vec <eaf_flags_t> &arg_flags, clone_info *info)
3874 auto_vec<eaf_flags_t> old = arg_flags.copy ();
3875 int max = -1;
3876 size_t i;
3877 ipa_adjusted_param *p;
3879 arg_flags.release ();
3881 FOR_EACH_VEC_SAFE_ELT (info->param_adjustments->m_adj_params, i, p)
3883 int o = info->param_adjustments->get_original_index (i);
3884 if (o >= 0 && (int)old.length () > o && old[o])
3885 max = i;
3887 if (max >= 0)
3888 arg_flags.safe_grow_cleared (max + 1, true);
3889 FOR_EACH_VEC_SAFE_ELT (info->param_adjustments->m_adj_params, i, p)
3891 int o = info->param_adjustments->get_original_index (i);
3892 if (o >= 0 && (int)old.length () > o && old[o])
3893 arg_flags[i] = old[o];
3897 /* Update kills accrdoing to the parm map MAP. */
3899 static void
3900 remap_kills (vec <modref_access_node> &kills, const vec <int> &map)
3902 for (size_t i = 0; i < kills.length ();)
3903 if (kills[i].parm_index >= 0)
3905 if (kills[i].parm_index < (int)map.length ()
3906 && map[kills[i].parm_index] != MODREF_UNKNOWN_PARM)
3908 kills[i].parm_index = map[kills[i].parm_index];
3909 i++;
3911 else
3912 kills.unordered_remove (i);
3914 else
3915 i++;
3918 /* If signature changed, update the summary. */
3920 static void
3921 update_signature (struct cgraph_node *node)
3923 clone_info *info = clone_info::get (node);
3924 if (!info || !info->param_adjustments)
3925 return;
3927 modref_summary *r = optimization_summaries
3928 ? optimization_summaries->get (node) : NULL;
3929 modref_summary_lto *r_lto = summaries_lto
3930 ? summaries_lto->get (node) : NULL;
3931 if (!r && !r_lto)
3932 return;
3933 if (dump_file)
3935 fprintf (dump_file, "Updating summary for %s from:\n",
3936 node->dump_name ());
3937 if (r)
3938 r->dump (dump_file);
3939 if (r_lto)
3940 r_lto->dump (dump_file);
3943 size_t i, max = 0;
3944 ipa_adjusted_param *p;
3946 FOR_EACH_VEC_SAFE_ELT (info->param_adjustments->m_adj_params, i, p)
3948 int idx = info->param_adjustments->get_original_index (i);
3949 if (idx > (int)max)
3950 max = idx;
3953 auto_vec <int, 32> map;
3955 map.reserve (max + 1);
3956 for (i = 0; i <= max; i++)
3957 map.quick_push (MODREF_UNKNOWN_PARM);
3958 FOR_EACH_VEC_SAFE_ELT (info->param_adjustments->m_adj_params, i, p)
3960 int idx = info->param_adjustments->get_original_index (i);
3961 if (idx >= 0)
3962 map[idx] = i;
3964 if (r)
3966 r->loads->remap_params (&map);
3967 r->stores->remap_params (&map);
3968 remap_kills (r->kills, map);
3969 if (r->arg_flags.length ())
3970 remap_arg_flags (r->arg_flags, info);
3972 if (r_lto)
3974 r_lto->loads->remap_params (&map);
3975 r_lto->stores->remap_params (&map);
3976 remap_kills (r_lto->kills, map);
3977 if (r_lto->arg_flags.length ())
3978 remap_arg_flags (r_lto->arg_flags, info);
3980 if (dump_file)
3982 fprintf (dump_file, "to:\n");
3983 if (r)
3984 r->dump (dump_file);
3985 if (r_lto)
3986 r_lto->dump (dump_file);
3988 if (r)
3989 r->finalize (node->decl);
3990 return;
3993 /* Definition of the modref IPA pass. */
3994 const pass_data pass_data_ipa_modref =
3996 IPA_PASS, /* type */
3997 "modref", /* name */
3998 OPTGROUP_IPA, /* optinfo_flags */
3999 TV_IPA_MODREF, /* tv_id */
4000 0, /* properties_required */
4001 0, /* properties_provided */
4002 0, /* properties_destroyed */
4003 0, /* todo_flags_start */
4004 ( TODO_dump_symtab ), /* todo_flags_finish */
4007 class pass_ipa_modref : public ipa_opt_pass_d
4009 public:
4010 pass_ipa_modref (gcc::context *ctxt)
4011 : ipa_opt_pass_d (pass_data_ipa_modref, ctxt,
4012 modref_generate, /* generate_summary */
4013 modref_write, /* write_summary */
4014 modref_read, /* read_summary */
4015 modref_write, /* write_optimization_summary */
4016 modref_read, /* read_optimization_summary */
4017 NULL, /* stmt_fixup */
4018 0, /* function_transform_todo_flags_start */
4019 NULL, /* function_transform */
4020 NULL) /* variable_transform */
4023 /* opt_pass methods: */
4024 opt_pass *clone () { return new pass_ipa_modref (m_ctxt); }
4025 virtual bool gate (function *)
4027 return true;
4029 virtual unsigned int execute (function *);
4035 unsigned int pass_modref::execute (function *)
4037 if (analyze_function (false))
4038 return execute_fixup_cfg ();
4039 return 0;
4042 gimple_opt_pass *
4043 make_pass_modref (gcc::context *ctxt)
4045 return new pass_modref (ctxt);
4048 ipa_opt_pass_d *
4049 make_pass_ipa_modref (gcc::context *ctxt)
4051 return new pass_ipa_modref (ctxt);
4054 namespace {
4056 /* Skip edges from and to nodes without ipa_pure_const enabled.
4057 Ignore not available symbols. */
4059 static bool
4060 ignore_edge (struct cgraph_edge *e)
4062 /* We merge summaries of inline clones into summaries of functions they
4063 are inlined to. For that reason the complete function bodies must
4064 act as unit. */
4065 if (!e->inline_failed)
4066 return false;
4067 enum availability avail;
4068 cgraph_node *callee = e->callee->function_or_virtual_thunk_symbol
4069 (&avail, e->caller);
4071 return (avail <= AVAIL_INTERPOSABLE
4072 || ((!optimization_summaries || !optimization_summaries->get (callee))
4073 && (!summaries_lto || !summaries_lto->get (callee))));
4076 /* Compute parm_map for CALLEE_EDGE. */
4078 static bool
4079 compute_parm_map (cgraph_edge *callee_edge, vec<modref_parm_map> *parm_map)
4081 class ipa_edge_args *args;
4082 if (ipa_node_params_sum
4083 && !callee_edge->call_stmt_cannot_inline_p
4084 && (args = ipa_edge_args_sum->get (callee_edge)) != NULL)
4086 int i, count = ipa_get_cs_argument_count (args);
4087 class ipa_node_params *caller_parms_info, *callee_pi;
4088 class ipa_call_summary *es
4089 = ipa_call_summaries->get (callee_edge);
4090 cgraph_node *callee
4091 = callee_edge->callee->function_or_virtual_thunk_symbol
4092 (NULL, callee_edge->caller);
4094 caller_parms_info
4095 = ipa_node_params_sum->get (callee_edge->caller->inlined_to
4096 ? callee_edge->caller->inlined_to
4097 : callee_edge->caller);
4098 callee_pi = ipa_node_params_sum->get (callee);
4100 (*parm_map).safe_grow_cleared (count, true);
4102 for (i = 0; i < count; i++)
4104 if (es && es->param[i].points_to_local_or_readonly_memory)
4106 (*parm_map)[i].parm_index = MODREF_LOCAL_MEMORY_PARM;
4107 continue;
4110 struct ipa_jump_func *jf
4111 = ipa_get_ith_jump_func (args, i);
4112 if (jf && callee_pi)
4114 tree cst = ipa_value_from_jfunc (caller_parms_info,
4116 ipa_get_type
4117 (callee_pi, i));
4118 if (cst && points_to_local_or_readonly_memory_p (cst))
4120 (*parm_map)[i].parm_index = MODREF_LOCAL_MEMORY_PARM;
4121 continue;
4124 if (jf && jf->type == IPA_JF_PASS_THROUGH)
4126 (*parm_map)[i].parm_index
4127 = ipa_get_jf_pass_through_formal_id (jf);
4128 if (ipa_get_jf_pass_through_operation (jf) == NOP_EXPR)
4130 (*parm_map)[i].parm_offset_known = true;
4131 (*parm_map)[i].parm_offset = 0;
4133 else if (ipa_get_jf_pass_through_operation (jf)
4134 == POINTER_PLUS_EXPR
4135 && ptrdiff_tree_p (ipa_get_jf_pass_through_operand (jf),
4136 &(*parm_map)[i].parm_offset))
4137 (*parm_map)[i].parm_offset_known = true;
4138 else
4139 (*parm_map)[i].parm_offset_known = false;
4140 continue;
4142 if (jf && jf->type == IPA_JF_ANCESTOR)
4144 (*parm_map)[i].parm_index = ipa_get_jf_ancestor_formal_id (jf);
4145 (*parm_map)[i].parm_offset_known = true;
4146 gcc_checking_assert
4147 (!(ipa_get_jf_ancestor_offset (jf) & (BITS_PER_UNIT - 1)));
4148 (*parm_map)[i].parm_offset
4149 = ipa_get_jf_ancestor_offset (jf) >> LOG2_BITS_PER_UNIT;
4151 else
4152 (*parm_map)[i].parm_index = -1;
4154 if (dump_file)
4156 fprintf (dump_file, " Parm map: ");
4157 for (i = 0; i < count; i++)
4158 fprintf (dump_file, " %i", (*parm_map)[i].parm_index);
4159 fprintf (dump_file, "\n");
4161 return true;
4163 return false;
4166 /* Map used to translate escape infos. */
4168 struct escape_map
4170 int parm_index;
4171 bool direct;
4174 /* Update escape map for E. */
4176 static void
4177 update_escape_summary_1 (cgraph_edge *e,
4178 vec <vec <escape_map>> &map,
4179 bool ignore_stores)
4181 escape_summary *sum = escape_summaries->get (e);
4182 if (!sum)
4183 return;
4184 auto_vec <escape_entry> old = sum->esc.copy ();
4185 sum->esc.release ();
4187 unsigned int i;
4188 escape_entry *ee;
4189 FOR_EACH_VEC_ELT (old, i, ee)
4191 unsigned int j;
4192 struct escape_map *em;
4193 /* TODO: We do not have jump functions for return slots, so we
4194 never propagate them to outer function. */
4195 if (ee->parm_index >= (int)map.length ()
4196 || ee->parm_index < 0)
4197 continue;
4198 FOR_EACH_VEC_ELT (map[ee->parm_index], j, em)
4200 int min_flags = ee->min_flags;
4201 if (ee->direct && !em->direct)
4202 min_flags = deref_flags (min_flags, ignore_stores);
4203 struct escape_entry entry = {em->parm_index, ee->arg,
4204 min_flags,
4205 ee->direct & em->direct};
4206 sum->esc.safe_push (entry);
4209 if (!sum->esc.length ())
4210 escape_summaries->remove (e);
4213 /* Update escape map fo NODE. */
4215 static void
4216 update_escape_summary (cgraph_node *node,
4217 vec <vec <escape_map>> &map,
4218 bool ignore_stores)
4220 if (!escape_summaries)
4221 return;
4222 for (cgraph_edge *e = node->indirect_calls; e; e = e->next_callee)
4223 update_escape_summary_1 (e, map, ignore_stores);
4224 for (cgraph_edge *e = node->callees; e; e = e->next_callee)
4226 if (!e->inline_failed)
4227 update_escape_summary (e->callee, map, ignore_stores);
4228 else
4229 update_escape_summary_1 (e, map, ignore_stores);
4233 /* Get parameter type from DECL. This is only safe for special cases
4234 like builtins we create fnspec for because the type match is checked
4235 at fnspec creation time. */
4237 static tree
4238 get_parm_type (tree decl, unsigned int i)
4240 tree t = TYPE_ARG_TYPES (TREE_TYPE (decl));
4242 for (unsigned int p = 0; p < i; p++)
4243 t = TREE_CHAIN (t);
4244 return TREE_VALUE (t);
4247 /* Return access mode for argument I of call E with FNSPEC. */
4249 static modref_access_node
4250 get_access_for_fnspec (cgraph_edge *e, attr_fnspec &fnspec,
4251 unsigned int i, modref_parm_map &map)
4253 tree size = NULL_TREE;
4254 unsigned int size_arg;
4256 if (!fnspec.arg_specified_p (i))
4258 else if (fnspec.arg_max_access_size_given_by_arg_p (i, &size_arg))
4260 cgraph_node *node = e->caller->inlined_to
4261 ? e->caller->inlined_to : e->caller;
4262 ipa_node_params *caller_parms_info = ipa_node_params_sum->get (node);
4263 ipa_edge_args *args = ipa_edge_args_sum->get (e);
4264 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, size_arg);
4266 if (jf)
4267 size = ipa_value_from_jfunc (caller_parms_info, jf,
4268 get_parm_type (e->callee->decl, size_arg));
4270 else if (fnspec.arg_access_size_given_by_type_p (i))
4271 size = TYPE_SIZE_UNIT (get_parm_type (e->callee->decl, i));
4272 modref_access_node a = {0, -1, -1,
4273 map.parm_offset, map.parm_index,
4274 map.parm_offset_known, 0};
4275 poly_int64 size_hwi;
4276 if (size
4277 && poly_int_tree_p (size, &size_hwi)
4278 && coeffs_in_range_p (size_hwi, 0,
4279 HOST_WIDE_INT_MAX / BITS_PER_UNIT))
4281 a.size = -1;
4282 a.max_size = size_hwi << LOG2_BITS_PER_UNIT;
4284 return a;
4287 /* Collapse loads and return true if something changed. */
4288 static bool
4289 collapse_loads (modref_summary *cur_summary,
4290 modref_summary_lto *cur_summary_lto)
4292 bool changed = false;
4294 if (cur_summary && !cur_summary->loads->every_base)
4296 cur_summary->loads->collapse ();
4297 changed = true;
4299 if (cur_summary_lto
4300 && !cur_summary_lto->loads->every_base)
4302 cur_summary_lto->loads->collapse ();
4303 changed = true;
4305 return changed;
4308 /* Collapse loads and return true if something changed. */
4310 static bool
4311 collapse_stores (modref_summary *cur_summary,
4312 modref_summary_lto *cur_summary_lto)
4314 bool changed = false;
4316 if (cur_summary && !cur_summary->stores->every_base)
4318 cur_summary->stores->collapse ();
4319 changed = true;
4321 if (cur_summary_lto
4322 && !cur_summary_lto->stores->every_base)
4324 cur_summary_lto->stores->collapse ();
4325 changed = true;
4327 return changed;
4330 /* Call E in NODE with ECF_FLAGS has no summary; update MODREF_SUMMARY and
4331 CUR_SUMMARY_LTO accordingly. Return true if something changed. */
4333 static bool
4334 propagate_unknown_call (cgraph_node *node,
4335 cgraph_edge *e, int ecf_flags,
4336 modref_summary *cur_summary,
4337 modref_summary_lto *cur_summary_lto,
4338 bool nontrivial_scc)
4340 bool changed = false;
4341 class fnspec_summary *fnspec_sum = fnspec_summaries->get (e);
4342 auto_vec <modref_parm_map, 32> parm_map;
4343 bool looping;
4345 if (e->callee
4346 && builtin_safe_for_const_function_p (&looping, e->callee->decl))
4348 if (looping && cur_summary && !cur_summary->side_effects)
4350 cur_summary->side_effects = true;
4351 changed = true;
4353 if (looping && cur_summary_lto && !cur_summary_lto->side_effects)
4355 cur_summary_lto->side_effects = true;
4356 changed = true;
4358 return changed;
4361 if (!(ecf_flags & (ECF_CONST | ECF_NOVOPS | ECF_PURE))
4362 || (ecf_flags & ECF_LOOPING_CONST_OR_PURE)
4363 || nontrivial_scc)
4365 if (cur_summary && !cur_summary->side_effects)
4367 cur_summary->side_effects = true;
4368 changed = true;
4370 if (cur_summary_lto && !cur_summary_lto->side_effects)
4372 cur_summary_lto->side_effects = true;
4373 changed = true;
4375 if (cur_summary && !cur_summary->nondeterministic
4376 && !ignore_nondeterminism_p (node->decl, ecf_flags))
4378 cur_summary->nondeterministic = true;
4379 changed = true;
4381 if (cur_summary_lto && !cur_summary_lto->nondeterministic
4382 && !ignore_nondeterminism_p (node->decl, ecf_flags))
4384 cur_summary_lto->nondeterministic = true;
4385 changed = true;
4388 if (ecf_flags & (ECF_CONST | ECF_NOVOPS))
4389 return changed;
4391 if (fnspec_sum
4392 && compute_parm_map (e, &parm_map))
4394 attr_fnspec fnspec (fnspec_sum->fnspec);
4396 gcc_checking_assert (fnspec.known_p ());
4397 if (fnspec.global_memory_read_p ())
4398 collapse_loads (cur_summary, cur_summary_lto);
4399 else
4401 tree t = TYPE_ARG_TYPES (TREE_TYPE (e->callee->decl));
4402 for (unsigned i = 0; i < parm_map.length () && t;
4403 i++, t = TREE_CHAIN (t))
4404 if (!POINTER_TYPE_P (TREE_VALUE (t)))
4406 else if (!fnspec.arg_specified_p (i)
4407 || fnspec.arg_maybe_read_p (i))
4409 modref_parm_map map = parm_map[i];
4410 if (map.parm_index == MODREF_LOCAL_MEMORY_PARM)
4411 continue;
4412 if (map.parm_index == MODREF_UNKNOWN_PARM)
4414 collapse_loads (cur_summary, cur_summary_lto);
4415 break;
4417 if (cur_summary)
4418 changed |= cur_summary->loads->insert
4419 (node->decl, 0, 0,
4420 get_access_for_fnspec (e, fnspec, i, map), false);
4421 if (cur_summary_lto)
4422 changed |= cur_summary_lto->loads->insert
4423 (node->decl, 0, 0,
4424 get_access_for_fnspec (e, fnspec, i, map), false);
4427 if (ignore_stores_p (node->decl, ecf_flags))
4429 else if (fnspec.global_memory_written_p ())
4430 collapse_stores (cur_summary, cur_summary_lto);
4431 else
4433 tree t = TYPE_ARG_TYPES (TREE_TYPE (e->callee->decl));
4434 for (unsigned i = 0; i < parm_map.length () && t;
4435 i++, t = TREE_CHAIN (t))
4436 if (!POINTER_TYPE_P (TREE_VALUE (t)))
4438 else if (!fnspec.arg_specified_p (i)
4439 || fnspec.arg_maybe_written_p (i))
4441 modref_parm_map map = parm_map[i];
4442 if (map.parm_index == MODREF_LOCAL_MEMORY_PARM)
4443 continue;
4444 if (map.parm_index == MODREF_UNKNOWN_PARM)
4446 collapse_stores (cur_summary, cur_summary_lto);
4447 break;
4449 if (cur_summary)
4450 changed |= cur_summary->stores->insert
4451 (node->decl, 0, 0,
4452 get_access_for_fnspec (e, fnspec, i, map), false);
4453 if (cur_summary_lto)
4454 changed |= cur_summary_lto->stores->insert
4455 (node->decl, 0, 0,
4456 get_access_for_fnspec (e, fnspec, i, map), false);
4459 if (fnspec.errno_maybe_written_p () && flag_errno_math)
4461 if (cur_summary && !cur_summary->writes_errno)
4463 cur_summary->writes_errno = true;
4464 changed = true;
4466 if (cur_summary_lto && !cur_summary_lto->writes_errno)
4468 cur_summary_lto->writes_errno = true;
4469 changed = true;
4472 return changed;
4474 if (dump_file)
4475 fprintf (dump_file, " collapsing loads\n");
4476 changed |= collapse_loads (cur_summary, cur_summary_lto);
4477 if (!ignore_stores_p (node->decl, ecf_flags))
4479 if (dump_file)
4480 fprintf (dump_file, " collapsing stores\n");
4481 changed |= collapse_stores (cur_summary, cur_summary_lto);
4483 return changed;
4486 /* Maybe remove summaies of NODE pointed to by CUR_SUMMARY_PTR
4487 and CUR_SUMMARY_LTO_PTR if they are useless according to ECF_FLAGS. */
4489 static void
4490 remove_useless_summaries (cgraph_node *node,
4491 modref_summary **cur_summary_ptr,
4492 modref_summary_lto **cur_summary_lto_ptr,
4493 int ecf_flags)
4495 if (*cur_summary_ptr && !(*cur_summary_ptr)->useful_p (ecf_flags, false))
4497 optimization_summaries->remove (node);
4498 *cur_summary_ptr = NULL;
4500 if (*cur_summary_lto_ptr
4501 && !(*cur_summary_lto_ptr)->useful_p (ecf_flags, false))
4503 summaries_lto->remove (node);
4504 *cur_summary_lto_ptr = NULL;
4508 /* Perform iterative dataflow on SCC component starting in COMPONENT_NODE
4509 and propagate loads/stores. */
4511 static bool
4512 modref_propagate_in_scc (cgraph_node *component_node)
4514 bool changed = true;
4515 bool first = true;
4516 int iteration = 0;
4518 while (changed)
4520 bool nontrivial_scc
4521 = ((struct ipa_dfs_info *) component_node->aux)->next_cycle;
4522 changed = false;
4523 for (struct cgraph_node *cur = component_node; cur;
4524 cur = ((struct ipa_dfs_info *) cur->aux)->next_cycle)
4526 cgraph_node *node = cur->inlined_to ? cur->inlined_to : cur;
4527 modref_summary *cur_summary = optimization_summaries
4528 ? optimization_summaries->get (node)
4529 : NULL;
4530 modref_summary_lto *cur_summary_lto = summaries_lto
4531 ? summaries_lto->get (node)
4532 : NULL;
4534 if (!cur_summary && !cur_summary_lto)
4535 continue;
4537 int cur_ecf_flags = flags_from_decl_or_type (node->decl);
4539 if (dump_file)
4540 fprintf (dump_file, " Processing %s%s%s\n",
4541 cur->dump_name (),
4542 TREE_READONLY (cur->decl) ? " (const)" : "",
4543 DECL_PURE_P (cur->decl) ? " (pure)" : "");
4545 for (cgraph_edge *e = cur->indirect_calls; e; e = e->next_callee)
4547 if (dump_file)
4548 fprintf (dump_file, " Indirect call\n");
4549 if (propagate_unknown_call
4550 (node, e, e->indirect_info->ecf_flags,
4551 cur_summary, cur_summary_lto,
4552 nontrivial_scc))
4554 changed = true;
4555 remove_useless_summaries (node, &cur_summary,
4556 &cur_summary_lto,
4557 cur_ecf_flags);
4558 if (!cur_summary && !cur_summary_lto)
4559 break;
4563 if (!cur_summary && !cur_summary_lto)
4564 continue;
4566 for (cgraph_edge *callee_edge = cur->callees; callee_edge;
4567 callee_edge = callee_edge->next_callee)
4569 int flags = flags_from_decl_or_type (callee_edge->callee->decl);
4570 modref_summary *callee_summary = NULL;
4571 modref_summary_lto *callee_summary_lto = NULL;
4572 struct cgraph_node *callee;
4574 if (!callee_edge->inline_failed
4575 || ((flags & (ECF_CONST | ECF_NOVOPS))
4576 && !(flags & ECF_LOOPING_CONST_OR_PURE)))
4577 continue;
4579 /* Get the callee and its summary. */
4580 enum availability avail;
4581 callee = callee_edge->callee->function_or_virtual_thunk_symbol
4582 (&avail, cur);
4584 /* It is not necessary to re-process calls outside of the
4585 SCC component. */
4586 if (iteration > 0
4587 && (!callee->aux
4588 || ((struct ipa_dfs_info *)cur->aux)->scc_no
4589 != ((struct ipa_dfs_info *)callee->aux)->scc_no))
4590 continue;
4592 if (dump_file)
4593 fprintf (dump_file, " Call to %s\n",
4594 callee_edge->callee->dump_name ());
4596 bool ignore_stores = ignore_stores_p (cur->decl, flags);
4598 if (avail <= AVAIL_INTERPOSABLE)
4600 if (dump_file)
4601 fprintf (dump_file, " Call target interposable"
4602 " or not available\n");
4603 changed |= propagate_unknown_call
4604 (node, callee_edge, flags,
4605 cur_summary, cur_summary_lto,
4606 nontrivial_scc);
4607 if (!cur_summary && !cur_summary_lto)
4608 break;
4609 continue;
4612 /* We don't know anything about CALLEE, hence we cannot tell
4613 anything about the entire component. */
4615 if (cur_summary
4616 && !(callee_summary = optimization_summaries->get (callee)))
4618 if (dump_file)
4619 fprintf (dump_file, " No call target summary\n");
4620 changed |= propagate_unknown_call
4621 (node, callee_edge, flags,
4622 cur_summary, NULL,
4623 nontrivial_scc);
4625 if (cur_summary_lto
4626 && !(callee_summary_lto = summaries_lto->get (callee)))
4628 if (dump_file)
4629 fprintf (dump_file, " No call target summary\n");
4630 changed |= propagate_unknown_call
4631 (node, callee_edge, flags,
4632 NULL, cur_summary_lto,
4633 nontrivial_scc);
4636 if (callee_summary && !cur_summary->side_effects
4637 && (callee_summary->side_effects
4638 || callee_edge->recursive_p ()))
4640 cur_summary->side_effects = true;
4641 changed = true;
4643 if (callee_summary_lto && !cur_summary_lto->side_effects
4644 && (callee_summary_lto->side_effects
4645 || callee_edge->recursive_p ()))
4647 cur_summary_lto->side_effects = true;
4648 changed = true;
4650 if (callee_summary && !cur_summary->nondeterministic
4651 && callee_summary->nondeterministic
4652 && !ignore_nondeterminism_p (cur->decl, flags))
4654 cur_summary->nondeterministic = true;
4655 changed = true;
4657 if (callee_summary_lto && !cur_summary_lto->nondeterministic
4658 && callee_summary_lto->nondeterministic
4659 && !ignore_nondeterminism_p (cur->decl, flags))
4661 cur_summary_lto->nondeterministic = true;
4662 changed = true;
4664 if (flags & (ECF_CONST | ECF_NOVOPS))
4665 continue;
4667 /* We can not safely optimize based on summary of callee if it
4668 does not always bind to current def: it is possible that
4669 memory load was optimized out earlier which may not happen in
4670 the interposed variant. */
4671 if (!callee_edge->binds_to_current_def_p ())
4673 if (cur_summary && !cur_summary->calls_interposable)
4675 cur_summary->calls_interposable = true;
4676 changed = true;
4678 if (cur_summary_lto && !cur_summary_lto->calls_interposable)
4680 cur_summary_lto->calls_interposable = true;
4681 changed = true;
4683 if (dump_file)
4684 fprintf (dump_file, " May not bind local;"
4685 " collapsing loads\n");
4689 auto_vec <modref_parm_map, 32> parm_map;
4690 modref_parm_map chain_map;
4691 /* TODO: Once we get jump functions for static chains we could
4692 compute this. */
4693 chain_map.parm_index = MODREF_UNKNOWN_PARM;
4695 compute_parm_map (callee_edge, &parm_map);
4697 /* Merge in callee's information. */
4698 if (callee_summary)
4700 changed |= cur_summary->loads->merge
4701 (node->decl, callee_summary->loads,
4702 &parm_map, &chain_map, !first);
4703 if (!ignore_stores)
4705 changed |= cur_summary->stores->merge
4706 (node->decl, callee_summary->stores,
4707 &parm_map, &chain_map, !first);
4708 if (!cur_summary->writes_errno
4709 && callee_summary->writes_errno)
4711 cur_summary->writes_errno = true;
4712 changed = true;
4716 if (callee_summary_lto)
4718 changed |= cur_summary_lto->loads->merge
4719 (node->decl, callee_summary_lto->loads,
4720 &parm_map, &chain_map, !first);
4721 if (!ignore_stores)
4723 changed |= cur_summary_lto->stores->merge
4724 (node->decl, callee_summary_lto->stores,
4725 &parm_map, &chain_map, !first);
4726 if (!cur_summary_lto->writes_errno
4727 && callee_summary_lto->writes_errno)
4729 cur_summary_lto->writes_errno = true;
4730 changed = true;
4734 if (changed)
4735 remove_useless_summaries (node, &cur_summary,
4736 &cur_summary_lto,
4737 cur_ecf_flags);
4738 if (!cur_summary && !cur_summary_lto)
4739 break;
4740 if (dump_file && changed)
4742 if (cur_summary)
4743 cur_summary->dump (dump_file);
4744 if (cur_summary_lto)
4745 cur_summary_lto->dump (dump_file);
4746 dump_modref_edge_summaries (dump_file, node, 4);
4750 iteration++;
4751 first = false;
4753 if (dump_file)
4754 fprintf (dump_file,
4755 "Propagation finished in %i iterations\n", iteration);
4756 bool pureconst = false;
4757 for (struct cgraph_node *cur = component_node; cur;
4758 cur = ((struct ipa_dfs_info *) cur->aux)->next_cycle)
4759 if (!cur->inlined_to && opt_for_fn (cur->decl, flag_ipa_pure_const))
4761 modref_summary *summary = optimization_summaries
4762 ? optimization_summaries->get (cur)
4763 : NULL;
4764 modref_summary_lto *summary_lto = summaries_lto
4765 ? summaries_lto->get (cur)
4766 : NULL;
4767 if (summary && !summary->stores->every_base && !summary->stores->bases
4768 && !summary->nondeterministic)
4770 if (!summary->loads->every_base && !summary->loads->bases
4771 && !summary->calls_interposable)
4772 pureconst |= ipa_make_function_const
4773 (cur, summary->side_effects, false);
4774 else
4775 pureconst |= ipa_make_function_pure
4776 (cur, summary->side_effects, false);
4778 if (summary_lto && !summary_lto->stores->every_base
4779 && !summary_lto->stores->bases && !summary_lto->nondeterministic)
4781 if (!summary_lto->loads->every_base && !summary_lto->loads->bases
4782 && !summary_lto->calls_interposable)
4783 pureconst |= ipa_make_function_const
4784 (cur, summary_lto->side_effects, false);
4785 else
4786 pureconst |= ipa_make_function_pure
4787 (cur, summary_lto->side_effects, false);
4790 return pureconst;
4793 /* Dump results of propagation in SCC rooted in COMPONENT_NODE. */
4795 static void
4796 modref_propagate_dump_scc (cgraph_node *component_node)
4798 for (struct cgraph_node *cur = component_node; cur;
4799 cur = ((struct ipa_dfs_info *) cur->aux)->next_cycle)
4800 if (!cur->inlined_to)
4802 modref_summary *cur_summary = optimization_summaries
4803 ? optimization_summaries->get (cur)
4804 : NULL;
4805 modref_summary_lto *cur_summary_lto = summaries_lto
4806 ? summaries_lto->get (cur)
4807 : NULL;
4809 fprintf (dump_file, "Propagated modref for %s%s%s\n",
4810 cur->dump_name (),
4811 TREE_READONLY (cur->decl) ? " (const)" : "",
4812 DECL_PURE_P (cur->decl) ? " (pure)" : "");
4813 if (optimization_summaries)
4815 if (cur_summary)
4816 cur_summary->dump (dump_file);
4817 else
4818 fprintf (dump_file, " Not tracked\n");
4820 if (summaries_lto)
4822 if (cur_summary_lto)
4823 cur_summary_lto->dump (dump_file);
4824 else
4825 fprintf (dump_file, " Not tracked (lto)\n");
4830 /* Determine EAF flags know for call E with CALLEE_ECF_FLAGS and ARG. */
4833 implicit_eaf_flags_for_edge_and_arg (cgraph_edge *e, int callee_ecf_flags,
4834 bool ignore_stores, int arg)
4836 /* Returning the value is already accounted to at local propagation. */
4837 int implicit_flags = EAF_NOT_RETURNED_DIRECTLY
4838 | EAF_NOT_RETURNED_INDIRECTLY;
4839 if (ignore_stores)
4840 implicit_flags |= ignore_stores_eaf_flags;
4841 if (callee_ecf_flags & ECF_PURE)
4842 implicit_flags |= implicit_pure_eaf_flags;
4843 if (callee_ecf_flags & (ECF_CONST | ECF_NOVOPS))
4844 implicit_flags |= implicit_const_eaf_flags;
4845 class fnspec_summary *fnspec_sum = fnspec_summaries->get (e);
4846 if (fnspec_sum)
4848 attr_fnspec fnspec (fnspec_sum->fnspec);
4849 implicit_flags |= fnspec.arg_eaf_flags (arg);
4851 return implicit_flags;
4854 /* Process escapes in SUM and merge SUMMARY to CUR_SUMMARY
4855 and SUMMARY_LTO to CUR_SUMMARY_LTO.
4856 Return true if something changed. */
4858 static bool
4859 modref_merge_call_site_flags (escape_summary *sum,
4860 modref_summary *cur_summary,
4861 modref_summary_lto *cur_summary_lto,
4862 modref_summary *summary,
4863 modref_summary_lto *summary_lto,
4864 tree caller,
4865 cgraph_edge *e,
4866 int caller_ecf_flags,
4867 int callee_ecf_flags,
4868 bool binds_to_current_def)
4870 escape_entry *ee;
4871 unsigned int i;
4872 bool changed = false;
4873 bool ignore_stores = ignore_stores_p (caller, callee_ecf_flags);
4875 /* If we have no useful info to propagate. */
4876 if ((!cur_summary || !cur_summary->arg_flags.length ())
4877 && (!cur_summary_lto || !cur_summary_lto->arg_flags.length ()))
4878 return false;
4880 FOR_EACH_VEC_ELT (sum->esc, i, ee)
4882 int flags = 0;
4883 int flags_lto = 0;
4884 int implicit_flags = implicit_eaf_flags_for_edge_and_arg
4885 (e, callee_ecf_flags, ignore_stores, ee->arg);
4887 if (summary && ee->arg < summary->arg_flags.length ())
4888 flags = summary->arg_flags[ee->arg];
4889 if (summary_lto
4890 && ee->arg < summary_lto->arg_flags.length ())
4891 flags_lto = summary_lto->arg_flags[ee->arg];
4892 if (!ee->direct)
4894 flags = deref_flags (flags, ignore_stores);
4895 flags_lto = deref_flags (flags_lto, ignore_stores);
4897 if (ignore_stores)
4898 implicit_flags |= ignore_stores_eaf_flags;
4899 if (callee_ecf_flags & ECF_PURE)
4900 implicit_flags |= implicit_pure_eaf_flags;
4901 if (callee_ecf_flags & (ECF_CONST | ECF_NOVOPS))
4902 implicit_flags |= implicit_const_eaf_flags;
4903 class fnspec_summary *fnspec_sum = fnspec_summaries->get (e);
4904 if (fnspec_sum)
4906 attr_fnspec fnspec (fnspec_sum->fnspec);
4907 implicit_flags |= fnspec.arg_eaf_flags (ee->arg);
4909 if (!ee->direct)
4910 implicit_flags = deref_flags (implicit_flags, ignore_stores);
4911 flags |= implicit_flags;
4912 flags_lto |= implicit_flags;
4913 if (!binds_to_current_def && (flags || flags_lto))
4915 flags = interposable_eaf_flags (flags, implicit_flags);
4916 flags_lto = interposable_eaf_flags (flags_lto, implicit_flags);
4918 if (!(flags & EAF_UNUSED)
4919 && cur_summary && ee->parm_index < (int)cur_summary->arg_flags.length ())
4921 eaf_flags_t &f = ee->parm_index == MODREF_RETSLOT_PARM
4922 ? cur_summary->retslot_flags
4923 : ee->parm_index == MODREF_STATIC_CHAIN_PARM
4924 ? cur_summary->static_chain_flags
4925 : cur_summary->arg_flags[ee->parm_index];
4926 if ((f & flags) != f)
4928 f = remove_useless_eaf_flags
4929 (f & flags, caller_ecf_flags,
4930 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (caller))));
4931 changed = true;
4934 if (!(flags_lto & EAF_UNUSED)
4935 && cur_summary_lto
4936 && ee->parm_index < (int)cur_summary_lto->arg_flags.length ())
4938 eaf_flags_t &f = ee->parm_index == MODREF_RETSLOT_PARM
4939 ? cur_summary_lto->retslot_flags
4940 : ee->parm_index == MODREF_STATIC_CHAIN_PARM
4941 ? cur_summary_lto->static_chain_flags
4942 : cur_summary_lto->arg_flags[ee->parm_index];
4943 if ((f & flags_lto) != f)
4945 f = remove_useless_eaf_flags
4946 (f & flags_lto, caller_ecf_flags,
4947 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (caller))));
4948 changed = true;
4952 return changed;
4955 /* Perform iterative dataflow on SCC component starting in COMPONENT_NODE
4956 and propagate arg flags. */
4958 static void
4959 modref_propagate_flags_in_scc (cgraph_node *component_node)
4961 bool changed = true;
4962 int iteration = 0;
4964 while (changed)
4966 changed = false;
4967 for (struct cgraph_node *cur = component_node; cur;
4968 cur = ((struct ipa_dfs_info *) cur->aux)->next_cycle)
4970 cgraph_node *node = cur->inlined_to ? cur->inlined_to : cur;
4971 modref_summary *cur_summary = optimization_summaries
4972 ? optimization_summaries->get (node)
4973 : NULL;
4974 modref_summary_lto *cur_summary_lto = summaries_lto
4975 ? summaries_lto->get (node)
4976 : NULL;
4978 if (!cur_summary && !cur_summary_lto)
4979 continue;
4980 int caller_ecf_flags = flags_from_decl_or_type (cur->decl);
4982 if (dump_file)
4983 fprintf (dump_file, " Processing %s%s%s\n",
4984 cur->dump_name (),
4985 TREE_READONLY (cur->decl) ? " (const)" : "",
4986 DECL_PURE_P (cur->decl) ? " (pure)" : "");
4988 for (cgraph_edge *e = cur->indirect_calls; e; e = e->next_callee)
4990 escape_summary *sum = escape_summaries->get (e);
4992 if (!sum || (e->indirect_info->ecf_flags
4993 & (ECF_CONST | ECF_NOVOPS)))
4994 continue;
4996 changed |= modref_merge_call_site_flags
4997 (sum, cur_summary, cur_summary_lto,
4998 NULL, NULL,
4999 node->decl,
5001 caller_ecf_flags,
5002 e->indirect_info->ecf_flags,
5003 false);
5006 if (!cur_summary && !cur_summary_lto)
5007 continue;
5009 for (cgraph_edge *callee_edge = cur->callees; callee_edge;
5010 callee_edge = callee_edge->next_callee)
5012 int ecf_flags = flags_from_decl_or_type
5013 (callee_edge->callee->decl);
5014 modref_summary *callee_summary = NULL;
5015 modref_summary_lto *callee_summary_lto = NULL;
5016 struct cgraph_node *callee;
5018 if (ecf_flags & (ECF_CONST | ECF_NOVOPS)
5019 || !callee_edge->inline_failed)
5020 continue;
5022 /* Get the callee and its summary. */
5023 enum availability avail;
5024 callee = callee_edge->callee->function_or_virtual_thunk_symbol
5025 (&avail, cur);
5027 /* It is not necessary to re-process calls outside of the
5028 SCC component. */
5029 if (iteration > 0
5030 && (!callee->aux
5031 || ((struct ipa_dfs_info *)cur->aux)->scc_no
5032 != ((struct ipa_dfs_info *)callee->aux)->scc_no))
5033 continue;
5035 escape_summary *sum = escape_summaries->get (callee_edge);
5036 if (!sum)
5037 continue;
5039 if (dump_file)
5040 fprintf (dump_file, " Call to %s\n",
5041 callee_edge->callee->dump_name ());
5043 if (avail <= AVAIL_INTERPOSABLE
5044 || callee_edge->call_stmt_cannot_inline_p)
5046 else
5048 if (cur_summary)
5049 callee_summary = optimization_summaries->get (callee);
5050 if (cur_summary_lto)
5051 callee_summary_lto = summaries_lto->get (callee);
5053 changed |= modref_merge_call_site_flags
5054 (sum, cur_summary, cur_summary_lto,
5055 callee_summary, callee_summary_lto,
5056 node->decl,
5057 callee_edge,
5058 caller_ecf_flags,
5059 ecf_flags,
5060 callee->binds_to_current_def_p ());
5061 if (dump_file && changed)
5063 if (cur_summary)
5064 cur_summary->dump (dump_file);
5065 if (cur_summary_lto)
5066 cur_summary_lto->dump (dump_file);
5070 iteration++;
5072 if (dump_file)
5073 fprintf (dump_file,
5074 "Propagation of flags finished in %i iterations\n", iteration);
5077 } /* ANON namespace. */
5079 /* Call EDGE was inlined; merge summary from callee to the caller. */
5081 void
5082 ipa_merge_modref_summary_after_inlining (cgraph_edge *edge)
5084 if (!summaries && !summaries_lto)
5085 return;
5087 struct cgraph_node *to = (edge->caller->inlined_to
5088 ? edge->caller->inlined_to : edge->caller);
5089 class modref_summary *to_info = summaries ? summaries->get (to) : NULL;
5090 class modref_summary_lto *to_info_lto = summaries_lto
5091 ? summaries_lto->get (to) : NULL;
5093 if (!to_info && !to_info_lto)
5095 if (summaries)
5096 summaries->remove (edge->callee);
5097 if (summaries_lto)
5098 summaries_lto->remove (edge->callee);
5099 remove_modref_edge_summaries (edge->callee);
5100 return;
5103 class modref_summary *callee_info = summaries ? summaries->get (edge->callee)
5104 : NULL;
5105 class modref_summary_lto *callee_info_lto
5106 = summaries_lto ? summaries_lto->get (edge->callee) : NULL;
5107 int flags = flags_from_decl_or_type (edge->callee->decl);
5108 /* Combine in outer flags. */
5109 cgraph_node *n;
5110 for (n = edge->caller; n->inlined_to; n = n->callers->caller)
5111 flags |= flags_from_decl_or_type (n->decl);
5112 flags |= flags_from_decl_or_type (n->decl);
5113 bool ignore_stores = ignore_stores_p (edge->caller->decl, flags);
5115 if (!callee_info && to_info)
5117 if (!(flags & (ECF_CONST | ECF_NOVOPS)))
5118 to_info->loads->collapse ();
5119 if (!ignore_stores)
5120 to_info->stores->collapse ();
5122 if (!callee_info_lto && to_info_lto)
5124 if (!(flags & (ECF_CONST | ECF_NOVOPS)))
5125 to_info_lto->loads->collapse ();
5126 if (!ignore_stores)
5127 to_info_lto->stores->collapse ();
5129 if (callee_info || callee_info_lto)
5131 auto_vec <modref_parm_map, 32> parm_map;
5132 modref_parm_map chain_map;
5133 /* TODO: Once we get jump functions for static chains we could
5134 compute parm_index. */
5136 compute_parm_map (edge, &parm_map);
5138 if (!ignore_stores)
5140 if (to_info && callee_info)
5141 to_info->stores->merge (to->decl, callee_info->stores, &parm_map,
5142 &chain_map, false);
5143 if (to_info_lto && callee_info_lto)
5144 to_info_lto->stores->merge (to->decl, callee_info_lto->stores,
5145 &parm_map, &chain_map, false);
5147 if (!(flags & (ECF_CONST | ECF_NOVOPS)))
5149 if (to_info && callee_info)
5150 to_info->loads->merge (to->decl, callee_info->loads, &parm_map,
5151 &chain_map, false);
5152 if (to_info_lto && callee_info_lto)
5153 to_info_lto->loads->merge (to->decl, callee_info_lto->loads,
5154 &parm_map, &chain_map, false);
5158 /* Now merge escape summaries.
5159 For every escape to the callee we need to merge calle flags
5160 and remap calees escapes. */
5161 class escape_summary *sum = escape_summaries->get (edge);
5162 int max_escape = -1;
5163 escape_entry *ee;
5164 unsigned int i;
5166 if (sum && !(flags & (ECF_CONST | ECF_NOVOPS)))
5167 FOR_EACH_VEC_ELT (sum->esc, i, ee)
5168 if ((int)ee->arg > max_escape)
5169 max_escape = ee->arg;
5171 auto_vec <vec <struct escape_map>, 32> emap (max_escape + 1);
5172 emap.safe_grow (max_escape + 1, true);
5173 for (i = 0; (int)i < max_escape + 1; i++)
5174 emap[i] = vNULL;
5176 if (sum && !(flags & (ECF_CONST | ECF_NOVOPS)))
5177 FOR_EACH_VEC_ELT (sum->esc, i, ee)
5179 bool needed = false;
5180 int implicit_flags = implicit_eaf_flags_for_edge_and_arg
5181 (edge, flags, ignore_stores,
5182 ee->arg);
5183 if (!ee->direct)
5184 implicit_flags = deref_flags (implicit_flags, ignore_stores);
5185 if (to_info && (int)to_info->arg_flags.length () > ee->parm_index)
5187 int flags = callee_info
5188 && callee_info->arg_flags.length () > ee->arg
5189 ? callee_info->arg_flags[ee->arg] : 0;
5190 if (!ee->direct)
5191 flags = deref_flags (flags, ignore_stores);
5192 flags |= ee->min_flags | implicit_flags;
5193 eaf_flags_t &f = ee->parm_index == MODREF_RETSLOT_PARM
5194 ? to_info->retslot_flags
5195 : ee->parm_index == MODREF_STATIC_CHAIN_PARM
5196 ? to_info->static_chain_flags
5197 : to_info->arg_flags[ee->parm_index];
5198 f &= flags;
5199 if (f)
5200 needed = true;
5202 if (to_info_lto
5203 && (int)to_info_lto->arg_flags.length () > ee->parm_index)
5205 int flags = callee_info_lto
5206 && callee_info_lto->arg_flags.length () > ee->arg
5207 ? callee_info_lto->arg_flags[ee->arg] : 0;
5208 if (!ee->direct)
5209 flags = deref_flags (flags, ignore_stores);
5210 flags |= ee->min_flags | implicit_flags;
5211 eaf_flags_t &f = ee->parm_index == MODREF_RETSLOT_PARM
5212 ? to_info_lto->retslot_flags
5213 : ee->parm_index == MODREF_STATIC_CHAIN_PARM
5214 ? to_info_lto->static_chain_flags
5215 : to_info_lto->arg_flags[ee->parm_index];
5216 f &= flags;
5217 if (f)
5218 needed = true;
5220 struct escape_map entry = {ee->parm_index, ee->direct};
5221 if (needed)
5222 emap[ee->arg].safe_push (entry);
5224 update_escape_summary (edge->callee, emap, ignore_stores);
5225 for (i = 0; (int)i < max_escape + 1; i++)
5226 emap[i].release ();
5227 if (sum)
5228 escape_summaries->remove (edge);
5230 if (summaries)
5232 if (to_info && !to_info->useful_p (flags))
5234 if (dump_file)
5235 fprintf (dump_file, "Removed mod-ref summary for %s\n",
5236 to->dump_name ());
5237 summaries->remove (to);
5238 to_info = NULL;
5240 else if (to_info && dump_file)
5242 if (dump_file)
5243 fprintf (dump_file, "Updated mod-ref summary for %s\n",
5244 to->dump_name ());
5245 to_info->dump (dump_file);
5247 if (callee_info)
5248 summaries->remove (edge->callee);
5250 if (summaries_lto)
5252 if (to_info_lto && !to_info_lto->useful_p (flags))
5254 if (dump_file)
5255 fprintf (dump_file, "Removed mod-ref summary for %s\n",
5256 to->dump_name ());
5257 summaries_lto->remove (to);
5258 to_info_lto = NULL;
5260 else if (to_info_lto && dump_file)
5262 if (dump_file)
5263 fprintf (dump_file, "Updated mod-ref summary for %s\n",
5264 to->dump_name ());
5265 to_info_lto->dump (dump_file);
5267 if (callee_info_lto)
5268 summaries_lto->remove (edge->callee);
5270 if (!to_info && !to_info_lto)
5271 remove_modref_edge_summaries (to);
5272 return;
5275 /* Run the IPA pass. This will take a function's summaries and calls and
5276 construct new summaries which represent a transitive closure. So that
5277 summary of an analyzed function contains information about the loads and
5278 stores that the function or any function that it calls does. */
5280 unsigned int
5281 pass_ipa_modref::execute (function *)
5283 if (!summaries && !summaries_lto)
5284 return 0;
5285 bool pureconst = false;
5287 if (optimization_summaries)
5288 ggc_delete (optimization_summaries);
5289 optimization_summaries = summaries;
5290 summaries = NULL;
5292 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *,
5293 symtab->cgraph_count);
5294 int order_pos;
5295 order_pos = ipa_reduced_postorder (order, true, ignore_edge);
5296 int i;
5298 /* Iterate over all strongly connected components in post-order. */
5299 for (i = 0; i < order_pos; i++)
5301 /* Get the component's representative. That's just any node in the
5302 component from which we can traverse the entire component. */
5303 struct cgraph_node *component_node = order[i];
5305 if (dump_file)
5306 fprintf (dump_file, "\n\nStart of SCC component\n");
5308 pureconst |= modref_propagate_in_scc (component_node);
5309 modref_propagate_flags_in_scc (component_node);
5310 if (optimization_summaries)
5311 for (struct cgraph_node *cur = component_node; cur;
5312 cur = ((struct ipa_dfs_info *) cur->aux)->next_cycle)
5313 if (modref_summary *sum = optimization_summaries->get (cur))
5314 sum->finalize (cur->decl);
5315 if (dump_file)
5316 modref_propagate_dump_scc (component_node);
5318 cgraph_node *node;
5319 FOR_EACH_FUNCTION (node)
5320 update_signature (node);
5321 if (summaries_lto)
5322 ((modref_summaries_lto *)summaries_lto)->propagated = true;
5323 ipa_free_postorder_info ();
5324 free (order);
5325 delete fnspec_summaries;
5326 fnspec_summaries = NULL;
5327 delete escape_summaries;
5328 escape_summaries = NULL;
5330 /* If we posibly made constructors const/pure we may need to remove
5331 them. */
5332 return pureconst ? TODO_remove_functions : 0;
5335 /* Summaries must stay alive until end of compilation. */
5337 void
5338 ipa_modref_c_finalize ()
5340 if (optimization_summaries)
5341 ggc_delete (optimization_summaries);
5342 optimization_summaries = NULL;
5343 if (summaries_lto)
5344 ggc_delete (summaries_lto);
5345 summaries_lto = NULL;
5346 if (fnspec_summaries)
5347 delete fnspec_summaries;
5348 fnspec_summaries = NULL;
5349 if (escape_summaries)
5350 delete escape_summaries;
5351 escape_summaries = NULL;
5354 #include "gt-ipa-modref.h"