1 /* Search for references that a functions loads or stores.
2 Copyright (C) 2020-2021 Free Software Foundation, Inc.
3 Contributed by David Cepelik and Jan Hubicka
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* Mod/ref pass records summary about loads and stores performed by the
22 function. This is later used by alias analysis to disambiguate memory
23 accesses across function calls.
25 This file contains a tree pass and an IPA pass. Both performs the same
26 analysis however tree pass is executed during early and late optimization
27 passes to propagate info downwards in the compilation order. IPA pass
28 propagates across the callgraph and is able to handle recursion and works on
29 whole program during link-time analysis.
31 LTO mode differs from the local mode by not recording alias sets but types
32 that are translated to alias sets later. This is necessary in order stream
33 the information because the alias sets are rebuild at stream-in time and may
34 not correspond to ones seen during analysis. For this reason part of
35 analysis is duplicated.
37 The following information is computed
38 1) load/store access tree described in ipa-modref-tree.h
39 This is used by tree-ssa-alias to disambiguate load/stores
40 2) EAF flags used by points-to analysis (in tree-ssa-structlias).
41 and defined in tree-core.h.
42 and stored to optimization_summaries.
44 There are multiple summaries computed and used during the propagation:
45 - summaries holds summaries from analysis to IPA propagation
47 - summaries_lto is same as summaries but holds them in a format
48 that can be streamed (as described above).
49 - fnspec_summary holds fnspec strings for call. This is
50 necessary because gimple_call_fnspec performs additional
51 analysis except for looking callee fndecl.
52 - escape_summary holds escape points for given call edge.
53 That is a vector recording what function parmaeters
54 may escape to a function call (and with what parameter index). */
58 #include "coretypes.h"
62 #include "alloc-pool.h"
63 #include "tree-pass.h"
64 #include "gimple-iterator.h"
67 #include "ipa-utils.h"
68 #include "symbol-summary.h"
69 #include "gimple-pretty-print.h"
70 #include "gimple-walk.h"
71 #include "print-tree.h"
72 #include "tree-streamer.h"
75 #include "ipa-modref-tree.h"
76 #include "ipa-modref.h"
77 #include "value-range.h"
79 #include "ipa-fnsummary.h"
80 #include "attr-fnspec.h"
81 #include "symtab-clones.h"
82 #include "gimple-ssa.h"
83 #include "tree-phinodes.h"
84 #include "tree-ssa-operands.h"
85 #include "ssa-iterators.h"
86 #include "stringpool.h"
87 #include "tree-ssanames.h"
95 /* We record fnspec specifiers for call edges since they depends on actual
114 /* Summary holding fnspec string for a given call. */
116 class fnspec_summaries_t
: public call_summary
<fnspec_summary
*>
119 fnspec_summaries_t (symbol_table
*symtab
)
120 : call_summary
<fnspec_summary
*> (symtab
) {}
121 /* Hook that is called by summary when an edge is duplicated. */
122 virtual void duplicate (cgraph_edge
*,
127 dst
->fnspec
= xstrdup (src
->fnspec
);
131 static fnspec_summaries_t
*fnspec_summaries
= NULL
;
133 /* Escape summary holds a vector of param indexes that escape to
137 /* Parameter that escapes at a given call. */
139 /* Argument it escapes to. */
141 /* Minimal flags known about the argument. */
142 eaf_flags_t min_flags
;
143 /* Does it escape directly or indirectly? */
147 /* Dump EAF flags. */
150 dump_eaf_flags (FILE *out
, int flags
, bool newline
= true)
152 if (flags
& EAF_UNUSED
)
153 fprintf (out
, " unused");
154 if (flags
& EAF_NO_DIRECT_CLOBBER
)
155 fprintf (out
, " no_direct_clobber");
156 if (flags
& EAF_NO_INDIRECT_CLOBBER
)
157 fprintf (out
, " no_indirect_clobber");
158 if (flags
& EAF_NO_DIRECT_ESCAPE
)
159 fprintf (out
, " no_direct_escape");
160 if (flags
& EAF_NO_INDIRECT_ESCAPE
)
161 fprintf (out
, " no_indirect_escape");
162 if (flags
& EAF_NOT_RETURNED_DIRECTLY
)
163 fprintf (out
, " not_returned_directly");
164 if (flags
& EAF_NOT_RETURNED_INDIRECTLY
)
165 fprintf (out
, " not_returned_indirectly");
166 if (flags
& EAF_NO_DIRECT_READ
)
167 fprintf (out
, " no_direct_read");
168 if (flags
& EAF_NO_INDIRECT_READ
)
169 fprintf (out
, " no_indirect_read");
174 struct escape_summary
176 auto_vec
<escape_entry
> esc
;
177 void dump (FILE *out
)
179 for (unsigned int i
= 0; i
< esc
.length (); i
++)
181 fprintf (out
, " parm %i arg %i %s min:",
184 esc
[i
].direct
? "(direct)" : "(indirect)");
185 dump_eaf_flags (out
, esc
[i
].min_flags
, false);
191 class escape_summaries_t
: public call_summary
<escape_summary
*>
194 escape_summaries_t (symbol_table
*symtab
)
195 : call_summary
<escape_summary
*> (symtab
) {}
196 /* Hook that is called by summary when an edge is duplicated. */
197 virtual void duplicate (cgraph_edge
*,
202 dst
->esc
= src
->esc
.copy ();
206 static escape_summaries_t
*escape_summaries
= NULL
;
208 } /* ANON namespace: GTY annotated summaries can not be anonymous. */
211 /* Class (from which there is one global instance) that holds modref summaries
212 for all analyzed functions. */
214 class GTY((user
)) modref_summaries
215 : public fast_function_summary
<modref_summary
*, va_gc
>
218 modref_summaries (symbol_table
*symtab
)
219 : fast_function_summary
<modref_summary
*, va_gc
> (symtab
) {}
220 virtual void insert (cgraph_node
*, modref_summary
*state
);
221 virtual void duplicate (cgraph_node
*src_node
,
222 cgraph_node
*dst_node
,
223 modref_summary
*src_data
,
224 modref_summary
*dst_data
);
225 static modref_summaries
*create_ggc (symbol_table
*symtab
)
227 return new (ggc_alloc_no_dtor
<modref_summaries
> ())
228 modref_summaries (symtab
);
232 class modref_summary_lto
;
234 /* Class (from which there is one global instance) that holds modref summaries
235 for all analyzed functions. */
237 class GTY((user
)) modref_summaries_lto
238 : public fast_function_summary
<modref_summary_lto
*, va_gc
>
241 modref_summaries_lto (symbol_table
*symtab
)
242 : fast_function_summary
<modref_summary_lto
*, va_gc
> (symtab
),
243 propagated (false) {}
244 virtual void insert (cgraph_node
*, modref_summary_lto
*state
);
245 virtual void duplicate (cgraph_node
*src_node
,
246 cgraph_node
*dst_node
,
247 modref_summary_lto
*src_data
,
248 modref_summary_lto
*dst_data
);
249 static modref_summaries_lto
*create_ggc (symbol_table
*symtab
)
251 return new (ggc_alloc_no_dtor
<modref_summaries_lto
> ())
252 modref_summaries_lto (symtab
);
257 /* Global variable holding all modref summaries
258 (from analysis to IPA propagation time). */
260 static GTY(()) fast_function_summary
<modref_summary
*, va_gc
>
263 /* Global variable holding all modref optimization summaries
264 (from IPA propagation time or used by local optimization pass). */
266 static GTY(()) fast_function_summary
<modref_summary
*, va_gc
>
267 *optimization_summaries
;
269 /* LTO summaries hold info from analysis to LTO streaming or from LTO
270 stream-in through propagation to LTO stream-out. */
272 static GTY(()) fast_function_summary
<modref_summary_lto
*, va_gc
>
275 /* Summary for a single function which this pass produces. */
277 modref_summary::modref_summary ()
278 : loads (NULL
), stores (NULL
), retslot_flags (0), static_chain_flags (0),
279 writes_errno (false), side_effects (false), nondeterministic (false),
280 calls_interposable (false), global_memory_read (false),
281 global_memory_written (false), try_dse (false)
285 modref_summary::~modref_summary ()
293 /* Remove all flags from EAF_FLAGS that are implied by ECF_FLAGS and not
294 useful to track. If returns_void is true moreover clear
297 remove_useless_eaf_flags (int eaf_flags
, int ecf_flags
, bool returns_void
)
299 if (ecf_flags
& (ECF_CONST
| ECF_NOVOPS
))
300 eaf_flags
&= ~implicit_const_eaf_flags
;
301 else if (ecf_flags
& ECF_PURE
)
302 eaf_flags
&= ~implicit_pure_eaf_flags
;
303 else if ((ecf_flags
& ECF_NORETURN
) || returns_void
)
304 eaf_flags
&= ~(EAF_NOT_RETURNED_DIRECTLY
| EAF_NOT_RETURNED_INDIRECTLY
);
308 /* Return true if FLAGS holds some useful information. */
311 eaf_flags_useful_p (vec
<eaf_flags_t
> &flags
, int ecf_flags
)
313 for (unsigned i
= 0; i
< flags
.length (); i
++)
314 if (remove_useless_eaf_flags (flags
[i
], ecf_flags
, false))
319 /* Return true if summary is potentially useful for optimization.
320 If CHECK_FLAGS is false assume that arg_flags are useful. */
323 modref_summary::useful_p (int ecf_flags
, bool check_flags
)
325 if (arg_flags
.length () && !check_flags
)
327 if (check_flags
&& eaf_flags_useful_p (arg_flags
, ecf_flags
))
329 arg_flags
.release ();
330 if (check_flags
&& remove_useless_eaf_flags (retslot_flags
, ecf_flags
, false))
333 && remove_useless_eaf_flags (static_chain_flags
, ecf_flags
, false))
335 if (ecf_flags
& (ECF_CONST
| ECF_NOVOPS
))
336 return ((!side_effects
|| !nondeterministic
)
337 && (ecf_flags
& ECF_LOOPING_CONST_OR_PURE
));
338 if (loads
&& !loads
->every_base
)
342 if (ecf_flags
& ECF_PURE
)
343 return ((!side_effects
|| !nondeterministic
)
344 && (ecf_flags
& ECF_LOOPING_CONST_OR_PURE
));
345 return stores
&& !stores
->every_base
;
348 /* Single function summary used for LTO. */
350 typedef modref_tree
<tree
> modref_records_lto
;
351 struct GTY(()) modref_summary_lto
353 /* Load and stores in functions using types rather then alias sets.
355 This is necessary to make the information streamable for LTO but is also
356 more verbose and thus more likely to hit the limits. */
357 modref_records_lto
*loads
;
358 modref_records_lto
*stores
;
359 auto_vec
<modref_access_node
> GTY((skip
)) kills
;
360 auto_vec
<eaf_flags_t
> GTY((skip
)) arg_flags
;
361 eaf_flags_t retslot_flags
;
362 eaf_flags_t static_chain_flags
;
363 unsigned writes_errno
: 1;
364 unsigned side_effects
: 1;
365 unsigned nondeterministic
: 1;
366 unsigned calls_interposable
: 1;
368 modref_summary_lto ();
369 ~modref_summary_lto ();
371 bool useful_p (int ecf_flags
, bool check_flags
= true);
374 /* Summary for a single function which this pass produces. */
376 modref_summary_lto::modref_summary_lto ()
377 : loads (NULL
), stores (NULL
), retslot_flags (0), static_chain_flags (0),
378 writes_errno (false), side_effects (false), nondeterministic (false),
379 calls_interposable (false)
383 modref_summary_lto::~modref_summary_lto ()
392 /* Return true if lto summary is potentially useful for optimization.
393 If CHECK_FLAGS is false assume that arg_flags are useful. */
396 modref_summary_lto::useful_p (int ecf_flags
, bool check_flags
)
398 if (arg_flags
.length () && !check_flags
)
400 if (check_flags
&& eaf_flags_useful_p (arg_flags
, ecf_flags
))
402 arg_flags
.release ();
403 if (check_flags
&& remove_useless_eaf_flags (retslot_flags
, ecf_flags
, false))
406 && remove_useless_eaf_flags (static_chain_flags
, ecf_flags
, false))
408 if (ecf_flags
& (ECF_CONST
| ECF_NOVOPS
))
409 return ((!side_effects
|| !nondeterministic
)
410 && (ecf_flags
& ECF_LOOPING_CONST_OR_PURE
));
411 if (loads
&& !loads
->every_base
)
415 if (ecf_flags
& ECF_PURE
)
416 return ((!side_effects
|| !nondeterministic
)
417 && (ecf_flags
& ECF_LOOPING_CONST_OR_PURE
));
418 return stores
&& !stores
->every_base
;
421 /* Dump records TT to OUT. */
424 dump_records (modref_records
*tt
, FILE *out
)
428 fprintf (out
, " Every base\n");
432 modref_base_node
<alias_set_type
> *n
;
433 FOR_EACH_VEC_SAFE_ELT (tt
->bases
, i
, n
)
435 fprintf (out
, " Base %i: alias set %i\n", (int)i
, n
->base
);
438 fprintf (out
, " Every ref\n");
442 modref_ref_node
<alias_set_type
> *r
;
443 FOR_EACH_VEC_SAFE_ELT (n
->refs
, j
, r
)
445 fprintf (out
, " Ref %i: alias set %i\n", (int)j
, r
->ref
);
448 fprintf (out
, " Every access\n");
452 modref_access_node
*a
;
453 FOR_EACH_VEC_SAFE_ELT (r
->accesses
, k
, a
)
455 fprintf (out
, " access:");
462 /* Dump records TT to OUT. */
465 dump_lto_records (modref_records_lto
*tt
, FILE *out
)
469 fprintf (out
, " Every base\n");
473 modref_base_node
<tree
> *n
;
474 FOR_EACH_VEC_SAFE_ELT (tt
->bases
, i
, n
)
476 fprintf (out
, " Base %i:", (int)i
);
477 print_generic_expr (dump_file
, n
->base
);
478 fprintf (out
, " (alias set %i)\n",
479 n
->base
? get_alias_set (n
->base
) : 0);
482 fprintf (out
, " Every ref\n");
486 modref_ref_node
<tree
> *r
;
487 FOR_EACH_VEC_SAFE_ELT (n
->refs
, j
, r
)
489 fprintf (out
, " Ref %i:", (int)j
);
490 print_generic_expr (dump_file
, r
->ref
);
491 fprintf (out
, " (alias set %i)\n",
492 r
->ref
? get_alias_set (r
->ref
) : 0);
495 fprintf (out
, " Every access\n");
499 modref_access_node
*a
;
500 FOR_EACH_VEC_SAFE_ELT (r
->accesses
, k
, a
)
502 fprintf (out
, " access:");
509 /* Dump all escape points of NODE to OUT. */
512 dump_modref_edge_summaries (FILE *out
, cgraph_node
*node
, int depth
)
515 if (!escape_summaries
)
517 for (cgraph_edge
*e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
519 class escape_summary
*sum
= escape_summaries
->get (e
);
522 fprintf (out
, "%*sIndirect call %i in %s escapes:",
523 depth
, "", i
, node
->dump_name ());
528 for (cgraph_edge
*e
= node
->callees
; e
; e
= e
->next_callee
)
530 if (!e
->inline_failed
)
531 dump_modref_edge_summaries (out
, e
->callee
, depth
+ 1);
532 class escape_summary
*sum
= escape_summaries
->get (e
);
535 fprintf (out
, "%*sCall %s->%s escapes:", depth
, "",
536 node
->dump_name (), e
->callee
->dump_name ());
539 class fnspec_summary
*fsum
= fnspec_summaries
->get (e
);
542 fprintf (out
, "%*sCall %s->%s fnspec: %s\n", depth
, "",
543 node
->dump_name (), e
->callee
->dump_name (),
549 /* Remove all call edge summaries associated with NODE. */
552 remove_modref_edge_summaries (cgraph_node
*node
)
554 if (!escape_summaries
)
556 for (cgraph_edge
*e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
557 escape_summaries
->remove (e
);
558 for (cgraph_edge
*e
= node
->callees
; e
; e
= e
->next_callee
)
560 if (!e
->inline_failed
)
561 remove_modref_edge_summaries (e
->callee
);
562 escape_summaries
->remove (e
);
563 fnspec_summaries
->remove (e
);
570 modref_summary::dump (FILE *out
)
574 fprintf (out
, " loads:\n");
575 dump_records (loads
, out
);
579 fprintf (out
, " stores:\n");
580 dump_records (stores
, out
);
584 fprintf (out
, " kills:\n");
585 for (auto kill
: kills
)
592 fprintf (out
, " Writes errno\n");
594 fprintf (out
, " Side effects\n");
595 if (nondeterministic
)
596 fprintf (out
, " Nondeterministic\n");
597 if (calls_interposable
)
598 fprintf (out
, " Calls interposable\n");
599 if (global_memory_read
)
600 fprintf (out
, " Global memory read\n");
601 if (global_memory_written
)
602 fprintf (out
, " Global memory written\n");
604 fprintf (out
, " Try dse\n");
605 if (arg_flags
.length ())
607 for (unsigned int i
= 0; i
< arg_flags
.length (); i
++)
610 fprintf (out
, " parm %i flags:", i
);
611 dump_eaf_flags (out
, arg_flags
[i
]);
616 fprintf (out
, " Retslot flags:");
617 dump_eaf_flags (out
, retslot_flags
);
619 if (static_chain_flags
)
621 fprintf (out
, " Static chain flags:");
622 dump_eaf_flags (out
, static_chain_flags
);
629 modref_summary_lto::dump (FILE *out
)
631 fprintf (out
, " loads:\n");
632 dump_lto_records (loads
, out
);
633 fprintf (out
, " stores:\n");
634 dump_lto_records (stores
, out
);
637 fprintf (out
, " kills:\n");
638 for (auto kill
: kills
)
645 fprintf (out
, " Writes errno\n");
647 fprintf (out
, " Side effects\n");
648 if (nondeterministic
)
649 fprintf (out
, " Nondeterministic\n");
650 if (calls_interposable
)
651 fprintf (out
, " Calls interposable\n");
652 if (arg_flags
.length ())
654 for (unsigned int i
= 0; i
< arg_flags
.length (); i
++)
657 fprintf (out
, " parm %i flags:", i
);
658 dump_eaf_flags (out
, arg_flags
[i
]);
663 fprintf (out
, " Retslot flags:");
664 dump_eaf_flags (out
, retslot_flags
);
666 if (static_chain_flags
)
668 fprintf (out
, " Static chain flags:");
669 dump_eaf_flags (out
, static_chain_flags
);
673 /* Called after summary is produced and before it is used by local analysis.
674 Can be called multiple times in case summary needs to update signature.
675 FUN is decl of function summary is attached to. */
677 modref_summary::finalize (tree fun
)
679 global_memory_read
= !loads
|| loads
->global_access_p ();
680 global_memory_written
= !stores
|| stores
->global_access_p ();
682 /* We can do DSE if we know function has no side effects and
683 we can analyse all stores. Disable dse if there are too many
685 if (side_effects
|| global_memory_written
|| writes_errno
)
691 int num_tests
= 0, max_tests
692 = opt_for_fn (fun
, param_modref_max_tests
);
693 modref_base_node
<alias_set_type
> *base_node
;
694 modref_ref_node
<alias_set_type
> *ref_node
;
695 modref_access_node
*access_node
;
696 FOR_EACH_VEC_SAFE_ELT (stores
->bases
, i
, base_node
)
698 if (base_node
->every_ref
)
703 FOR_EACH_VEC_SAFE_ELT (base_node
->refs
, j
, ref_node
)
705 if (base_node
->every_ref
)
710 FOR_EACH_VEC_SAFE_ELT (ref_node
->accesses
, k
, access_node
)
711 if (num_tests
++ > max_tests
712 || !access_node
->parm_offset_known
)
724 if (loads
->every_base
)
729 for (auto base_node
: loads
->bases
)
731 if (base_node
->every_ref
)
734 for (auto ref_node
: base_node
->refs
)
735 if (ref_node
->every_access
)
738 load_accesses
+= ref_node
->accesses
->length ();
743 /* Get function summary for FUNC if it exists, return NULL otherwise. */
746 get_modref_function_summary (cgraph_node
*func
)
748 /* Avoid creation of the summary too early (e.g. when front-end calls us). */
749 if (!optimization_summaries
)
752 /* A single function body may be represented by multiple symbols with
753 different visibility. For example, if FUNC is an interposable alias,
754 we don't want to return anything, even if we have summary for the target
756 enum availability avail
;
757 func
= func
->function_or_virtual_thunk_symbol
758 (&avail
, current_function_decl
?
759 cgraph_node::get (current_function_decl
) : NULL
);
760 if (avail
<= AVAIL_INTERPOSABLE
)
763 modref_summary
*r
= optimization_summaries
->get (func
);
767 /* Get function summary for CALL if it exists, return NULL otherwise.
768 If non-null set interposed to indicate whether function may not
769 bind to current def. In this case sometimes loads from function
770 needs to be ignored. */
773 get_modref_function_summary (gcall
*call
, bool *interposed
)
775 tree callee
= gimple_call_fndecl (call
);
778 struct cgraph_node
*node
= cgraph_node::get (callee
);
781 modref_summary
*r
= get_modref_function_summary (node
);
783 *interposed
= r
->calls_interposable
784 || !node
->binds_to_current_def_p ();
791 /* Return true if ECF flags says that nondeterminsm can be ignored. */
794 ignore_nondeterminism_p (tree caller
, int flags
)
796 if (flags
& (ECF_CONST
| ECF_PURE
))
798 if ((flags
& (ECF_NORETURN
| ECF_NOTHROW
)) == (ECF_NORETURN
| ECF_NOTHROW
)
799 || (!opt_for_fn (caller
, flag_exceptions
) && (flags
& ECF_NORETURN
)))
804 /* Return true if ECF flags says that return value can be ignored. */
807 ignore_retval_p (tree caller
, int flags
)
809 if ((flags
& (ECF_NORETURN
| ECF_NOTHROW
)) == (ECF_NORETURN
| ECF_NOTHROW
)
810 || (!opt_for_fn (caller
, flag_exceptions
) && (flags
& ECF_NORETURN
)))
815 /* Return true if ECF flags says that stores can be ignored. */
818 ignore_stores_p (tree caller
, int flags
)
820 if (flags
& (ECF_PURE
| ECF_CONST
| ECF_NOVOPS
))
822 if ((flags
& (ECF_NORETURN
| ECF_NOTHROW
)) == (ECF_NORETURN
| ECF_NOTHROW
)
823 || (!opt_for_fn (caller
, flag_exceptions
) && (flags
& ECF_NORETURN
)))
828 /* Determine parm_map for PTR which is supposed to be a pointer. */
831 parm_map_for_ptr (tree op
)
835 struct modref_parm_map parm_map
;
838 parm_map
.parm_offset_known
= false;
839 parm_map
.parm_offset
= 0;
841 offset_known
= unadjusted_ptr_and_unit_offset (op
, &op
, &offset
);
842 if (TREE_CODE (op
) == SSA_NAME
843 && SSA_NAME_IS_DEFAULT_DEF (op
)
844 && TREE_CODE (SSA_NAME_VAR (op
)) == PARM_DECL
)
848 if (cfun
->static_chain_decl
849 && op
== ssa_default_def (cfun
, cfun
->static_chain_decl
))
850 index
= MODREF_STATIC_CHAIN_PARM
;
852 for (tree t
= DECL_ARGUMENTS (current_function_decl
);
853 t
!= SSA_NAME_VAR (op
); t
= DECL_CHAIN (t
))
855 parm_map
.parm_index
= index
;
856 parm_map
.parm_offset_known
= offset_known
;
857 parm_map
.parm_offset
= offset
;
859 else if (points_to_local_or_readonly_memory_p (op
))
860 parm_map
.parm_index
= MODREF_LOCAL_MEMORY_PARM
;
861 /* Memory allocated in the function is not visible to caller before the
862 call and thus we do not need to record it as load/stores/kills. */
863 else if (TREE_CODE (op
) == SSA_NAME
864 && (call
= dyn_cast
<gcall
*>(SSA_NAME_DEF_STMT (op
))) != NULL
865 && gimple_call_flags (call
) & ECF_MALLOC
)
866 parm_map
.parm_index
= MODREF_LOCAL_MEMORY_PARM
;
868 parm_map
.parm_index
= MODREF_UNKNOWN_PARM
;
872 /* Analyze memory accesses (loads, stores and kills) performed
873 by the function. Set also side_effects, calls_interposable
874 and nondeterminism flags. */
876 class modref_access_analysis
879 modref_access_analysis (bool ipa
, modref_summary
*summary
,
880 modref_summary_lto
*summary_lto
)
881 : m_summary (summary
), m_summary_lto (summary_lto
), m_ipa (ipa
)
886 bool set_side_effects ();
887 bool set_nondeterministic ();
888 static modref_access_node
get_access (ao_ref
*ref
);
889 static void record_access (modref_records
*, ao_ref
*, modref_access_node
&);
890 static void record_access_lto (modref_records_lto
*, ao_ref
*,
891 modref_access_node
&a
);
892 bool record_access_p (tree
);
893 bool record_unknown_load ();
894 bool record_unknown_store ();
895 bool merge_call_side_effects (gimple
*, modref_summary
*,
896 cgraph_node
*, bool);
897 modref_access_node
get_access_for_fnspec (gcall
*, attr_fnspec
&,
898 unsigned int, modref_parm_map
&);
899 void process_fnspec (gcall
*);
900 void analyze_call (gcall
*);
901 static bool analyze_load (gimple
*, tree
, tree
, void *);
902 static bool analyze_store (gimple
*, tree
, tree
, void *);
903 void analyze_stmt (gimple
*, bool);
906 /* Summary being computed.
907 We work eitehr with m_summary or m_summary_lto. Never on both. */
908 modref_summary
*m_summary
;
909 modref_summary_lto
*m_summary_lto
;
910 /* Recursive calls needs simplisitc dataflow after analysis finished.
911 Collect all calls into this vector during analysis and later process
912 them in propagate. */
913 auto_vec
<gimple
*, 32> m_recursive_calls
;
914 /* ECF flags of function being analysed. */
916 /* True if IPA propagation will be done later. */
918 /* Set true if statement currently analysed is known to be
919 executed each time function is called. */
920 bool m_always_executed
;
923 /* Set side_effects flag and return if someting changed. */
926 modref_access_analysis::set_side_effects ()
928 bool changed
= false;
930 if (m_summary
&& !m_summary
->side_effects
)
932 m_summary
->side_effects
= true;
935 if (m_summary_lto
&& !m_summary_lto
->side_effects
)
937 m_summary_lto
->side_effects
= true;
943 /* Set nondeterministic flag and return if someting changed. */
946 modref_access_analysis::set_nondeterministic ()
948 bool changed
= false;
950 if (m_summary
&& !m_summary
->nondeterministic
)
952 m_summary
->side_effects
= m_summary
->nondeterministic
= true;
955 if (m_summary_lto
&& !m_summary_lto
->nondeterministic
)
957 m_summary_lto
->side_effects
= m_summary_lto
->nondeterministic
= true;
963 /* Construct modref_access_node from REF. */
966 modref_access_analysis::get_access (ao_ref
*ref
)
970 base
= ao_ref_base (ref
);
971 modref_access_node a
= {ref
->offset
, ref
->size
, ref
->max_size
,
972 0, MODREF_UNKNOWN_PARM
, false, 0};
973 if (TREE_CODE (base
) == MEM_REF
|| TREE_CODE (base
) == TARGET_MEM_REF
)
976 modref_parm_map m
= parm_map_for_ptr (TREE_OPERAND (base
, 0));
978 a
.parm_index
= m
.parm_index
;
979 if (a
.parm_index
!= MODREF_UNKNOWN_PARM
&& TREE_CODE (memref
) == MEM_REF
)
982 = wi::to_poly_wide (TREE_OPERAND
983 (memref
, 1)).to_shwi (&a
.parm_offset
);
984 if (a
.parm_offset_known
&& m
.parm_offset_known
)
985 a
.parm_offset
+= m
.parm_offset
;
987 a
.parm_offset_known
= false;
991 a
.parm_index
= MODREF_UNKNOWN_PARM
;
995 /* Record access into the modref_records data structure. */
998 modref_access_analysis::record_access (modref_records
*tt
,
1000 modref_access_node
&a
)
1002 alias_set_type base_set
= !flag_strict_aliasing
? 0
1003 : ao_ref_base_alias_set (ref
);
1004 alias_set_type ref_set
= !flag_strict_aliasing
? 0
1005 : (ao_ref_alias_set (ref
));
1008 fprintf (dump_file
, " - Recording base_set=%i ref_set=%i ",
1012 tt
->insert (current_function_decl
, base_set
, ref_set
, a
, false);
1015 /* IPA version of record_access_tree. */
1018 modref_access_analysis::record_access_lto (modref_records_lto
*tt
, ao_ref
*ref
,
1019 modref_access_node
&a
)
1021 /* get_alias_set sometimes use different type to compute the alias set
1022 than TREE_TYPE (base). Do same adjustments. */
1023 tree base_type
= NULL_TREE
, ref_type
= NULL_TREE
;
1024 if (flag_strict_aliasing
)
1029 while (handled_component_p (base
))
1030 base
= TREE_OPERAND (base
, 0);
1032 base_type
= reference_alias_ptr_type_1 (&base
);
1035 base_type
= TREE_TYPE (base
);
1037 base_type
= TYPE_REF_CAN_ALIAS_ALL (base_type
)
1038 ? NULL_TREE
: TREE_TYPE (base_type
);
1040 tree ref_expr
= ref
->ref
;
1041 ref_type
= reference_alias_ptr_type_1 (&ref_expr
);
1044 ref_type
= TREE_TYPE (ref_expr
);
1046 ref_type
= TYPE_REF_CAN_ALIAS_ALL (ref_type
)
1047 ? NULL_TREE
: TREE_TYPE (ref_type
);
1049 /* Sanity check that we are in sync with what get_alias_set does. */
1050 gcc_checking_assert ((!base_type
&& !ao_ref_base_alias_set (ref
))
1051 || get_alias_set (base_type
)
1052 == ao_ref_base_alias_set (ref
));
1053 gcc_checking_assert ((!ref_type
&& !ao_ref_alias_set (ref
))
1054 || get_alias_set (ref_type
)
1055 == ao_ref_alias_set (ref
));
1057 /* Do not bother to record types that have no meaningful alias set.
1058 Also skip variably modified types since these go to local streams. */
1059 if (base_type
&& (!get_alias_set (base_type
)
1060 || variably_modified_type_p (base_type
, NULL_TREE
)))
1061 base_type
= NULL_TREE
;
1062 if (ref_type
&& (!get_alias_set (ref_type
)
1063 || variably_modified_type_p (ref_type
, NULL_TREE
)))
1064 ref_type
= NULL_TREE
;
1068 fprintf (dump_file
, " - Recording base type:");
1069 print_generic_expr (dump_file
, base_type
);
1070 fprintf (dump_file
, " (alias set %i) ref type:",
1071 base_type
? get_alias_set (base_type
) : 0);
1072 print_generic_expr (dump_file
, ref_type
);
1073 fprintf (dump_file
, " (alias set %i) ",
1074 ref_type
? get_alias_set (ref_type
) : 0);
1078 tt
->insert (current_function_decl
, base_type
, ref_type
, a
, false);
1081 /* Returns true if and only if we should store the access to EXPR.
1082 Some accesses, e.g. loads from automatic variables, are not interesting. */
1085 modref_access_analysis::record_access_p (tree expr
)
1087 if (TREE_THIS_VOLATILE (expr
))
1090 fprintf (dump_file
, " (volatile; marking nondeterministic) ");
1091 set_nondeterministic ();
1093 if (cfun
->can_throw_non_call_exceptions
1094 && tree_could_throw_p (expr
))
1097 fprintf (dump_file
, " (can throw; marking side effects) ");
1098 set_side_effects ();
1101 if (refs_local_or_readonly_memory_p (expr
))
1104 fprintf (dump_file
, " - Read-only or local, ignoring.\n");
1110 /* Collapse loads and return true if something changed. */
1113 modref_access_analysis::record_unknown_load ()
1115 bool changed
= false;
1117 if (m_summary
&& !m_summary
->loads
->every_base
)
1119 m_summary
->loads
->collapse ();
1122 if (m_summary_lto
&& !m_summary_lto
->loads
->every_base
)
1124 m_summary_lto
->loads
->collapse ();
1130 /* Collapse loads and return true if something changed. */
1133 modref_access_analysis::record_unknown_store ()
1135 bool changed
= false;
1137 if (m_summary
&& !m_summary
->stores
->every_base
)
1139 m_summary
->stores
->collapse ();
1142 if (m_summary_lto
&& !m_summary_lto
->stores
->every_base
)
1144 m_summary_lto
->stores
->collapse ();
1150 /* Merge side effects of call STMT to function with CALLEE_SUMMARY.
1151 Return true if something changed.
1152 If IGNORE_STORES is true, do not merge stores.
1153 If RECORD_ADJUSTMENTS is true cap number of adjustments to
1154 a given access to make dataflow finite. */
1157 modref_access_analysis::merge_call_side_effects
1158 (gimple
*stmt
, modref_summary
*callee_summary
,
1159 cgraph_node
*callee_node
, bool record_adjustments
)
1161 int flags
= gimple_call_flags (stmt
);
1163 /* Nothing to do for non-looping cont functions. */
1164 if ((flags
& (ECF_CONST
| ECF_NOVOPS
))
1165 && !(flags
& ECF_LOOPING_CONST_OR_PURE
))
1168 bool changed
= false;
1171 fprintf (dump_file
, " - Merging side effects of %s\n",
1172 callee_node
->dump_name ());
1174 /* Merge side effects and non-determinism.
1175 PURE/CONST flags makes functions deterministic and if there is
1176 no LOOPING_CONST_OR_PURE they also have no side effects. */
1177 if (!(flags
& (ECF_CONST
| ECF_NOVOPS
| ECF_PURE
))
1178 || (flags
& ECF_LOOPING_CONST_OR_PURE
))
1180 if (!m_summary
->side_effects
&& callee_summary
->side_effects
)
1183 fprintf (dump_file
, " - merging side effects.\n");
1184 m_summary
->side_effects
= true;
1187 if (!m_summary
->nondeterministic
&& callee_summary
->nondeterministic
1188 && !ignore_nondeterminism_p (current_function_decl
, flags
))
1191 fprintf (dump_file
, " - merging nondeterministic.\n");
1192 m_summary
->nondeterministic
= true;
1197 /* For const functions we are done. */
1198 if (flags
& (ECF_CONST
| ECF_NOVOPS
))
1201 /* Merge calls_interposable flags. */
1202 if (!m_summary
->calls_interposable
&& callee_summary
->calls_interposable
)
1205 fprintf (dump_file
, " - merging calls interposable.\n");
1206 m_summary
->calls_interposable
= true;
1210 if (!callee_node
->binds_to_current_def_p () && !m_summary
->calls_interposable
)
1213 fprintf (dump_file
, " - May be interposed.\n");
1214 m_summary
->calls_interposable
= true;
1218 /* Now merge the actual load, store and kill vectors. For this we need
1219 to compute map translating new parameters to old. */
1221 fprintf (dump_file
, " Parm map:");
1223 auto_vec
<modref_parm_map
, 32> parm_map
;
1224 parm_map
.safe_grow_cleared (gimple_call_num_args (stmt
), true);
1225 for (unsigned i
= 0; i
< gimple_call_num_args (stmt
); i
++)
1227 parm_map
[i
] = parm_map_for_ptr (gimple_call_arg (stmt
, i
));
1230 fprintf (dump_file
, " %i", parm_map
[i
].parm_index
);
1231 if (parm_map
[i
].parm_offset_known
)
1233 fprintf (dump_file
, " offset:");
1234 print_dec ((poly_int64_pod
)parm_map
[i
].parm_offset
,
1240 modref_parm_map chain_map
;
1241 if (gimple_call_chain (stmt
))
1243 chain_map
= parm_map_for_ptr (gimple_call_chain (stmt
));
1246 fprintf (dump_file
, "static chain %i", chain_map
.parm_index
);
1247 if (chain_map
.parm_offset_known
)
1249 fprintf (dump_file
, " offset:");
1250 print_dec ((poly_int64_pod
)chain_map
.parm_offset
,
1256 fprintf (dump_file
, "\n");
1258 /* Kills can me merged in only if we know the function is going to be
1260 if (m_always_executed
1261 && callee_summary
->kills
.length ()
1262 && (!cfun
->can_throw_non_call_exceptions
1263 || !stmt_could_throw_p (cfun
, stmt
)))
1265 /* Watch for self recursive updates. */
1266 auto_vec
<modref_access_node
, 32> saved_kills
;
1268 saved_kills
.reserve_exact (callee_summary
->kills
.length ());
1269 saved_kills
.splice (callee_summary
->kills
);
1270 for (auto kill
: saved_kills
)
1272 if (kill
.parm_index
>= (int)parm_map
.length ())
1275 = kill
.parm_index
== MODREF_STATIC_CHAIN_PARM
1277 : parm_map
[kill
.parm_index
];
1278 if (m
.parm_index
== MODREF_LOCAL_MEMORY_PARM
1279 || m
.parm_index
== MODREF_UNKNOWN_PARM
1280 || m
.parm_index
== MODREF_RETSLOT_PARM
1281 || !m
.parm_offset_known
)
1283 modref_access_node n
= kill
;
1284 n
.parm_index
= m
.parm_index
;
1285 n
.parm_offset
+= m
.parm_offset
;
1286 if (modref_access_node::insert_kill (m_summary
->kills
, n
,
1287 record_adjustments
))
1292 /* Merge in loads. */
1293 changed
|= m_summary
->loads
->merge (current_function_decl
,
1294 callee_summary
->loads
,
1295 &parm_map
, &chain_map
,
1296 record_adjustments
);
1297 /* Merge in stores. */
1298 if (!ignore_stores_p (current_function_decl
, flags
))
1300 changed
|= m_summary
->stores
->merge (current_function_decl
,
1301 callee_summary
->stores
,
1302 &parm_map
, &chain_map
,
1303 record_adjustments
);
1304 if (!m_summary
->writes_errno
1305 && callee_summary
->writes_errno
)
1307 m_summary
->writes_errno
= true;
1314 /* Return access mode for argument I of call STMT with FNSPEC. */
1317 modref_access_analysis::get_access_for_fnspec (gcall
*call
, attr_fnspec
&fnspec
,
1319 modref_parm_map
&map
)
1321 tree size
= NULL_TREE
;
1322 unsigned int size_arg
;
1324 if (!fnspec
.arg_specified_p (i
))
1326 else if (fnspec
.arg_max_access_size_given_by_arg_p (i
, &size_arg
))
1327 size
= gimple_call_arg (call
, size_arg
);
1328 else if (fnspec
.arg_access_size_given_by_type_p (i
))
1330 tree callee
= gimple_call_fndecl (call
);
1331 tree t
= TYPE_ARG_TYPES (TREE_TYPE (callee
));
1333 for (unsigned int p
= 0; p
< i
; p
++)
1335 size
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_VALUE (t
)));
1337 modref_access_node a
= {0, -1, -1,
1338 map
.parm_offset
, map
.parm_index
,
1339 map
.parm_offset_known
, 0};
1340 poly_int64 size_hwi
;
1342 && poly_int_tree_p (size
, &size_hwi
)
1343 && coeffs_in_range_p (size_hwi
, 0,
1344 HOST_WIDE_INT_MAX
/ BITS_PER_UNIT
))
1347 a
.max_size
= size_hwi
<< LOG2_BITS_PER_UNIT
;
1352 /* Apply side effects of call STMT to CUR_SUMMARY using FNSPEC.
1353 If IGNORE_STORES is true ignore them.
1354 Return false if no useful summary can be produced. */
1357 modref_access_analysis::process_fnspec (gcall
*call
)
1359 int flags
= gimple_call_flags (call
);
1361 /* PURE/CONST flags makes functions deterministic and if there is
1362 no LOOPING_CONST_OR_PURE they also have no side effects. */
1363 if (!(flags
& (ECF_CONST
| ECF_NOVOPS
| ECF_PURE
))
1364 || (flags
& ECF_LOOPING_CONST_OR_PURE
)
1365 || (cfun
->can_throw_non_call_exceptions
1366 && stmt_could_throw_p (cfun
, call
)))
1368 set_side_effects ();
1369 if (!ignore_nondeterminism_p (current_function_decl
, flags
))
1370 set_nondeterministic ();
1373 /* For const functions we are done. */
1374 if (flags
& (ECF_CONST
| ECF_NOVOPS
))
1377 attr_fnspec fnspec
= gimple_call_fnspec (call
);
1378 /* If there is no fnpec we know nothing about loads & stores. */
1379 if (!fnspec
.known_p ())
1381 if (dump_file
&& gimple_call_builtin_p (call
, BUILT_IN_NORMAL
))
1382 fprintf (dump_file
, " Builtin with no fnspec: %s\n",
1383 IDENTIFIER_POINTER (DECL_NAME (gimple_call_fndecl (call
))));
1384 record_unknown_load ();
1385 if (!ignore_stores_p (current_function_decl
, flags
))
1386 record_unknown_store ();
1389 /* Process fnspec. */
1390 if (fnspec
.global_memory_read_p ())
1391 record_unknown_load ();
1394 for (unsigned int i
= 0; i
< gimple_call_num_args (call
); i
++)
1395 if (!POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call
, i
))))
1397 else if (!fnspec
.arg_specified_p (i
)
1398 || fnspec
.arg_maybe_read_p (i
))
1400 modref_parm_map map
= parm_map_for_ptr
1401 (gimple_call_arg (call
, i
));
1403 if (map
.parm_index
== MODREF_LOCAL_MEMORY_PARM
)
1405 if (map
.parm_index
== MODREF_UNKNOWN_PARM
)
1407 record_unknown_load ();
1410 modref_access_node a
= get_access_for_fnspec (call
, fnspec
, i
, map
);
1411 if (a
.parm_index
== MODREF_LOCAL_MEMORY_PARM
)
1414 m_summary
->loads
->insert (current_function_decl
, 0, 0, a
, false);
1416 m_summary_lto
->loads
->insert (current_function_decl
, 0, 0, a
,
1420 if (ignore_stores_p (current_function_decl
, flags
))
1422 if (fnspec
.global_memory_written_p ())
1423 record_unknown_store ();
1426 for (unsigned int i
= 0; i
< gimple_call_num_args (call
); i
++)
1427 if (!POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call
, i
))))
1429 else if (!fnspec
.arg_specified_p (i
)
1430 || fnspec
.arg_maybe_written_p (i
))
1432 modref_parm_map map
= parm_map_for_ptr
1433 (gimple_call_arg (call
, i
));
1435 if (map
.parm_index
== MODREF_LOCAL_MEMORY_PARM
)
1437 if (map
.parm_index
== MODREF_UNKNOWN_PARM
)
1439 record_unknown_store ();
1442 modref_access_node a
= get_access_for_fnspec (call
, fnspec
, i
, map
);
1443 if (a
.parm_index
== MODREF_LOCAL_MEMORY_PARM
)
1446 m_summary
->stores
->insert (current_function_decl
, 0, 0, a
, false);
1448 m_summary_lto
->stores
->insert (current_function_decl
,
1451 if (fnspec
.errno_maybe_written_p () && flag_errno_math
)
1454 m_summary
->writes_errno
= true;
1456 m_summary_lto
->writes_errno
= true;
1461 /* Analyze function call STMT in function F.
1462 Remember recursive calls in RECURSIVE_CALLS. */
1465 modref_access_analysis::analyze_call (gcall
*stmt
)
1467 /* Check flags on the function call. In certain cases, analysis can be
1469 int flags
= gimple_call_flags (stmt
);
1471 if ((flags
& (ECF_CONST
| ECF_NOVOPS
))
1472 && !(flags
& ECF_LOOPING_CONST_OR_PURE
))
1476 " - ECF_CONST | ECF_NOVOPS, ignoring all stores and all loads "
1477 "except for args.\n");
1481 /* Next, we try to get the callee's function declaration. The goal is to
1482 merge their summary with ours. */
1483 tree callee
= gimple_call_fndecl (stmt
);
1485 /* Check if this is an indirect call. */
1489 fprintf (dump_file
, gimple_call_internal_p (stmt
)
1490 ? " - Internal call" : " - Indirect call.\n");
1491 process_fnspec (stmt
);
1494 /* We only need to handle internal calls in IPA mode. */
1495 gcc_checking_assert (!m_summary_lto
&& !m_ipa
);
1497 struct cgraph_node
*callee_node
= cgraph_node::get_create (callee
);
1499 /* If this is a recursive call, the target summary is the same as ours, so
1500 there's nothing to do. */
1501 if (recursive_call_p (current_function_decl
, callee
))
1503 m_recursive_calls
.safe_push (stmt
);
1504 set_side_effects ();
1506 fprintf (dump_file
, " - Skipping recursive call.\n");
1510 gcc_assert (callee_node
!= NULL
);
1512 /* Get the function symbol and its availability. */
1513 enum availability avail
;
1514 callee_node
= callee_node
->function_symbol (&avail
);
1516 if (builtin_safe_for_const_function_p (&looping
, callee
))
1519 set_side_effects ();
1521 fprintf (dump_file
, " - Builtin is safe for const.\n");
1524 if (avail
<= AVAIL_INTERPOSABLE
)
1528 " - Function availability <= AVAIL_INTERPOSABLE.\n");
1529 process_fnspec (stmt
);
1533 /* Get callee's modref summary. As above, if there's no summary, we either
1534 have to give up or, if stores are ignored, we can just purge loads. */
1535 modref_summary
*callee_summary
= optimization_summaries
->get (callee_node
);
1536 if (!callee_summary
)
1539 fprintf (dump_file
, " - No modref summary available for callee.\n");
1540 process_fnspec (stmt
);
1544 merge_call_side_effects (stmt
, callee_summary
, callee_node
, false);
1549 /* Helper for analyze_stmt. */
1552 modref_access_analysis::analyze_load (gimple
*, tree
, tree op
, void *data
)
1554 modref_access_analysis
*t
= (modref_access_analysis
*)data
;
1558 fprintf (dump_file
, " - Analyzing load: ");
1559 print_generic_expr (dump_file
, op
);
1560 fprintf (dump_file
, "\n");
1563 if (!t
->record_access_p (op
))
1567 ao_ref_init (&r
, op
);
1568 modref_access_node a
= get_access (&r
);
1569 if (a
.parm_index
== MODREF_LOCAL_MEMORY_PARM
)
1573 t
->record_access (t
->m_summary
->loads
, &r
, a
);
1574 if (t
->m_summary_lto
)
1575 t
->record_access_lto (t
->m_summary_lto
->loads
, &r
, a
);
1579 /* Helper for analyze_stmt. */
1582 modref_access_analysis::analyze_store (gimple
*stmt
, tree
, tree op
, void *data
)
1584 modref_access_analysis
*t
= (modref_access_analysis
*)data
;
1588 fprintf (dump_file
, " - Analyzing store: ");
1589 print_generic_expr (dump_file
, op
);
1590 fprintf (dump_file
, "\n");
1593 if (!t
->record_access_p (op
))
1597 ao_ref_init (&r
, op
);
1598 modref_access_node a
= get_access (&r
);
1599 if (a
.parm_index
== MODREF_LOCAL_MEMORY_PARM
)
1603 t
->record_access (t
->m_summary
->stores
, &r
, a
);
1604 if (t
->m_summary_lto
)
1605 t
->record_access_lto (t
->m_summary_lto
->stores
, &r
, a
);
1606 if (t
->m_always_executed
1607 && a
.useful_for_kill_p ()
1608 && (!cfun
->can_throw_non_call_exceptions
1609 || !stmt_could_throw_p (cfun
, stmt
)))
1612 fprintf (dump_file
, " - Recording kill\n");
1614 modref_access_node::insert_kill (t
->m_summary
->kills
, a
, false);
1615 if (t
->m_summary_lto
)
1616 modref_access_node::insert_kill (t
->m_summary_lto
->kills
, a
, false);
1621 /* Analyze statement STMT of function F.
1622 If IPA is true do not merge in side effects of calls. */
1625 modref_access_analysis::analyze_stmt (gimple
*stmt
, bool always_executed
)
1627 m_always_executed
= always_executed
;
1628 /* In general we can not ignore clobbers because they are barriers for code
1629 motion, however after inlining it is safe to do because local optimization
1630 passes do not consider clobbers from other functions.
1631 Similar logic is in ipa-pure-const.c. */
1632 if ((m_ipa
|| cfun
->after_inlining
) && gimple_clobber_p (stmt
))
1634 if (always_executed
&& record_access_p (gimple_assign_lhs (stmt
)))
1637 ao_ref_init (&r
, gimple_assign_lhs (stmt
));
1638 modref_access_node a
= get_access (&r
);
1639 if (a
.useful_for_kill_p ())
1642 fprintf (dump_file
, " - Recording kill\n");
1644 modref_access_node::insert_kill (m_summary
->kills
, a
, false);
1646 modref_access_node::insert_kill (m_summary_lto
->kills
,
1653 /* Analyze all loads and stores in STMT. */
1654 walk_stmt_load_store_ops (stmt
, this,
1655 analyze_load
, analyze_store
);
1657 switch (gimple_code (stmt
))
1660 if (gimple_asm_volatile_p (as_a
<gasm
*> (stmt
)))
1661 set_nondeterministic ();
1662 if (cfun
->can_throw_non_call_exceptions
1663 && stmt_could_throw_p (cfun
, stmt
))
1664 set_side_effects ();
1665 /* If the ASM statement does not read nor write memory, there's nothing
1666 to do. Otherwise just give up. */
1667 if (!gimple_asm_clobbers_memory_p (as_a
<gasm
*> (stmt
)))
1670 fprintf (dump_file
, " - Function contains GIMPLE_ASM statement "
1671 "which clobbers memory.\n");
1672 record_unknown_load ();
1673 record_unknown_store ();
1676 if (!m_ipa
|| gimple_call_internal_p (stmt
))
1677 analyze_call (as_a
<gcall
*> (stmt
));
1680 attr_fnspec fnspec
= gimple_call_fnspec (as_a
<gcall
*>(stmt
));
1682 if (fnspec
.known_p ()
1683 && (!fnspec
.global_memory_read_p ()
1684 || !fnspec
.global_memory_written_p ()))
1686 cgraph_edge
*e
= cgraph_node::get
1687 (current_function_decl
)->get_edge (stmt
);
1690 fnspec_summaries
->get_create (e
)->fnspec
1691 = xstrdup (fnspec
.get_str ());
1693 fprintf (dump_file
, " Recorded fnspec %s\n",
1700 if (cfun
->can_throw_non_call_exceptions
1701 && stmt_could_throw_p (cfun
, stmt
))
1702 set_side_effects ();
1707 /* Propagate load/stres acress recursive calls. */
1710 modref_access_analysis::propagate ()
1712 if (m_ipa
&& m_summary
)
1715 bool changed
= true;
1717 cgraph_node
*fnode
= cgraph_node::get (current_function_decl
);
1719 m_always_executed
= false;
1720 while (changed
&& m_summary
->useful_p (m_ecf_flags
, false))
1723 for (unsigned i
= 0; i
< m_recursive_calls
.length (); i
++)
1725 changed
|= merge_call_side_effects (m_recursive_calls
[i
], m_summary
,
1732 /* Analyze function. */
1735 modref_access_analysis::analyze ()
1737 m_ecf_flags
= flags_from_decl_or_type (current_function_decl
);
1738 bool summary_useful
= true;
1740 /* Analyze each statement in each basic block of the function. If the
1741 statement cannot be analyzed (for any reason), the entire function cannot
1742 be analyzed by modref. */
1744 FOR_EACH_BB_FN (bb
, cfun
)
1746 gimple_stmt_iterator si
;
1747 bool always_executed
1748 = bb
== single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
))->dest
;
1750 for (si
= gsi_start_nondebug_after_labels_bb (bb
);
1751 !gsi_end_p (si
); gsi_next_nondebug (&si
))
1753 analyze_stmt (gsi_stmt (si
), always_executed
);
1755 /* Avoid doing useles work. */
1756 if ((!m_summary
|| !m_summary
->useful_p (m_ecf_flags
, false))
1758 || !m_summary_lto
->useful_p (m_ecf_flags
, false)))
1760 summary_useful
= false;
1764 && stmt_can_throw_external (cfun
, gsi_stmt (si
)))
1765 always_executed
= false;
1767 if (!summary_useful
)
1770 /* In non-IPA mode we need to perform iterative datafow on recursive calls.
1771 This needs to be done after all other side effects are computed. */
1776 if (m_summary
&& !m_summary
->side_effects
&& !finite_function_p ())
1777 m_summary
->side_effects
= true;
1778 if (m_summary_lto
&& !m_summary_lto
->side_effects
1779 && !finite_function_p ())
1780 m_summary_lto
->side_effects
= true;
1784 /* Return true if OP accesses memory pointed to by SSA_NAME. */
1787 memory_access_to (tree op
, tree ssa_name
)
1789 tree base
= get_base_address (op
);
1792 if (TREE_CODE (base
) != MEM_REF
&& TREE_CODE (base
) != TARGET_MEM_REF
)
1794 return TREE_OPERAND (base
, 0) == ssa_name
;
1797 /* Consider statement val = *arg.
1798 return EAF flags of ARG that can be determined from EAF flags of VAL
1799 (which are known to be FLAGS). If IGNORE_STORES is true we can ignore
1800 all stores to VAL, i.e. when handling noreturn function. */
1803 deref_flags (int flags
, bool ignore_stores
)
1805 /* Dereference is also a direct read but dereferenced value does not
1806 yield any other direct use. */
1807 int ret
= EAF_NO_DIRECT_CLOBBER
| EAF_NO_DIRECT_ESCAPE
1808 | EAF_NOT_RETURNED_DIRECTLY
;
1809 /* If argument is unused just account for
1810 the read involved in dereference. */
1811 if (flags
& EAF_UNUSED
)
1812 ret
|= EAF_NO_INDIRECT_READ
| EAF_NO_INDIRECT_CLOBBER
1813 | EAF_NO_INDIRECT_ESCAPE
;
1816 /* Direct or indirect accesses leads to indirect accesses. */
1817 if (((flags
& EAF_NO_DIRECT_CLOBBER
)
1818 && (flags
& EAF_NO_INDIRECT_CLOBBER
))
1820 ret
|= EAF_NO_INDIRECT_CLOBBER
;
1821 if (((flags
& EAF_NO_DIRECT_ESCAPE
)
1822 && (flags
& EAF_NO_INDIRECT_ESCAPE
))
1824 ret
|= EAF_NO_INDIRECT_ESCAPE
;
1825 if ((flags
& EAF_NO_DIRECT_READ
)
1826 && (flags
& EAF_NO_INDIRECT_READ
))
1827 ret
|= EAF_NO_INDIRECT_READ
;
1828 if ((flags
& EAF_NOT_RETURNED_DIRECTLY
)
1829 && (flags
& EAF_NOT_RETURNED_INDIRECTLY
))
1830 ret
|= EAF_NOT_RETURNED_INDIRECTLY
;
1836 /* Description of an escape point: a call which affects flags of a given
1841 /* Value escapes to this call. */
1843 /* Argument it escapes to. */
1845 /* Flags already known about the argument (this can save us from recording
1846 esape points if local analysis did good job already). */
1847 eaf_flags_t min_flags
;
1848 /* Does value escape directly or indiretly? */
1852 /* Lattice used during the eaf flags analsysis dataflow. For a given SSA name
1853 we aim to compute its flags and escape points. We also use the lattice
1854 to dynamically build dataflow graph to propagate on. */
1856 class modref_lattice
1859 /* EAF flags of the SSA name. */
1861 /* Used during DFS walk to mark names where final value was determined
1862 without need for dataflow. */
1864 /* Used during DFS walk to mark open vertices (for cycle detection). */
1866 /* Set during DFS walk for names that needs dataflow propagation. */
1868 /* Used during the iterative dataflow. */
1871 /* When doing IPA analysis we can not merge in callee escape points;
1872 Only remember them and do the merging at IPA propagation time. */
1873 vec
<escape_point
, va_heap
, vl_ptr
> escape_points
;
1875 /* Representation of a graph for dataaflow. This graph is built on-demand
1876 using modref_eaf_analysis::analyze_ssa and later solved by
1877 modref_eaf_analysis::propagate.
1878 Each edge represents the fact that flags of current lattice should be
1879 propagated to lattice of SSA_NAME. */
1880 struct propagate_edge
1885 vec
<propagate_edge
, va_heap
, vl_ptr
> propagate_to
;
1889 bool merge (const modref_lattice
&with
);
1890 bool merge (int flags
);
1891 bool merge_deref (const modref_lattice
&with
, bool ignore_stores
);
1892 bool merge_direct_load ();
1893 bool merge_direct_store ();
1894 bool add_escape_point (gcall
*call
, int arg
, int min_flags
, bool diret
);
1895 void dump (FILE *out
, int indent
= 0) const;
1898 /* Lattices are saved to vectors, so keep them PODs. */
1900 modref_lattice::init ()
1902 /* All flags we track. */
1903 int f
= EAF_NO_DIRECT_CLOBBER
| EAF_NO_INDIRECT_CLOBBER
1904 | EAF_NO_DIRECT_ESCAPE
| EAF_NO_INDIRECT_ESCAPE
1905 | EAF_NO_DIRECT_READ
| EAF_NO_INDIRECT_READ
1906 | EAF_NOT_RETURNED_DIRECTLY
| EAF_NOT_RETURNED_INDIRECTLY
1909 /* Check that eaf_flags_t is wide enough to hold all flags. */
1910 gcc_checking_assert (f
== flags
);
1915 /* Release memory. */
1917 modref_lattice::release ()
1919 escape_points
.release ();
1920 propagate_to
.release ();
1923 /* Dump lattice to OUT; indent with INDENT spaces. */
1926 modref_lattice::dump (FILE *out
, int indent
) const
1928 dump_eaf_flags (out
, flags
);
1929 if (escape_points
.length ())
1931 fprintf (out
, "%*sEscapes:\n", indent
, "");
1932 for (unsigned int i
= 0; i
< escape_points
.length (); i
++)
1934 fprintf (out
, "%*s Arg %i (%s) min flags", indent
, "",
1935 escape_points
[i
].arg
,
1936 escape_points
[i
].direct
? "direct" : "indirect");
1937 dump_eaf_flags (out
, escape_points
[i
].min_flags
, false);
1938 fprintf (out
, " in call ");
1939 print_gimple_stmt (out
, escape_points
[i
].call
, 0);
1944 /* Add escape point CALL, ARG, MIN_FLAGS, DIRECT. Return false if such escape
1948 modref_lattice::add_escape_point (gcall
*call
, int arg
, int min_flags
,
1954 /* If we already determined flags to be bad enough,
1955 we do not need to record. */
1956 if ((flags
& min_flags
) == flags
|| (min_flags
& EAF_UNUSED
))
1959 FOR_EACH_VEC_ELT (escape_points
, i
, ep
)
1960 if (ep
->call
== call
&& ep
->arg
== arg
&& ep
->direct
== direct
)
1962 if ((ep
->min_flags
& min_flags
) == min_flags
)
1964 ep
->min_flags
&= min_flags
;
1967 /* Give up if max escape points is met. */
1968 if ((int)escape_points
.length () > param_modref_max_escape_points
)
1971 fprintf (dump_file
, "--param modref-max-escape-points limit reached\n");
1975 escape_point new_ep
= {call
, arg
, min_flags
, direct
};
1976 escape_points
.safe_push (new_ep
);
1980 /* Merge in flags from F. */
1982 modref_lattice::merge (int f
)
1986 /* Check that flags seems sane: if function does not read the parameter
1987 it can not access it indirectly. */
1988 gcc_checking_assert (!(f
& EAF_NO_DIRECT_READ
)
1989 || ((f
& EAF_NO_INDIRECT_READ
)
1990 && (f
& EAF_NO_INDIRECT_CLOBBER
)
1991 && (f
& EAF_NO_INDIRECT_ESCAPE
)
1992 && (f
& EAF_NOT_RETURNED_INDIRECTLY
)));
1993 if ((flags
& f
) != flags
)
1996 /* Prune obvoiusly useless flags;
1997 We do not have ECF_FLAGS handy which is not big problem since
1998 we will do final flags cleanup before producing summary.
1999 Merging should be fast so it can work well with dataflow. */
2000 flags
= remove_useless_eaf_flags (flags
, 0, false);
2002 escape_points
.release ();
2008 /* Merge in WITH. Return true if anyting changed. */
2011 modref_lattice::merge (const modref_lattice
&with
)
2016 bool changed
= merge (with
.flags
);
2020 for (unsigned int i
= 0; i
< with
.escape_points
.length (); i
++)
2021 changed
|= add_escape_point (with
.escape_points
[i
].call
,
2022 with
.escape_points
[i
].arg
,
2023 with
.escape_points
[i
].min_flags
,
2024 with
.escape_points
[i
].direct
);
2028 /* Merge in deref of WITH. If IGNORE_STORES is true do not consider
2029 stores. Return true if anyting changed. */
2032 modref_lattice::merge_deref (const modref_lattice
&with
, bool ignore_stores
)
2037 bool changed
= merge (deref_flags (with
.flags
, ignore_stores
));
2041 for (unsigned int i
= 0; i
< with
.escape_points
.length (); i
++)
2043 int min_flags
= with
.escape_points
[i
].min_flags
;
2045 if (with
.escape_points
[i
].direct
)
2046 min_flags
= deref_flags (min_flags
, ignore_stores
);
2047 else if (ignore_stores
)
2048 min_flags
|= ignore_stores_eaf_flags
;
2049 changed
|= add_escape_point (with
.escape_points
[i
].call
,
2050 with
.escape_points
[i
].arg
,
2057 /* Merge in flags for direct load. */
2060 modref_lattice::merge_direct_load ()
2062 return merge (~(EAF_UNUSED
| EAF_NO_DIRECT_READ
));
2065 /* Merge in flags for direct store. */
2068 modref_lattice::merge_direct_store ()
2070 return merge (~(EAF_UNUSED
| EAF_NO_DIRECT_CLOBBER
));
2073 /* Analyzer of EAF flags.
2074 This is genrally dataflow problem over the SSA graph, however we only
2075 care about flags of few selected ssa names (arguments, return slot and
2076 static chain). So we first call analyze_ssa_name on all relevant names
2077 and perform a DFS walk to discover SSA names where flags needs to be
2078 determined. For acyclic graphs we try to determine final flags during
2079 this walk. Once cycles or recursin depth is met we enlist SSA names
2080 for dataflow which is done by propagate call.
2082 After propagation the flags can be obtained using get_ssa_name_flags. */
2084 class modref_eaf_analysis
2087 /* Mark NAME as relevant for analysis. */
2088 void analyze_ssa_name (tree name
);
2089 /* Dataflow slover. */
2091 /* Return flags computed earlier for NAME. */
2092 int get_ssa_name_flags (tree name
)
2094 int version
= SSA_NAME_VERSION (name
);
2095 gcc_checking_assert (m_lattice
[version
].known
);
2096 return m_lattice
[version
].flags
;
2098 /* In IPA mode this will record all escape points
2099 determined for NAME to PARM_IDNEX. Flags are minimal
2101 void record_escape_points (tree name
, int parm_index
, int flags
);
2102 modref_eaf_analysis (bool ipa
)
2106 m_lattice
.safe_grow_cleared (num_ssa_names
, true);
2108 ~modref_eaf_analysis ()
2110 gcc_checking_assert (!m_depth
);
2111 if (m_ipa
|| m_names_to_propagate
.length ())
2112 for (unsigned int i
= 0; i
< num_ssa_names
; i
++)
2113 m_lattice
[i
].release ();
2116 /* If true, we produce analysis for IPA mode. In this case escape points ar
2119 /* Depth of recursion of analyze_ssa_name. */
2121 /* Propagation lattice for individual ssa names. */
2122 auto_vec
<modref_lattice
> m_lattice
;
2123 auto_vec
<tree
> m_deferred_names
;
2124 auto_vec
<int> m_names_to_propagate
;
2126 void merge_with_ssa_name (tree dest
, tree src
, bool deref
);
2127 void merge_call_lhs_flags (gcall
*call
, int arg
, tree name
, bool direct
,
2132 /* Call statements may return tgeir parameters. Consider argument number
2133 ARG of USE_STMT and determine flags that can needs to be cleared
2134 in case pointer possibly indirectly references from ARG I is returned.
2135 If DIRECT is true consider direct returns and if INDIRECT consider
2137 LATTICE, DEPTH and ipa are same as in analyze_ssa_name.
2138 ARG is set to -1 for static chain. */
2141 modref_eaf_analysis::merge_call_lhs_flags (gcall
*call
, int arg
,
2142 tree name
, bool direct
,
2145 int index
= SSA_NAME_VERSION (name
);
2146 bool returned_directly
= false;
2148 /* If there is no return value, no flags are affected. */
2149 if (!gimple_call_lhs (call
))
2152 /* If we know that function returns given argument and it is not ARG
2153 we can still be happy. */
2156 int flags
= gimple_call_return_flags (call
);
2157 if (flags
& ERF_RETURNS_ARG
)
2159 if ((flags
& ERF_RETURN_ARG_MASK
) == arg
)
2160 returned_directly
= true;
2165 /* Make ERF_RETURNS_ARG overwrite EAF_UNUSED. */
2166 if (returned_directly
)
2171 /* If value is not returned at all, do nothing. */
2172 else if (!direct
&& !indirect
)
2175 /* If return value is SSA name determine its flags. */
2176 if (TREE_CODE (gimple_call_lhs (call
)) == SSA_NAME
)
2178 tree lhs
= gimple_call_lhs (call
);
2180 merge_with_ssa_name (name
, lhs
, false);
2182 merge_with_ssa_name (name
, lhs
, true);
2184 /* In the case of memory store we can do nothing. */
2186 m_lattice
[index
].merge (deref_flags (0, false));
2188 m_lattice
[index
].merge (0);
2191 /* CALL_FLAGS are EAF_FLAGS of the argument. Turn them
2192 into flags for caller, update LATTICE of corresponding
2193 argument if needed. */
2196 callee_to_caller_flags (int call_flags
, bool ignore_stores
,
2197 modref_lattice
&lattice
)
2199 /* call_flags is about callee returning a value
2200 that is not the same as caller returning it. */
2201 call_flags
|= EAF_NOT_RETURNED_DIRECTLY
2202 | EAF_NOT_RETURNED_INDIRECTLY
;
2203 if (!ignore_stores
&& !(call_flags
& EAF_UNUSED
))
2205 /* If value escapes we are no longer able to track what happens
2206 with it because we can read it from the escaped location
2208 if (!(call_flags
& EAF_NO_DIRECT_ESCAPE
))
2210 else if (!(call_flags
& EAF_NO_INDIRECT_ESCAPE
))
2211 lattice
.merge (~(EAF_NOT_RETURNED_INDIRECTLY
2212 | EAF_NO_DIRECT_READ
2213 | EAF_NO_INDIRECT_READ
2214 | EAF_NO_INDIRECT_CLOBBER
2218 call_flags
|= ignore_stores_eaf_flags
;
2222 /* Analyze EAF flags for SSA name NAME and store result to LATTICE.
2223 LATTICE is an array of modref_lattices.
2224 DEPTH is a recursion depth used to make debug output prettier.
2225 If IPA is true we analyze for IPA propagation (and thus call escape points
2226 are processed later) */
2229 modref_eaf_analysis::analyze_ssa_name (tree name
)
2231 imm_use_iterator ui
;
2233 int index
= SSA_NAME_VERSION (name
);
2235 /* See if value is already computed. */
2236 if (m_lattice
[index
].known
|| m_lattice
[index
].do_dataflow
)
2238 if (m_lattice
[index
].open
)
2242 "%*sCycle in SSA graph\n",
2246 /* Recursion guard. */
2247 m_lattice
[index
].init ();
2248 if (m_depth
== param_modref_max_depth
)
2252 "%*sMax recursion depth reached; postponing\n",
2254 m_deferred_names
.safe_push (name
);
2261 "%*sAnalyzing flags of ssa name: ", m_depth
* 4, "");
2262 print_generic_expr (dump_file
, name
);
2263 fprintf (dump_file
, "\n");
2266 FOR_EACH_IMM_USE_STMT (use_stmt
, ui
, name
)
2268 if (m_lattice
[index
].flags
== 0)
2270 if (is_gimple_debug (use_stmt
))
2274 fprintf (dump_file
, "%*s Analyzing stmt: ", m_depth
* 4, "");
2275 print_gimple_stmt (dump_file
, use_stmt
, 0);
2277 /* If we see a direct non-debug use, clear unused bit.
2278 All dereferneces should be accounted below using deref_flags. */
2279 m_lattice
[index
].merge (~EAF_UNUSED
);
2281 /* Gimple return may load the return value.
2282 Returning name counts as an use by tree-ssa-structalias.c */
2283 if (greturn
*ret
= dyn_cast
<greturn
*> (use_stmt
))
2285 /* Returning through return slot is seen as memory write earlier. */
2286 if (DECL_RESULT (current_function_decl
)
2287 && DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
)))
2289 else if (gimple_return_retval (ret
) == name
)
2290 m_lattice
[index
].merge (~(EAF_UNUSED
| EAF_NOT_RETURNED_DIRECTLY
2291 | EAF_NOT_RETURNED_DIRECTLY
));
2292 else if (memory_access_to (gimple_return_retval (ret
), name
))
2294 m_lattice
[index
].merge_direct_load ();
2295 m_lattice
[index
].merge (~(EAF_UNUSED
2296 | EAF_NOT_RETURNED_INDIRECTLY
));
2299 /* Account for LHS store, arg loads and flags from callee function. */
2300 else if (gcall
*call
= dyn_cast
<gcall
*> (use_stmt
))
2302 tree callee
= gimple_call_fndecl (call
);
2304 /* IPA PTA internally it treats calling a function as "writing" to
2305 the argument space of all functions the function pointer points to
2306 (PR101949). We can not drop EAF_NOCLOBBER only when ipa-pta
2307 is on since that would allow propagation of this from -fno-ipa-pta
2308 to -fipa-pta functions. */
2309 if (gimple_call_fn (use_stmt
) == name
)
2310 m_lattice
[index
].merge (~(EAF_NO_DIRECT_CLOBBER
| EAF_UNUSED
));
2312 /* Recursion would require bit of propagation; give up for now. */
2313 if (callee
&& !m_ipa
&& recursive_call_p (current_function_decl
,
2315 m_lattice
[index
].merge (0);
2318 int ecf_flags
= gimple_call_flags (call
);
2319 bool ignore_stores
= ignore_stores_p (current_function_decl
,
2321 bool ignore_retval
= ignore_retval_p (current_function_decl
,
2324 /* Handle *name = func (...). */
2325 if (gimple_call_lhs (call
)
2326 && memory_access_to (gimple_call_lhs (call
), name
))
2328 m_lattice
[index
].merge_direct_store ();
2329 /* Return slot optimization passes address of
2330 LHS to callee via hidden parameter and this
2331 may make LHS to escape. See PR 98499. */
2332 if (gimple_call_return_slot_opt_p (call
)
2333 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (call
))))
2335 int call_flags
= gimple_call_retslot_flags (call
);
2336 bool isretslot
= false;
2338 if (DECL_RESULT (current_function_decl
)
2339 && DECL_BY_REFERENCE
2340 (DECL_RESULT (current_function_decl
)))
2341 isretslot
= ssa_default_def
2343 DECL_RESULT (current_function_decl
))
2346 /* Passing returnslot to return slot is special because
2347 not_returned and escape has same meaning.
2348 However passing arg to return slot is different. If
2349 the callee's return slot is returned it means that
2350 arg is written to itself which is an escape.
2351 Since we do not track the memory it is written to we
2352 need to give up on analysisng it. */
2355 if (!(call_flags
& (EAF_NOT_RETURNED_DIRECTLY
2357 m_lattice
[index
].merge (0);
2358 else gcc_checking_assert
2359 (call_flags
& (EAF_NOT_RETURNED_INDIRECTLY
2361 call_flags
= callee_to_caller_flags
2365 m_lattice
[index
].merge (call_flags
);
2369 if (gimple_call_chain (call
)
2370 && (gimple_call_chain (call
) == name
))
2372 int call_flags
= gimple_call_static_chain_flags (call
);
2373 if (!ignore_retval
&& !(call_flags
& EAF_UNUSED
))
2374 merge_call_lhs_flags
2376 !(call_flags
& EAF_NOT_RETURNED_DIRECTLY
),
2377 !(call_flags
& EAF_NOT_RETURNED_INDIRECTLY
));
2378 call_flags
= callee_to_caller_flags
2379 (call_flags
, ignore_stores
,
2381 if (!(ecf_flags
& (ECF_CONST
| ECF_NOVOPS
)))
2382 m_lattice
[index
].merge (call_flags
);
2385 /* Process internal functions and right away. */
2386 bool record_ipa
= m_ipa
&& !gimple_call_internal_p (call
);
2388 /* Handle all function parameters. */
2389 for (unsigned i
= 0;
2390 i
< gimple_call_num_args (call
)
2391 && m_lattice
[index
].flags
; i
++)
2392 /* Name is directly passed to the callee. */
2393 if (gimple_call_arg (call
, i
) == name
)
2395 int call_flags
= gimple_call_arg_flags (call
, i
);
2397 merge_call_lhs_flags
2399 !(call_flags
& (EAF_NOT_RETURNED_DIRECTLY
2401 !(call_flags
& (EAF_NOT_RETURNED_INDIRECTLY
2403 if (!(ecf_flags
& (ECF_CONST
| ECF_NOVOPS
)))
2405 call_flags
= callee_to_caller_flags
2406 (call_flags
, ignore_stores
,
2409 m_lattice
[index
].merge (call_flags
);
2411 m_lattice
[index
].add_escape_point (call
, i
,
2415 /* Name is dereferenced and passed to a callee. */
2416 else if (memory_access_to (gimple_call_arg (call
, i
), name
))
2418 int call_flags
= deref_flags
2419 (gimple_call_arg_flags (call
, i
), ignore_stores
);
2420 if (!ignore_retval
&& !(call_flags
& EAF_UNUSED
)
2421 && !(call_flags
& EAF_NOT_RETURNED_DIRECTLY
)
2422 && !(call_flags
& EAF_NOT_RETURNED_INDIRECTLY
))
2423 merge_call_lhs_flags (call
, i
, name
, false, true);
2424 if (ecf_flags
& (ECF_CONST
| ECF_NOVOPS
))
2425 m_lattice
[index
].merge_direct_load ();
2428 call_flags
= callee_to_caller_flags
2429 (call_flags
, ignore_stores
,
2432 m_lattice
[index
].merge (call_flags
);
2434 m_lattice
[index
].add_escape_point (call
, i
,
2440 else if (gimple_assign_load_p (use_stmt
))
2442 gassign
*assign
= as_a
<gassign
*> (use_stmt
);
2443 /* Memory to memory copy. */
2444 if (gimple_store_p (assign
))
2446 /* Handle *lhs = *name.
2448 We do not track memory locations, so assume that value
2449 is used arbitrarily. */
2450 if (memory_access_to (gimple_assign_rhs1 (assign
), name
))
2451 m_lattice
[index
].merge (deref_flags (0, false));
2452 /* Handle *name = *exp. */
2453 else if (memory_access_to (gimple_assign_lhs (assign
), name
))
2454 m_lattice
[index
].merge_direct_store ();
2456 /* Handle lhs = *name. */
2457 else if (memory_access_to (gimple_assign_rhs1 (assign
), name
))
2459 tree lhs
= gimple_assign_lhs (assign
);
2460 merge_with_ssa_name (name
, lhs
, true);
2463 else if (gimple_store_p (use_stmt
))
2465 gassign
*assign
= dyn_cast
<gassign
*> (use_stmt
);
2467 /* Handle *lhs = name. */
2468 if (assign
&& gimple_assign_rhs1 (assign
) == name
)
2471 fprintf (dump_file
, "%*s ssa name saved to memory\n",
2473 m_lattice
[index
].merge (0);
2475 /* Handle *name = exp. */
2477 && memory_access_to (gimple_assign_lhs (assign
), name
))
2479 /* In general we can not ignore clobbers because they are
2480 barriers for code motion, however after inlining it is safe to
2481 do because local optimization passes do not consider clobbers
2482 from other functions.
2483 Similar logic is in ipa-pure-const.c. */
2484 if (!cfun
->after_inlining
|| !gimple_clobber_p (assign
))
2485 m_lattice
[index
].merge_direct_store ();
2487 /* ASM statements etc. */
2491 fprintf (dump_file
, "%*s Unhandled store\n", m_depth
* 4, "");
2492 m_lattice
[index
].merge (0);
2495 else if (gassign
*assign
= dyn_cast
<gassign
*> (use_stmt
))
2497 enum tree_code code
= gimple_assign_rhs_code (assign
);
2499 /* See if operation is a merge as considered by
2500 tree-ssa-structalias.c:find_func_aliases. */
2501 if (!truth_value_p (code
)
2502 && code
!= POINTER_DIFF_EXPR
2503 && (code
!= POINTER_PLUS_EXPR
2504 || gimple_assign_rhs1 (assign
) == name
))
2506 tree lhs
= gimple_assign_lhs (assign
);
2507 merge_with_ssa_name (name
, lhs
, false);
2510 else if (gphi
*phi
= dyn_cast
<gphi
*> (use_stmt
))
2512 tree result
= gimple_phi_result (phi
);
2513 merge_with_ssa_name (name
, result
, false);
2515 /* Conditions are not considered escape points
2516 by tree-ssa-structalias. */
2517 else if (gimple_code (use_stmt
) == GIMPLE_COND
)
2522 fprintf (dump_file
, "%*s Unhandled stmt\n", m_depth
* 4, "");
2523 m_lattice
[index
].merge (0);
2528 fprintf (dump_file
, "%*s current flags of ", m_depth
* 4, "");
2529 print_generic_expr (dump_file
, name
);
2530 m_lattice
[index
].dump (dump_file
, m_depth
* 4 + 4);
2535 fprintf (dump_file
, "%*sflags of ssa name ", m_depth
* 4, "");
2536 print_generic_expr (dump_file
, name
);
2537 m_lattice
[index
].dump (dump_file
, m_depth
* 4 + 2);
2539 m_lattice
[index
].open
= false;
2540 if (!m_lattice
[index
].do_dataflow
)
2541 m_lattice
[index
].known
= true;
2544 /* Propagate info from SRC to DEST. If DEREF it true, assume that SRC
2548 modref_eaf_analysis::merge_with_ssa_name (tree dest
, tree src
, bool deref
)
2550 int index
= SSA_NAME_VERSION (dest
);
2551 int src_index
= SSA_NAME_VERSION (src
);
2553 /* Merging lattice with itself is a no-op. */
2554 if (!deref
&& src
== dest
)
2558 analyze_ssa_name (src
);
2561 m_lattice
[index
].merge_deref (m_lattice
[src_index
], false);
2563 m_lattice
[index
].merge (m_lattice
[src_index
]);
2565 /* If we failed to produce final solution add an edge to the dataflow
2567 if (!m_lattice
[src_index
].known
)
2569 modref_lattice::propagate_edge e
= {index
, deref
};
2571 if (!m_lattice
[src_index
].propagate_to
.length ())
2572 m_names_to_propagate
.safe_push (src_index
);
2573 m_lattice
[src_index
].propagate_to
.safe_push (e
);
2574 m_lattice
[src_index
].changed
= true;
2575 m_lattice
[src_index
].do_dataflow
= true;
2578 "%*sWill propgate from ssa_name %i to %i%s\n",
2580 "", src_index
, index
, deref
? " (deref)" : "");
2584 /* In the case we deferred some SSA names, reprocess them. In the case some
2585 dataflow edges were introduced, do the actual iterative dataflow. */
2588 modref_eaf_analysis::propagate ()
2593 bool changed
= true;
2595 while (m_deferred_names
.length ())
2597 tree name
= m_deferred_names
.pop ();
2598 m_lattice
[SSA_NAME_VERSION (name
)].open
= false;
2600 fprintf (dump_file
, "Analyzing deferred SSA name\n");
2601 analyze_ssa_name (name
);
2604 if (!m_names_to_propagate
.length ())
2607 fprintf (dump_file
, "Propagating EAF flags\n");
2609 /* Compute reverse postorder. */
2616 auto_vec
<struct stack_entry
> stack
;
2617 int pos
= m_names_to_propagate
.length () - 1;
2619 rpo
.safe_grow (m_names_to_propagate
.length (), true);
2620 stack
.reserve_exact (m_names_to_propagate
.length ());
2622 /* We reuse known flag for RPO DFS walk bookeeping. */
2624 FOR_EACH_VEC_ELT (m_names_to_propagate
, i
, index
)
2625 gcc_assert (!m_lattice
[index
].known
&& m_lattice
[index
].changed
);
2627 FOR_EACH_VEC_ELT (m_names_to_propagate
, i
, index
)
2629 if (!m_lattice
[index
].known
)
2631 stack_entry e
= {index
, 0};
2633 stack
.quick_push (e
);
2634 m_lattice
[index
].known
= true;
2636 while (stack
.length ())
2639 int index1
= stack
.last ().name
;
2641 while (stack
.last ().pos
< m_lattice
[index1
].propagate_to
.length ())
2643 int index2
= m_lattice
[index1
]
2644 .propagate_to
[stack
.last ().pos
].ssa_name
;
2646 stack
.last ().pos
++;
2647 if (!m_lattice
[index2
].known
2648 && m_lattice
[index2
].propagate_to
.length ())
2650 stack_entry e
= {index2
, 0};
2652 stack
.quick_push (e
);
2653 m_lattice
[index2
].known
= true;
2659 && stack
.last ().pos
== m_lattice
[index1
].propagate_to
.length ())
2661 rpo
[pos
--] = index1
;
2667 /* Perform itrative dataflow. */
2673 fprintf (dump_file
, " iteration %i\n", iterations
);
2674 FOR_EACH_VEC_ELT (rpo
, i
, index
)
2676 if (m_lattice
[index
].changed
)
2680 m_lattice
[index
].changed
= false;
2682 fprintf (dump_file
, " Visiting ssa name %i\n", index
);
2683 for (j
= 0; j
< m_lattice
[index
].propagate_to
.length (); j
++)
2686 int target
= m_lattice
[index
].propagate_to
[j
].ssa_name
;
2687 bool deref
= m_lattice
[index
].propagate_to
[j
].deref
;
2690 fprintf (dump_file
, " Propagating flags of ssa name"
2692 index
, target
, deref
? " (deref)" : "");
2693 m_lattice
[target
].known
= true;
2694 if (!m_lattice
[index
].propagate_to
[j
].deref
)
2695 ch
= m_lattice
[target
].merge (m_lattice
[index
]);
2697 ch
= m_lattice
[target
].merge_deref (m_lattice
[index
],
2703 fprintf (dump_file
, " New lattice: ");
2704 m_lattice
[target
].dump (dump_file
);
2707 m_lattice
[target
].changed
= true;
2713 fprintf (dump_file
, "EAF flags propagated in %i iterations\n", iterations
);
2716 /* Record escape points of PARM_INDEX according to LATTICE. */
2719 modref_eaf_analysis::record_escape_points (tree name
, int parm_index
, int flags
)
2721 modref_lattice
&lattice
= m_lattice
[SSA_NAME_VERSION (name
)];
2723 if (lattice
.escape_points
.length ())
2727 cgraph_node
*node
= cgraph_node::get (current_function_decl
);
2730 FOR_EACH_VEC_ELT (lattice
.escape_points
, ip
, ep
)
2731 if ((ep
->min_flags
& flags
) != flags
)
2733 cgraph_edge
*e
= node
->get_edge (ep
->call
);
2734 struct escape_entry ee
= {parm_index
, ep
->arg
,
2735 ep
->min_flags
, ep
->direct
};
2737 escape_summaries
->get_create (e
)->esc
.safe_push (ee
);
2742 /* Determine EAF flags for function parameters
2743 and fill in SUMMARY/SUMMARY_LTO. If IPA is true work in IPA mode
2744 where we also collect scape points.
2745 PAST_FLAGS, PAST_RETSLOT_FLAGS, PAST_STATIC_CHAIN_FLAGS can be
2746 used to preserve flags from prevoius (IPA) run for cases where
2747 late optimizations changed code in a way we can no longer analyze
2751 analyze_parms (modref_summary
*summary
, modref_summary_lto
*summary_lto
,
2752 bool ipa
, vec
<eaf_flags_t
> &past_flags
,
2753 int past_retslot_flags
, int past_static_chain_flags
)
2755 unsigned int parm_index
= 0;
2756 unsigned int count
= 0;
2757 int ecf_flags
= flags_from_decl_or_type (current_function_decl
);
2758 tree retslot
= NULL
;
2759 tree static_chain
= NULL
;
2761 /* If there is return slot, look up its SSA name. */
2762 if (DECL_RESULT (current_function_decl
)
2763 && DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
)))
2764 retslot
= ssa_default_def (cfun
, DECL_RESULT (current_function_decl
));
2765 if (cfun
->static_chain_decl
)
2766 static_chain
= ssa_default_def (cfun
, cfun
->static_chain_decl
);
2768 for (tree parm
= DECL_ARGUMENTS (current_function_decl
); parm
;
2769 parm
= TREE_CHAIN (parm
))
2772 if (!count
&& !retslot
&& !static_chain
)
2775 modref_eaf_analysis
eaf_analysis (ipa
);
2777 /* Determine all SSA names we need to know flags for. */
2778 for (tree parm
= DECL_ARGUMENTS (current_function_decl
); parm
;
2779 parm
= TREE_CHAIN (parm
))
2781 tree name
= ssa_default_def (cfun
, parm
);
2783 eaf_analysis
.analyze_ssa_name (name
);
2786 eaf_analysis
.analyze_ssa_name (retslot
);
2788 eaf_analysis
.analyze_ssa_name (static_chain
);
2790 /* Do the dataflow. */
2791 eaf_analysis
.propagate ();
2793 tree attr
= lookup_attribute ("fn spec",
2795 (TREE_TYPE (current_function_decl
)));
2796 attr_fnspec
fnspec (attr
2797 ? TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr
)))
2801 /* Store results to summaries. */
2802 for (tree parm
= DECL_ARGUMENTS (current_function_decl
); parm
; parm_index
++,
2803 parm
= TREE_CHAIN (parm
))
2805 tree name
= ssa_default_def (cfun
, parm
);
2806 if (!name
|| has_zero_uses (name
))
2808 /* We do not track non-SSA parameters,
2809 but we want to track unused gimple_regs. */
2810 if (!is_gimple_reg (parm
))
2814 if (parm_index
>= summary
->arg_flags
.length ())
2815 summary
->arg_flags
.safe_grow_cleared (count
, true);
2816 summary
->arg_flags
[parm_index
] = EAF_UNUSED
;
2818 else if (summary_lto
)
2820 if (parm_index
>= summary_lto
->arg_flags
.length ())
2821 summary_lto
->arg_flags
.safe_grow_cleared (count
, true);
2822 summary_lto
->arg_flags
[parm_index
] = EAF_UNUSED
;
2826 int flags
= eaf_analysis
.get_ssa_name_flags (name
);
2827 int attr_flags
= fnspec
.arg_eaf_flags (parm_index
);
2829 if (dump_file
&& (flags
| attr_flags
) != flags
&& !(flags
& EAF_UNUSED
))
2832 " Flags for param %i combined with fnspec flags:",
2834 dump_eaf_flags (dump_file
, attr_flags
, false);
2835 fprintf (dump_file
, " determined: ");
2836 dump_eaf_flags (dump_file
, flags
, true);
2838 flags
|= attr_flags
;
2840 /* Eliminate useless flags so we do not end up storing unnecessary
2843 flags
= remove_useless_eaf_flags
2845 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
))));
2846 if (past_flags
.length () > parm_index
)
2848 int past
= past_flags
[parm_index
];
2849 past
= remove_useless_eaf_flags
2851 VOID_TYPE_P (TREE_TYPE
2852 (TREE_TYPE (current_function_decl
))));
2853 if (dump_file
&& (flags
| past
) != flags
&& !(flags
& EAF_UNUSED
))
2856 " Flags for param %i combined with IPA pass:",
2858 dump_eaf_flags (dump_file
, past
, false);
2859 fprintf (dump_file
, " determined: ");
2860 dump_eaf_flags (dump_file
, flags
, true);
2862 if (!(flags
& EAF_UNUSED
))
2870 if (parm_index
>= summary
->arg_flags
.length ())
2871 summary
->arg_flags
.safe_grow_cleared (count
, true);
2872 summary
->arg_flags
[parm_index
] = flags
;
2874 else if (summary_lto
)
2876 if (parm_index
>= summary_lto
->arg_flags
.length ())
2877 summary_lto
->arg_flags
.safe_grow_cleared (count
, true);
2878 summary_lto
->arg_flags
[parm_index
] = flags
;
2880 eaf_analysis
.record_escape_points (name
, parm_index
, flags
);
2885 int flags
= eaf_analysis
.get_ssa_name_flags (retslot
);
2886 int past
= past_retslot_flags
;
2888 flags
= remove_useless_eaf_flags (flags
, ecf_flags
, false);
2889 past
= remove_useless_eaf_flags
2891 VOID_TYPE_P (TREE_TYPE
2892 (TREE_TYPE (current_function_decl
))));
2893 if (dump_file
&& (flags
| past
) != flags
&& !(flags
& EAF_UNUSED
))
2896 " Retslot flags combined with IPA pass:");
2897 dump_eaf_flags (dump_file
, past
, false);
2898 fprintf (dump_file
, " determined: ");
2899 dump_eaf_flags (dump_file
, flags
, true);
2901 if (!(flags
& EAF_UNUSED
))
2906 summary
->retslot_flags
= flags
;
2908 summary_lto
->retslot_flags
= flags
;
2909 eaf_analysis
.record_escape_points (retslot
,
2910 MODREF_RETSLOT_PARM
, flags
);
2915 int flags
= eaf_analysis
.get_ssa_name_flags (static_chain
);
2916 int past
= past_static_chain_flags
;
2918 flags
= remove_useless_eaf_flags (flags
, ecf_flags
, false);
2919 past
= remove_useless_eaf_flags
2921 VOID_TYPE_P (TREE_TYPE
2922 (TREE_TYPE (current_function_decl
))));
2923 if (dump_file
&& (flags
| past
) != flags
&& !(flags
& EAF_UNUSED
))
2926 " Static chain flags combined with IPA pass:");
2927 dump_eaf_flags (dump_file
, past
, false);
2928 fprintf (dump_file
, " determined: ");
2929 dump_eaf_flags (dump_file
, flags
, true);
2931 if (!(flags
& EAF_UNUSED
))
2936 summary
->static_chain_flags
= flags
;
2938 summary_lto
->static_chain_flags
= flags
;
2939 eaf_analysis
.record_escape_points (static_chain
,
2940 MODREF_STATIC_CHAIN_PARM
,
2946 /* Analyze function. IPA indicates whether we're running in local mode
2947 (false) or the IPA mode (true).
2948 Return true if fixup cfg is needed after the pass. */
2951 analyze_function (bool ipa
)
2953 bool fixup_cfg
= false;
2955 fprintf (dump_file
, "\n\nmodref analyzing '%s' (ipa=%i)%s%s\n",
2956 cgraph_node::get (current_function_decl
)->dump_name (), ipa
,
2957 TREE_READONLY (current_function_decl
) ? " (const)" : "",
2958 DECL_PURE_P (current_function_decl
) ? " (pure)" : "");
2960 /* Don't analyze this function if it's compiled with -fno-strict-aliasing. */
2961 if (!flag_ipa_modref
2962 || lookup_attribute ("noipa", DECL_ATTRIBUTES (current_function_decl
)))
2965 /* Compute no-LTO summaries when local optimization is going to happen. */
2966 bool nolto
= (!ipa
|| ((!flag_lto
|| flag_fat_lto_objects
) && !in_lto_p
)
2967 || (in_lto_p
&& !flag_wpa
2968 && flag_incremental_link
!= INCREMENTAL_LINK_LTO
));
2969 /* Compute LTO when LTO streaming is going to happen. */
2970 bool lto
= ipa
&& ((flag_lto
&& !in_lto_p
)
2972 || flag_incremental_link
== INCREMENTAL_LINK_LTO
);
2973 cgraph_node
*fnode
= cgraph_node::get (current_function_decl
);
2975 modref_summary
*summary
= NULL
;
2976 modref_summary_lto
*summary_lto
= NULL
;
2978 bool past_flags_known
= false;
2979 auto_vec
<eaf_flags_t
> past_flags
;
2980 int past_retslot_flags
= 0;
2981 int past_static_chain_flags
= 0;
2983 /* Initialize the summary.
2984 If we run in local mode there is possibly pre-existing summary from
2985 IPA pass. Dump it so it is easy to compare if mod-ref info has
2989 if (!optimization_summaries
)
2990 optimization_summaries
= modref_summaries::create_ggc (symtab
);
2991 else /* Remove existing summary if we are re-running the pass. */
2993 summary
= optimization_summaries
->get (fnode
);
2999 fprintf (dump_file
, "Past summary:\n");
3000 optimization_summaries
->get (fnode
)->dump (dump_file
);
3002 past_flags
.reserve_exact (summary
->arg_flags
.length ());
3003 past_flags
.splice (summary
->arg_flags
);
3004 past_retslot_flags
= summary
->retslot_flags
;
3005 past_static_chain_flags
= summary
->static_chain_flags
;
3006 past_flags_known
= true;
3008 optimization_summaries
->remove (fnode
);
3010 summary
= optimization_summaries
->get_create (fnode
);
3011 gcc_checking_assert (nolto
&& !lto
);
3013 /* In IPA mode we analyze every function precisely once. Assert that. */
3019 summaries
= modref_summaries::create_ggc (symtab
);
3021 summaries
->remove (fnode
);
3022 summary
= summaries
->get_create (fnode
);
3027 summaries_lto
= modref_summaries_lto::create_ggc (symtab
);
3029 summaries_lto
->remove (fnode
);
3030 summary_lto
= summaries_lto
->get_create (fnode
);
3032 if (!fnspec_summaries
)
3033 fnspec_summaries
= new fnspec_summaries_t (symtab
);
3034 if (!escape_summaries
)
3035 escape_summaries
= new escape_summaries_t (symtab
);
3039 /* Create and initialize summary for F.
3040 Note that summaries may be already allocated from previous
3044 gcc_assert (!summary
->loads
);
3045 summary
->loads
= modref_records::create_ggc ();
3046 gcc_assert (!summary
->stores
);
3047 summary
->stores
= modref_records::create_ggc ();
3048 summary
->writes_errno
= false;
3049 summary
->side_effects
= false;
3050 summary
->nondeterministic
= false;
3051 summary
->calls_interposable
= false;
3055 gcc_assert (!summary_lto
->loads
);
3056 summary_lto
->loads
= modref_records_lto::create_ggc ();
3057 gcc_assert (!summary_lto
->stores
);
3058 summary_lto
->stores
= modref_records_lto::create_ggc ();
3059 summary_lto
->writes_errno
= false;
3060 summary_lto
->side_effects
= false;
3061 summary_lto
->nondeterministic
= false;
3062 summary_lto
->calls_interposable
= false;
3065 analyze_parms (summary
, summary_lto
, ipa
,
3066 past_flags
, past_retslot_flags
, past_static_chain_flags
);
3069 modref_access_analysis
analyzer (ipa
, summary
, summary_lto
);
3070 analyzer
.analyze ();
3073 if (!ipa
&& flag_ipa_pure_const
)
3075 if (!summary
->stores
->every_base
&& !summary
->stores
->bases
3076 && !summary
->nondeterministic
)
3078 if (!summary
->loads
->every_base
&& !summary
->loads
->bases
3079 && !summary
->calls_interposable
)
3080 fixup_cfg
= ipa_make_function_const (fnode
,
3081 summary
->side_effects
, true);
3083 fixup_cfg
= ipa_make_function_pure (fnode
,
3084 summary
->side_effects
, true);
3087 int ecf_flags
= flags_from_decl_or_type (current_function_decl
);
3088 if (summary
&& !summary
->useful_p (ecf_flags
))
3091 optimization_summaries
->remove (fnode
);
3093 summaries
->remove (fnode
);
3097 summary
->finalize (current_function_decl
);
3098 if (summary_lto
&& !summary_lto
->useful_p (ecf_flags
))
3100 summaries_lto
->remove (fnode
);
3104 if (ipa
&& !summary
&& !summary_lto
)
3105 remove_modref_edge_summaries (fnode
);
3109 fprintf (dump_file
, " - modref done with result: tracked.\n");
3111 summary
->dump (dump_file
);
3113 summary_lto
->dump (dump_file
);
3114 dump_modref_edge_summaries (dump_file
, fnode
, 2);
3115 /* To simplify debugging, compare IPA and local solutions. */
3116 if (past_flags_known
&& summary
)
3118 size_t len
= summary
->arg_flags
.length ();
3120 if (past_flags
.length () > len
)
3121 len
= past_flags
.length ();
3122 for (size_t i
= 0; i
< len
; i
++)
3124 int old_flags
= i
< past_flags
.length () ? past_flags
[i
] : 0;
3125 int new_flags
= i
< summary
->arg_flags
.length ()
3126 ? summary
->arg_flags
[i
] : 0;
3127 old_flags
= remove_useless_eaf_flags
3128 (old_flags
, flags_from_decl_or_type (current_function_decl
),
3129 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
))));
3130 if (old_flags
!= new_flags
)
3132 if ((old_flags
& ~new_flags
) == 0
3133 || (new_flags
& EAF_UNUSED
))
3134 fprintf (dump_file
, " Flags for param %i improved:",
3138 dump_eaf_flags (dump_file
, old_flags
, false);
3139 fprintf (dump_file
, " -> ");
3140 dump_eaf_flags (dump_file
, new_flags
, true);
3143 past_retslot_flags
= remove_useless_eaf_flags
3144 (past_retslot_flags
,
3145 flags_from_decl_or_type (current_function_decl
),
3146 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
))));
3147 if (past_retslot_flags
!= summary
->retslot_flags
)
3149 if ((past_retslot_flags
& ~summary
->retslot_flags
) == 0
3150 || (summary
->retslot_flags
& EAF_UNUSED
))
3151 fprintf (dump_file
, " Flags for retslot improved:");
3154 dump_eaf_flags (dump_file
, past_retslot_flags
, false);
3155 fprintf (dump_file
, " -> ");
3156 dump_eaf_flags (dump_file
, summary
->retslot_flags
, true);
3158 past_static_chain_flags
= remove_useless_eaf_flags
3159 (past_static_chain_flags
,
3160 flags_from_decl_or_type (current_function_decl
),
3161 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
))));
3162 if (past_static_chain_flags
!= summary
->static_chain_flags
)
3164 if ((past_static_chain_flags
& ~summary
->static_chain_flags
) == 0
3165 || (summary
->static_chain_flags
& EAF_UNUSED
))
3166 fprintf (dump_file
, " Flags for static chain improved:");
3169 dump_eaf_flags (dump_file
, past_static_chain_flags
, false);
3170 fprintf (dump_file
, " -> ");
3171 dump_eaf_flags (dump_file
, summary
->static_chain_flags
, true);
3174 else if (past_flags_known
&& !summary
)
3176 for (size_t i
= 0; i
< past_flags
.length (); i
++)
3178 int old_flags
= past_flags
[i
];
3179 old_flags
= remove_useless_eaf_flags
3180 (old_flags
, flags_from_decl_or_type (current_function_decl
),
3181 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
))));
3184 fprintf (dump_file
, " Flags for param %i worsened:",
3186 dump_eaf_flags (dump_file
, old_flags
, false);
3187 fprintf (dump_file
, " -> \n");
3190 past_retslot_flags
= remove_useless_eaf_flags
3191 (past_retslot_flags
,
3192 flags_from_decl_or_type (current_function_decl
),
3193 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
))));
3194 if (past_retslot_flags
)
3196 fprintf (dump_file
, " Flags for retslot worsened:");
3197 dump_eaf_flags (dump_file
, past_retslot_flags
, false);
3198 fprintf (dump_file
, " ->\n");
3200 past_static_chain_flags
= remove_useless_eaf_flags
3201 (past_static_chain_flags
,
3202 flags_from_decl_or_type (current_function_decl
),
3203 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
))));
3204 if (past_static_chain_flags
)
3206 fprintf (dump_file
, " Flags for static chain worsened:");
3207 dump_eaf_flags (dump_file
, past_static_chain_flags
, false);
3208 fprintf (dump_file
, " ->\n");
3215 /* Callback for generate_summary. */
3218 modref_generate (void)
3220 struct cgraph_node
*node
;
3221 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node
)
3223 function
*f
= DECL_STRUCT_FUNCTION (node
->decl
);
3227 analyze_function (true);
3232 } /* ANON namespace. */
3234 /* Debugging helper. */
3237 debug_eaf_flags (int flags
)
3239 dump_eaf_flags (stderr
, flags
, true);
3242 /* Called when a new function is inserted to callgraph late. */
3245 modref_summaries::insert (struct cgraph_node
*node
, modref_summary
*)
3247 /* Local passes ought to be executed by the pass manager. */
3248 if (this == optimization_summaries
)
3250 optimization_summaries
->remove (node
);
3253 if (!DECL_STRUCT_FUNCTION (node
->decl
)
3254 || !opt_for_fn (node
->decl
, flag_ipa_modref
))
3256 summaries
->remove (node
);
3259 push_cfun (DECL_STRUCT_FUNCTION (node
->decl
));
3260 analyze_function (true);
3264 /* Called when a new function is inserted to callgraph late. */
3267 modref_summaries_lto::insert (struct cgraph_node
*node
, modref_summary_lto
*)
3269 /* We do not support adding new function when IPA information is already
3270 propagated. This is done only by SIMD cloning that is not very
3272 if (!DECL_STRUCT_FUNCTION (node
->decl
)
3273 || !opt_for_fn (node
->decl
, flag_ipa_modref
)
3276 summaries_lto
->remove (node
);
3279 push_cfun (DECL_STRUCT_FUNCTION (node
->decl
));
3280 analyze_function (true);
3284 /* Called when new clone is inserted to callgraph late. */
3287 modref_summaries::duplicate (cgraph_node
*, cgraph_node
*dst
,
3288 modref_summary
*src_data
,
3289 modref_summary
*dst_data
)
3291 /* Do not duplicate optimization summaries; we do not handle parameter
3292 transforms on them. */
3293 if (this == optimization_summaries
)
3295 optimization_summaries
->remove (dst
);
3298 dst_data
->stores
= modref_records::create_ggc ();
3299 dst_data
->stores
->copy_from (src_data
->stores
);
3300 dst_data
->loads
= modref_records::create_ggc ();
3301 dst_data
->loads
->copy_from (src_data
->loads
);
3302 dst_data
->kills
.reserve_exact (src_data
->kills
.length ());
3303 dst_data
->kills
.splice (src_data
->kills
);
3304 dst_data
->writes_errno
= src_data
->writes_errno
;
3305 dst_data
->side_effects
= src_data
->side_effects
;
3306 dst_data
->nondeterministic
= src_data
->nondeterministic
;
3307 dst_data
->calls_interposable
= src_data
->calls_interposable
;
3308 if (src_data
->arg_flags
.length ())
3309 dst_data
->arg_flags
= src_data
->arg_flags
.copy ();
3310 dst_data
->retslot_flags
= src_data
->retslot_flags
;
3311 dst_data
->static_chain_flags
= src_data
->static_chain_flags
;
3314 /* Called when new clone is inserted to callgraph late. */
3317 modref_summaries_lto::duplicate (cgraph_node
*, cgraph_node
*,
3318 modref_summary_lto
*src_data
,
3319 modref_summary_lto
*dst_data
)
3321 /* Be sure that no further cloning happens after ipa-modref. If it does
3322 we will need to update signatures for possible param changes. */
3323 gcc_checking_assert (!((modref_summaries_lto
*)summaries_lto
)->propagated
);
3324 dst_data
->stores
= modref_records_lto::create_ggc ();
3325 dst_data
->stores
->copy_from (src_data
->stores
);
3326 dst_data
->loads
= modref_records_lto::create_ggc ();
3327 dst_data
->loads
->copy_from (src_data
->loads
);
3328 dst_data
->kills
.reserve_exact (src_data
->kills
.length ());
3329 dst_data
->kills
.splice (src_data
->kills
);
3330 dst_data
->writes_errno
= src_data
->writes_errno
;
3331 dst_data
->side_effects
= src_data
->side_effects
;
3332 dst_data
->nondeterministic
= src_data
->nondeterministic
;
3333 dst_data
->calls_interposable
= src_data
->calls_interposable
;
3334 if (src_data
->arg_flags
.length ())
3335 dst_data
->arg_flags
= src_data
->arg_flags
.copy ();
3336 dst_data
->retslot_flags
= src_data
->retslot_flags
;
3337 dst_data
->static_chain_flags
= src_data
->static_chain_flags
;
3342 /* Definition of the modref pass on GIMPLE. */
3343 const pass_data pass_data_modref
= {
3348 (PROP_cfg
| PROP_ssa
),
3355 class pass_modref
: public gimple_opt_pass
3358 pass_modref (gcc::context
*ctxt
)
3359 : gimple_opt_pass (pass_data_modref
, ctxt
) {}
3361 /* opt_pass methods: */
3364 return new pass_modref (m_ctxt
);
3366 virtual bool gate (function
*)
3368 return flag_ipa_modref
;
3370 virtual unsigned int execute (function
*);
3373 /* Encode TT to the output block OB using the summary streaming API. */
3376 write_modref_records (modref_records_lto
*tt
, struct output_block
*ob
)
3378 streamer_write_uhwi (ob
, tt
->every_base
);
3379 streamer_write_uhwi (ob
, vec_safe_length (tt
->bases
));
3380 for (auto base_node
: tt
->bases
)
3382 stream_write_tree (ob
, base_node
->base
, true);
3384 streamer_write_uhwi (ob
, base_node
->every_ref
);
3385 streamer_write_uhwi (ob
, vec_safe_length (base_node
->refs
));
3387 for (auto ref_node
: base_node
->refs
)
3389 stream_write_tree (ob
, ref_node
->ref
, true);
3390 streamer_write_uhwi (ob
, ref_node
->every_access
);
3391 streamer_write_uhwi (ob
, vec_safe_length (ref_node
->accesses
));
3393 for (auto access_node
: ref_node
->accesses
)
3394 access_node
.stream_out (ob
);
3399 /* Read a modref_tree from the input block IB using the data from DATA_IN.
3400 This assumes that the tree was encoded using write_modref_tree.
3401 Either nolto_ret or lto_ret is initialized by the tree depending whether
3402 LTO streaming is expected or not. */
3405 read_modref_records (tree decl
,
3406 lto_input_block
*ib
, struct data_in
*data_in
,
3407 modref_records
**nolto_ret
,
3408 modref_records_lto
**lto_ret
)
3410 size_t max_bases
= opt_for_fn (decl
, param_modref_max_bases
);
3411 size_t max_refs
= opt_for_fn (decl
, param_modref_max_refs
);
3412 size_t max_accesses
= opt_for_fn (decl
, param_modref_max_accesses
);
3415 *lto_ret
= modref_records_lto::create_ggc ();
3417 *nolto_ret
= modref_records::create_ggc ();
3418 gcc_checking_assert (lto_ret
|| nolto_ret
);
3420 size_t every_base
= streamer_read_uhwi (ib
);
3421 size_t nbase
= streamer_read_uhwi (ib
);
3423 gcc_assert (!every_base
|| nbase
== 0);
3427 (*nolto_ret
)->collapse ();
3429 (*lto_ret
)->collapse ();
3431 for (size_t i
= 0; i
< nbase
; i
++)
3433 tree base_tree
= stream_read_tree (ib
, data_in
);
3434 modref_base_node
<alias_set_type
> *nolto_base_node
= NULL
;
3435 modref_base_node
<tree
> *lto_base_node
= NULL
;
3437 /* At stream in time we have LTO alias info. Check if we streamed in
3438 something obviously unnecessary. Do not glob types by alias sets;
3439 it is not 100% clear that ltrans types will get merged same way.
3440 Types may get refined based on ODR type conflicts. */
3441 if (base_tree
&& !get_alias_set (base_tree
))
3445 fprintf (dump_file
, "Streamed in alias set 0 type ");
3446 print_generic_expr (dump_file
, base_tree
);
3447 fprintf (dump_file
, "\n");
3453 nolto_base_node
= (*nolto_ret
)->insert_base (base_tree
3454 ? get_alias_set (base_tree
)
3457 lto_base_node
= (*lto_ret
)->insert_base (base_tree
, 0, max_bases
);
3458 size_t every_ref
= streamer_read_uhwi (ib
);
3459 size_t nref
= streamer_read_uhwi (ib
);
3461 gcc_assert (!every_ref
|| nref
== 0);
3464 if (nolto_base_node
)
3465 nolto_base_node
->collapse ();
3467 lto_base_node
->collapse ();
3469 for (size_t j
= 0; j
< nref
; j
++)
3471 tree ref_tree
= stream_read_tree (ib
, data_in
);
3473 if (ref_tree
&& !get_alias_set (ref_tree
))
3477 fprintf (dump_file
, "Streamed in alias set 0 type ");
3478 print_generic_expr (dump_file
, ref_tree
);
3479 fprintf (dump_file
, "\n");
3484 modref_ref_node
<alias_set_type
> *nolto_ref_node
= NULL
;
3485 modref_ref_node
<tree
> *lto_ref_node
= NULL
;
3487 if (nolto_base_node
)
3489 = nolto_base_node
->insert_ref (ref_tree
3490 ? get_alias_set (ref_tree
) : 0,
3493 lto_ref_node
= lto_base_node
->insert_ref (ref_tree
, max_refs
);
3495 size_t every_access
= streamer_read_uhwi (ib
);
3496 size_t naccesses
= streamer_read_uhwi (ib
);
3498 if (nolto_ref_node
&& every_access
)
3499 nolto_ref_node
->collapse ();
3500 if (lto_ref_node
&& every_access
)
3501 lto_ref_node
->collapse ();
3503 for (size_t k
= 0; k
< naccesses
; k
++)
3505 modref_access_node a
= modref_access_node::stream_in (ib
);
3507 nolto_ref_node
->insert_access (a
, max_accesses
, false);
3509 lto_ref_node
->insert_access (a
, max_accesses
, false);
3514 (*lto_ret
)->cleanup ();
3516 (*nolto_ret
)->cleanup ();
3519 /* Write ESUM to BP. */
3522 modref_write_escape_summary (struct bitpack_d
*bp
, escape_summary
*esum
)
3526 bp_pack_var_len_unsigned (bp
, 0);
3529 bp_pack_var_len_unsigned (bp
, esum
->esc
.length ());
3532 FOR_EACH_VEC_ELT (esum
->esc
, i
, ee
)
3534 bp_pack_var_len_int (bp
, ee
->parm_index
);
3535 bp_pack_var_len_unsigned (bp
, ee
->arg
);
3536 bp_pack_var_len_unsigned (bp
, ee
->min_flags
);
3537 bp_pack_value (bp
, ee
->direct
, 1);
3541 /* Read escape summary for E from BP. */
3544 modref_read_escape_summary (struct bitpack_d
*bp
, cgraph_edge
*e
)
3546 unsigned int n
= bp_unpack_var_len_unsigned (bp
);
3549 escape_summary
*esum
= escape_summaries
->get_create (e
);
3550 esum
->esc
.reserve_exact (n
);
3551 for (unsigned int i
= 0; i
< n
; i
++)
3554 ee
.parm_index
= bp_unpack_var_len_int (bp
);
3555 ee
.arg
= bp_unpack_var_len_unsigned (bp
);
3556 ee
.min_flags
= bp_unpack_var_len_unsigned (bp
);
3557 ee
.direct
= bp_unpack_value (bp
, 1);
3558 esum
->esc
.quick_push (ee
);
3562 /* Callback for write_summary. */
3567 struct output_block
*ob
= create_output_block (LTO_section_ipa_modref
);
3568 lto_symtab_encoder_t encoder
= ob
->decl_state
->symtab_node_encoder
;
3569 unsigned int count
= 0;
3574 streamer_write_uhwi (ob
, 0);
3575 streamer_write_char_stream (ob
->main_stream
, 0);
3576 produce_asm (ob
, NULL
);
3577 destroy_output_block (ob
);
3581 for (i
= 0; i
< lto_symtab_encoder_size (encoder
); i
++)
3583 symtab_node
*snode
= lto_symtab_encoder_deref (encoder
, i
);
3584 cgraph_node
*cnode
= dyn_cast
<cgraph_node
*> (snode
);
3585 modref_summary_lto
*r
;
3587 if (cnode
&& cnode
->definition
&& !cnode
->alias
3588 && (r
= summaries_lto
->get (cnode
))
3589 && r
->useful_p (flags_from_decl_or_type (cnode
->decl
)))
3592 streamer_write_uhwi (ob
, count
);
3594 for (i
= 0; i
< lto_symtab_encoder_size (encoder
); i
++)
3596 symtab_node
*snode
= lto_symtab_encoder_deref (encoder
, i
);
3597 cgraph_node
*cnode
= dyn_cast
<cgraph_node
*> (snode
);
3599 if (cnode
&& cnode
->definition
&& !cnode
->alias
)
3601 modref_summary_lto
*r
= summaries_lto
->get (cnode
);
3603 if (!r
|| !r
->useful_p (flags_from_decl_or_type (cnode
->decl
)))
3606 streamer_write_uhwi (ob
, lto_symtab_encoder_encode (encoder
, cnode
));
3608 streamer_write_uhwi (ob
, r
->arg_flags
.length ());
3609 for (unsigned int i
= 0; i
< r
->arg_flags
.length (); i
++)
3610 streamer_write_uhwi (ob
, r
->arg_flags
[i
]);
3611 streamer_write_uhwi (ob
, r
->retslot_flags
);
3612 streamer_write_uhwi (ob
, r
->static_chain_flags
);
3614 write_modref_records (r
->loads
, ob
);
3615 write_modref_records (r
->stores
, ob
);
3616 streamer_write_uhwi (ob
, r
->kills
.length ());
3617 for (auto kill
: r
->kills
)
3618 kill
.stream_out (ob
);
3620 struct bitpack_d bp
= bitpack_create (ob
->main_stream
);
3621 bp_pack_value (&bp
, r
->writes_errno
, 1);
3622 bp_pack_value (&bp
, r
->side_effects
, 1);
3623 bp_pack_value (&bp
, r
->nondeterministic
, 1);
3624 bp_pack_value (&bp
, r
->calls_interposable
, 1);
3627 for (cgraph_edge
*e
= cnode
->indirect_calls
;
3628 e
; e
= e
->next_callee
)
3630 class fnspec_summary
*sum
= fnspec_summaries
->get (e
);
3631 bp_pack_value (&bp
, sum
!= NULL
, 1);
3633 bp_pack_string (ob
, &bp
, sum
->fnspec
, true);
3634 class escape_summary
*esum
= escape_summaries
->get (e
);
3635 modref_write_escape_summary (&bp
,esum
);
3637 for (cgraph_edge
*e
= cnode
->callees
; e
; e
= e
->next_callee
)
3639 class fnspec_summary
*sum
= fnspec_summaries
->get (e
);
3640 bp_pack_value (&bp
, sum
!= NULL
, 1);
3642 bp_pack_string (ob
, &bp
, sum
->fnspec
, true);
3643 class escape_summary
*esum
= escape_summaries
->get (e
);
3644 modref_write_escape_summary (&bp
,esum
);
3647 streamer_write_bitpack (&bp
);
3650 streamer_write_char_stream (ob
->main_stream
, 0);
3651 produce_asm (ob
, NULL
);
3652 destroy_output_block (ob
);
3656 read_section (struct lto_file_decl_data
*file_data
, const char *data
,
3659 const struct lto_function_header
*header
3660 = (const struct lto_function_header
*) data
;
3661 const int cfg_offset
= sizeof (struct lto_function_header
);
3662 const int main_offset
= cfg_offset
+ header
->cfg_size
;
3663 const int string_offset
= main_offset
+ header
->main_size
;
3664 struct data_in
*data_in
;
3666 unsigned int f_count
;
3668 lto_input_block
ib ((const char *) data
+ main_offset
, header
->main_size
,
3669 file_data
->mode_table
);
3672 = lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
3673 header
->string_size
, vNULL
);
3674 f_count
= streamer_read_uhwi (&ib
);
3675 for (i
= 0; i
< f_count
; i
++)
3677 struct cgraph_node
*node
;
3678 lto_symtab_encoder_t encoder
;
3680 unsigned int index
= streamer_read_uhwi (&ib
);
3681 encoder
= file_data
->symtab_node_encoder
;
3682 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
3685 modref_summary
*modref_sum
= summaries
3686 ? summaries
->get_create (node
) : NULL
;
3687 modref_summary_lto
*modref_sum_lto
= summaries_lto
3688 ? summaries_lto
->get_create (node
)
3690 if (optimization_summaries
)
3691 modref_sum
= optimization_summaries
->get_create (node
);
3695 modref_sum
->writes_errno
= false;
3696 modref_sum
->side_effects
= false;
3697 modref_sum
->nondeterministic
= false;
3698 modref_sum
->calls_interposable
= false;
3702 modref_sum_lto
->writes_errno
= false;
3703 modref_sum_lto
->side_effects
= false;
3704 modref_sum_lto
->nondeterministic
= false;
3705 modref_sum_lto
->calls_interposable
= false;
3708 gcc_assert (!modref_sum
|| (!modref_sum
->loads
3709 && !modref_sum
->stores
));
3710 gcc_assert (!modref_sum_lto
|| (!modref_sum_lto
->loads
3711 && !modref_sum_lto
->stores
));
3712 unsigned int args
= streamer_read_uhwi (&ib
);
3713 if (args
&& modref_sum
)
3714 modref_sum
->arg_flags
.reserve_exact (args
);
3715 if (args
&& modref_sum_lto
)
3716 modref_sum_lto
->arg_flags
.reserve_exact (args
);
3717 for (unsigned int i
= 0; i
< args
; i
++)
3719 eaf_flags_t flags
= streamer_read_uhwi (&ib
);
3721 modref_sum
->arg_flags
.quick_push (flags
);
3723 modref_sum_lto
->arg_flags
.quick_push (flags
);
3725 eaf_flags_t flags
= streamer_read_uhwi (&ib
);
3727 modref_sum
->retslot_flags
= flags
;
3729 modref_sum_lto
->retslot_flags
= flags
;
3731 flags
= streamer_read_uhwi (&ib
);
3733 modref_sum
->static_chain_flags
= flags
;
3735 modref_sum_lto
->static_chain_flags
= flags
;
3737 read_modref_records (node
->decl
, &ib
, data_in
,
3738 modref_sum
? &modref_sum
->loads
: NULL
,
3739 modref_sum_lto
? &modref_sum_lto
->loads
: NULL
);
3740 read_modref_records (node
->decl
, &ib
, data_in
,
3741 modref_sum
? &modref_sum
->stores
: NULL
,
3742 modref_sum_lto
? &modref_sum_lto
->stores
: NULL
);
3743 int j
= streamer_read_uhwi (&ib
);
3744 if (j
&& modref_sum
)
3745 modref_sum
->kills
.reserve_exact (j
);
3746 if (j
&& modref_sum_lto
)
3747 modref_sum_lto
->kills
.reserve_exact (j
);
3748 for (int k
= 0; k
< j
; k
++)
3750 modref_access_node a
= modref_access_node::stream_in (&ib
);
3753 modref_sum
->kills
.quick_push (a
);
3755 modref_sum_lto
->kills
.quick_push (a
);
3757 struct bitpack_d bp
= streamer_read_bitpack (&ib
);
3758 if (bp_unpack_value (&bp
, 1))
3761 modref_sum
->writes_errno
= true;
3763 modref_sum_lto
->writes_errno
= true;
3765 if (bp_unpack_value (&bp
, 1))
3768 modref_sum
->side_effects
= true;
3770 modref_sum_lto
->side_effects
= true;
3772 if (bp_unpack_value (&bp
, 1))
3775 modref_sum
->nondeterministic
= true;
3777 modref_sum_lto
->nondeterministic
= true;
3779 if (bp_unpack_value (&bp
, 1))
3782 modref_sum
->calls_interposable
= true;
3784 modref_sum_lto
->calls_interposable
= true;
3788 for (cgraph_edge
*e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
3790 if (bp_unpack_value (&bp
, 1))
3792 class fnspec_summary
*sum
= fnspec_summaries
->get_create (e
);
3793 sum
->fnspec
= xstrdup (bp_unpack_string (data_in
, &bp
));
3795 modref_read_escape_summary (&bp
, e
);
3797 for (cgraph_edge
*e
= node
->callees
; e
; e
= e
->next_callee
)
3799 if (bp_unpack_value (&bp
, 1))
3801 class fnspec_summary
*sum
= fnspec_summaries
->get_create (e
);
3802 sum
->fnspec
= xstrdup (bp_unpack_string (data_in
, &bp
));
3804 modref_read_escape_summary (&bp
, e
);
3808 modref_sum
->finalize (node
->decl
);
3811 fprintf (dump_file
, "Read modref for %s\n",
3812 node
->dump_name ());
3814 modref_sum
->dump (dump_file
);
3816 modref_sum_lto
->dump (dump_file
);
3817 dump_modref_edge_summaries (dump_file
, node
, 4);
3821 lto_free_section_data (file_data
, LTO_section_ipa_modref
, NULL
, data
,
3823 lto_data_in_delete (data_in
);
3826 /* Callback for read_summary. */
3831 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
3832 struct lto_file_decl_data
*file_data
;
3835 gcc_checking_assert (!optimization_summaries
&& !summaries
&& !summaries_lto
);
3837 optimization_summaries
= modref_summaries::create_ggc (symtab
);
3840 if (flag_wpa
|| flag_incremental_link
== INCREMENTAL_LINK_LTO
)
3841 summaries_lto
= modref_summaries_lto::create_ggc (symtab
);
3843 || (flag_incremental_link
== INCREMENTAL_LINK_LTO
3844 && flag_fat_lto_objects
))
3845 summaries
= modref_summaries::create_ggc (symtab
);
3846 if (!fnspec_summaries
)
3847 fnspec_summaries
= new fnspec_summaries_t (symtab
);
3848 if (!escape_summaries
)
3849 escape_summaries
= new escape_summaries_t (symtab
);
3852 while ((file_data
= file_data_vec
[j
++]))
3855 const char *data
= lto_get_summary_section_data (file_data
,
3856 LTO_section_ipa_modref
,
3859 read_section (file_data
, data
, len
);
3861 /* Fatal error here. We do not want to support compiling ltrans units
3862 with different version of compiler or different flags than the WPA
3863 unit, so this should never happen. */
3864 fatal_error (input_location
,
3865 "IPA modref summary is missing in input file");
3869 /* Recompute arg_flags for param adjustments in INFO. */
3872 remap_arg_flags (auto_vec
<eaf_flags_t
> &arg_flags
, clone_info
*info
)
3874 auto_vec
<eaf_flags_t
> old
= arg_flags
.copy ();
3877 ipa_adjusted_param
*p
;
3879 arg_flags
.release ();
3881 FOR_EACH_VEC_SAFE_ELT (info
->param_adjustments
->m_adj_params
, i
, p
)
3883 int o
= info
->param_adjustments
->get_original_index (i
);
3884 if (o
>= 0 && (int)old
.length () > o
&& old
[o
])
3888 arg_flags
.safe_grow_cleared (max
+ 1, true);
3889 FOR_EACH_VEC_SAFE_ELT (info
->param_adjustments
->m_adj_params
, i
, p
)
3891 int o
= info
->param_adjustments
->get_original_index (i
);
3892 if (o
>= 0 && (int)old
.length () > o
&& old
[o
])
3893 arg_flags
[i
] = old
[o
];
3897 /* Update kills accrdoing to the parm map MAP. */
3900 remap_kills (vec
<modref_access_node
> &kills
, const vec
<int> &map
)
3902 for (size_t i
= 0; i
< kills
.length ();)
3903 if (kills
[i
].parm_index
>= 0)
3905 if (kills
[i
].parm_index
< (int)map
.length ()
3906 && map
[kills
[i
].parm_index
] != MODREF_UNKNOWN_PARM
)
3908 kills
[i
].parm_index
= map
[kills
[i
].parm_index
];
3912 kills
.unordered_remove (i
);
3918 /* If signature changed, update the summary. */
3921 update_signature (struct cgraph_node
*node
)
3923 clone_info
*info
= clone_info::get (node
);
3924 if (!info
|| !info
->param_adjustments
)
3927 modref_summary
*r
= optimization_summaries
3928 ? optimization_summaries
->get (node
) : NULL
;
3929 modref_summary_lto
*r_lto
= summaries_lto
3930 ? summaries_lto
->get (node
) : NULL
;
3935 fprintf (dump_file
, "Updating summary for %s from:\n",
3936 node
->dump_name ());
3938 r
->dump (dump_file
);
3940 r_lto
->dump (dump_file
);
3944 ipa_adjusted_param
*p
;
3946 FOR_EACH_VEC_SAFE_ELT (info
->param_adjustments
->m_adj_params
, i
, p
)
3948 int idx
= info
->param_adjustments
->get_original_index (i
);
3953 auto_vec
<int, 32> map
;
3955 map
.reserve (max
+ 1);
3956 for (i
= 0; i
<= max
; i
++)
3957 map
.quick_push (MODREF_UNKNOWN_PARM
);
3958 FOR_EACH_VEC_SAFE_ELT (info
->param_adjustments
->m_adj_params
, i
, p
)
3960 int idx
= info
->param_adjustments
->get_original_index (i
);
3966 r
->loads
->remap_params (&map
);
3967 r
->stores
->remap_params (&map
);
3968 remap_kills (r
->kills
, map
);
3969 if (r
->arg_flags
.length ())
3970 remap_arg_flags (r
->arg_flags
, info
);
3974 r_lto
->loads
->remap_params (&map
);
3975 r_lto
->stores
->remap_params (&map
);
3976 remap_kills (r_lto
->kills
, map
);
3977 if (r_lto
->arg_flags
.length ())
3978 remap_arg_flags (r_lto
->arg_flags
, info
);
3982 fprintf (dump_file
, "to:\n");
3984 r
->dump (dump_file
);
3986 r_lto
->dump (dump_file
);
3989 r
->finalize (node
->decl
);
3993 /* Definition of the modref IPA pass. */
3994 const pass_data pass_data_ipa_modref
=
3996 IPA_PASS
, /* type */
3997 "modref", /* name */
3998 OPTGROUP_IPA
, /* optinfo_flags */
3999 TV_IPA_MODREF
, /* tv_id */
4000 0, /* properties_required */
4001 0, /* properties_provided */
4002 0, /* properties_destroyed */
4003 0, /* todo_flags_start */
4004 ( TODO_dump_symtab
), /* todo_flags_finish */
4007 class pass_ipa_modref
: public ipa_opt_pass_d
4010 pass_ipa_modref (gcc::context
*ctxt
)
4011 : ipa_opt_pass_d (pass_data_ipa_modref
, ctxt
,
4012 modref_generate
, /* generate_summary */
4013 modref_write
, /* write_summary */
4014 modref_read
, /* read_summary */
4015 modref_write
, /* write_optimization_summary */
4016 modref_read
, /* read_optimization_summary */
4017 NULL
, /* stmt_fixup */
4018 0, /* function_transform_todo_flags_start */
4019 NULL
, /* function_transform */
4020 NULL
) /* variable_transform */
4023 /* opt_pass methods: */
4024 opt_pass
*clone () { return new pass_ipa_modref (m_ctxt
); }
4025 virtual bool gate (function
*)
4029 virtual unsigned int execute (function
*);
4035 unsigned int pass_modref::execute (function
*)
4037 if (analyze_function (false))
4038 return execute_fixup_cfg ();
4043 make_pass_modref (gcc::context
*ctxt
)
4045 return new pass_modref (ctxt
);
4049 make_pass_ipa_modref (gcc::context
*ctxt
)
4051 return new pass_ipa_modref (ctxt
);
4056 /* Skip edges from and to nodes without ipa_pure_const enabled.
4057 Ignore not available symbols. */
4060 ignore_edge (struct cgraph_edge
*e
)
4062 /* We merge summaries of inline clones into summaries of functions they
4063 are inlined to. For that reason the complete function bodies must
4065 if (!e
->inline_failed
)
4067 enum availability avail
;
4068 cgraph_node
*callee
= e
->callee
->function_or_virtual_thunk_symbol
4069 (&avail
, e
->caller
);
4071 return (avail
<= AVAIL_INTERPOSABLE
4072 || ((!optimization_summaries
|| !optimization_summaries
->get (callee
))
4073 && (!summaries_lto
|| !summaries_lto
->get (callee
))));
4076 /* Compute parm_map for CALLEE_EDGE. */
4079 compute_parm_map (cgraph_edge
*callee_edge
, vec
<modref_parm_map
> *parm_map
)
4081 class ipa_edge_args
*args
;
4082 if (ipa_node_params_sum
4083 && !callee_edge
->call_stmt_cannot_inline_p
4084 && (args
= ipa_edge_args_sum
->get (callee_edge
)) != NULL
)
4086 int i
, count
= ipa_get_cs_argument_count (args
);
4087 class ipa_node_params
*caller_parms_info
, *callee_pi
;
4088 class ipa_call_summary
*es
4089 = ipa_call_summaries
->get (callee_edge
);
4091 = callee_edge
->callee
->function_or_virtual_thunk_symbol
4092 (NULL
, callee_edge
->caller
);
4095 = ipa_node_params_sum
->get (callee_edge
->caller
->inlined_to
4096 ? callee_edge
->caller
->inlined_to
4097 : callee_edge
->caller
);
4098 callee_pi
= ipa_node_params_sum
->get (callee
);
4100 (*parm_map
).safe_grow_cleared (count
, true);
4102 for (i
= 0; i
< count
; i
++)
4104 if (es
&& es
->param
[i
].points_to_local_or_readonly_memory
)
4106 (*parm_map
)[i
].parm_index
= MODREF_LOCAL_MEMORY_PARM
;
4110 struct ipa_jump_func
*jf
4111 = ipa_get_ith_jump_func (args
, i
);
4112 if (jf
&& callee_pi
)
4114 tree cst
= ipa_value_from_jfunc (caller_parms_info
,
4118 if (cst
&& points_to_local_or_readonly_memory_p (cst
))
4120 (*parm_map
)[i
].parm_index
= MODREF_LOCAL_MEMORY_PARM
;
4124 if (jf
&& jf
->type
== IPA_JF_PASS_THROUGH
)
4126 (*parm_map
)[i
].parm_index
4127 = ipa_get_jf_pass_through_formal_id (jf
);
4128 if (ipa_get_jf_pass_through_operation (jf
) == NOP_EXPR
)
4130 (*parm_map
)[i
].parm_offset_known
= true;
4131 (*parm_map
)[i
].parm_offset
= 0;
4133 else if (ipa_get_jf_pass_through_operation (jf
)
4134 == POINTER_PLUS_EXPR
4135 && ptrdiff_tree_p (ipa_get_jf_pass_through_operand (jf
),
4136 &(*parm_map
)[i
].parm_offset
))
4137 (*parm_map
)[i
].parm_offset_known
= true;
4139 (*parm_map
)[i
].parm_offset_known
= false;
4142 if (jf
&& jf
->type
== IPA_JF_ANCESTOR
)
4144 (*parm_map
)[i
].parm_index
= ipa_get_jf_ancestor_formal_id (jf
);
4145 (*parm_map
)[i
].parm_offset_known
= true;
4147 (!(ipa_get_jf_ancestor_offset (jf
) & (BITS_PER_UNIT
- 1)));
4148 (*parm_map
)[i
].parm_offset
4149 = ipa_get_jf_ancestor_offset (jf
) >> LOG2_BITS_PER_UNIT
;
4152 (*parm_map
)[i
].parm_index
= -1;
4156 fprintf (dump_file
, " Parm map: ");
4157 for (i
= 0; i
< count
; i
++)
4158 fprintf (dump_file
, " %i", (*parm_map
)[i
].parm_index
);
4159 fprintf (dump_file
, "\n");
4166 /* Map used to translate escape infos. */
4174 /* Update escape map for E. */
4177 update_escape_summary_1 (cgraph_edge
*e
,
4178 vec
<vec
<escape_map
>> &map
,
4181 escape_summary
*sum
= escape_summaries
->get (e
);
4184 auto_vec
<escape_entry
> old
= sum
->esc
.copy ();
4185 sum
->esc
.release ();
4189 FOR_EACH_VEC_ELT (old
, i
, ee
)
4192 struct escape_map
*em
;
4193 /* TODO: We do not have jump functions for return slots, so we
4194 never propagate them to outer function. */
4195 if (ee
->parm_index
>= (int)map
.length ()
4196 || ee
->parm_index
< 0)
4198 FOR_EACH_VEC_ELT (map
[ee
->parm_index
], j
, em
)
4200 int min_flags
= ee
->min_flags
;
4201 if (ee
->direct
&& !em
->direct
)
4202 min_flags
= deref_flags (min_flags
, ignore_stores
);
4203 struct escape_entry entry
= {em
->parm_index
, ee
->arg
,
4205 ee
->direct
& em
->direct
};
4206 sum
->esc
.safe_push (entry
);
4209 if (!sum
->esc
.length ())
4210 escape_summaries
->remove (e
);
4213 /* Update escape map fo NODE. */
4216 update_escape_summary (cgraph_node
*node
,
4217 vec
<vec
<escape_map
>> &map
,
4220 if (!escape_summaries
)
4222 for (cgraph_edge
*e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
4223 update_escape_summary_1 (e
, map
, ignore_stores
);
4224 for (cgraph_edge
*e
= node
->callees
; e
; e
= e
->next_callee
)
4226 if (!e
->inline_failed
)
4227 update_escape_summary (e
->callee
, map
, ignore_stores
);
4229 update_escape_summary_1 (e
, map
, ignore_stores
);
4233 /* Get parameter type from DECL. This is only safe for special cases
4234 like builtins we create fnspec for because the type match is checked
4235 at fnspec creation time. */
4238 get_parm_type (tree decl
, unsigned int i
)
4240 tree t
= TYPE_ARG_TYPES (TREE_TYPE (decl
));
4242 for (unsigned int p
= 0; p
< i
; p
++)
4244 return TREE_VALUE (t
);
4247 /* Return access mode for argument I of call E with FNSPEC. */
4249 static modref_access_node
4250 get_access_for_fnspec (cgraph_edge
*e
, attr_fnspec
&fnspec
,
4251 unsigned int i
, modref_parm_map
&map
)
4253 tree size
= NULL_TREE
;
4254 unsigned int size_arg
;
4256 if (!fnspec
.arg_specified_p (i
))
4258 else if (fnspec
.arg_max_access_size_given_by_arg_p (i
, &size_arg
))
4260 cgraph_node
*node
= e
->caller
->inlined_to
4261 ? e
->caller
->inlined_to
: e
->caller
;
4262 ipa_node_params
*caller_parms_info
= ipa_node_params_sum
->get (node
);
4263 ipa_edge_args
*args
= ipa_edge_args_sum
->get (e
);
4264 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, size_arg
);
4267 size
= ipa_value_from_jfunc (caller_parms_info
, jf
,
4268 get_parm_type (e
->callee
->decl
, size_arg
));
4270 else if (fnspec
.arg_access_size_given_by_type_p (i
))
4271 size
= TYPE_SIZE_UNIT (get_parm_type (e
->callee
->decl
, i
));
4272 modref_access_node a
= {0, -1, -1,
4273 map
.parm_offset
, map
.parm_index
,
4274 map
.parm_offset_known
, 0};
4275 poly_int64 size_hwi
;
4277 && poly_int_tree_p (size
, &size_hwi
)
4278 && coeffs_in_range_p (size_hwi
, 0,
4279 HOST_WIDE_INT_MAX
/ BITS_PER_UNIT
))
4282 a
.max_size
= size_hwi
<< LOG2_BITS_PER_UNIT
;
4287 /* Collapse loads and return true if something changed. */
4289 collapse_loads (modref_summary
*cur_summary
,
4290 modref_summary_lto
*cur_summary_lto
)
4292 bool changed
= false;
4294 if (cur_summary
&& !cur_summary
->loads
->every_base
)
4296 cur_summary
->loads
->collapse ();
4300 && !cur_summary_lto
->loads
->every_base
)
4302 cur_summary_lto
->loads
->collapse ();
4308 /* Collapse loads and return true if something changed. */
4311 collapse_stores (modref_summary
*cur_summary
,
4312 modref_summary_lto
*cur_summary_lto
)
4314 bool changed
= false;
4316 if (cur_summary
&& !cur_summary
->stores
->every_base
)
4318 cur_summary
->stores
->collapse ();
4322 && !cur_summary_lto
->stores
->every_base
)
4324 cur_summary_lto
->stores
->collapse ();
4330 /* Call E in NODE with ECF_FLAGS has no summary; update MODREF_SUMMARY and
4331 CUR_SUMMARY_LTO accordingly. Return true if something changed. */
4334 propagate_unknown_call (cgraph_node
*node
,
4335 cgraph_edge
*e
, int ecf_flags
,
4336 modref_summary
*cur_summary
,
4337 modref_summary_lto
*cur_summary_lto
,
4338 bool nontrivial_scc
)
4340 bool changed
= false;
4341 class fnspec_summary
*fnspec_sum
= fnspec_summaries
->get (e
);
4342 auto_vec
<modref_parm_map
, 32> parm_map
;
4346 && builtin_safe_for_const_function_p (&looping
, e
->callee
->decl
))
4348 if (looping
&& cur_summary
&& !cur_summary
->side_effects
)
4350 cur_summary
->side_effects
= true;
4353 if (looping
&& cur_summary_lto
&& !cur_summary_lto
->side_effects
)
4355 cur_summary_lto
->side_effects
= true;
4361 if (!(ecf_flags
& (ECF_CONST
| ECF_NOVOPS
| ECF_PURE
))
4362 || (ecf_flags
& ECF_LOOPING_CONST_OR_PURE
)
4365 if (cur_summary
&& !cur_summary
->side_effects
)
4367 cur_summary
->side_effects
= true;
4370 if (cur_summary_lto
&& !cur_summary_lto
->side_effects
)
4372 cur_summary_lto
->side_effects
= true;
4375 if (cur_summary
&& !cur_summary
->nondeterministic
4376 && !ignore_nondeterminism_p (node
->decl
, ecf_flags
))
4378 cur_summary
->nondeterministic
= true;
4381 if (cur_summary_lto
&& !cur_summary_lto
->nondeterministic
4382 && !ignore_nondeterminism_p (node
->decl
, ecf_flags
))
4384 cur_summary_lto
->nondeterministic
= true;
4388 if (ecf_flags
& (ECF_CONST
| ECF_NOVOPS
))
4392 && compute_parm_map (e
, &parm_map
))
4394 attr_fnspec
fnspec (fnspec_sum
->fnspec
);
4396 gcc_checking_assert (fnspec
.known_p ());
4397 if (fnspec
.global_memory_read_p ())
4398 collapse_loads (cur_summary
, cur_summary_lto
);
4401 tree t
= TYPE_ARG_TYPES (TREE_TYPE (e
->callee
->decl
));
4402 for (unsigned i
= 0; i
< parm_map
.length () && t
;
4403 i
++, t
= TREE_CHAIN (t
))
4404 if (!POINTER_TYPE_P (TREE_VALUE (t
)))
4406 else if (!fnspec
.arg_specified_p (i
)
4407 || fnspec
.arg_maybe_read_p (i
))
4409 modref_parm_map map
= parm_map
[i
];
4410 if (map
.parm_index
== MODREF_LOCAL_MEMORY_PARM
)
4412 if (map
.parm_index
== MODREF_UNKNOWN_PARM
)
4414 collapse_loads (cur_summary
, cur_summary_lto
);
4418 changed
|= cur_summary
->loads
->insert
4420 get_access_for_fnspec (e
, fnspec
, i
, map
), false);
4421 if (cur_summary_lto
)
4422 changed
|= cur_summary_lto
->loads
->insert
4424 get_access_for_fnspec (e
, fnspec
, i
, map
), false);
4427 if (ignore_stores_p (node
->decl
, ecf_flags
))
4429 else if (fnspec
.global_memory_written_p ())
4430 collapse_stores (cur_summary
, cur_summary_lto
);
4433 tree t
= TYPE_ARG_TYPES (TREE_TYPE (e
->callee
->decl
));
4434 for (unsigned i
= 0; i
< parm_map
.length () && t
;
4435 i
++, t
= TREE_CHAIN (t
))
4436 if (!POINTER_TYPE_P (TREE_VALUE (t
)))
4438 else if (!fnspec
.arg_specified_p (i
)
4439 || fnspec
.arg_maybe_written_p (i
))
4441 modref_parm_map map
= parm_map
[i
];
4442 if (map
.parm_index
== MODREF_LOCAL_MEMORY_PARM
)
4444 if (map
.parm_index
== MODREF_UNKNOWN_PARM
)
4446 collapse_stores (cur_summary
, cur_summary_lto
);
4450 changed
|= cur_summary
->stores
->insert
4452 get_access_for_fnspec (e
, fnspec
, i
, map
), false);
4453 if (cur_summary_lto
)
4454 changed
|= cur_summary_lto
->stores
->insert
4456 get_access_for_fnspec (e
, fnspec
, i
, map
), false);
4459 if (fnspec
.errno_maybe_written_p () && flag_errno_math
)
4461 if (cur_summary
&& !cur_summary
->writes_errno
)
4463 cur_summary
->writes_errno
= true;
4466 if (cur_summary_lto
&& !cur_summary_lto
->writes_errno
)
4468 cur_summary_lto
->writes_errno
= true;
4475 fprintf (dump_file
, " collapsing loads\n");
4476 changed
|= collapse_loads (cur_summary
, cur_summary_lto
);
4477 if (!ignore_stores_p (node
->decl
, ecf_flags
))
4480 fprintf (dump_file
, " collapsing stores\n");
4481 changed
|= collapse_stores (cur_summary
, cur_summary_lto
);
4486 /* Maybe remove summaies of NODE pointed to by CUR_SUMMARY_PTR
4487 and CUR_SUMMARY_LTO_PTR if they are useless according to ECF_FLAGS. */
4490 remove_useless_summaries (cgraph_node
*node
,
4491 modref_summary
**cur_summary_ptr
,
4492 modref_summary_lto
**cur_summary_lto_ptr
,
4495 if (*cur_summary_ptr
&& !(*cur_summary_ptr
)->useful_p (ecf_flags
, false))
4497 optimization_summaries
->remove (node
);
4498 *cur_summary_ptr
= NULL
;
4500 if (*cur_summary_lto_ptr
4501 && !(*cur_summary_lto_ptr
)->useful_p (ecf_flags
, false))
4503 summaries_lto
->remove (node
);
4504 *cur_summary_lto_ptr
= NULL
;
4508 /* Perform iterative dataflow on SCC component starting in COMPONENT_NODE
4509 and propagate loads/stores. */
4512 modref_propagate_in_scc (cgraph_node
*component_node
)
4514 bool changed
= true;
4521 = ((struct ipa_dfs_info
*) component_node
->aux
)->next_cycle
;
4523 for (struct cgraph_node
*cur
= component_node
; cur
;
4524 cur
= ((struct ipa_dfs_info
*) cur
->aux
)->next_cycle
)
4526 cgraph_node
*node
= cur
->inlined_to
? cur
->inlined_to
: cur
;
4527 modref_summary
*cur_summary
= optimization_summaries
4528 ? optimization_summaries
->get (node
)
4530 modref_summary_lto
*cur_summary_lto
= summaries_lto
4531 ? summaries_lto
->get (node
)
4534 if (!cur_summary
&& !cur_summary_lto
)
4537 int cur_ecf_flags
= flags_from_decl_or_type (node
->decl
);
4540 fprintf (dump_file
, " Processing %s%s%s\n",
4542 TREE_READONLY (cur
->decl
) ? " (const)" : "",
4543 DECL_PURE_P (cur
->decl
) ? " (pure)" : "");
4545 for (cgraph_edge
*e
= cur
->indirect_calls
; e
; e
= e
->next_callee
)
4548 fprintf (dump_file
, " Indirect call\n");
4549 if (propagate_unknown_call
4550 (node
, e
, e
->indirect_info
->ecf_flags
,
4551 cur_summary
, cur_summary_lto
,
4555 remove_useless_summaries (node
, &cur_summary
,
4558 if (!cur_summary
&& !cur_summary_lto
)
4563 if (!cur_summary
&& !cur_summary_lto
)
4566 for (cgraph_edge
*callee_edge
= cur
->callees
; callee_edge
;
4567 callee_edge
= callee_edge
->next_callee
)
4569 int flags
= flags_from_decl_or_type (callee_edge
->callee
->decl
);
4570 modref_summary
*callee_summary
= NULL
;
4571 modref_summary_lto
*callee_summary_lto
= NULL
;
4572 struct cgraph_node
*callee
;
4574 if (!callee_edge
->inline_failed
4575 || ((flags
& (ECF_CONST
| ECF_NOVOPS
))
4576 && !(flags
& ECF_LOOPING_CONST_OR_PURE
)))
4579 /* Get the callee and its summary. */
4580 enum availability avail
;
4581 callee
= callee_edge
->callee
->function_or_virtual_thunk_symbol
4584 /* It is not necessary to re-process calls outside of the
4588 || ((struct ipa_dfs_info
*)cur
->aux
)->scc_no
4589 != ((struct ipa_dfs_info
*)callee
->aux
)->scc_no
))
4593 fprintf (dump_file
, " Call to %s\n",
4594 callee_edge
->callee
->dump_name ());
4596 bool ignore_stores
= ignore_stores_p (cur
->decl
, flags
);
4598 if (avail
<= AVAIL_INTERPOSABLE
)
4601 fprintf (dump_file
, " Call target interposable"
4602 " or not available\n");
4603 changed
|= propagate_unknown_call
4604 (node
, callee_edge
, flags
,
4605 cur_summary
, cur_summary_lto
,
4607 if (!cur_summary
&& !cur_summary_lto
)
4612 /* We don't know anything about CALLEE, hence we cannot tell
4613 anything about the entire component. */
4616 && !(callee_summary
= optimization_summaries
->get (callee
)))
4619 fprintf (dump_file
, " No call target summary\n");
4620 changed
|= propagate_unknown_call
4621 (node
, callee_edge
, flags
,
4626 && !(callee_summary_lto
= summaries_lto
->get (callee
)))
4629 fprintf (dump_file
, " No call target summary\n");
4630 changed
|= propagate_unknown_call
4631 (node
, callee_edge
, flags
,
4632 NULL
, cur_summary_lto
,
4636 if (callee_summary
&& !cur_summary
->side_effects
4637 && (callee_summary
->side_effects
4638 || callee_edge
->recursive_p ()))
4640 cur_summary
->side_effects
= true;
4643 if (callee_summary_lto
&& !cur_summary_lto
->side_effects
4644 && (callee_summary_lto
->side_effects
4645 || callee_edge
->recursive_p ()))
4647 cur_summary_lto
->side_effects
= true;
4650 if (callee_summary
&& !cur_summary
->nondeterministic
4651 && callee_summary
->nondeterministic
4652 && !ignore_nondeterminism_p (cur
->decl
, flags
))
4654 cur_summary
->nondeterministic
= true;
4657 if (callee_summary_lto
&& !cur_summary_lto
->nondeterministic
4658 && callee_summary_lto
->nondeterministic
4659 && !ignore_nondeterminism_p (cur
->decl
, flags
))
4661 cur_summary_lto
->nondeterministic
= true;
4664 if (flags
& (ECF_CONST
| ECF_NOVOPS
))
4667 /* We can not safely optimize based on summary of callee if it
4668 does not always bind to current def: it is possible that
4669 memory load was optimized out earlier which may not happen in
4670 the interposed variant. */
4671 if (!callee_edge
->binds_to_current_def_p ())
4673 if (cur_summary
&& !cur_summary
->calls_interposable
)
4675 cur_summary
->calls_interposable
= true;
4678 if (cur_summary_lto
&& !cur_summary_lto
->calls_interposable
)
4680 cur_summary_lto
->calls_interposable
= true;
4684 fprintf (dump_file
, " May not bind local;"
4685 " collapsing loads\n");
4689 auto_vec
<modref_parm_map
, 32> parm_map
;
4690 modref_parm_map chain_map
;
4691 /* TODO: Once we get jump functions for static chains we could
4693 chain_map
.parm_index
= MODREF_UNKNOWN_PARM
;
4695 compute_parm_map (callee_edge
, &parm_map
);
4697 /* Merge in callee's information. */
4700 changed
|= cur_summary
->loads
->merge
4701 (node
->decl
, callee_summary
->loads
,
4702 &parm_map
, &chain_map
, !first
);
4705 changed
|= cur_summary
->stores
->merge
4706 (node
->decl
, callee_summary
->stores
,
4707 &parm_map
, &chain_map
, !first
);
4708 if (!cur_summary
->writes_errno
4709 && callee_summary
->writes_errno
)
4711 cur_summary
->writes_errno
= true;
4716 if (callee_summary_lto
)
4718 changed
|= cur_summary_lto
->loads
->merge
4719 (node
->decl
, callee_summary_lto
->loads
,
4720 &parm_map
, &chain_map
, !first
);
4723 changed
|= cur_summary_lto
->stores
->merge
4724 (node
->decl
, callee_summary_lto
->stores
,
4725 &parm_map
, &chain_map
, !first
);
4726 if (!cur_summary_lto
->writes_errno
4727 && callee_summary_lto
->writes_errno
)
4729 cur_summary_lto
->writes_errno
= true;
4735 remove_useless_summaries (node
, &cur_summary
,
4738 if (!cur_summary
&& !cur_summary_lto
)
4740 if (dump_file
&& changed
)
4743 cur_summary
->dump (dump_file
);
4744 if (cur_summary_lto
)
4745 cur_summary_lto
->dump (dump_file
);
4746 dump_modref_edge_summaries (dump_file
, node
, 4);
4755 "Propagation finished in %i iterations\n", iteration
);
4756 bool pureconst
= false;
4757 for (struct cgraph_node
*cur
= component_node
; cur
;
4758 cur
= ((struct ipa_dfs_info
*) cur
->aux
)->next_cycle
)
4759 if (!cur
->inlined_to
&& opt_for_fn (cur
->decl
, flag_ipa_pure_const
))
4761 modref_summary
*summary
= optimization_summaries
4762 ? optimization_summaries
->get (cur
)
4764 modref_summary_lto
*summary_lto
= summaries_lto
4765 ? summaries_lto
->get (cur
)
4767 if (summary
&& !summary
->stores
->every_base
&& !summary
->stores
->bases
4768 && !summary
->nondeterministic
)
4770 if (!summary
->loads
->every_base
&& !summary
->loads
->bases
4771 && !summary
->calls_interposable
)
4772 pureconst
|= ipa_make_function_const
4773 (cur
, summary
->side_effects
, false);
4775 pureconst
|= ipa_make_function_pure
4776 (cur
, summary
->side_effects
, false);
4778 if (summary_lto
&& !summary_lto
->stores
->every_base
4779 && !summary_lto
->stores
->bases
&& !summary_lto
->nondeterministic
)
4781 if (!summary_lto
->loads
->every_base
&& !summary_lto
->loads
->bases
4782 && !summary_lto
->calls_interposable
)
4783 pureconst
|= ipa_make_function_const
4784 (cur
, summary_lto
->side_effects
, false);
4786 pureconst
|= ipa_make_function_pure
4787 (cur
, summary_lto
->side_effects
, false);
4793 /* Dump results of propagation in SCC rooted in COMPONENT_NODE. */
4796 modref_propagate_dump_scc (cgraph_node
*component_node
)
4798 for (struct cgraph_node
*cur
= component_node
; cur
;
4799 cur
= ((struct ipa_dfs_info
*) cur
->aux
)->next_cycle
)
4800 if (!cur
->inlined_to
)
4802 modref_summary
*cur_summary
= optimization_summaries
4803 ? optimization_summaries
->get (cur
)
4805 modref_summary_lto
*cur_summary_lto
= summaries_lto
4806 ? summaries_lto
->get (cur
)
4809 fprintf (dump_file
, "Propagated modref for %s%s%s\n",
4811 TREE_READONLY (cur
->decl
) ? " (const)" : "",
4812 DECL_PURE_P (cur
->decl
) ? " (pure)" : "");
4813 if (optimization_summaries
)
4816 cur_summary
->dump (dump_file
);
4818 fprintf (dump_file
, " Not tracked\n");
4822 if (cur_summary_lto
)
4823 cur_summary_lto
->dump (dump_file
);
4825 fprintf (dump_file
, " Not tracked (lto)\n");
4830 /* Determine EAF flags know for call E with CALLEE_ECF_FLAGS and ARG. */
4833 implicit_eaf_flags_for_edge_and_arg (cgraph_edge
*e
, int callee_ecf_flags
,
4834 bool ignore_stores
, int arg
)
4836 /* Returning the value is already accounted to at local propagation. */
4837 int implicit_flags
= EAF_NOT_RETURNED_DIRECTLY
4838 | EAF_NOT_RETURNED_INDIRECTLY
;
4840 implicit_flags
|= ignore_stores_eaf_flags
;
4841 if (callee_ecf_flags
& ECF_PURE
)
4842 implicit_flags
|= implicit_pure_eaf_flags
;
4843 if (callee_ecf_flags
& (ECF_CONST
| ECF_NOVOPS
))
4844 implicit_flags
|= implicit_const_eaf_flags
;
4845 class fnspec_summary
*fnspec_sum
= fnspec_summaries
->get (e
);
4848 attr_fnspec
fnspec (fnspec_sum
->fnspec
);
4849 implicit_flags
|= fnspec
.arg_eaf_flags (arg
);
4851 return implicit_flags
;
4854 /* Process escapes in SUM and merge SUMMARY to CUR_SUMMARY
4855 and SUMMARY_LTO to CUR_SUMMARY_LTO.
4856 Return true if something changed. */
4859 modref_merge_call_site_flags (escape_summary
*sum
,
4860 modref_summary
*cur_summary
,
4861 modref_summary_lto
*cur_summary_lto
,
4862 modref_summary
*summary
,
4863 modref_summary_lto
*summary_lto
,
4866 int caller_ecf_flags
,
4867 int callee_ecf_flags
,
4868 bool binds_to_current_def
)
4872 bool changed
= false;
4873 bool ignore_stores
= ignore_stores_p (caller
, callee_ecf_flags
);
4875 /* If we have no useful info to propagate. */
4876 if ((!cur_summary
|| !cur_summary
->arg_flags
.length ())
4877 && (!cur_summary_lto
|| !cur_summary_lto
->arg_flags
.length ()))
4880 FOR_EACH_VEC_ELT (sum
->esc
, i
, ee
)
4884 int implicit_flags
= implicit_eaf_flags_for_edge_and_arg
4885 (e
, callee_ecf_flags
, ignore_stores
, ee
->arg
);
4887 if (summary
&& ee
->arg
< summary
->arg_flags
.length ())
4888 flags
= summary
->arg_flags
[ee
->arg
];
4890 && ee
->arg
< summary_lto
->arg_flags
.length ())
4891 flags_lto
= summary_lto
->arg_flags
[ee
->arg
];
4894 flags
= deref_flags (flags
, ignore_stores
);
4895 flags_lto
= deref_flags (flags_lto
, ignore_stores
);
4898 implicit_flags
|= ignore_stores_eaf_flags
;
4899 if (callee_ecf_flags
& ECF_PURE
)
4900 implicit_flags
|= implicit_pure_eaf_flags
;
4901 if (callee_ecf_flags
& (ECF_CONST
| ECF_NOVOPS
))
4902 implicit_flags
|= implicit_const_eaf_flags
;
4903 class fnspec_summary
*fnspec_sum
= fnspec_summaries
->get (e
);
4906 attr_fnspec
fnspec (fnspec_sum
->fnspec
);
4907 implicit_flags
|= fnspec
.arg_eaf_flags (ee
->arg
);
4910 implicit_flags
= deref_flags (implicit_flags
, ignore_stores
);
4911 flags
|= implicit_flags
;
4912 flags_lto
|= implicit_flags
;
4913 if (!binds_to_current_def
&& (flags
|| flags_lto
))
4915 flags
= interposable_eaf_flags (flags
, implicit_flags
);
4916 flags_lto
= interposable_eaf_flags (flags_lto
, implicit_flags
);
4918 if (!(flags
& EAF_UNUSED
)
4919 && cur_summary
&& ee
->parm_index
< (int)cur_summary
->arg_flags
.length ())
4921 eaf_flags_t
&f
= ee
->parm_index
== MODREF_RETSLOT_PARM
4922 ? cur_summary
->retslot_flags
4923 : ee
->parm_index
== MODREF_STATIC_CHAIN_PARM
4924 ? cur_summary
->static_chain_flags
4925 : cur_summary
->arg_flags
[ee
->parm_index
];
4926 if ((f
& flags
) != f
)
4928 f
= remove_useless_eaf_flags
4929 (f
& flags
, caller_ecf_flags
,
4930 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (caller
))));
4934 if (!(flags_lto
& EAF_UNUSED
)
4936 && ee
->parm_index
< (int)cur_summary_lto
->arg_flags
.length ())
4938 eaf_flags_t
&f
= ee
->parm_index
== MODREF_RETSLOT_PARM
4939 ? cur_summary_lto
->retslot_flags
4940 : ee
->parm_index
== MODREF_STATIC_CHAIN_PARM
4941 ? cur_summary_lto
->static_chain_flags
4942 : cur_summary_lto
->arg_flags
[ee
->parm_index
];
4943 if ((f
& flags_lto
) != f
)
4945 f
= remove_useless_eaf_flags
4946 (f
& flags_lto
, caller_ecf_flags
,
4947 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (caller
))));
4955 /* Perform iterative dataflow on SCC component starting in COMPONENT_NODE
4956 and propagate arg flags. */
4959 modref_propagate_flags_in_scc (cgraph_node
*component_node
)
4961 bool changed
= true;
4967 for (struct cgraph_node
*cur
= component_node
; cur
;
4968 cur
= ((struct ipa_dfs_info
*) cur
->aux
)->next_cycle
)
4970 cgraph_node
*node
= cur
->inlined_to
? cur
->inlined_to
: cur
;
4971 modref_summary
*cur_summary
= optimization_summaries
4972 ? optimization_summaries
->get (node
)
4974 modref_summary_lto
*cur_summary_lto
= summaries_lto
4975 ? summaries_lto
->get (node
)
4978 if (!cur_summary
&& !cur_summary_lto
)
4980 int caller_ecf_flags
= flags_from_decl_or_type (cur
->decl
);
4983 fprintf (dump_file
, " Processing %s%s%s\n",
4985 TREE_READONLY (cur
->decl
) ? " (const)" : "",
4986 DECL_PURE_P (cur
->decl
) ? " (pure)" : "");
4988 for (cgraph_edge
*e
= cur
->indirect_calls
; e
; e
= e
->next_callee
)
4990 escape_summary
*sum
= escape_summaries
->get (e
);
4992 if (!sum
|| (e
->indirect_info
->ecf_flags
4993 & (ECF_CONST
| ECF_NOVOPS
)))
4996 changed
|= modref_merge_call_site_flags
4997 (sum
, cur_summary
, cur_summary_lto
,
5002 e
->indirect_info
->ecf_flags
,
5006 if (!cur_summary
&& !cur_summary_lto
)
5009 for (cgraph_edge
*callee_edge
= cur
->callees
; callee_edge
;
5010 callee_edge
= callee_edge
->next_callee
)
5012 int ecf_flags
= flags_from_decl_or_type
5013 (callee_edge
->callee
->decl
);
5014 modref_summary
*callee_summary
= NULL
;
5015 modref_summary_lto
*callee_summary_lto
= NULL
;
5016 struct cgraph_node
*callee
;
5018 if (ecf_flags
& (ECF_CONST
| ECF_NOVOPS
)
5019 || !callee_edge
->inline_failed
)
5022 /* Get the callee and its summary. */
5023 enum availability avail
;
5024 callee
= callee_edge
->callee
->function_or_virtual_thunk_symbol
5027 /* It is not necessary to re-process calls outside of the
5031 || ((struct ipa_dfs_info
*)cur
->aux
)->scc_no
5032 != ((struct ipa_dfs_info
*)callee
->aux
)->scc_no
))
5035 escape_summary
*sum
= escape_summaries
->get (callee_edge
);
5040 fprintf (dump_file
, " Call to %s\n",
5041 callee_edge
->callee
->dump_name ());
5043 if (avail
<= AVAIL_INTERPOSABLE
5044 || callee_edge
->call_stmt_cannot_inline_p
)
5049 callee_summary
= optimization_summaries
->get (callee
);
5050 if (cur_summary_lto
)
5051 callee_summary_lto
= summaries_lto
->get (callee
);
5053 changed
|= modref_merge_call_site_flags
5054 (sum
, cur_summary
, cur_summary_lto
,
5055 callee_summary
, callee_summary_lto
,
5060 callee
->binds_to_current_def_p ());
5061 if (dump_file
&& changed
)
5064 cur_summary
->dump (dump_file
);
5065 if (cur_summary_lto
)
5066 cur_summary_lto
->dump (dump_file
);
5074 "Propagation of flags finished in %i iterations\n", iteration
);
5077 } /* ANON namespace. */
5079 /* Call EDGE was inlined; merge summary from callee to the caller. */
5082 ipa_merge_modref_summary_after_inlining (cgraph_edge
*edge
)
5084 if (!summaries
&& !summaries_lto
)
5087 struct cgraph_node
*to
= (edge
->caller
->inlined_to
5088 ? edge
->caller
->inlined_to
: edge
->caller
);
5089 class modref_summary
*to_info
= summaries
? summaries
->get (to
) : NULL
;
5090 class modref_summary_lto
*to_info_lto
= summaries_lto
5091 ? summaries_lto
->get (to
) : NULL
;
5093 if (!to_info
&& !to_info_lto
)
5096 summaries
->remove (edge
->callee
);
5098 summaries_lto
->remove (edge
->callee
);
5099 remove_modref_edge_summaries (edge
->callee
);
5103 class modref_summary
*callee_info
= summaries
? summaries
->get (edge
->callee
)
5105 class modref_summary_lto
*callee_info_lto
5106 = summaries_lto
? summaries_lto
->get (edge
->callee
) : NULL
;
5107 int flags
= flags_from_decl_or_type (edge
->callee
->decl
);
5108 /* Combine in outer flags. */
5110 for (n
= edge
->caller
; n
->inlined_to
; n
= n
->callers
->caller
)
5111 flags
|= flags_from_decl_or_type (n
->decl
);
5112 flags
|= flags_from_decl_or_type (n
->decl
);
5113 bool ignore_stores
= ignore_stores_p (edge
->caller
->decl
, flags
);
5115 if (!callee_info
&& to_info
)
5117 if (!(flags
& (ECF_CONST
| ECF_NOVOPS
)))
5118 to_info
->loads
->collapse ();
5120 to_info
->stores
->collapse ();
5122 if (!callee_info_lto
&& to_info_lto
)
5124 if (!(flags
& (ECF_CONST
| ECF_NOVOPS
)))
5125 to_info_lto
->loads
->collapse ();
5127 to_info_lto
->stores
->collapse ();
5129 if (callee_info
|| callee_info_lto
)
5131 auto_vec
<modref_parm_map
, 32> parm_map
;
5132 modref_parm_map chain_map
;
5133 /* TODO: Once we get jump functions for static chains we could
5134 compute parm_index. */
5136 compute_parm_map (edge
, &parm_map
);
5140 if (to_info
&& callee_info
)
5141 to_info
->stores
->merge (to
->decl
, callee_info
->stores
, &parm_map
,
5143 if (to_info_lto
&& callee_info_lto
)
5144 to_info_lto
->stores
->merge (to
->decl
, callee_info_lto
->stores
,
5145 &parm_map
, &chain_map
, false);
5147 if (!(flags
& (ECF_CONST
| ECF_NOVOPS
)))
5149 if (to_info
&& callee_info
)
5150 to_info
->loads
->merge (to
->decl
, callee_info
->loads
, &parm_map
,
5152 if (to_info_lto
&& callee_info_lto
)
5153 to_info_lto
->loads
->merge (to
->decl
, callee_info_lto
->loads
,
5154 &parm_map
, &chain_map
, false);
5158 /* Now merge escape summaries.
5159 For every escape to the callee we need to merge calle flags
5160 and remap calees escapes. */
5161 class escape_summary
*sum
= escape_summaries
->get (edge
);
5162 int max_escape
= -1;
5166 if (sum
&& !(flags
& (ECF_CONST
| ECF_NOVOPS
)))
5167 FOR_EACH_VEC_ELT (sum
->esc
, i
, ee
)
5168 if ((int)ee
->arg
> max_escape
)
5169 max_escape
= ee
->arg
;
5171 auto_vec
<vec
<struct escape_map
>, 32> emap (max_escape
+ 1);
5172 emap
.safe_grow (max_escape
+ 1, true);
5173 for (i
= 0; (int)i
< max_escape
+ 1; i
++)
5176 if (sum
&& !(flags
& (ECF_CONST
| ECF_NOVOPS
)))
5177 FOR_EACH_VEC_ELT (sum
->esc
, i
, ee
)
5179 bool needed
= false;
5180 int implicit_flags
= implicit_eaf_flags_for_edge_and_arg
5181 (edge
, flags
, ignore_stores
,
5184 implicit_flags
= deref_flags (implicit_flags
, ignore_stores
);
5185 if (to_info
&& (int)to_info
->arg_flags
.length () > ee
->parm_index
)
5187 int flags
= callee_info
5188 && callee_info
->arg_flags
.length () > ee
->arg
5189 ? callee_info
->arg_flags
[ee
->arg
] : 0;
5191 flags
= deref_flags (flags
, ignore_stores
);
5192 flags
|= ee
->min_flags
| implicit_flags
;
5193 eaf_flags_t
&f
= ee
->parm_index
== MODREF_RETSLOT_PARM
5194 ? to_info
->retslot_flags
5195 : ee
->parm_index
== MODREF_STATIC_CHAIN_PARM
5196 ? to_info
->static_chain_flags
5197 : to_info
->arg_flags
[ee
->parm_index
];
5203 && (int)to_info_lto
->arg_flags
.length () > ee
->parm_index
)
5205 int flags
= callee_info_lto
5206 && callee_info_lto
->arg_flags
.length () > ee
->arg
5207 ? callee_info_lto
->arg_flags
[ee
->arg
] : 0;
5209 flags
= deref_flags (flags
, ignore_stores
);
5210 flags
|= ee
->min_flags
| implicit_flags
;
5211 eaf_flags_t
&f
= ee
->parm_index
== MODREF_RETSLOT_PARM
5212 ? to_info_lto
->retslot_flags
5213 : ee
->parm_index
== MODREF_STATIC_CHAIN_PARM
5214 ? to_info_lto
->static_chain_flags
5215 : to_info_lto
->arg_flags
[ee
->parm_index
];
5220 struct escape_map entry
= {ee
->parm_index
, ee
->direct
};
5222 emap
[ee
->arg
].safe_push (entry
);
5224 update_escape_summary (edge
->callee
, emap
, ignore_stores
);
5225 for (i
= 0; (int)i
< max_escape
+ 1; i
++)
5228 escape_summaries
->remove (edge
);
5232 if (to_info
&& !to_info
->useful_p (flags
))
5235 fprintf (dump_file
, "Removed mod-ref summary for %s\n",
5237 summaries
->remove (to
);
5240 else if (to_info
&& dump_file
)
5243 fprintf (dump_file
, "Updated mod-ref summary for %s\n",
5245 to_info
->dump (dump_file
);
5248 summaries
->remove (edge
->callee
);
5252 if (to_info_lto
&& !to_info_lto
->useful_p (flags
))
5255 fprintf (dump_file
, "Removed mod-ref summary for %s\n",
5257 summaries_lto
->remove (to
);
5260 else if (to_info_lto
&& dump_file
)
5263 fprintf (dump_file
, "Updated mod-ref summary for %s\n",
5265 to_info_lto
->dump (dump_file
);
5267 if (callee_info_lto
)
5268 summaries_lto
->remove (edge
->callee
);
5270 if (!to_info
&& !to_info_lto
)
5271 remove_modref_edge_summaries (to
);
5275 /* Run the IPA pass. This will take a function's summaries and calls and
5276 construct new summaries which represent a transitive closure. So that
5277 summary of an analyzed function contains information about the loads and
5278 stores that the function or any function that it calls does. */
5281 pass_ipa_modref::execute (function
*)
5283 if (!summaries
&& !summaries_lto
)
5285 bool pureconst
= false;
5287 if (optimization_summaries
)
5288 ggc_delete (optimization_summaries
);
5289 optimization_summaries
= summaries
;
5292 struct cgraph_node
**order
= XCNEWVEC (struct cgraph_node
*,
5293 symtab
->cgraph_count
);
5295 order_pos
= ipa_reduced_postorder (order
, true, ignore_edge
);
5298 /* Iterate over all strongly connected components in post-order. */
5299 for (i
= 0; i
< order_pos
; i
++)
5301 /* Get the component's representative. That's just any node in the
5302 component from which we can traverse the entire component. */
5303 struct cgraph_node
*component_node
= order
[i
];
5306 fprintf (dump_file
, "\n\nStart of SCC component\n");
5308 pureconst
|= modref_propagate_in_scc (component_node
);
5309 modref_propagate_flags_in_scc (component_node
);
5310 if (optimization_summaries
)
5311 for (struct cgraph_node
*cur
= component_node
; cur
;
5312 cur
= ((struct ipa_dfs_info
*) cur
->aux
)->next_cycle
)
5313 if (modref_summary
*sum
= optimization_summaries
->get (cur
))
5314 sum
->finalize (cur
->decl
);
5316 modref_propagate_dump_scc (component_node
);
5319 FOR_EACH_FUNCTION (node
)
5320 update_signature (node
);
5322 ((modref_summaries_lto
*)summaries_lto
)->propagated
= true;
5323 ipa_free_postorder_info ();
5325 delete fnspec_summaries
;
5326 fnspec_summaries
= NULL
;
5327 delete escape_summaries
;
5328 escape_summaries
= NULL
;
5330 /* If we posibly made constructors const/pure we may need to remove
5332 return pureconst
? TODO_remove_functions
: 0;
5335 /* Summaries must stay alive until end of compilation. */
5338 ipa_modref_c_finalize ()
5340 if (optimization_summaries
)
5341 ggc_delete (optimization_summaries
);
5342 optimization_summaries
= NULL
;
5344 ggc_delete (summaries_lto
);
5345 summaries_lto
= NULL
;
5346 if (fnspec_summaries
)
5347 delete fnspec_summaries
;
5348 fnspec_summaries
= NULL
;
5349 if (escape_summaries
)
5350 delete escape_summaries
;
5351 escape_summaries
= NULL
;
5354 #include "gt-ipa-modref.h"