1 /* Search for references that a functions loads or stores.
2 Copyright (C) 2020-2023 Free Software Foundation, Inc.
3 Contributed by David Cepelik and Jan Hubicka
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* Mod/ref pass records summary about loads and stores performed by the
22 function. This is later used by alias analysis to disambiguate memory
23 accesses across function calls.
25 This file contains a tree pass and an IPA pass. Both performs the same
26 analysis however tree pass is executed during early and late optimization
27 passes to propagate info downwards in the compilation order. IPA pass
28 propagates across the callgraph and is able to handle recursion and works on
29 whole program during link-time analysis.
31 LTO mode differs from the local mode by not recording alias sets but types
32 that are translated to alias sets later. This is necessary in order stream
33 the information because the alias sets are rebuild at stream-in time and may
34 not correspond to ones seen during analysis. For this reason part of
35 analysis is duplicated.
37 The following information is computed
38 1) load/store access tree described in ipa-modref-tree.h
39 This is used by tree-ssa-alias to disambiguate load/stores
40 2) EAF flags used by points-to analysis (in tree-ssa-structalias).
41 and defined in tree-core.h.
42 and stored to optimization_summaries.
44 There are multiple summaries computed and used during the propagation:
45 - summaries holds summaries from analysis to IPA propagation
47 - summaries_lto is same as summaries but holds them in a format
48 that can be streamed (as described above).
49 - fnspec_summary holds fnspec strings for call. This is
50 necessary because gimple_call_fnspec performs additional
51 analysis except for looking callee fndecl.
52 - escape_summary holds escape points for given call edge.
53 That is a vector recording what function parameters
54 may escape to a function call (and with what parameter index). */
58 #include "coretypes.h"
62 #include "alloc-pool.h"
63 #include "tree-pass.h"
64 #include "gimple-iterator.h"
67 #include "ipa-utils.h"
68 #include "symbol-summary.h"
69 #include "gimple-pretty-print.h"
70 #include "gimple-walk.h"
71 #include "print-tree.h"
72 #include "tree-streamer.h"
75 #include "ipa-modref-tree.h"
76 #include "ipa-modref.h"
77 #include "value-range.h"
79 #include "ipa-fnsummary.h"
80 #include "attr-fnspec.h"
81 #include "symtab-clones.h"
82 #include "gimple-ssa.h"
83 #include "tree-phinodes.h"
84 #include "tree-ssa-operands.h"
85 #include "ssa-iterators.h"
86 #include "stringpool.h"
87 #include "tree-ssanames.h"
95 /* We record fnspec specifiers for call edges since they depends on actual
114 /* Summary holding fnspec string for a given call. */
116 class fnspec_summaries_t
: public call_summary
<fnspec_summary
*>
119 fnspec_summaries_t (symbol_table
*symtab
)
120 : call_summary
<fnspec_summary
*> (symtab
) {}
121 /* Hook that is called by summary when an edge is duplicated. */
122 void duplicate (cgraph_edge
*,
125 fnspec_summary
*dst
) final override
127 dst
->fnspec
= xstrdup (src
->fnspec
);
131 static fnspec_summaries_t
*fnspec_summaries
= NULL
;
133 /* Escape summary holds a vector of param indexes that escape to
137 /* Parameter that escapes at a given call. */
139 /* Argument it escapes to. */
141 /* Minimal flags known about the argument. */
142 eaf_flags_t min_flags
;
143 /* Does it escape directly or indirectly? */
147 /* Dump EAF flags. */
150 dump_eaf_flags (FILE *out
, int flags
, bool newline
= true)
152 if (flags
& EAF_UNUSED
)
153 fprintf (out
, " unused");
154 if (flags
& EAF_NO_DIRECT_CLOBBER
)
155 fprintf (out
, " no_direct_clobber");
156 if (flags
& EAF_NO_INDIRECT_CLOBBER
)
157 fprintf (out
, " no_indirect_clobber");
158 if (flags
& EAF_NO_DIRECT_ESCAPE
)
159 fprintf (out
, " no_direct_escape");
160 if (flags
& EAF_NO_INDIRECT_ESCAPE
)
161 fprintf (out
, " no_indirect_escape");
162 if (flags
& EAF_NOT_RETURNED_DIRECTLY
)
163 fprintf (out
, " not_returned_directly");
164 if (flags
& EAF_NOT_RETURNED_INDIRECTLY
)
165 fprintf (out
, " not_returned_indirectly");
166 if (flags
& EAF_NO_DIRECT_READ
)
167 fprintf (out
, " no_direct_read");
168 if (flags
& EAF_NO_INDIRECT_READ
)
169 fprintf (out
, " no_indirect_read");
174 struct escape_summary
176 auto_vec
<escape_entry
> esc
;
177 void dump (FILE *out
)
179 for (unsigned int i
= 0; i
< esc
.length (); i
++)
181 fprintf (out
, " parm %i arg %i %s min:",
184 esc
[i
].direct
? "(direct)" : "(indirect)");
185 dump_eaf_flags (out
, esc
[i
].min_flags
, false);
191 class escape_summaries_t
: public call_summary
<escape_summary
*>
194 escape_summaries_t (symbol_table
*symtab
)
195 : call_summary
<escape_summary
*> (symtab
) {}
196 /* Hook that is called by summary when an edge is duplicated. */
197 void duplicate (cgraph_edge
*,
200 escape_summary
*dst
) final override
202 dst
->esc
= src
->esc
.copy ();
206 static escape_summaries_t
*escape_summaries
= NULL
;
208 } /* ANON namespace: GTY annotated summaries can not be anonymous. */
211 /* Class (from which there is one global instance) that holds modref summaries
212 for all analyzed functions. */
214 class GTY((user
)) modref_summaries
215 : public fast_function_summary
<modref_summary
*, va_gc
>
218 modref_summaries (symbol_table
*symtab
)
219 : fast_function_summary
<modref_summary
*, va_gc
> (symtab
) {}
220 void insert (cgraph_node
*, modref_summary
*state
) final override
;
221 void duplicate (cgraph_node
*src_node
,
222 cgraph_node
*dst_node
,
223 modref_summary
*src_data
,
224 modref_summary
*dst_data
) final override
;
225 static modref_summaries
*create_ggc (symbol_table
*symtab
)
227 return new (ggc_alloc_no_dtor
<modref_summaries
> ())
228 modref_summaries (symtab
);
232 class modref_summary_lto
;
234 /* Class (from which there is one global instance) that holds modref summaries
235 for all analyzed functions. */
237 class GTY((user
)) modref_summaries_lto
238 : public fast_function_summary
<modref_summary_lto
*, va_gc
>
241 modref_summaries_lto (symbol_table
*symtab
)
242 : fast_function_summary
<modref_summary_lto
*, va_gc
> (symtab
),
243 propagated (false) {}
244 void insert (cgraph_node
*, modref_summary_lto
*state
) final override
;
245 void duplicate (cgraph_node
*src_node
,
246 cgraph_node
*dst_node
,
247 modref_summary_lto
*src_data
,
248 modref_summary_lto
*dst_data
) final override
;
249 static modref_summaries_lto
*create_ggc (symbol_table
*symtab
)
251 return new (ggc_alloc_no_dtor
<modref_summaries_lto
> ())
252 modref_summaries_lto (symtab
);
257 /* Global variable holding all modref summaries
258 (from analysis to IPA propagation time). */
260 static GTY(()) fast_function_summary
<modref_summary
*, va_gc
>
263 /* Global variable holding all modref optimization summaries
264 (from IPA propagation time or used by local optimization pass). */
266 static GTY(()) fast_function_summary
<modref_summary
*, va_gc
>
267 *optimization_summaries
;
269 /* LTO summaries hold info from analysis to LTO streaming or from LTO
270 stream-in through propagation to LTO stream-out. */
272 static GTY(()) fast_function_summary
<modref_summary_lto
*, va_gc
>
275 /* Summary for a single function which this pass produces. */
277 modref_summary::modref_summary ()
278 : loads (NULL
), stores (NULL
), retslot_flags (0), static_chain_flags (0),
279 writes_errno (false), side_effects (false), nondeterministic (false),
280 calls_interposable (false), global_memory_read (false),
281 global_memory_written (false), try_dse (false)
285 modref_summary::~modref_summary ()
293 /* Remove all flags from EAF_FLAGS that are implied by ECF_FLAGS and not
294 useful to track. If returns_void is true moreover clear
297 remove_useless_eaf_flags (int eaf_flags
, int ecf_flags
, bool returns_void
)
299 if (ecf_flags
& (ECF_CONST
| ECF_NOVOPS
))
300 eaf_flags
&= ~implicit_const_eaf_flags
;
301 else if (ecf_flags
& ECF_PURE
)
302 eaf_flags
&= ~implicit_pure_eaf_flags
;
303 else if ((ecf_flags
& ECF_NORETURN
) || returns_void
)
304 eaf_flags
&= ~(EAF_NOT_RETURNED_DIRECTLY
| EAF_NOT_RETURNED_INDIRECTLY
);
308 /* Return true if FLAGS holds some useful information. */
311 eaf_flags_useful_p (vec
<eaf_flags_t
> &flags
, int ecf_flags
)
313 for (unsigned i
= 0; i
< flags
.length (); i
++)
314 if (remove_useless_eaf_flags (flags
[i
], ecf_flags
, false))
319 /* Return true if summary is potentially useful for optimization.
320 If CHECK_FLAGS is false assume that arg_flags are useful. */
323 modref_summary::useful_p (int ecf_flags
, bool check_flags
)
325 if (arg_flags
.length () && !check_flags
)
327 if (check_flags
&& eaf_flags_useful_p (arg_flags
, ecf_flags
))
329 arg_flags
.release ();
330 if (check_flags
&& remove_useless_eaf_flags (retslot_flags
, ecf_flags
, false))
333 && remove_useless_eaf_flags (static_chain_flags
, ecf_flags
, false))
335 if (ecf_flags
& (ECF_CONST
| ECF_NOVOPS
))
336 return ((!side_effects
|| !nondeterministic
)
337 && (ecf_flags
& ECF_LOOPING_CONST_OR_PURE
));
338 if (loads
&& !loads
->every_base
)
342 if (ecf_flags
& ECF_PURE
)
343 return ((!side_effects
|| !nondeterministic
)
344 && (ecf_flags
& ECF_LOOPING_CONST_OR_PURE
));
345 return stores
&& !stores
->every_base
;
348 /* Single function summary used for LTO. */
350 typedef modref_tree
<tree
> modref_records_lto
;
351 struct GTY(()) modref_summary_lto
353 /* Load and stores in functions using types rather then alias sets.
355 This is necessary to make the information streamable for LTO but is also
356 more verbose and thus more likely to hit the limits. */
357 modref_records_lto
*loads
;
358 modref_records_lto
*stores
;
359 auto_vec
<modref_access_node
> GTY((skip
)) kills
;
360 auto_vec
<eaf_flags_t
> GTY((skip
)) arg_flags
;
361 eaf_flags_t retslot_flags
;
362 eaf_flags_t static_chain_flags
;
363 unsigned writes_errno
: 1;
364 unsigned side_effects
: 1;
365 unsigned nondeterministic
: 1;
366 unsigned calls_interposable
: 1;
368 modref_summary_lto ();
369 ~modref_summary_lto ();
371 bool useful_p (int ecf_flags
, bool check_flags
= true);
374 /* Summary for a single function which this pass produces. */
376 modref_summary_lto::modref_summary_lto ()
377 : loads (NULL
), stores (NULL
), retslot_flags (0), static_chain_flags (0),
378 writes_errno (false), side_effects (false), nondeterministic (false),
379 calls_interposable (false)
383 modref_summary_lto::~modref_summary_lto ()
392 /* Return true if lto summary is potentially useful for optimization.
393 If CHECK_FLAGS is false assume that arg_flags are useful. */
396 modref_summary_lto::useful_p (int ecf_flags
, bool check_flags
)
398 if (arg_flags
.length () && !check_flags
)
400 if (check_flags
&& eaf_flags_useful_p (arg_flags
, ecf_flags
))
402 arg_flags
.release ();
403 if (check_flags
&& remove_useless_eaf_flags (retslot_flags
, ecf_flags
, false))
406 && remove_useless_eaf_flags (static_chain_flags
, ecf_flags
, false))
408 if (ecf_flags
& (ECF_CONST
| ECF_NOVOPS
))
409 return ((!side_effects
|| !nondeterministic
)
410 && (ecf_flags
& ECF_LOOPING_CONST_OR_PURE
));
411 if (loads
&& !loads
->every_base
)
415 if (ecf_flags
& ECF_PURE
)
416 return ((!side_effects
|| !nondeterministic
)
417 && (ecf_flags
& ECF_LOOPING_CONST_OR_PURE
));
418 return stores
&& !stores
->every_base
;
421 /* Dump records TT to OUT. */
424 dump_records (modref_records
*tt
, FILE *out
)
428 fprintf (out
, " Every base\n");
432 modref_base_node
<alias_set_type
> *n
;
433 FOR_EACH_VEC_SAFE_ELT (tt
->bases
, i
, n
)
435 fprintf (out
, " Base %i: alias set %i\n", (int)i
, n
->base
);
438 fprintf (out
, " Every ref\n");
442 modref_ref_node
<alias_set_type
> *r
;
443 FOR_EACH_VEC_SAFE_ELT (n
->refs
, j
, r
)
445 fprintf (out
, " Ref %i: alias set %i\n", (int)j
, r
->ref
);
448 fprintf (out
, " Every access\n");
452 modref_access_node
*a
;
453 FOR_EACH_VEC_SAFE_ELT (r
->accesses
, k
, a
)
455 fprintf (out
, " access:");
462 /* Dump records TT to OUT. */
465 dump_lto_records (modref_records_lto
*tt
, FILE *out
)
469 fprintf (out
, " Every base\n");
473 modref_base_node
<tree
> *n
;
474 FOR_EACH_VEC_SAFE_ELT (tt
->bases
, i
, n
)
476 fprintf (out
, " Base %i:", (int)i
);
477 print_generic_expr (dump_file
, n
->base
);
478 fprintf (out
, " (alias set %i)\n",
479 n
->base
? get_alias_set (n
->base
) : 0);
482 fprintf (out
, " Every ref\n");
486 modref_ref_node
<tree
> *r
;
487 FOR_EACH_VEC_SAFE_ELT (n
->refs
, j
, r
)
489 fprintf (out
, " Ref %i:", (int)j
);
490 print_generic_expr (dump_file
, r
->ref
);
491 fprintf (out
, " (alias set %i)\n",
492 r
->ref
? get_alias_set (r
->ref
) : 0);
495 fprintf (out
, " Every access\n");
499 modref_access_node
*a
;
500 FOR_EACH_VEC_SAFE_ELT (r
->accesses
, k
, a
)
502 fprintf (out
, " access:");
509 /* Dump all escape points of NODE to OUT. */
512 dump_modref_edge_summaries (FILE *out
, cgraph_node
*node
, int depth
)
515 if (!escape_summaries
)
517 for (cgraph_edge
*e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
519 class escape_summary
*sum
= escape_summaries
->get (e
);
522 fprintf (out
, "%*sIndirect call %i in %s escapes:",
523 depth
, "", i
, node
->dump_name ());
528 for (cgraph_edge
*e
= node
->callees
; e
; e
= e
->next_callee
)
530 if (!e
->inline_failed
)
531 dump_modref_edge_summaries (out
, e
->callee
, depth
+ 1);
532 class escape_summary
*sum
= escape_summaries
->get (e
);
535 fprintf (out
, "%*sCall %s->%s escapes:", depth
, "",
536 node
->dump_name (), e
->callee
->dump_name ());
539 class fnspec_summary
*fsum
= fnspec_summaries
->get (e
);
542 fprintf (out
, "%*sCall %s->%s fnspec: %s\n", depth
, "",
543 node
->dump_name (), e
->callee
->dump_name (),
549 /* Remove all call edge summaries associated with NODE. */
552 remove_modref_edge_summaries (cgraph_node
*node
)
554 if (!escape_summaries
)
556 for (cgraph_edge
*e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
557 escape_summaries
->remove (e
);
558 for (cgraph_edge
*e
= node
->callees
; e
; e
= e
->next_callee
)
560 if (!e
->inline_failed
)
561 remove_modref_edge_summaries (e
->callee
);
562 escape_summaries
->remove (e
);
563 fnspec_summaries
->remove (e
);
570 modref_summary::dump (FILE *out
)
574 fprintf (out
, " loads:\n");
575 dump_records (loads
, out
);
579 fprintf (out
, " stores:\n");
580 dump_records (stores
, out
);
584 fprintf (out
, " kills:\n");
585 for (auto kill
: kills
)
592 fprintf (out
, " Writes errno\n");
594 fprintf (out
, " Side effects\n");
595 if (nondeterministic
)
596 fprintf (out
, " Nondeterministic\n");
597 if (calls_interposable
)
598 fprintf (out
, " Calls interposable\n");
599 if (global_memory_read
)
600 fprintf (out
, " Global memory read\n");
601 if (global_memory_written
)
602 fprintf (out
, " Global memory written\n");
604 fprintf (out
, " Try dse\n");
605 if (arg_flags
.length ())
607 for (unsigned int i
= 0; i
< arg_flags
.length (); i
++)
610 fprintf (out
, " parm %i flags:", i
);
611 dump_eaf_flags (out
, arg_flags
[i
]);
616 fprintf (out
, " Retslot flags:");
617 dump_eaf_flags (out
, retslot_flags
);
619 if (static_chain_flags
)
621 fprintf (out
, " Static chain flags:");
622 dump_eaf_flags (out
, static_chain_flags
);
629 modref_summary_lto::dump (FILE *out
)
631 fprintf (out
, " loads:\n");
632 dump_lto_records (loads
, out
);
633 fprintf (out
, " stores:\n");
634 dump_lto_records (stores
, out
);
637 fprintf (out
, " kills:\n");
638 for (auto kill
: kills
)
645 fprintf (out
, " Writes errno\n");
647 fprintf (out
, " Side effects\n");
648 if (nondeterministic
)
649 fprintf (out
, " Nondeterministic\n");
650 if (calls_interposable
)
651 fprintf (out
, " Calls interposable\n");
652 if (arg_flags
.length ())
654 for (unsigned int i
= 0; i
< arg_flags
.length (); i
++)
657 fprintf (out
, " parm %i flags:", i
);
658 dump_eaf_flags (out
, arg_flags
[i
]);
663 fprintf (out
, " Retslot flags:");
664 dump_eaf_flags (out
, retslot_flags
);
666 if (static_chain_flags
)
668 fprintf (out
, " Static chain flags:");
669 dump_eaf_flags (out
, static_chain_flags
);
673 /* Called after summary is produced and before it is used by local analysis.
674 Can be called multiple times in case summary needs to update signature.
675 FUN is decl of function summary is attached to. */
677 modref_summary::finalize (tree fun
)
679 global_memory_read
= !loads
|| loads
->global_access_p ();
680 global_memory_written
= !stores
|| stores
->global_access_p ();
682 /* We can do DSE if we know function has no side effects and
683 we can analyze all stores. Disable dse if there are too many
685 if (side_effects
|| global_memory_written
|| writes_errno
)
691 int num_tests
= 0, max_tests
692 = opt_for_fn (fun
, param_modref_max_tests
);
693 modref_base_node
<alias_set_type
> *base_node
;
694 modref_ref_node
<alias_set_type
> *ref_node
;
695 modref_access_node
*access_node
;
696 FOR_EACH_VEC_SAFE_ELT (stores
->bases
, i
, base_node
)
698 if (base_node
->every_ref
)
703 FOR_EACH_VEC_SAFE_ELT (base_node
->refs
, j
, ref_node
)
705 if (base_node
->every_ref
)
710 FOR_EACH_VEC_SAFE_ELT (ref_node
->accesses
, k
, access_node
)
711 if (num_tests
++ > max_tests
712 || !access_node
->parm_offset_known
)
724 if (loads
->every_base
)
729 for (auto base_node
: loads
->bases
)
731 if (base_node
->every_ref
)
734 for (auto ref_node
: base_node
->refs
)
735 if (ref_node
->every_access
)
738 load_accesses
+= ref_node
->accesses
->length ();
743 /* Get function summary for FUNC if it exists, return NULL otherwise. */
746 get_modref_function_summary (cgraph_node
*func
)
748 /* Avoid creation of the summary too early (e.g. when front-end calls us). */
749 if (!optimization_summaries
)
752 /* A single function body may be represented by multiple symbols with
753 different visibility. For example, if FUNC is an interposable alias,
754 we don't want to return anything, even if we have summary for the target
756 enum availability avail
;
757 func
= func
->ultimate_alias_target
758 (&avail
, current_function_decl
?
759 cgraph_node::get (current_function_decl
) : NULL
);
760 if (avail
<= AVAIL_INTERPOSABLE
)
763 modref_summary
*r
= optimization_summaries
->get (func
);
767 /* Get function summary for CALL if it exists, return NULL otherwise.
768 If non-null set interposed to indicate whether function may not
769 bind to current def. In this case sometimes loads from function
770 needs to be ignored. */
773 get_modref_function_summary (gcall
*call
, bool *interposed
)
775 tree callee
= gimple_call_fndecl (call
);
778 struct cgraph_node
*node
= cgraph_node::get (callee
);
781 modref_summary
*r
= get_modref_function_summary (node
);
783 *interposed
= r
->calls_interposable
784 || !node
->binds_to_current_def_p ();
791 /* Return true if ECF flags says that nondeterminism can be ignored. */
794 ignore_nondeterminism_p (tree caller
, int flags
)
796 if (flags
& (ECF_CONST
| ECF_PURE
))
798 if ((flags
& (ECF_NORETURN
| ECF_NOTHROW
)) == (ECF_NORETURN
| ECF_NOTHROW
)
799 || (!opt_for_fn (caller
, flag_exceptions
) && (flags
& ECF_NORETURN
)))
804 /* Return true if ECF flags says that return value can be ignored. */
807 ignore_retval_p (tree caller
, int flags
)
809 if ((flags
& (ECF_NORETURN
| ECF_NOTHROW
)) == (ECF_NORETURN
| ECF_NOTHROW
)
810 || (!opt_for_fn (caller
, flag_exceptions
) && (flags
& ECF_NORETURN
)))
815 /* Return true if ECF flags says that stores can be ignored. */
818 ignore_stores_p (tree caller
, int flags
)
820 if (flags
& (ECF_PURE
| ECF_CONST
| ECF_NOVOPS
))
822 if ((flags
& (ECF_NORETURN
| ECF_NOTHROW
)) == (ECF_NORETURN
| ECF_NOTHROW
)
823 || (!opt_for_fn (caller
, flag_exceptions
) && (flags
& ECF_NORETURN
)))
828 /* Determine parm_map for PTR which is supposed to be a pointer. */
831 parm_map_for_ptr (tree op
)
835 struct modref_parm_map parm_map
;
838 parm_map
.parm_offset_known
= false;
839 parm_map
.parm_offset
= 0;
841 offset_known
= unadjusted_ptr_and_unit_offset (op
, &op
, &offset
);
842 if (TREE_CODE (op
) == SSA_NAME
843 && SSA_NAME_IS_DEFAULT_DEF (op
)
844 && TREE_CODE (SSA_NAME_VAR (op
)) == PARM_DECL
)
848 if (cfun
->static_chain_decl
849 && op
== ssa_default_def (cfun
, cfun
->static_chain_decl
))
850 index
= MODREF_STATIC_CHAIN_PARM
;
852 for (tree t
= DECL_ARGUMENTS (current_function_decl
);
853 t
!= SSA_NAME_VAR (op
); t
= DECL_CHAIN (t
))
855 parm_map
.parm_index
= index
;
856 parm_map
.parm_offset_known
= offset_known
;
857 parm_map
.parm_offset
= offset
;
859 else if (points_to_local_or_readonly_memory_p (op
))
860 parm_map
.parm_index
= MODREF_LOCAL_MEMORY_PARM
;
861 /* Memory allocated in the function is not visible to caller before the
862 call and thus we do not need to record it as load/stores/kills. */
863 else if (TREE_CODE (op
) == SSA_NAME
864 && (call
= dyn_cast
<gcall
*>(SSA_NAME_DEF_STMT (op
))) != NULL
865 && gimple_call_flags (call
) & ECF_MALLOC
)
866 parm_map
.parm_index
= MODREF_LOCAL_MEMORY_PARM
;
868 parm_map
.parm_index
= MODREF_UNKNOWN_PARM
;
872 /* Return true if ARG with EAF flags FLAGS can not make any caller's parameter
873 used (if LOAD is true we check loads, otherwise stores). */
876 verify_arg (tree arg
, int flags
, bool load
)
878 if (flags
& EAF_UNUSED
)
880 if (load
&& (flags
& EAF_NO_DIRECT_READ
))
883 && (flags
& (EAF_NO_DIRECT_CLOBBER
| EAF_NO_INDIRECT_CLOBBER
))
884 == (EAF_NO_DIRECT_CLOBBER
| EAF_NO_INDIRECT_CLOBBER
))
886 if (is_gimple_constant (arg
))
888 if (DECL_P (arg
) && TREE_READONLY (arg
))
890 if (TREE_CODE (arg
) == ADDR_EXPR
)
892 tree t
= get_base_address (TREE_OPERAND (arg
, 0));
893 if (is_gimple_constant (t
))
896 && (TREE_READONLY (t
) || TREE_CODE (t
) == FUNCTION_DECL
))
902 /* Return true if STMT may access memory that is pointed to by parameters
903 of caller and which is not seen as an escape by PTA.
904 CALLEE_ECF_FLAGS are ECF flags of callee. If LOAD is true then by access
905 we mean load, otherwise we mean store. */
908 may_access_nonescaping_parm_p (gcall
*call
, int callee_ecf_flags
, bool load
)
910 int implicit_flags
= 0;
912 if (ignore_stores_p (current_function_decl
, callee_ecf_flags
))
913 implicit_flags
|= ignore_stores_eaf_flags
;
914 if (callee_ecf_flags
& ECF_PURE
)
915 implicit_flags
|= implicit_pure_eaf_flags
;
916 if (callee_ecf_flags
& (ECF_CONST
| ECF_NOVOPS
))
917 implicit_flags
|= implicit_const_eaf_flags
;
918 if (gimple_call_chain (call
)
919 && !verify_arg (gimple_call_chain (call
),
920 gimple_call_static_chain_flags (call
) | implicit_flags
,
923 for (unsigned int i
= 0; i
< gimple_call_num_args (call
); i
++)
924 if (!verify_arg (gimple_call_arg (call
, i
),
925 gimple_call_arg_flags (call
, i
) | implicit_flags
,
932 /* Analyze memory accesses (loads, stores and kills) performed
933 by the function. Set also side_effects, calls_interposable
934 and nondeterminism flags. */
936 class modref_access_analysis
939 modref_access_analysis (bool ipa
, modref_summary
*summary
,
940 modref_summary_lto
*summary_lto
)
941 : m_summary (summary
), m_summary_lto (summary_lto
), m_ipa (ipa
)
946 bool set_side_effects ();
947 bool set_nondeterministic ();
948 static modref_access_node
get_access (ao_ref
*ref
);
949 static void record_access (modref_records
*, ao_ref
*, modref_access_node
&);
950 static void record_access_lto (modref_records_lto
*, ao_ref
*,
951 modref_access_node
&a
);
952 bool record_access_p (tree
);
953 bool record_unknown_load ();
954 bool record_unknown_store ();
955 bool record_global_memory_load ();
956 bool record_global_memory_store ();
957 bool merge_call_side_effects (gimple
*, modref_summary
*,
958 cgraph_node
*, bool);
959 modref_access_node
get_access_for_fnspec (gcall
*, attr_fnspec
&,
960 unsigned int, modref_parm_map
&);
961 void process_fnspec (gcall
*);
962 void analyze_call (gcall
*);
963 static bool analyze_load (gimple
*, tree
, tree
, void *);
964 static bool analyze_store (gimple
*, tree
, tree
, void *);
965 void analyze_stmt (gimple
*, bool);
968 /* Summary being computed.
969 We work either with m_summary or m_summary_lto. Never on both. */
970 modref_summary
*m_summary
;
971 modref_summary_lto
*m_summary_lto
;
972 /* Recursive calls needs simplistic dataflow after analysis finished.
973 Collect all calls into this vector during analysis and later process
974 them in propagate. */
975 auto_vec
<gimple
*, 32> m_recursive_calls
;
976 /* ECF flags of function being analyzed. */
978 /* True if IPA propagation will be done later. */
980 /* Set true if statement currently analyze is known to be
981 executed each time function is called. */
982 bool m_always_executed
;
985 /* Set side_effects flag and return if something changed. */
988 modref_access_analysis::set_side_effects ()
990 bool changed
= false;
992 if (m_summary
&& !m_summary
->side_effects
)
994 m_summary
->side_effects
= true;
997 if (m_summary_lto
&& !m_summary_lto
->side_effects
)
999 m_summary_lto
->side_effects
= true;
1005 /* Set nondeterministic flag and return if something changed. */
1008 modref_access_analysis::set_nondeterministic ()
1010 bool changed
= false;
1012 if (m_summary
&& !m_summary
->nondeterministic
)
1014 m_summary
->side_effects
= m_summary
->nondeterministic
= true;
1017 if (m_summary_lto
&& !m_summary_lto
->nondeterministic
)
1019 m_summary_lto
->side_effects
= m_summary_lto
->nondeterministic
= true;
1025 /* Construct modref_access_node from REF. */
1028 modref_access_analysis::get_access (ao_ref
*ref
)
1032 base
= ao_ref_base (ref
);
1033 modref_access_node a
= {ref
->offset
, ref
->size
, ref
->max_size
,
1034 0, MODREF_UNKNOWN_PARM
, false, 0};
1035 if (TREE_CODE (base
) == MEM_REF
|| TREE_CODE (base
) == TARGET_MEM_REF
)
1038 modref_parm_map m
= parm_map_for_ptr (TREE_OPERAND (base
, 0));
1040 a
.parm_index
= m
.parm_index
;
1041 if (a
.parm_index
!= MODREF_UNKNOWN_PARM
&& TREE_CODE (memref
) == MEM_REF
)
1044 = wi::to_poly_wide (TREE_OPERAND
1045 (memref
, 1)).to_shwi (&a
.parm_offset
);
1046 if (a
.parm_offset_known
&& m
.parm_offset_known
)
1047 a
.parm_offset
+= m
.parm_offset
;
1049 a
.parm_offset_known
= false;
1053 a
.parm_index
= MODREF_UNKNOWN_PARM
;
1057 /* Record access into the modref_records data structure. */
1060 modref_access_analysis::record_access (modref_records
*tt
,
1062 modref_access_node
&a
)
1064 alias_set_type base_set
= !flag_strict_aliasing
1065 || !flag_ipa_strict_aliasing
? 0
1066 : ao_ref_base_alias_set (ref
);
1067 alias_set_type ref_set
= !flag_strict_aliasing
1068 || !flag_ipa_strict_aliasing
? 0
1069 : (ao_ref_alias_set (ref
));
1072 fprintf (dump_file
, " - Recording base_set=%i ref_set=%i ",
1076 tt
->insert (current_function_decl
, base_set
, ref_set
, a
, false);
1079 /* IPA version of record_access_tree. */
1082 modref_access_analysis::record_access_lto (modref_records_lto
*tt
, ao_ref
*ref
,
1083 modref_access_node
&a
)
1085 /* get_alias_set sometimes use different type to compute the alias set
1086 than TREE_TYPE (base). Do same adjustments. */
1087 tree base_type
= NULL_TREE
, ref_type
= NULL_TREE
;
1088 if (flag_strict_aliasing
&& flag_ipa_strict_aliasing
)
1093 while (handled_component_p (base
))
1094 base
= TREE_OPERAND (base
, 0);
1096 base_type
= reference_alias_ptr_type_1 (&base
);
1099 base_type
= TREE_TYPE (base
);
1101 base_type
= TYPE_REF_CAN_ALIAS_ALL (base_type
)
1102 ? NULL_TREE
: TREE_TYPE (base_type
);
1104 tree ref_expr
= ref
->ref
;
1105 ref_type
= reference_alias_ptr_type_1 (&ref_expr
);
1108 ref_type
= TREE_TYPE (ref_expr
);
1110 ref_type
= TYPE_REF_CAN_ALIAS_ALL (ref_type
)
1111 ? NULL_TREE
: TREE_TYPE (ref_type
);
1113 /* Sanity check that we are in sync with what get_alias_set does. */
1114 gcc_checking_assert ((!base_type
&& !ao_ref_base_alias_set (ref
))
1115 || get_alias_set (base_type
)
1116 == ao_ref_base_alias_set (ref
));
1117 gcc_checking_assert ((!ref_type
&& !ao_ref_alias_set (ref
))
1118 || get_alias_set (ref_type
)
1119 == ao_ref_alias_set (ref
));
1121 /* Do not bother to record types that have no meaningful alias set.
1122 Also skip variably modified types since these go to local streams. */
1123 if (base_type
&& (!get_alias_set (base_type
)
1124 || variably_modified_type_p (base_type
, NULL_TREE
)))
1125 base_type
= NULL_TREE
;
1126 if (ref_type
&& (!get_alias_set (ref_type
)
1127 || variably_modified_type_p (ref_type
, NULL_TREE
)))
1128 ref_type
= NULL_TREE
;
1132 fprintf (dump_file
, " - Recording base type:");
1133 print_generic_expr (dump_file
, base_type
);
1134 fprintf (dump_file
, " (alias set %i) ref type:",
1135 base_type
? get_alias_set (base_type
) : 0);
1136 print_generic_expr (dump_file
, ref_type
);
1137 fprintf (dump_file
, " (alias set %i) ",
1138 ref_type
? get_alias_set (ref_type
) : 0);
1142 tt
->insert (current_function_decl
, base_type
, ref_type
, a
, false);
1145 /* Returns true if and only if we should store the access to EXPR.
1146 Some accesses, e.g. loads from automatic variables, are not interesting. */
1149 modref_access_analysis::record_access_p (tree expr
)
1151 if (TREE_THIS_VOLATILE (expr
))
1154 fprintf (dump_file
, " (volatile; marking nondeterministic) ");
1155 set_nondeterministic ();
1157 if (cfun
->can_throw_non_call_exceptions
1158 && tree_could_throw_p (expr
))
1161 fprintf (dump_file
, " (can throw; marking side effects) ");
1162 set_side_effects ();
1165 if (refs_local_or_readonly_memory_p (expr
))
1168 fprintf (dump_file
, " - Read-only or local, ignoring.\n");
1174 /* Collapse loads and return true if something changed. */
1177 modref_access_analysis::record_unknown_load ()
1179 bool changed
= false;
1181 if (m_summary
&& !m_summary
->loads
->every_base
)
1183 m_summary
->loads
->collapse ();
1186 if (m_summary_lto
&& !m_summary_lto
->loads
->every_base
)
1188 m_summary_lto
->loads
->collapse ();
1194 /* Collapse loads and return true if something changed. */
1197 modref_access_analysis::record_unknown_store ()
1199 bool changed
= false;
1201 if (m_summary
&& !m_summary
->stores
->every_base
)
1203 m_summary
->stores
->collapse ();
1206 if (m_summary_lto
&& !m_summary_lto
->stores
->every_base
)
1208 m_summary_lto
->stores
->collapse ();
1214 /* Record unknown load from global memory. */
1217 modref_access_analysis::record_global_memory_load ()
1219 bool changed
= false;
1220 modref_access_node a
= {0, -1, -1,
1221 0, MODREF_GLOBAL_MEMORY_PARM
, false, 0};
1223 if (m_summary
&& !m_summary
->loads
->every_base
)
1224 changed
|= m_summary
->loads
->insert (current_function_decl
, 0, 0, a
, false);
1225 if (m_summary_lto
&& !m_summary_lto
->loads
->every_base
)
1226 changed
|= m_summary_lto
->loads
->insert (current_function_decl
,
1231 /* Record unknown store from global memory. */
1234 modref_access_analysis::record_global_memory_store ()
1236 bool changed
= false;
1237 modref_access_node a
= {0, -1, -1,
1238 0, MODREF_GLOBAL_MEMORY_PARM
, false, 0};
1240 if (m_summary
&& !m_summary
->stores
->every_base
)
1241 changed
|= m_summary
->stores
->insert (current_function_decl
,
1243 if (m_summary_lto
&& !m_summary_lto
->stores
->every_base
)
1244 changed
|= m_summary_lto
->stores
->insert (current_function_decl
,
1249 /* Merge side effects of call STMT to function with CALLEE_SUMMARY.
1250 Return true if something changed.
1251 If IGNORE_STORES is true, do not merge stores.
1252 If RECORD_ADJUSTMENTS is true cap number of adjustments to
1253 a given access to make dataflow finite. */
1256 modref_access_analysis::merge_call_side_effects
1257 (gimple
*stmt
, modref_summary
*callee_summary
,
1258 cgraph_node
*callee_node
, bool record_adjustments
)
1260 gcall
*call
= as_a
<gcall
*> (stmt
);
1261 int flags
= gimple_call_flags (call
);
1263 /* Nothing to do for non-looping cont functions. */
1264 if ((flags
& (ECF_CONST
| ECF_NOVOPS
))
1265 && !(flags
& ECF_LOOPING_CONST_OR_PURE
))
1268 bool changed
= false;
1271 fprintf (dump_file
, " - Merging side effects of %s\n",
1272 callee_node
->dump_name ());
1274 /* Merge side effects and non-determinism.
1275 PURE/CONST flags makes functions deterministic and if there is
1276 no LOOPING_CONST_OR_PURE they also have no side effects. */
1277 if (!(flags
& (ECF_CONST
| ECF_NOVOPS
| ECF_PURE
))
1278 || (flags
& ECF_LOOPING_CONST_OR_PURE
))
1280 if (!m_summary
->side_effects
&& callee_summary
->side_effects
)
1283 fprintf (dump_file
, " - merging side effects.\n");
1284 m_summary
->side_effects
= true;
1287 if (!m_summary
->nondeterministic
&& callee_summary
->nondeterministic
1288 && !ignore_nondeterminism_p (current_function_decl
, flags
))
1291 fprintf (dump_file
, " - merging nondeterministic.\n");
1292 m_summary
->nondeterministic
= true;
1297 /* For const functions we are done. */
1298 if (flags
& (ECF_CONST
| ECF_NOVOPS
))
1301 /* Merge calls_interposable flags. */
1302 if (!m_summary
->calls_interposable
&& callee_summary
->calls_interposable
)
1305 fprintf (dump_file
, " - merging calls interposable.\n");
1306 m_summary
->calls_interposable
= true;
1310 if (!callee_node
->binds_to_current_def_p () && !m_summary
->calls_interposable
)
1313 fprintf (dump_file
, " - May be interposed.\n");
1314 m_summary
->calls_interposable
= true;
1318 /* Now merge the actual load, store and kill vectors. For this we need
1319 to compute map translating new parameters to old. */
1321 fprintf (dump_file
, " Parm map:");
1323 auto_vec
<modref_parm_map
, 32> parm_map
;
1324 parm_map
.safe_grow_cleared (gimple_call_num_args (call
), true);
1325 for (unsigned i
= 0; i
< gimple_call_num_args (call
); i
++)
1327 parm_map
[i
] = parm_map_for_ptr (gimple_call_arg (call
, i
));
1330 fprintf (dump_file
, " %i", parm_map
[i
].parm_index
);
1331 if (parm_map
[i
].parm_offset_known
)
1333 fprintf (dump_file
, " offset:");
1334 print_dec ((poly_int64_pod
)parm_map
[i
].parm_offset
,
1340 modref_parm_map chain_map
;
1341 if (gimple_call_chain (call
))
1343 chain_map
= parm_map_for_ptr (gimple_call_chain (call
));
1346 fprintf (dump_file
, "static chain %i", chain_map
.parm_index
);
1347 if (chain_map
.parm_offset_known
)
1349 fprintf (dump_file
, " offset:");
1350 print_dec ((poly_int64_pod
)chain_map
.parm_offset
,
1356 fprintf (dump_file
, "\n");
1358 /* Kills can me merged in only if we know the function is going to be
1360 if (m_always_executed
1361 && callee_summary
->kills
.length ()
1362 && (!cfun
->can_throw_non_call_exceptions
1363 || !stmt_could_throw_p (cfun
, call
)))
1365 /* Watch for self recursive updates. */
1366 auto_vec
<modref_access_node
, 32> saved_kills
;
1368 saved_kills
.reserve_exact (callee_summary
->kills
.length ());
1369 saved_kills
.splice (callee_summary
->kills
);
1370 for (auto kill
: saved_kills
)
1372 if (kill
.parm_index
>= (int)parm_map
.length ())
1375 = kill
.parm_index
== MODREF_STATIC_CHAIN_PARM
1377 : parm_map
[kill
.parm_index
];
1378 if (m
.parm_index
== MODREF_LOCAL_MEMORY_PARM
1379 || m
.parm_index
== MODREF_UNKNOWN_PARM
1380 || m
.parm_index
== MODREF_RETSLOT_PARM
1381 || !m
.parm_offset_known
)
1383 modref_access_node n
= kill
;
1384 n
.parm_index
= m
.parm_index
;
1385 n
.parm_offset
+= m
.parm_offset
;
1386 if (modref_access_node::insert_kill (m_summary
->kills
, n
,
1387 record_adjustments
))
1392 /* Merge in loads. */
1393 changed
|= m_summary
->loads
->merge (current_function_decl
,
1394 callee_summary
->loads
,
1395 &parm_map
, &chain_map
,
1397 !may_access_nonescaping_parm_p
1398 (call
, flags
, true));
1399 /* Merge in stores. */
1400 if (!ignore_stores_p (current_function_decl
, flags
))
1402 changed
|= m_summary
->stores
->merge (current_function_decl
,
1403 callee_summary
->stores
,
1404 &parm_map
, &chain_map
,
1406 !may_access_nonescaping_parm_p
1407 (call
, flags
, false));
1408 if (!m_summary
->writes_errno
1409 && callee_summary
->writes_errno
)
1411 m_summary
->writes_errno
= true;
1418 /* Return access mode for argument I of call STMT with FNSPEC. */
1421 modref_access_analysis::get_access_for_fnspec (gcall
*call
, attr_fnspec
&fnspec
,
1423 modref_parm_map
&map
)
1425 tree size
= NULL_TREE
;
1426 unsigned int size_arg
;
1428 if (!fnspec
.arg_specified_p (i
))
1430 else if (fnspec
.arg_max_access_size_given_by_arg_p (i
, &size_arg
))
1431 size
= gimple_call_arg (call
, size_arg
);
1432 else if (fnspec
.arg_access_size_given_by_type_p (i
))
1434 tree callee
= gimple_call_fndecl (call
);
1435 tree t
= TYPE_ARG_TYPES (TREE_TYPE (callee
));
1437 for (unsigned int p
= 0; p
< i
; p
++)
1439 size
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_VALUE (t
)));
1441 modref_access_node a
= {0, -1, -1,
1442 map
.parm_offset
, map
.parm_index
,
1443 map
.parm_offset_known
, 0};
1444 poly_int64 size_hwi
;
1446 && poly_int_tree_p (size
, &size_hwi
)
1447 && coeffs_in_range_p (size_hwi
, 0,
1448 HOST_WIDE_INT_MAX
/ BITS_PER_UNIT
))
1451 a
.max_size
= size_hwi
<< LOG2_BITS_PER_UNIT
;
1455 /* Apply side effects of call STMT to CUR_SUMMARY using FNSPEC.
1456 If IGNORE_STORES is true ignore them.
1457 Return false if no useful summary can be produced. */
1460 modref_access_analysis::process_fnspec (gcall
*call
)
1462 int flags
= gimple_call_flags (call
);
1464 /* PURE/CONST flags makes functions deterministic and if there is
1465 no LOOPING_CONST_OR_PURE they also have no side effects. */
1466 if (!(flags
& (ECF_CONST
| ECF_NOVOPS
| ECF_PURE
))
1467 || (flags
& ECF_LOOPING_CONST_OR_PURE
)
1468 || (cfun
->can_throw_non_call_exceptions
1469 && stmt_could_throw_p (cfun
, call
)))
1471 set_side_effects ();
1472 if (!ignore_nondeterminism_p (current_function_decl
, flags
))
1473 set_nondeterministic ();
1476 /* For const functions we are done. */
1477 if (flags
& (ECF_CONST
| ECF_NOVOPS
))
1480 attr_fnspec fnspec
= gimple_call_fnspec (call
);
1481 /* If there is no fnpec we know nothing about loads & stores. */
1482 if (!fnspec
.known_p ())
1484 if (dump_file
&& gimple_call_builtin_p (call
, BUILT_IN_NORMAL
))
1485 fprintf (dump_file
, " Builtin with no fnspec: %s\n",
1486 IDENTIFIER_POINTER (DECL_NAME (gimple_call_fndecl (call
))));
1487 if (!ignore_stores_p (current_function_decl
, flags
))
1489 if (!may_access_nonescaping_parm_p (call
, flags
, false))
1490 record_global_memory_store ();
1492 record_unknown_store ();
1493 if (!may_access_nonescaping_parm_p (call
, flags
, true))
1494 record_global_memory_load ();
1496 record_unknown_load ();
1500 if (!may_access_nonescaping_parm_p (call
, flags
, true))
1501 record_global_memory_load ();
1503 record_unknown_load ();
1507 /* Process fnspec. */
1508 if (fnspec
.global_memory_read_p ())
1510 if (may_access_nonescaping_parm_p (call
, flags
, true))
1511 record_unknown_load ();
1513 record_global_memory_load ();
1517 for (unsigned int i
= 0; i
< gimple_call_num_args (call
); i
++)
1518 if (!POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call
, i
))))
1520 else if (!fnspec
.arg_specified_p (i
)
1521 || fnspec
.arg_maybe_read_p (i
))
1523 modref_parm_map map
= parm_map_for_ptr
1524 (gimple_call_arg (call
, i
));
1526 if (map
.parm_index
== MODREF_LOCAL_MEMORY_PARM
)
1528 if (map
.parm_index
== MODREF_UNKNOWN_PARM
)
1530 record_unknown_load ();
1533 modref_access_node a
= get_access_for_fnspec (call
, fnspec
, i
, map
);
1534 if (a
.parm_index
== MODREF_LOCAL_MEMORY_PARM
)
1537 m_summary
->loads
->insert (current_function_decl
, 0, 0, a
, false);
1539 m_summary_lto
->loads
->insert (current_function_decl
, 0, 0, a
,
1543 if (ignore_stores_p (current_function_decl
, flags
))
1545 if (fnspec
.global_memory_written_p ())
1547 if (may_access_nonescaping_parm_p (call
, flags
, false))
1548 record_unknown_store ();
1550 record_global_memory_store ();
1554 for (unsigned int i
= 0; i
< gimple_call_num_args (call
); i
++)
1555 if (!POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call
, i
))))
1557 else if (!fnspec
.arg_specified_p (i
)
1558 || fnspec
.arg_maybe_written_p (i
))
1560 modref_parm_map map
= parm_map_for_ptr
1561 (gimple_call_arg (call
, i
));
1563 if (map
.parm_index
== MODREF_LOCAL_MEMORY_PARM
)
1565 if (map
.parm_index
== MODREF_UNKNOWN_PARM
)
1567 record_unknown_store ();
1570 modref_access_node a
= get_access_for_fnspec (call
, fnspec
, i
, map
);
1571 if (a
.parm_index
== MODREF_LOCAL_MEMORY_PARM
)
1574 m_summary
->stores
->insert (current_function_decl
, 0, 0, a
, false);
1576 m_summary_lto
->stores
->insert (current_function_decl
,
1579 if (fnspec
.errno_maybe_written_p () && flag_errno_math
)
1582 m_summary
->writes_errno
= true;
1584 m_summary_lto
->writes_errno
= true;
1589 /* Analyze function call STMT in function F.
1590 Remember recursive calls in RECURSIVE_CALLS. */
1593 modref_access_analysis::analyze_call (gcall
*stmt
)
1595 /* Check flags on the function call. In certain cases, analysis can be
1597 int flags
= gimple_call_flags (stmt
);
1601 fprintf (dump_file
, " - Analyzing call:");
1602 print_gimple_stmt (dump_file
, stmt
, 0);
1605 if ((flags
& (ECF_CONST
| ECF_NOVOPS
))
1606 && !(flags
& ECF_LOOPING_CONST_OR_PURE
))
1610 " - ECF_CONST | ECF_NOVOPS, ignoring all stores and all loads "
1611 "except for args.\n");
1615 /* Next, we try to get the callee's function declaration. The goal is to
1616 merge their summary with ours. */
1617 tree callee
= gimple_call_fndecl (stmt
);
1619 /* Check if this is an indirect call. */
1623 fprintf (dump_file
, gimple_call_internal_p (stmt
)
1624 ? " - Internal call" : " - Indirect call.\n");
1625 process_fnspec (stmt
);
1628 /* We only need to handle internal calls in IPA mode. */
1629 gcc_checking_assert (!m_summary_lto
&& !m_ipa
);
1631 struct cgraph_node
*callee_node
= cgraph_node::get_create (callee
);
1633 /* If this is a recursive call, the target summary is the same as ours, so
1634 there's nothing to do. */
1635 if (recursive_call_p (current_function_decl
, callee
))
1637 m_recursive_calls
.safe_push (stmt
);
1638 set_side_effects ();
1640 fprintf (dump_file
, " - Skipping recursive call.\n");
1644 gcc_assert (callee_node
!= NULL
);
1646 /* Get the function symbol and its availability. */
1647 enum availability avail
;
1648 callee_node
= callee_node
->function_symbol (&avail
);
1650 if (builtin_safe_for_const_function_p (&looping
, callee
))
1653 set_side_effects ();
1655 fprintf (dump_file
, " - Builtin is safe for const.\n");
1658 if (avail
<= AVAIL_INTERPOSABLE
)
1662 " - Function availability <= AVAIL_INTERPOSABLE.\n");
1663 process_fnspec (stmt
);
1667 /* Get callee's modref summary. As above, if there's no summary, we either
1668 have to give up or, if stores are ignored, we can just purge loads. */
1669 modref_summary
*callee_summary
= optimization_summaries
->get (callee_node
);
1670 if (!callee_summary
)
1673 fprintf (dump_file
, " - No modref summary available for callee.\n");
1674 process_fnspec (stmt
);
1678 merge_call_side_effects (stmt
, callee_summary
, callee_node
, false);
1683 /* Helper for analyze_stmt. */
1686 modref_access_analysis::analyze_load (gimple
*, tree
, tree op
, void *data
)
1688 modref_access_analysis
*t
= (modref_access_analysis
*)data
;
1692 fprintf (dump_file
, " - Analyzing load: ");
1693 print_generic_expr (dump_file
, op
);
1694 fprintf (dump_file
, "\n");
1697 if (!t
->record_access_p (op
))
1701 ao_ref_init (&r
, op
);
1702 modref_access_node a
= get_access (&r
);
1703 if (a
.parm_index
== MODREF_LOCAL_MEMORY_PARM
)
1707 t
->record_access (t
->m_summary
->loads
, &r
, a
);
1708 if (t
->m_summary_lto
)
1709 t
->record_access_lto (t
->m_summary_lto
->loads
, &r
, a
);
1713 /* Helper for analyze_stmt. */
1716 modref_access_analysis::analyze_store (gimple
*stmt
, tree
, tree op
, void *data
)
1718 modref_access_analysis
*t
= (modref_access_analysis
*)data
;
1722 fprintf (dump_file
, " - Analyzing store: ");
1723 print_generic_expr (dump_file
, op
);
1724 fprintf (dump_file
, "\n");
1727 if (!t
->record_access_p (op
))
1731 ao_ref_init (&r
, op
);
1732 modref_access_node a
= get_access (&r
);
1733 if (a
.parm_index
== MODREF_LOCAL_MEMORY_PARM
)
1737 t
->record_access (t
->m_summary
->stores
, &r
, a
);
1738 if (t
->m_summary_lto
)
1739 t
->record_access_lto (t
->m_summary_lto
->stores
, &r
, a
);
1740 if (t
->m_always_executed
1741 && a
.useful_for_kill_p ()
1742 && (!cfun
->can_throw_non_call_exceptions
1743 || !stmt_could_throw_p (cfun
, stmt
)))
1746 fprintf (dump_file
, " - Recording kill\n");
1748 modref_access_node::insert_kill (t
->m_summary
->kills
, a
, false);
1749 if (t
->m_summary_lto
)
1750 modref_access_node::insert_kill (t
->m_summary_lto
->kills
, a
, false);
1755 /* Analyze statement STMT of function F.
1756 If IPA is true do not merge in side effects of calls. */
1759 modref_access_analysis::analyze_stmt (gimple
*stmt
, bool always_executed
)
1761 m_always_executed
= always_executed
;
1762 /* In general we can not ignore clobbers because they are barriers for code
1763 motion, however after inlining it is safe to do because local optimization
1764 passes do not consider clobbers from other functions.
1765 Similar logic is in ipa-pure-const.cc. */
1766 if ((m_ipa
|| cfun
->after_inlining
) && gimple_clobber_p (stmt
))
1768 if (always_executed
&& record_access_p (gimple_assign_lhs (stmt
)))
1771 ao_ref_init (&r
, gimple_assign_lhs (stmt
));
1772 modref_access_node a
= get_access (&r
);
1773 if (a
.useful_for_kill_p ())
1776 fprintf (dump_file
, " - Recording kill\n");
1778 modref_access_node::insert_kill (m_summary
->kills
, a
, false);
1780 modref_access_node::insert_kill (m_summary_lto
->kills
,
1787 /* Analyze all loads and stores in STMT. */
1788 walk_stmt_load_store_ops (stmt
, this,
1789 analyze_load
, analyze_store
);
1791 switch (gimple_code (stmt
))
1794 if (gimple_asm_volatile_p (as_a
<gasm
*> (stmt
)))
1795 set_nondeterministic ();
1796 if (cfun
->can_throw_non_call_exceptions
1797 && stmt_could_throw_p (cfun
, stmt
))
1798 set_side_effects ();
1799 /* If the ASM statement does not read nor write memory, there's nothing
1800 to do. Otherwise just give up. */
1801 if (!gimple_asm_clobbers_memory_p (as_a
<gasm
*> (stmt
)))
1804 fprintf (dump_file
, " - Function contains GIMPLE_ASM statement "
1805 "which clobbers memory.\n");
1806 record_unknown_load ();
1807 record_unknown_store ();
1810 if (!m_ipa
|| gimple_call_internal_p (stmt
))
1811 analyze_call (as_a
<gcall
*> (stmt
));
1814 attr_fnspec fnspec
= gimple_call_fnspec (as_a
<gcall
*>(stmt
));
1816 if (fnspec
.known_p ()
1817 && (!fnspec
.global_memory_read_p ()
1818 || !fnspec
.global_memory_written_p ()))
1820 cgraph_edge
*e
= cgraph_node::get
1821 (current_function_decl
)->get_edge (stmt
);
1824 fnspec_summaries
->get_create (e
)->fnspec
1825 = xstrdup (fnspec
.get_str ());
1827 fprintf (dump_file
, " Recorded fnspec %s\n",
1834 if (cfun
->can_throw_non_call_exceptions
1835 && stmt_could_throw_p (cfun
, stmt
))
1836 set_side_effects ();
1841 /* Propagate load/stores across recursive calls. */
1844 modref_access_analysis::propagate ()
1846 if (m_ipa
&& m_summary
)
1849 bool changed
= true;
1851 cgraph_node
*fnode
= cgraph_node::get (current_function_decl
);
1853 m_always_executed
= false;
1854 while (changed
&& m_summary
->useful_p (m_ecf_flags
, false))
1857 for (unsigned i
= 0; i
< m_recursive_calls
.length (); i
++)
1859 changed
|= merge_call_side_effects (m_recursive_calls
[i
], m_summary
,
1866 /* Analyze function. */
1869 modref_access_analysis::analyze ()
1871 m_ecf_flags
= flags_from_decl_or_type (current_function_decl
);
1872 bool summary_useful
= true;
1874 /* Analyze each statement in each basic block of the function. If the
1875 statement cannot be analyzed (for any reason), the entire function cannot
1876 be analyzed by modref. */
1878 bitmap always_executed_bbs
= find_always_executed_bbs (cfun
, true);
1879 FOR_EACH_BB_FN (bb
, cfun
)
1881 gimple_stmt_iterator si
;
1882 bool always_executed
= bitmap_bit_p (always_executed_bbs
, bb
->index
);
1884 for (si
= gsi_start_nondebug_after_labels_bb (bb
);
1885 !gsi_end_p (si
); gsi_next_nondebug (&si
))
1887 /* NULL memory accesses terminates BB. These accesses are known
1888 to trip undefined behavior. gimple-ssa-isolate-paths turns them
1889 to volatile accesses and adds builtin_trap call which would
1890 confuse us otherwise. */
1891 if (infer_nonnull_range_by_dereference (gsi_stmt (si
),
1895 fprintf (dump_file
, " - NULL memory access; terminating BB\n");
1896 if (flag_non_call_exceptions
)
1897 set_side_effects ();
1900 analyze_stmt (gsi_stmt (si
), always_executed
);
1902 /* Avoid doing useless work. */
1903 if ((!m_summary
|| !m_summary
->useful_p (m_ecf_flags
, false))
1905 || !m_summary_lto
->useful_p (m_ecf_flags
, false)))
1907 summary_useful
= false;
1911 && stmt_can_throw_external (cfun
, gsi_stmt (si
)))
1912 always_executed
= false;
1914 if (!summary_useful
)
1917 /* In non-IPA mode we need to perform iterative dataflow on recursive calls.
1918 This needs to be done after all other side effects are computed. */
1923 if (m_summary
&& !m_summary
->side_effects
&& !finite_function_p ())
1924 m_summary
->side_effects
= true;
1925 if (m_summary_lto
&& !m_summary_lto
->side_effects
1926 && !finite_function_p ())
1927 m_summary_lto
->side_effects
= true;
1929 BITMAP_FREE (always_executed_bbs
);
1932 /* Return true if OP accesses memory pointed to by SSA_NAME. */
1935 memory_access_to (tree op
, tree ssa_name
)
1937 tree base
= get_base_address (op
);
1940 if (TREE_CODE (base
) != MEM_REF
&& TREE_CODE (base
) != TARGET_MEM_REF
)
1942 return TREE_OPERAND (base
, 0) == ssa_name
;
1945 /* Consider statement val = *arg.
1946 return EAF flags of ARG that can be determined from EAF flags of VAL
1947 (which are known to be FLAGS). If IGNORE_STORES is true we can ignore
1948 all stores to VAL, i.e. when handling noreturn function. */
1951 deref_flags (int flags
, bool ignore_stores
)
1953 /* Dereference is also a direct read but dereferenced value does not
1954 yield any other direct use. */
1955 int ret
= EAF_NO_DIRECT_CLOBBER
| EAF_NO_DIRECT_ESCAPE
1956 | EAF_NOT_RETURNED_DIRECTLY
;
1957 /* If argument is unused just account for
1958 the read involved in dereference. */
1959 if (flags
& EAF_UNUSED
)
1960 ret
|= EAF_NO_INDIRECT_READ
| EAF_NO_INDIRECT_CLOBBER
1961 | EAF_NO_INDIRECT_ESCAPE
;
1964 /* Direct or indirect accesses leads to indirect accesses. */
1965 if (((flags
& EAF_NO_DIRECT_CLOBBER
)
1966 && (flags
& EAF_NO_INDIRECT_CLOBBER
))
1968 ret
|= EAF_NO_INDIRECT_CLOBBER
;
1969 if (((flags
& EAF_NO_DIRECT_ESCAPE
)
1970 && (flags
& EAF_NO_INDIRECT_ESCAPE
))
1972 ret
|= EAF_NO_INDIRECT_ESCAPE
;
1973 if ((flags
& EAF_NO_DIRECT_READ
)
1974 && (flags
& EAF_NO_INDIRECT_READ
))
1975 ret
|= EAF_NO_INDIRECT_READ
;
1976 if ((flags
& EAF_NOT_RETURNED_DIRECTLY
)
1977 && (flags
& EAF_NOT_RETURNED_INDIRECTLY
))
1978 ret
|= EAF_NOT_RETURNED_INDIRECTLY
;
1984 /* Description of an escape point: a call which affects flags of a given
1989 /* Value escapes to this call. */
1991 /* Argument it escapes to. */
1993 /* Flags already known about the argument (this can save us from recording
1994 escape points if local analysis did good job already). */
1995 eaf_flags_t min_flags
;
1996 /* Does value escape directly or indirectly? */
2000 /* Lattice used during the eaf flags analysis dataflow. For a given SSA name
2001 we aim to compute its flags and escape points. We also use the lattice
2002 to dynamically build dataflow graph to propagate on. */
2004 class modref_lattice
2007 /* EAF flags of the SSA name. */
2009 /* Used during DFS walk to mark names where final value was determined
2010 without need for dataflow. */
2012 /* Used during DFS walk to mark open vertices (for cycle detection). */
2014 /* Set during DFS walk for names that needs dataflow propagation. */
2016 /* Used during the iterative dataflow. */
2019 /* When doing IPA analysis we can not merge in callee escape points;
2020 Only remember them and do the merging at IPA propagation time. */
2021 vec
<escape_point
, va_heap
, vl_ptr
> escape_points
;
2023 /* Representation of a graph for dataflow. This graph is built on-demand
2024 using modref_eaf_analysis::analyze_ssa and later solved by
2025 modref_eaf_analysis::propagate.
2026 Each edge represents the fact that flags of current lattice should be
2027 propagated to lattice of SSA_NAME. */
2028 struct propagate_edge
2033 vec
<propagate_edge
, va_heap
, vl_ptr
> propagate_to
;
2037 bool merge (const modref_lattice
&with
);
2038 bool merge (int flags
);
2039 bool merge_deref (const modref_lattice
&with
, bool ignore_stores
);
2040 bool merge_direct_load ();
2041 bool merge_direct_store ();
2042 bool add_escape_point (gcall
*call
, int arg
, int min_flags
, bool diret
);
2043 void dump (FILE *out
, int indent
= 0) const;
2046 /* Lattices are saved to vectors, so keep them PODs. */
2048 modref_lattice::init ()
2050 /* All flags we track. */
2051 int f
= EAF_NO_DIRECT_CLOBBER
| EAF_NO_INDIRECT_CLOBBER
2052 | EAF_NO_DIRECT_ESCAPE
| EAF_NO_INDIRECT_ESCAPE
2053 | EAF_NO_DIRECT_READ
| EAF_NO_INDIRECT_READ
2054 | EAF_NOT_RETURNED_DIRECTLY
| EAF_NOT_RETURNED_INDIRECTLY
2057 /* Check that eaf_flags_t is wide enough to hold all flags. */
2058 gcc_checking_assert (f
== flags
);
2063 /* Release memory. */
2065 modref_lattice::release ()
2067 escape_points
.release ();
2068 propagate_to
.release ();
2071 /* Dump lattice to OUT; indent with INDENT spaces. */
2074 modref_lattice::dump (FILE *out
, int indent
) const
2076 dump_eaf_flags (out
, flags
);
2077 if (escape_points
.length ())
2079 fprintf (out
, "%*sEscapes:\n", indent
, "");
2080 for (unsigned int i
= 0; i
< escape_points
.length (); i
++)
2082 fprintf (out
, "%*s Arg %i (%s) min flags", indent
, "",
2083 escape_points
[i
].arg
,
2084 escape_points
[i
].direct
? "direct" : "indirect");
2085 dump_eaf_flags (out
, escape_points
[i
].min_flags
, false);
2086 fprintf (out
, " in call ");
2087 print_gimple_stmt (out
, escape_points
[i
].call
, 0);
2092 /* Add escape point CALL, ARG, MIN_FLAGS, DIRECT. Return false if such escape
2096 modref_lattice::add_escape_point (gcall
*call
, int arg
, int min_flags
,
2102 /* If we already determined flags to be bad enough,
2103 we do not need to record. */
2104 if ((flags
& min_flags
) == flags
|| (min_flags
& EAF_UNUSED
))
2107 FOR_EACH_VEC_ELT (escape_points
, i
, ep
)
2108 if (ep
->call
== call
&& ep
->arg
== arg
&& ep
->direct
== direct
)
2110 if ((ep
->min_flags
& min_flags
) == min_flags
)
2112 ep
->min_flags
&= min_flags
;
2115 /* Give up if max escape points is met. */
2116 if ((int)escape_points
.length () > param_modref_max_escape_points
)
2119 fprintf (dump_file
, "--param modref-max-escape-points limit reached\n");
2123 escape_point new_ep
= {call
, arg
, min_flags
, direct
};
2124 escape_points
.safe_push (new_ep
);
2128 /* Merge in flags from F. */
2130 modref_lattice::merge (int f
)
2134 /* Check that flags seems sane: if function does not read the parameter
2135 it can not access it indirectly. */
2136 gcc_checking_assert (!(f
& EAF_NO_DIRECT_READ
)
2137 || ((f
& EAF_NO_INDIRECT_READ
)
2138 && (f
& EAF_NO_INDIRECT_CLOBBER
)
2139 && (f
& EAF_NO_INDIRECT_ESCAPE
)
2140 && (f
& EAF_NOT_RETURNED_INDIRECTLY
)));
2141 if ((flags
& f
) != flags
)
2144 /* Prune obviously useless flags;
2145 We do not have ECF_FLAGS handy which is not big problem since
2146 we will do final flags cleanup before producing summary.
2147 Merging should be fast so it can work well with dataflow. */
2148 flags
= remove_useless_eaf_flags (flags
, 0, false);
2150 escape_points
.release ();
2156 /* Merge in WITH. Return true if anything changed. */
2159 modref_lattice::merge (const modref_lattice
&with
)
2164 bool changed
= merge (with
.flags
);
2168 for (unsigned int i
= 0; i
< with
.escape_points
.length (); i
++)
2169 changed
|= add_escape_point (with
.escape_points
[i
].call
,
2170 with
.escape_points
[i
].arg
,
2171 with
.escape_points
[i
].min_flags
,
2172 with
.escape_points
[i
].direct
);
2176 /* Merge in deref of WITH. If IGNORE_STORES is true do not consider
2177 stores. Return true if anything changed. */
2180 modref_lattice::merge_deref (const modref_lattice
&with
, bool ignore_stores
)
2185 bool changed
= merge (deref_flags (with
.flags
, ignore_stores
));
2189 for (unsigned int i
= 0; i
< with
.escape_points
.length (); i
++)
2191 int min_flags
= with
.escape_points
[i
].min_flags
;
2193 if (with
.escape_points
[i
].direct
)
2194 min_flags
= deref_flags (min_flags
, ignore_stores
);
2195 else if (ignore_stores
)
2196 min_flags
|= ignore_stores_eaf_flags
;
2197 changed
|= add_escape_point (with
.escape_points
[i
].call
,
2198 with
.escape_points
[i
].arg
,
2205 /* Merge in flags for direct load. */
2208 modref_lattice::merge_direct_load ()
2210 return merge (~(EAF_UNUSED
| EAF_NO_DIRECT_READ
));
2213 /* Merge in flags for direct store. */
2216 modref_lattice::merge_direct_store ()
2218 return merge (~(EAF_UNUSED
| EAF_NO_DIRECT_CLOBBER
));
2221 /* Analyzer of EAF flags.
2222 This is generally dataflow problem over the SSA graph, however we only
2223 care about flags of few selected ssa names (arguments, return slot and
2224 static chain). So we first call analyze_ssa_name on all relevant names
2225 and perform a DFS walk to discover SSA names where flags needs to be
2226 determined. For acyclic graphs we try to determine final flags during
2227 this walk. Once cycles or recursion depth is met we enlist SSA names
2228 for dataflow which is done by propagate call.
2230 After propagation the flags can be obtained using get_ssa_name_flags. */
2232 class modref_eaf_analysis
2235 /* Mark NAME as relevant for analysis. */
2236 void analyze_ssa_name (tree name
, bool deferred
= false);
2237 /* Dataflow solver. */
2239 /* Return flags computed earlier for NAME. */
2240 int get_ssa_name_flags (tree name
)
2242 int version
= SSA_NAME_VERSION (name
);
2243 gcc_checking_assert (m_lattice
[version
].known
);
2244 return m_lattice
[version
].flags
;
2246 /* In IPA mode this will record all escape points
2247 determined for NAME to PARM_IDNEX. Flags are minimal
2249 void record_escape_points (tree name
, int parm_index
, int flags
);
2250 modref_eaf_analysis (bool ipa
)
2254 m_lattice
.safe_grow_cleared (num_ssa_names
, true);
2256 ~modref_eaf_analysis ()
2258 gcc_checking_assert (!m_depth
);
2259 if (m_ipa
|| m_names_to_propagate
.length ())
2260 for (unsigned int i
= 0; i
< num_ssa_names
; i
++)
2261 m_lattice
[i
].release ();
2264 /* If true, we produce analysis for IPA mode. In this case escape points are
2267 /* Depth of recursion of analyze_ssa_name. */
2269 /* Propagation lattice for individual ssa names. */
2270 auto_vec
<modref_lattice
> m_lattice
;
2271 auto_vec
<tree
> m_deferred_names
;
2272 auto_vec
<int> m_names_to_propagate
;
2274 void merge_with_ssa_name (tree dest
, tree src
, bool deref
);
2275 void merge_call_lhs_flags (gcall
*call
, int arg
, tree name
, bool direct
,
2280 /* Call statements may return their parameters. Consider argument number
2281 ARG of USE_STMT and determine flags that can needs to be cleared
2282 in case pointer possibly indirectly references from ARG I is returned.
2283 If DIRECT is true consider direct returns and if INDIRECT consider
2285 LATTICE, DEPTH and ipa are same as in analyze_ssa_name.
2286 ARG is set to -1 for static chain. */
2289 modref_eaf_analysis::merge_call_lhs_flags (gcall
*call
, int arg
,
2290 tree name
, bool direct
,
2293 int index
= SSA_NAME_VERSION (name
);
2294 bool returned_directly
= false;
2296 /* If there is no return value, no flags are affected. */
2297 if (!gimple_call_lhs (call
))
2300 /* If we know that function returns given argument and it is not ARG
2301 we can still be happy. */
2304 int flags
= gimple_call_return_flags (call
);
2305 if (flags
& ERF_RETURNS_ARG
)
2307 if ((flags
& ERF_RETURN_ARG_MASK
) == arg
)
2308 returned_directly
= true;
2313 /* Make ERF_RETURNS_ARG overwrite EAF_UNUSED. */
2314 if (returned_directly
)
2319 /* If value is not returned at all, do nothing. */
2320 else if (!direct
&& !indirect
)
2323 /* If return value is SSA name determine its flags. */
2324 if (TREE_CODE (gimple_call_lhs (call
)) == SSA_NAME
)
2326 tree lhs
= gimple_call_lhs (call
);
2328 merge_with_ssa_name (name
, lhs
, false);
2330 merge_with_ssa_name (name
, lhs
, true);
2332 /* In the case of memory store we can do nothing. */
2334 m_lattice
[index
].merge (deref_flags (0, false));
2336 m_lattice
[index
].merge (0);
2339 /* CALL_FLAGS are EAF_FLAGS of the argument. Turn them
2340 into flags for caller, update LATTICE of corresponding
2341 argument if needed. */
2344 callee_to_caller_flags (int call_flags
, bool ignore_stores
,
2345 modref_lattice
&lattice
)
2347 /* call_flags is about callee returning a value
2348 that is not the same as caller returning it. */
2349 call_flags
|= EAF_NOT_RETURNED_DIRECTLY
2350 | EAF_NOT_RETURNED_INDIRECTLY
;
2351 if (!ignore_stores
&& !(call_flags
& EAF_UNUSED
))
2353 /* If value escapes we are no longer able to track what happens
2354 with it because we can read it from the escaped location
2356 if (!(call_flags
& EAF_NO_DIRECT_ESCAPE
))
2358 else if (!(call_flags
& EAF_NO_INDIRECT_ESCAPE
))
2359 lattice
.merge (~(EAF_NOT_RETURNED_INDIRECTLY
2360 | EAF_NO_DIRECT_READ
2361 | EAF_NO_INDIRECT_READ
2362 | EAF_NO_INDIRECT_CLOBBER
2366 call_flags
|= ignore_stores_eaf_flags
;
2370 /* Analyze EAF flags for SSA name NAME and store result to LATTICE.
2371 LATTICE is an array of modref_lattices.
2372 DEPTH is a recursion depth used to make debug output prettier.
2373 If IPA is true we analyze for IPA propagation (and thus call escape points
2374 are processed later) */
2377 modref_eaf_analysis::analyze_ssa_name (tree name
, bool deferred
)
2379 imm_use_iterator ui
;
2381 int index
= SSA_NAME_VERSION (name
);
2385 /* See if value is already computed. */
2386 if (m_lattice
[index
].known
|| m_lattice
[index
].do_dataflow
)
2388 if (m_lattice
[index
].open
)
2392 "%*sCycle in SSA graph\n",
2396 /* Recursion guard. */
2397 m_lattice
[index
].init ();
2398 if (m_depth
== param_modref_max_depth
)
2402 "%*sMax recursion depth reached; postponing\n",
2404 m_deferred_names
.safe_push (name
);
2412 "%*sAnalyzing flags of ssa name: ", m_depth
* 4, "");
2413 print_generic_expr (dump_file
, name
);
2414 fprintf (dump_file
, "\n");
2417 FOR_EACH_IMM_USE_STMT (use_stmt
, ui
, name
)
2419 if (m_lattice
[index
].flags
== 0)
2421 if (is_gimple_debug (use_stmt
))
2425 fprintf (dump_file
, "%*s Analyzing stmt: ", m_depth
* 4, "");
2426 print_gimple_stmt (dump_file
, use_stmt
, 0);
2428 /* If we see a direct non-debug use, clear unused bit.
2429 All dereferences should be accounted below using deref_flags. */
2430 m_lattice
[index
].merge (~EAF_UNUSED
);
2432 /* Gimple return may load the return value.
2433 Returning name counts as an use by tree-ssa-structalias.cc */
2434 if (greturn
*ret
= dyn_cast
<greturn
*> (use_stmt
))
2436 /* Returning through return slot is seen as memory write earlier. */
2437 if (DECL_RESULT (current_function_decl
)
2438 && DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
)))
2440 else if (gimple_return_retval (ret
) == name
)
2441 m_lattice
[index
].merge (~(EAF_UNUSED
| EAF_NOT_RETURNED_DIRECTLY
2442 | EAF_NOT_RETURNED_DIRECTLY
));
2443 else if (memory_access_to (gimple_return_retval (ret
), name
))
2445 m_lattice
[index
].merge_direct_load ();
2446 m_lattice
[index
].merge (~(EAF_UNUSED
2447 | EAF_NOT_RETURNED_INDIRECTLY
));
2450 /* Account for LHS store, arg loads and flags from callee function. */
2451 else if (gcall
*call
= dyn_cast
<gcall
*> (use_stmt
))
2453 tree callee
= gimple_call_fndecl (call
);
2455 /* IPA PTA internally it treats calling a function as "writing" to
2456 the argument space of all functions the function pointer points to
2457 (PR101949). We can not drop EAF_NOCLOBBER only when ipa-pta
2458 is on since that would allow propagation of this from -fno-ipa-pta
2459 to -fipa-pta functions. */
2460 if (gimple_call_fn (use_stmt
) == name
)
2461 m_lattice
[index
].merge (~(EAF_NO_DIRECT_CLOBBER
| EAF_UNUSED
));
2463 /* Recursion would require bit of propagation; give up for now. */
2464 if (callee
&& !m_ipa
&& recursive_call_p (current_function_decl
,
2466 m_lattice
[index
].merge (0);
2469 int ecf_flags
= gimple_call_flags (call
);
2470 bool ignore_stores
= ignore_stores_p (current_function_decl
,
2472 bool ignore_retval
= ignore_retval_p (current_function_decl
,
2475 /* Handle *name = func (...). */
2476 if (gimple_call_lhs (call
)
2477 && memory_access_to (gimple_call_lhs (call
), name
))
2479 m_lattice
[index
].merge_direct_store ();
2480 /* Return slot optimization passes address of
2481 LHS to callee via hidden parameter and this
2482 may make LHS to escape. See PR 98499. */
2483 if (gimple_call_return_slot_opt_p (call
)
2484 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (call
))))
2486 int call_flags
= gimple_call_retslot_flags (call
);
2487 bool isretslot
= false;
2489 if (DECL_RESULT (current_function_decl
)
2490 && DECL_BY_REFERENCE
2491 (DECL_RESULT (current_function_decl
)))
2492 isretslot
= ssa_default_def
2494 DECL_RESULT (current_function_decl
))
2497 /* Passing returnslot to return slot is special because
2498 not_returned and escape has same meaning.
2499 However passing arg to return slot is different. If
2500 the callee's return slot is returned it means that
2501 arg is written to itself which is an escape.
2502 Since we do not track the memory it is written to we
2503 need to give up on analyzing it. */
2506 if (!(call_flags
& (EAF_NOT_RETURNED_DIRECTLY
2508 m_lattice
[index
].merge (0);
2509 else gcc_checking_assert
2510 (call_flags
& (EAF_NOT_RETURNED_INDIRECTLY
2512 call_flags
= callee_to_caller_flags
2516 m_lattice
[index
].merge (call_flags
);
2520 if (gimple_call_chain (call
)
2521 && (gimple_call_chain (call
) == name
))
2523 int call_flags
= gimple_call_static_chain_flags (call
);
2524 if (!ignore_retval
&& !(call_flags
& EAF_UNUSED
))
2525 merge_call_lhs_flags
2527 !(call_flags
& EAF_NOT_RETURNED_DIRECTLY
),
2528 !(call_flags
& EAF_NOT_RETURNED_INDIRECTLY
));
2529 call_flags
= callee_to_caller_flags
2530 (call_flags
, ignore_stores
,
2532 if (!(ecf_flags
& (ECF_CONST
| ECF_NOVOPS
)))
2533 m_lattice
[index
].merge (call_flags
);
2536 /* Process internal functions and right away. */
2537 bool record_ipa
= m_ipa
&& !gimple_call_internal_p (call
);
2539 /* Handle all function parameters. */
2540 for (unsigned i
= 0;
2541 i
< gimple_call_num_args (call
)
2542 && m_lattice
[index
].flags
; i
++)
2543 /* Name is directly passed to the callee. */
2544 if (gimple_call_arg (call
, i
) == name
)
2546 int call_flags
= gimple_call_arg_flags (call
, i
);
2548 merge_call_lhs_flags
2550 !(call_flags
& (EAF_NOT_RETURNED_DIRECTLY
2552 !(call_flags
& (EAF_NOT_RETURNED_INDIRECTLY
2554 if (!(ecf_flags
& (ECF_CONST
| ECF_NOVOPS
)))
2556 call_flags
= callee_to_caller_flags
2557 (call_flags
, ignore_stores
,
2560 m_lattice
[index
].merge (call_flags
);
2562 m_lattice
[index
].add_escape_point (call
, i
,
2566 /* Name is dereferenced and passed to a callee. */
2567 else if (memory_access_to (gimple_call_arg (call
, i
), name
))
2569 int call_flags
= deref_flags
2570 (gimple_call_arg_flags (call
, i
), ignore_stores
);
2571 if (!ignore_retval
&& !(call_flags
& EAF_UNUSED
)
2572 && !(call_flags
& EAF_NOT_RETURNED_DIRECTLY
)
2573 && !(call_flags
& EAF_NOT_RETURNED_INDIRECTLY
))
2574 merge_call_lhs_flags (call
, i
, name
, false, true);
2575 if (ecf_flags
& (ECF_CONST
| ECF_NOVOPS
))
2576 m_lattice
[index
].merge_direct_load ();
2579 call_flags
= callee_to_caller_flags
2580 (call_flags
, ignore_stores
,
2583 m_lattice
[index
].merge (call_flags
);
2585 m_lattice
[index
].add_escape_point (call
, i
,
2591 else if (gimple_assign_load_p (use_stmt
))
2593 gassign
*assign
= as_a
<gassign
*> (use_stmt
);
2594 /* Memory to memory copy. */
2595 if (gimple_store_p (assign
))
2597 /* Handle *lhs = *name.
2599 We do not track memory locations, so assume that value
2600 is used arbitrarily. */
2601 if (memory_access_to (gimple_assign_rhs1 (assign
), name
))
2602 m_lattice
[index
].merge (deref_flags (0, false));
2603 /* Handle *name = *exp. */
2604 else if (memory_access_to (gimple_assign_lhs (assign
), name
))
2605 m_lattice
[index
].merge_direct_store ();
2607 /* Handle lhs = *name. */
2608 else if (memory_access_to (gimple_assign_rhs1 (assign
), name
))
2610 tree lhs
= gimple_assign_lhs (assign
);
2611 merge_with_ssa_name (name
, lhs
, true);
2614 else if (gimple_store_p (use_stmt
))
2616 gassign
*assign
= dyn_cast
<gassign
*> (use_stmt
);
2618 /* Handle *lhs = name. */
2619 if (assign
&& gimple_assign_rhs1 (assign
) == name
)
2622 fprintf (dump_file
, "%*s ssa name saved to memory\n",
2624 m_lattice
[index
].merge (0);
2626 /* Handle *name = exp. */
2628 && memory_access_to (gimple_assign_lhs (assign
), name
))
2630 /* In general we can not ignore clobbers because they are
2631 barriers for code motion, however after inlining it is safe to
2632 do because local optimization passes do not consider clobbers
2633 from other functions.
2634 Similar logic is in ipa-pure-const.cc. */
2635 if (!cfun
->after_inlining
|| !gimple_clobber_p (assign
))
2636 m_lattice
[index
].merge_direct_store ();
2638 /* ASM statements etc. */
2642 fprintf (dump_file
, "%*s Unhandled store\n", m_depth
* 4, "");
2643 m_lattice
[index
].merge (0);
2646 else if (gassign
*assign
= dyn_cast
<gassign
*> (use_stmt
))
2648 enum tree_code code
= gimple_assign_rhs_code (assign
);
2650 /* See if operation is a merge as considered by
2651 tree-ssa-structalias.cc:find_func_aliases. */
2652 if (!truth_value_p (code
)
2653 && code
!= POINTER_DIFF_EXPR
2654 && (code
!= POINTER_PLUS_EXPR
2655 || gimple_assign_rhs1 (assign
) == name
))
2657 tree lhs
= gimple_assign_lhs (assign
);
2658 merge_with_ssa_name (name
, lhs
, false);
2661 else if (gphi
*phi
= dyn_cast
<gphi
*> (use_stmt
))
2663 tree result
= gimple_phi_result (phi
);
2664 merge_with_ssa_name (name
, result
, false);
2666 /* Conditions are not considered escape points
2667 by tree-ssa-structalias. */
2668 else if (gimple_code (use_stmt
) == GIMPLE_COND
)
2673 fprintf (dump_file
, "%*s Unhandled stmt\n", m_depth
* 4, "");
2674 m_lattice
[index
].merge (0);
2679 fprintf (dump_file
, "%*s current flags of ", m_depth
* 4, "");
2680 print_generic_expr (dump_file
, name
);
2681 m_lattice
[index
].dump (dump_file
, m_depth
* 4 + 4);
2686 fprintf (dump_file
, "%*sflags of ssa name ", m_depth
* 4, "");
2687 print_generic_expr (dump_file
, name
);
2688 m_lattice
[index
].dump (dump_file
, m_depth
* 4 + 2);
2690 m_lattice
[index
].open
= false;
2691 if (!m_lattice
[index
].do_dataflow
)
2692 m_lattice
[index
].known
= true;
2695 /* Propagate info from SRC to DEST. If DEREF it true, assume that SRC
2699 modref_eaf_analysis::merge_with_ssa_name (tree dest
, tree src
, bool deref
)
2701 int index
= SSA_NAME_VERSION (dest
);
2702 int src_index
= SSA_NAME_VERSION (src
);
2704 /* Merging lattice with itself is a no-op. */
2705 if (!deref
&& src
== dest
)
2709 analyze_ssa_name (src
);
2712 m_lattice
[index
].merge_deref (m_lattice
[src_index
], false);
2714 m_lattice
[index
].merge (m_lattice
[src_index
]);
2716 /* If we failed to produce final solution add an edge to the dataflow
2718 if (!m_lattice
[src_index
].known
)
2720 modref_lattice::propagate_edge e
= {index
, deref
};
2722 if (!m_lattice
[src_index
].propagate_to
.length ())
2723 m_names_to_propagate
.safe_push (src_index
);
2724 m_lattice
[src_index
].propagate_to
.safe_push (e
);
2725 m_lattice
[src_index
].changed
= true;
2726 m_lattice
[src_index
].do_dataflow
= true;
2729 "%*sWill propgate from ssa_name %i to %i%s\n",
2731 "", src_index
, index
, deref
? " (deref)" : "");
2735 /* In the case we deferred some SSA names, reprocess them. In the case some
2736 dataflow edges were introduced, do the actual iterative dataflow. */
2739 modref_eaf_analysis::propagate ()
2744 bool changed
= true;
2746 while (m_deferred_names
.length ())
2748 tree name
= m_deferred_names
.pop ();
2750 fprintf (dump_file
, "Analyzing deferred SSA name\n");
2751 analyze_ssa_name (name
, true);
2754 if (!m_names_to_propagate
.length ())
2757 fprintf (dump_file
, "Propagating EAF flags\n");
2759 /* Compute reverse postorder. */
2766 auto_vec
<struct stack_entry
> stack
;
2767 int pos
= m_names_to_propagate
.length () - 1;
2769 rpo
.safe_grow (m_names_to_propagate
.length (), true);
2770 stack
.reserve_exact (m_names_to_propagate
.length ());
2772 /* We reuse known flag for RPO DFS walk bookkeeping. */
2774 FOR_EACH_VEC_ELT (m_names_to_propagate
, i
, index
)
2775 gcc_assert (!m_lattice
[index
].known
&& m_lattice
[index
].changed
);
2777 FOR_EACH_VEC_ELT (m_names_to_propagate
, i
, index
)
2779 if (!m_lattice
[index
].known
)
2781 stack_entry e
= {index
, 0};
2783 stack
.quick_push (e
);
2784 m_lattice
[index
].known
= true;
2786 while (stack
.length ())
2789 int index1
= stack
.last ().name
;
2791 while (stack
.last ().pos
< m_lattice
[index1
].propagate_to
.length ())
2793 int index2
= m_lattice
[index1
]
2794 .propagate_to
[stack
.last ().pos
].ssa_name
;
2796 stack
.last ().pos
++;
2797 if (!m_lattice
[index2
].known
2798 && m_lattice
[index2
].propagate_to
.length ())
2800 stack_entry e
= {index2
, 0};
2802 stack
.quick_push (e
);
2803 m_lattice
[index2
].known
= true;
2809 && stack
.last ().pos
== m_lattice
[index1
].propagate_to
.length ())
2811 rpo
[pos
--] = index1
;
2817 /* Perform iterative dataflow. */
2823 fprintf (dump_file
, " iteration %i\n", iterations
);
2824 FOR_EACH_VEC_ELT (rpo
, i
, index
)
2826 if (m_lattice
[index
].changed
)
2830 m_lattice
[index
].changed
= false;
2832 fprintf (dump_file
, " Visiting ssa name %i\n", index
);
2833 for (j
= 0; j
< m_lattice
[index
].propagate_to
.length (); j
++)
2836 int target
= m_lattice
[index
].propagate_to
[j
].ssa_name
;
2837 bool deref
= m_lattice
[index
].propagate_to
[j
].deref
;
2840 fprintf (dump_file
, " Propagating flags of ssa name"
2842 index
, target
, deref
? " (deref)" : "");
2843 m_lattice
[target
].known
= true;
2844 if (!m_lattice
[index
].propagate_to
[j
].deref
)
2845 ch
= m_lattice
[target
].merge (m_lattice
[index
]);
2847 ch
= m_lattice
[target
].merge_deref (m_lattice
[index
],
2853 fprintf (dump_file
, " New lattice: ");
2854 m_lattice
[target
].dump (dump_file
);
2857 m_lattice
[target
].changed
= true;
2863 fprintf (dump_file
, "EAF flags propagated in %i iterations\n", iterations
);
2866 /* Record escape points of PARM_INDEX according to LATTICE. */
2869 modref_eaf_analysis::record_escape_points (tree name
, int parm_index
, int flags
)
2871 modref_lattice
&lattice
= m_lattice
[SSA_NAME_VERSION (name
)];
2873 if (lattice
.escape_points
.length ())
2877 cgraph_node
*node
= cgraph_node::get (current_function_decl
);
2880 FOR_EACH_VEC_ELT (lattice
.escape_points
, ip
, ep
)
2881 if ((ep
->min_flags
& flags
) != flags
)
2883 cgraph_edge
*e
= node
->get_edge (ep
->call
);
2884 struct escape_entry ee
= {parm_index
, ep
->arg
,
2885 ep
->min_flags
, ep
->direct
};
2887 escape_summaries
->get_create (e
)->esc
.safe_push (ee
);
2892 /* Determine EAF flags for function parameters
2893 and fill in SUMMARY/SUMMARY_LTO. If IPA is true work in IPA mode
2894 where we also collect escape points.
2895 PAST_FLAGS, PAST_RETSLOT_FLAGS, PAST_STATIC_CHAIN_FLAGS can be
2896 used to preserve flags from previous (IPA) run for cases where
2897 late optimizations changed code in a way we can no longer analyze
2901 analyze_parms (modref_summary
*summary
, modref_summary_lto
*summary_lto
,
2902 bool ipa
, vec
<eaf_flags_t
> &past_flags
,
2903 int past_retslot_flags
, int past_static_chain_flags
)
2905 unsigned int parm_index
= 0;
2906 unsigned int count
= 0;
2907 int ecf_flags
= flags_from_decl_or_type (current_function_decl
);
2908 tree retslot
= NULL
;
2909 tree static_chain
= NULL
;
2911 /* If there is return slot, look up its SSA name. */
2912 if (DECL_RESULT (current_function_decl
)
2913 && DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
)))
2914 retslot
= ssa_default_def (cfun
, DECL_RESULT (current_function_decl
));
2915 if (cfun
->static_chain_decl
)
2916 static_chain
= ssa_default_def (cfun
, cfun
->static_chain_decl
);
2918 for (tree parm
= DECL_ARGUMENTS (current_function_decl
); parm
;
2919 parm
= TREE_CHAIN (parm
))
2922 if (!count
&& !retslot
&& !static_chain
)
2925 modref_eaf_analysis
eaf_analysis (ipa
);
2927 /* Determine all SSA names we need to know flags for. */
2928 for (tree parm
= DECL_ARGUMENTS (current_function_decl
); parm
;
2929 parm
= TREE_CHAIN (parm
))
2931 tree name
= ssa_default_def (cfun
, parm
);
2933 eaf_analysis
.analyze_ssa_name (name
);
2936 eaf_analysis
.analyze_ssa_name (retslot
);
2938 eaf_analysis
.analyze_ssa_name (static_chain
);
2940 /* Do the dataflow. */
2941 eaf_analysis
.propagate ();
2943 tree attr
= lookup_attribute ("fn spec",
2945 (TREE_TYPE (current_function_decl
)));
2946 attr_fnspec
fnspec (attr
2947 ? TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr
)))
2951 /* Store results to summaries. */
2952 for (tree parm
= DECL_ARGUMENTS (current_function_decl
); parm
; parm_index
++,
2953 parm
= TREE_CHAIN (parm
))
2955 tree name
= ssa_default_def (cfun
, parm
);
2956 if (!name
|| has_zero_uses (name
))
2958 /* We do not track non-SSA parameters,
2959 but we want to track unused gimple_regs. */
2960 if (!is_gimple_reg (parm
))
2964 if (parm_index
>= summary
->arg_flags
.length ())
2965 summary
->arg_flags
.safe_grow_cleared (count
, true);
2966 summary
->arg_flags
[parm_index
] = EAF_UNUSED
;
2968 else if (summary_lto
)
2970 if (parm_index
>= summary_lto
->arg_flags
.length ())
2971 summary_lto
->arg_flags
.safe_grow_cleared (count
, true);
2972 summary_lto
->arg_flags
[parm_index
] = EAF_UNUSED
;
2976 int flags
= eaf_analysis
.get_ssa_name_flags (name
);
2977 int attr_flags
= fnspec
.arg_eaf_flags (parm_index
);
2979 if (dump_file
&& (flags
| attr_flags
) != flags
&& !(flags
& EAF_UNUSED
))
2982 " Flags for param %i combined with fnspec flags:",
2984 dump_eaf_flags (dump_file
, attr_flags
, false);
2985 fprintf (dump_file
, " determined: ");
2986 dump_eaf_flags (dump_file
, flags
, true);
2988 flags
|= attr_flags
;
2990 /* Eliminate useless flags so we do not end up storing unnecessary
2993 flags
= remove_useless_eaf_flags
2995 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
))));
2996 if (past_flags
.length () > parm_index
)
2998 int past
= past_flags
[parm_index
];
2999 past
= remove_useless_eaf_flags
3001 VOID_TYPE_P (TREE_TYPE
3002 (TREE_TYPE (current_function_decl
))));
3003 if (dump_file
&& (flags
| past
) != flags
&& !(flags
& EAF_UNUSED
))
3006 " Flags for param %i combined with IPA pass:",
3008 dump_eaf_flags (dump_file
, past
, false);
3009 fprintf (dump_file
, " determined: ");
3010 dump_eaf_flags (dump_file
, flags
, true);
3012 if (!(flags
& EAF_UNUSED
))
3020 if (parm_index
>= summary
->arg_flags
.length ())
3021 summary
->arg_flags
.safe_grow_cleared (count
, true);
3022 summary
->arg_flags
[parm_index
] = flags
;
3024 else if (summary_lto
)
3026 if (parm_index
>= summary_lto
->arg_flags
.length ())
3027 summary_lto
->arg_flags
.safe_grow_cleared (count
, true);
3028 summary_lto
->arg_flags
[parm_index
] = flags
;
3030 eaf_analysis
.record_escape_points (name
, parm_index
, flags
);
3035 int flags
= eaf_analysis
.get_ssa_name_flags (retslot
);
3036 int past
= past_retslot_flags
;
3038 flags
= remove_useless_eaf_flags (flags
, ecf_flags
, false);
3039 past
= remove_useless_eaf_flags
3041 VOID_TYPE_P (TREE_TYPE
3042 (TREE_TYPE (current_function_decl
))));
3043 if (dump_file
&& (flags
| past
) != flags
&& !(flags
& EAF_UNUSED
))
3046 " Retslot flags combined with IPA pass:");
3047 dump_eaf_flags (dump_file
, past
, false);
3048 fprintf (dump_file
, " determined: ");
3049 dump_eaf_flags (dump_file
, flags
, true);
3051 if (!(flags
& EAF_UNUSED
))
3056 summary
->retslot_flags
= flags
;
3058 summary_lto
->retslot_flags
= flags
;
3059 eaf_analysis
.record_escape_points (retslot
,
3060 MODREF_RETSLOT_PARM
, flags
);
3065 int flags
= eaf_analysis
.get_ssa_name_flags (static_chain
);
3066 int past
= past_static_chain_flags
;
3068 flags
= remove_useless_eaf_flags (flags
, ecf_flags
, false);
3069 past
= remove_useless_eaf_flags
3071 VOID_TYPE_P (TREE_TYPE
3072 (TREE_TYPE (current_function_decl
))));
3073 if (dump_file
&& (flags
| past
) != flags
&& !(flags
& EAF_UNUSED
))
3076 " Static chain flags combined with IPA pass:");
3077 dump_eaf_flags (dump_file
, past
, false);
3078 fprintf (dump_file
, " determined: ");
3079 dump_eaf_flags (dump_file
, flags
, true);
3081 if (!(flags
& EAF_UNUSED
))
3086 summary
->static_chain_flags
= flags
;
3088 summary_lto
->static_chain_flags
= flags
;
3089 eaf_analysis
.record_escape_points (static_chain
,
3090 MODREF_STATIC_CHAIN_PARM
,
3096 /* Analyze function. IPA indicates whether we're running in local mode
3097 (false) or the IPA mode (true).
3098 Return true if fixup cfg is needed after the pass. */
3101 analyze_function (bool ipa
)
3103 bool fixup_cfg
= false;
3105 fprintf (dump_file
, "\n\nmodref analyzing '%s' (ipa=%i)%s%s\n",
3106 cgraph_node::get (current_function_decl
)->dump_name (), ipa
,
3107 TREE_READONLY (current_function_decl
) ? " (const)" : "",
3108 DECL_PURE_P (current_function_decl
) ? " (pure)" : "");
3110 /* Don't analyze this function if it's compiled with -fno-strict-aliasing. */
3111 if (!flag_ipa_modref
3112 || lookup_attribute ("noipa", DECL_ATTRIBUTES (current_function_decl
)))
3115 /* Compute no-LTO summaries when local optimization is going to happen. */
3116 bool nolto
= (!ipa
|| ((!flag_lto
|| flag_fat_lto_objects
) && !in_lto_p
)
3117 || (in_lto_p
&& !flag_wpa
3118 && flag_incremental_link
!= INCREMENTAL_LINK_LTO
));
3119 /* Compute LTO when LTO streaming is going to happen. */
3120 bool lto
= ipa
&& ((flag_lto
&& !in_lto_p
)
3122 || flag_incremental_link
== INCREMENTAL_LINK_LTO
);
3123 cgraph_node
*fnode
= cgraph_node::get (current_function_decl
);
3125 modref_summary
*summary
= NULL
;
3126 modref_summary_lto
*summary_lto
= NULL
;
3128 bool past_flags_known
= false;
3129 auto_vec
<eaf_flags_t
> past_flags
;
3130 int past_retslot_flags
= 0;
3131 int past_static_chain_flags
= 0;
3133 /* Initialize the summary.
3134 If we run in local mode there is possibly pre-existing summary from
3135 IPA pass. Dump it so it is easy to compare if mod-ref info has
3139 if (!optimization_summaries
)
3140 optimization_summaries
= modref_summaries::create_ggc (symtab
);
3141 else /* Remove existing summary if we are re-running the pass. */
3143 summary
= optimization_summaries
->get (fnode
);
3149 fprintf (dump_file
, "Past summary:\n");
3150 optimization_summaries
->get (fnode
)->dump (dump_file
);
3152 past_flags
.reserve_exact (summary
->arg_flags
.length ());
3153 past_flags
.splice (summary
->arg_flags
);
3154 past_retslot_flags
= summary
->retslot_flags
;
3155 past_static_chain_flags
= summary
->static_chain_flags
;
3156 past_flags_known
= true;
3158 optimization_summaries
->remove (fnode
);
3160 summary
= optimization_summaries
->get_create (fnode
);
3161 gcc_checking_assert (nolto
&& !lto
);
3163 /* In IPA mode we analyze every function precisely once. Assert that. */
3169 summaries
= modref_summaries::create_ggc (symtab
);
3171 summaries
->remove (fnode
);
3172 summary
= summaries
->get_create (fnode
);
3177 summaries_lto
= modref_summaries_lto::create_ggc (symtab
);
3179 summaries_lto
->remove (fnode
);
3180 summary_lto
= summaries_lto
->get_create (fnode
);
3182 if (!fnspec_summaries
)
3183 fnspec_summaries
= new fnspec_summaries_t (symtab
);
3184 if (!escape_summaries
)
3185 escape_summaries
= new escape_summaries_t (symtab
);
3189 /* Create and initialize summary for F.
3190 Note that summaries may be already allocated from previous
3194 gcc_assert (!summary
->loads
);
3195 summary
->loads
= modref_records::create_ggc ();
3196 gcc_assert (!summary
->stores
);
3197 summary
->stores
= modref_records::create_ggc ();
3198 summary
->writes_errno
= false;
3199 summary
->side_effects
= false;
3200 summary
->nondeterministic
= false;
3201 summary
->calls_interposable
= false;
3205 gcc_assert (!summary_lto
->loads
);
3206 summary_lto
->loads
= modref_records_lto::create_ggc ();
3207 gcc_assert (!summary_lto
->stores
);
3208 summary_lto
->stores
= modref_records_lto::create_ggc ();
3209 summary_lto
->writes_errno
= false;
3210 summary_lto
->side_effects
= false;
3211 summary_lto
->nondeterministic
= false;
3212 summary_lto
->calls_interposable
= false;
3215 analyze_parms (summary
, summary_lto
, ipa
,
3216 past_flags
, past_retslot_flags
, past_static_chain_flags
);
3219 modref_access_analysis
analyzer (ipa
, summary
, summary_lto
);
3220 analyzer
.analyze ();
3223 if (!ipa
&& flag_ipa_pure_const
)
3225 if (!summary
->stores
->every_base
&& !summary
->stores
->bases
3226 && !summary
->nondeterministic
)
3228 if (!summary
->loads
->every_base
&& !summary
->loads
->bases
3229 && !summary
->calls_interposable
)
3230 fixup_cfg
= ipa_make_function_const (fnode
,
3231 summary
->side_effects
, true);
3233 fixup_cfg
= ipa_make_function_pure (fnode
,
3234 summary
->side_effects
, true);
3237 int ecf_flags
= flags_from_decl_or_type (current_function_decl
);
3238 if (summary
&& !summary
->useful_p (ecf_flags
))
3241 optimization_summaries
->remove (fnode
);
3243 summaries
->remove (fnode
);
3247 summary
->finalize (current_function_decl
);
3248 if (summary_lto
&& !summary_lto
->useful_p (ecf_flags
))
3250 summaries_lto
->remove (fnode
);
3254 if (ipa
&& !summary
&& !summary_lto
)
3255 remove_modref_edge_summaries (fnode
);
3259 fprintf (dump_file
, " - modref done with result: tracked.\n");
3261 summary
->dump (dump_file
);
3263 summary_lto
->dump (dump_file
);
3264 dump_modref_edge_summaries (dump_file
, fnode
, 2);
3265 /* To simplify debugging, compare IPA and local solutions. */
3266 if (past_flags_known
&& summary
)
3268 size_t len
= summary
->arg_flags
.length ();
3270 if (past_flags
.length () > len
)
3271 len
= past_flags
.length ();
3272 for (size_t i
= 0; i
< len
; i
++)
3274 int old_flags
= i
< past_flags
.length () ? past_flags
[i
] : 0;
3275 int new_flags
= i
< summary
->arg_flags
.length ()
3276 ? summary
->arg_flags
[i
] : 0;
3277 old_flags
= remove_useless_eaf_flags
3278 (old_flags
, flags_from_decl_or_type (current_function_decl
),
3279 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
))));
3280 if (old_flags
!= new_flags
)
3282 if ((old_flags
& ~new_flags
) == 0
3283 || (new_flags
& EAF_UNUSED
))
3284 fprintf (dump_file
, " Flags for param %i improved:",
3288 dump_eaf_flags (dump_file
, old_flags
, false);
3289 fprintf (dump_file
, " -> ");
3290 dump_eaf_flags (dump_file
, new_flags
, true);
3293 past_retslot_flags
= remove_useless_eaf_flags
3294 (past_retslot_flags
,
3295 flags_from_decl_or_type (current_function_decl
),
3296 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
))));
3297 if (past_retslot_flags
!= summary
->retslot_flags
)
3299 if ((past_retslot_flags
& ~summary
->retslot_flags
) == 0
3300 || (summary
->retslot_flags
& EAF_UNUSED
))
3301 fprintf (dump_file
, " Flags for retslot improved:");
3304 dump_eaf_flags (dump_file
, past_retslot_flags
, false);
3305 fprintf (dump_file
, " -> ");
3306 dump_eaf_flags (dump_file
, summary
->retslot_flags
, true);
3308 past_static_chain_flags
= remove_useless_eaf_flags
3309 (past_static_chain_flags
,
3310 flags_from_decl_or_type (current_function_decl
),
3311 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
))));
3312 if (past_static_chain_flags
!= summary
->static_chain_flags
)
3314 if ((past_static_chain_flags
& ~summary
->static_chain_flags
) == 0
3315 || (summary
->static_chain_flags
& EAF_UNUSED
))
3316 fprintf (dump_file
, " Flags for static chain improved:");
3319 dump_eaf_flags (dump_file
, past_static_chain_flags
, false);
3320 fprintf (dump_file
, " -> ");
3321 dump_eaf_flags (dump_file
, summary
->static_chain_flags
, true);
3324 else if (past_flags_known
&& !summary
)
3326 for (size_t i
= 0; i
< past_flags
.length (); i
++)
3328 int old_flags
= past_flags
[i
];
3329 old_flags
= remove_useless_eaf_flags
3330 (old_flags
, flags_from_decl_or_type (current_function_decl
),
3331 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
))));
3334 fprintf (dump_file
, " Flags for param %i worsened:",
3336 dump_eaf_flags (dump_file
, old_flags
, false);
3337 fprintf (dump_file
, " -> \n");
3340 past_retslot_flags
= remove_useless_eaf_flags
3341 (past_retslot_flags
,
3342 flags_from_decl_or_type (current_function_decl
),
3343 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
))));
3344 if (past_retslot_flags
)
3346 fprintf (dump_file
, " Flags for retslot worsened:");
3347 dump_eaf_flags (dump_file
, past_retslot_flags
, false);
3348 fprintf (dump_file
, " ->\n");
3350 past_static_chain_flags
= remove_useless_eaf_flags
3351 (past_static_chain_flags
,
3352 flags_from_decl_or_type (current_function_decl
),
3353 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
))));
3354 if (past_static_chain_flags
)
3356 fprintf (dump_file
, " Flags for static chain worsened:");
3357 dump_eaf_flags (dump_file
, past_static_chain_flags
, false);
3358 fprintf (dump_file
, " ->\n");
3365 /* Callback for generate_summary. */
3368 modref_generate (void)
3370 struct cgraph_node
*node
;
3371 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node
)
3373 function
*f
= DECL_STRUCT_FUNCTION (node
->decl
);
3377 analyze_function (true);
3382 } /* ANON namespace. */
3384 /* Debugging helper. */
3387 debug_eaf_flags (int flags
)
3389 dump_eaf_flags (stderr
, flags
, true);
3392 /* Called when a new function is inserted to callgraph late. */
3395 modref_summaries::insert (struct cgraph_node
*node
, modref_summary
*)
3397 /* Local passes ought to be executed by the pass manager. */
3398 if (this == optimization_summaries
)
3400 optimization_summaries
->remove (node
);
3403 if (!DECL_STRUCT_FUNCTION (node
->decl
)
3404 || !opt_for_fn (node
->decl
, flag_ipa_modref
))
3406 summaries
->remove (node
);
3409 push_cfun (DECL_STRUCT_FUNCTION (node
->decl
));
3410 analyze_function (true);
3414 /* Called when a new function is inserted to callgraph late. */
3417 modref_summaries_lto::insert (struct cgraph_node
*node
, modref_summary_lto
*)
3419 /* We do not support adding new function when IPA information is already
3420 propagated. This is done only by SIMD cloning that is not very
3422 if (!DECL_STRUCT_FUNCTION (node
->decl
)
3423 || !opt_for_fn (node
->decl
, flag_ipa_modref
)
3426 summaries_lto
->remove (node
);
3429 push_cfun (DECL_STRUCT_FUNCTION (node
->decl
));
3430 analyze_function (true);
3434 /* Called when new clone is inserted to callgraph late. */
3437 modref_summaries::duplicate (cgraph_node
*, cgraph_node
*dst
,
3438 modref_summary
*src_data
,
3439 modref_summary
*dst_data
)
3441 /* Do not duplicate optimization summaries; we do not handle parameter
3442 transforms on them. */
3443 if (this == optimization_summaries
)
3445 optimization_summaries
->remove (dst
);
3448 dst_data
->stores
= modref_records::create_ggc ();
3449 dst_data
->stores
->copy_from (src_data
->stores
);
3450 dst_data
->loads
= modref_records::create_ggc ();
3451 dst_data
->loads
->copy_from (src_data
->loads
);
3452 dst_data
->kills
.reserve_exact (src_data
->kills
.length ());
3453 dst_data
->kills
.splice (src_data
->kills
);
3454 dst_data
->writes_errno
= src_data
->writes_errno
;
3455 dst_data
->side_effects
= src_data
->side_effects
;
3456 dst_data
->nondeterministic
= src_data
->nondeterministic
;
3457 dst_data
->calls_interposable
= src_data
->calls_interposable
;
3458 if (src_data
->arg_flags
.length ())
3459 dst_data
->arg_flags
= src_data
->arg_flags
.copy ();
3460 dst_data
->retslot_flags
= src_data
->retslot_flags
;
3461 dst_data
->static_chain_flags
= src_data
->static_chain_flags
;
3464 /* Called when new clone is inserted to callgraph late. */
3467 modref_summaries_lto::duplicate (cgraph_node
*, cgraph_node
*,
3468 modref_summary_lto
*src_data
,
3469 modref_summary_lto
*dst_data
)
3471 /* Be sure that no further cloning happens after ipa-modref. If it does
3472 we will need to update signatures for possible param changes. */
3473 gcc_checking_assert (!((modref_summaries_lto
*)summaries_lto
)->propagated
);
3474 dst_data
->stores
= modref_records_lto::create_ggc ();
3475 dst_data
->stores
->copy_from (src_data
->stores
);
3476 dst_data
->loads
= modref_records_lto::create_ggc ();
3477 dst_data
->loads
->copy_from (src_data
->loads
);
3478 dst_data
->kills
.reserve_exact (src_data
->kills
.length ());
3479 dst_data
->kills
.splice (src_data
->kills
);
3480 dst_data
->writes_errno
= src_data
->writes_errno
;
3481 dst_data
->side_effects
= src_data
->side_effects
;
3482 dst_data
->nondeterministic
= src_data
->nondeterministic
;
3483 dst_data
->calls_interposable
= src_data
->calls_interposable
;
3484 if (src_data
->arg_flags
.length ())
3485 dst_data
->arg_flags
= src_data
->arg_flags
.copy ();
3486 dst_data
->retslot_flags
= src_data
->retslot_flags
;
3487 dst_data
->static_chain_flags
= src_data
->static_chain_flags
;
3492 /* Definition of the modref pass on GIMPLE. */
3493 const pass_data pass_data_modref
= {
3498 (PROP_cfg
| PROP_ssa
),
3505 class pass_modref
: public gimple_opt_pass
3508 pass_modref (gcc::context
*ctxt
)
3509 : gimple_opt_pass (pass_data_modref
, ctxt
) {}
3511 /* opt_pass methods: */
3512 opt_pass
*clone () final override
3514 return new pass_modref (m_ctxt
);
3516 bool gate (function
*) final override
3518 return flag_ipa_modref
;
3520 unsigned int execute (function
*) final override
;
3523 /* Encode TT to the output block OB using the summary streaming API. */
3526 write_modref_records (modref_records_lto
*tt
, struct output_block
*ob
)
3528 streamer_write_uhwi (ob
, tt
->every_base
);
3529 streamer_write_uhwi (ob
, vec_safe_length (tt
->bases
));
3530 for (auto base_node
: tt
->bases
)
3532 stream_write_tree (ob
, base_node
->base
, true);
3534 streamer_write_uhwi (ob
, base_node
->every_ref
);
3535 streamer_write_uhwi (ob
, vec_safe_length (base_node
->refs
));
3537 for (auto ref_node
: base_node
->refs
)
3539 stream_write_tree (ob
, ref_node
->ref
, true);
3540 streamer_write_uhwi (ob
, ref_node
->every_access
);
3541 streamer_write_uhwi (ob
, vec_safe_length (ref_node
->accesses
));
3543 for (auto access_node
: ref_node
->accesses
)
3544 access_node
.stream_out (ob
);
3549 /* Read a modref_tree from the input block IB using the data from DATA_IN.
3550 This assumes that the tree was encoded using write_modref_tree.
3551 Either nolto_ret or lto_ret is initialized by the tree depending whether
3552 LTO streaming is expected or not. */
3555 read_modref_records (tree decl
,
3556 lto_input_block
*ib
, struct data_in
*data_in
,
3557 modref_records
**nolto_ret
,
3558 modref_records_lto
**lto_ret
)
3560 size_t max_bases
= opt_for_fn (decl
, param_modref_max_bases
);
3561 size_t max_refs
= opt_for_fn (decl
, param_modref_max_refs
);
3562 size_t max_accesses
= opt_for_fn (decl
, param_modref_max_accesses
);
3565 *lto_ret
= modref_records_lto::create_ggc ();
3567 *nolto_ret
= modref_records::create_ggc ();
3568 gcc_checking_assert (lto_ret
|| nolto_ret
);
3570 size_t every_base
= streamer_read_uhwi (ib
);
3571 size_t nbase
= streamer_read_uhwi (ib
);
3573 gcc_assert (!every_base
|| nbase
== 0);
3577 (*nolto_ret
)->collapse ();
3579 (*lto_ret
)->collapse ();
3581 for (size_t i
= 0; i
< nbase
; i
++)
3583 tree base_tree
= stream_read_tree (ib
, data_in
);
3584 modref_base_node
<alias_set_type
> *nolto_base_node
= NULL
;
3585 modref_base_node
<tree
> *lto_base_node
= NULL
;
3587 /* At stream in time we have LTO alias info. Check if we streamed in
3588 something obviously unnecessary. Do not glob types by alias sets;
3589 it is not 100% clear that ltrans types will get merged same way.
3590 Types may get refined based on ODR type conflicts. */
3591 if (base_tree
&& !get_alias_set (base_tree
))
3595 fprintf (dump_file
, "Streamed in alias set 0 type ");
3596 print_generic_expr (dump_file
, base_tree
);
3597 fprintf (dump_file
, "\n");
3603 nolto_base_node
= (*nolto_ret
)->insert_base (base_tree
3604 ? get_alias_set (base_tree
)
3607 lto_base_node
= (*lto_ret
)->insert_base (base_tree
, 0, max_bases
);
3608 size_t every_ref
= streamer_read_uhwi (ib
);
3609 size_t nref
= streamer_read_uhwi (ib
);
3611 gcc_assert (!every_ref
|| nref
== 0);
3614 if (nolto_base_node
)
3615 nolto_base_node
->collapse ();
3617 lto_base_node
->collapse ();
3619 for (size_t j
= 0; j
< nref
; j
++)
3621 tree ref_tree
= stream_read_tree (ib
, data_in
);
3623 if (ref_tree
&& !get_alias_set (ref_tree
))
3627 fprintf (dump_file
, "Streamed in alias set 0 type ");
3628 print_generic_expr (dump_file
, ref_tree
);
3629 fprintf (dump_file
, "\n");
3634 modref_ref_node
<alias_set_type
> *nolto_ref_node
= NULL
;
3635 modref_ref_node
<tree
> *lto_ref_node
= NULL
;
3637 if (nolto_base_node
)
3639 = nolto_base_node
->insert_ref (ref_tree
3640 ? get_alias_set (ref_tree
) : 0,
3643 lto_ref_node
= lto_base_node
->insert_ref (ref_tree
, max_refs
);
3645 size_t every_access
= streamer_read_uhwi (ib
);
3646 size_t naccesses
= streamer_read_uhwi (ib
);
3648 if (nolto_ref_node
&& every_access
)
3649 nolto_ref_node
->collapse ();
3650 if (lto_ref_node
&& every_access
)
3651 lto_ref_node
->collapse ();
3653 for (size_t k
= 0; k
< naccesses
; k
++)
3655 modref_access_node a
= modref_access_node::stream_in (ib
);
3657 nolto_ref_node
->insert_access (a
, max_accesses
, false);
3659 lto_ref_node
->insert_access (a
, max_accesses
, false);
3664 (*lto_ret
)->cleanup ();
3666 (*nolto_ret
)->cleanup ();
3669 /* Write ESUM to BP. */
3672 modref_write_escape_summary (struct bitpack_d
*bp
, escape_summary
*esum
)
3676 bp_pack_var_len_unsigned (bp
, 0);
3679 bp_pack_var_len_unsigned (bp
, esum
->esc
.length ());
3682 FOR_EACH_VEC_ELT (esum
->esc
, i
, ee
)
3684 bp_pack_var_len_int (bp
, ee
->parm_index
);
3685 bp_pack_var_len_unsigned (bp
, ee
->arg
);
3686 bp_pack_var_len_unsigned (bp
, ee
->min_flags
);
3687 bp_pack_value (bp
, ee
->direct
, 1);
3691 /* Read escape summary for E from BP. */
3694 modref_read_escape_summary (struct bitpack_d
*bp
, cgraph_edge
*e
)
3696 unsigned int n
= bp_unpack_var_len_unsigned (bp
);
3699 escape_summary
*esum
= escape_summaries
->get_create (e
);
3700 esum
->esc
.reserve_exact (n
);
3701 for (unsigned int i
= 0; i
< n
; i
++)
3704 ee
.parm_index
= bp_unpack_var_len_int (bp
);
3705 ee
.arg
= bp_unpack_var_len_unsigned (bp
);
3706 ee
.min_flags
= bp_unpack_var_len_unsigned (bp
);
3707 ee
.direct
= bp_unpack_value (bp
, 1);
3708 esum
->esc
.quick_push (ee
);
3712 /* Callback for write_summary. */
3717 struct output_block
*ob
= create_output_block (LTO_section_ipa_modref
);
3718 lto_symtab_encoder_t encoder
= ob
->decl_state
->symtab_node_encoder
;
3719 unsigned int count
= 0;
3724 streamer_write_uhwi (ob
, 0);
3725 streamer_write_char_stream (ob
->main_stream
, 0);
3726 produce_asm (ob
, NULL
);
3727 destroy_output_block (ob
);
3731 for (i
= 0; i
< lto_symtab_encoder_size (encoder
); i
++)
3733 symtab_node
*snode
= lto_symtab_encoder_deref (encoder
, i
);
3734 cgraph_node
*cnode
= dyn_cast
<cgraph_node
*> (snode
);
3735 modref_summary_lto
*r
;
3737 if (cnode
&& cnode
->definition
&& !cnode
->alias
3738 && (r
= summaries_lto
->get (cnode
))
3739 && r
->useful_p (flags_from_decl_or_type (cnode
->decl
)))
3742 streamer_write_uhwi (ob
, count
);
3744 for (i
= 0; i
< lto_symtab_encoder_size (encoder
); i
++)
3746 symtab_node
*snode
= lto_symtab_encoder_deref (encoder
, i
);
3747 cgraph_node
*cnode
= dyn_cast
<cgraph_node
*> (snode
);
3749 if (cnode
&& cnode
->definition
&& !cnode
->alias
)
3751 modref_summary_lto
*r
= summaries_lto
->get (cnode
);
3753 if (!r
|| !r
->useful_p (flags_from_decl_or_type (cnode
->decl
)))
3756 streamer_write_uhwi (ob
, lto_symtab_encoder_encode (encoder
, cnode
));
3758 streamer_write_uhwi (ob
, r
->arg_flags
.length ());
3759 for (unsigned int i
= 0; i
< r
->arg_flags
.length (); i
++)
3760 streamer_write_uhwi (ob
, r
->arg_flags
[i
]);
3761 streamer_write_uhwi (ob
, r
->retslot_flags
);
3762 streamer_write_uhwi (ob
, r
->static_chain_flags
);
3764 write_modref_records (r
->loads
, ob
);
3765 write_modref_records (r
->stores
, ob
);
3766 streamer_write_uhwi (ob
, r
->kills
.length ());
3767 for (auto kill
: r
->kills
)
3768 kill
.stream_out (ob
);
3770 struct bitpack_d bp
= bitpack_create (ob
->main_stream
);
3771 bp_pack_value (&bp
, r
->writes_errno
, 1);
3772 bp_pack_value (&bp
, r
->side_effects
, 1);
3773 bp_pack_value (&bp
, r
->nondeterministic
, 1);
3774 bp_pack_value (&bp
, r
->calls_interposable
, 1);
3777 for (cgraph_edge
*e
= cnode
->indirect_calls
;
3778 e
; e
= e
->next_callee
)
3780 class fnspec_summary
*sum
= fnspec_summaries
->get (e
);
3781 bp_pack_value (&bp
, sum
!= NULL
, 1);
3783 bp_pack_string (ob
, &bp
, sum
->fnspec
, true);
3784 class escape_summary
*esum
= escape_summaries
->get (e
);
3785 modref_write_escape_summary (&bp
,esum
);
3787 for (cgraph_edge
*e
= cnode
->callees
; e
; e
= e
->next_callee
)
3789 class fnspec_summary
*sum
= fnspec_summaries
->get (e
);
3790 bp_pack_value (&bp
, sum
!= NULL
, 1);
3792 bp_pack_string (ob
, &bp
, sum
->fnspec
, true);
3793 class escape_summary
*esum
= escape_summaries
->get (e
);
3794 modref_write_escape_summary (&bp
,esum
);
3797 streamer_write_bitpack (&bp
);
3800 streamer_write_char_stream (ob
->main_stream
, 0);
3801 produce_asm (ob
, NULL
);
3802 destroy_output_block (ob
);
3806 read_section (struct lto_file_decl_data
*file_data
, const char *data
,
3809 const struct lto_function_header
*header
3810 = (const struct lto_function_header
*) data
;
3811 const int cfg_offset
= sizeof (struct lto_function_header
);
3812 const int main_offset
= cfg_offset
+ header
->cfg_size
;
3813 const int string_offset
= main_offset
+ header
->main_size
;
3814 struct data_in
*data_in
;
3816 unsigned int f_count
;
3818 lto_input_block
ib ((const char *) data
+ main_offset
, header
->main_size
,
3819 file_data
->mode_table
);
3822 = lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
3823 header
->string_size
, vNULL
);
3824 f_count
= streamer_read_uhwi (&ib
);
3825 for (i
= 0; i
< f_count
; i
++)
3827 struct cgraph_node
*node
;
3828 lto_symtab_encoder_t encoder
;
3830 unsigned int index
= streamer_read_uhwi (&ib
);
3831 encoder
= file_data
->symtab_node_encoder
;
3832 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
3835 modref_summary
*modref_sum
= summaries
3836 ? summaries
->get_create (node
) : NULL
;
3837 modref_summary_lto
*modref_sum_lto
= summaries_lto
3838 ? summaries_lto
->get_create (node
)
3840 if (optimization_summaries
)
3841 modref_sum
= optimization_summaries
->get_create (node
);
3845 modref_sum
->writes_errno
= false;
3846 modref_sum
->side_effects
= false;
3847 modref_sum
->nondeterministic
= false;
3848 modref_sum
->calls_interposable
= false;
3852 modref_sum_lto
->writes_errno
= false;
3853 modref_sum_lto
->side_effects
= false;
3854 modref_sum_lto
->nondeterministic
= false;
3855 modref_sum_lto
->calls_interposable
= false;
3858 gcc_assert (!modref_sum
|| (!modref_sum
->loads
3859 && !modref_sum
->stores
));
3860 gcc_assert (!modref_sum_lto
|| (!modref_sum_lto
->loads
3861 && !modref_sum_lto
->stores
));
3862 unsigned int args
= streamer_read_uhwi (&ib
);
3863 if (args
&& modref_sum
)
3864 modref_sum
->arg_flags
.reserve_exact (args
);
3865 if (args
&& modref_sum_lto
)
3866 modref_sum_lto
->arg_flags
.reserve_exact (args
);
3867 for (unsigned int i
= 0; i
< args
; i
++)
3869 eaf_flags_t flags
= streamer_read_uhwi (&ib
);
3871 modref_sum
->arg_flags
.quick_push (flags
);
3873 modref_sum_lto
->arg_flags
.quick_push (flags
);
3875 eaf_flags_t flags
= streamer_read_uhwi (&ib
);
3877 modref_sum
->retslot_flags
= flags
;
3879 modref_sum_lto
->retslot_flags
= flags
;
3881 flags
= streamer_read_uhwi (&ib
);
3883 modref_sum
->static_chain_flags
= flags
;
3885 modref_sum_lto
->static_chain_flags
= flags
;
3887 read_modref_records (node
->decl
, &ib
, data_in
,
3888 modref_sum
? &modref_sum
->loads
: NULL
,
3889 modref_sum_lto
? &modref_sum_lto
->loads
: NULL
);
3890 read_modref_records (node
->decl
, &ib
, data_in
,
3891 modref_sum
? &modref_sum
->stores
: NULL
,
3892 modref_sum_lto
? &modref_sum_lto
->stores
: NULL
);
3893 int j
= streamer_read_uhwi (&ib
);
3894 if (j
&& modref_sum
)
3895 modref_sum
->kills
.reserve_exact (j
);
3896 if (j
&& modref_sum_lto
)
3897 modref_sum_lto
->kills
.reserve_exact (j
);
3898 for (int k
= 0; k
< j
; k
++)
3900 modref_access_node a
= modref_access_node::stream_in (&ib
);
3903 modref_sum
->kills
.quick_push (a
);
3905 modref_sum_lto
->kills
.quick_push (a
);
3907 struct bitpack_d bp
= streamer_read_bitpack (&ib
);
3908 if (bp_unpack_value (&bp
, 1))
3911 modref_sum
->writes_errno
= true;
3913 modref_sum_lto
->writes_errno
= true;
3915 if (bp_unpack_value (&bp
, 1))
3918 modref_sum
->side_effects
= true;
3920 modref_sum_lto
->side_effects
= true;
3922 if (bp_unpack_value (&bp
, 1))
3925 modref_sum
->nondeterministic
= true;
3927 modref_sum_lto
->nondeterministic
= true;
3929 if (bp_unpack_value (&bp
, 1))
3932 modref_sum
->calls_interposable
= true;
3934 modref_sum_lto
->calls_interposable
= true;
3938 for (cgraph_edge
*e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
3940 if (bp_unpack_value (&bp
, 1))
3942 class fnspec_summary
*sum
= fnspec_summaries
->get_create (e
);
3943 sum
->fnspec
= xstrdup (bp_unpack_string (data_in
, &bp
));
3945 modref_read_escape_summary (&bp
, e
);
3947 for (cgraph_edge
*e
= node
->callees
; e
; e
= e
->next_callee
)
3949 if (bp_unpack_value (&bp
, 1))
3951 class fnspec_summary
*sum
= fnspec_summaries
->get_create (e
);
3952 sum
->fnspec
= xstrdup (bp_unpack_string (data_in
, &bp
));
3954 modref_read_escape_summary (&bp
, e
);
3958 modref_sum
->finalize (node
->decl
);
3961 fprintf (dump_file
, "Read modref for %s\n",
3962 node
->dump_name ());
3964 modref_sum
->dump (dump_file
);
3966 modref_sum_lto
->dump (dump_file
);
3967 dump_modref_edge_summaries (dump_file
, node
, 4);
3971 lto_free_section_data (file_data
, LTO_section_ipa_modref
, NULL
, data
,
3973 lto_data_in_delete (data_in
);
3976 /* Callback for read_summary. */
3981 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
3982 struct lto_file_decl_data
*file_data
;
3985 gcc_checking_assert (!optimization_summaries
&& !summaries
&& !summaries_lto
);
3987 optimization_summaries
= modref_summaries::create_ggc (symtab
);
3990 if (flag_wpa
|| flag_incremental_link
== INCREMENTAL_LINK_LTO
)
3991 summaries_lto
= modref_summaries_lto::create_ggc (symtab
);
3993 || (flag_incremental_link
== INCREMENTAL_LINK_LTO
3994 && flag_fat_lto_objects
))
3995 summaries
= modref_summaries::create_ggc (symtab
);
3996 if (!fnspec_summaries
)
3997 fnspec_summaries
= new fnspec_summaries_t (symtab
);
3998 if (!escape_summaries
)
3999 escape_summaries
= new escape_summaries_t (symtab
);
4002 while ((file_data
= file_data_vec
[j
++]))
4005 const char *data
= lto_get_summary_section_data (file_data
,
4006 LTO_section_ipa_modref
,
4009 read_section (file_data
, data
, len
);
4011 /* Fatal error here. We do not want to support compiling ltrans units
4012 with different version of compiler or different flags than the WPA
4013 unit, so this should never happen. */
4014 fatal_error (input_location
,
4015 "IPA modref summary is missing in input file");
4019 /* Recompute arg_flags for param adjustments in INFO. */
4022 remap_arg_flags (auto_vec
<eaf_flags_t
> &arg_flags
, clone_info
*info
)
4024 auto_vec
<eaf_flags_t
> old
= arg_flags
.copy ();
4027 ipa_adjusted_param
*p
;
4029 arg_flags
.release ();
4031 FOR_EACH_VEC_SAFE_ELT (info
->param_adjustments
->m_adj_params
, i
, p
)
4033 int o
= info
->param_adjustments
->get_original_index (i
);
4034 if (o
>= 0 && (int)old
.length () > o
&& old
[o
])
4038 arg_flags
.safe_grow_cleared (max
+ 1, true);
4039 FOR_EACH_VEC_SAFE_ELT (info
->param_adjustments
->m_adj_params
, i
, p
)
4041 int o
= info
->param_adjustments
->get_original_index (i
);
4042 if (o
>= 0 && (int)old
.length () > o
&& old
[o
])
4043 arg_flags
[i
] = old
[o
];
4047 /* Update kills according to the parm map MAP. */
4050 remap_kills (vec
<modref_access_node
> &kills
, const vec
<int> &map
)
4052 for (size_t i
= 0; i
< kills
.length ();)
4053 if (kills
[i
].parm_index
>= 0)
4055 if (kills
[i
].parm_index
< (int)map
.length ()
4056 && map
[kills
[i
].parm_index
] != MODREF_UNKNOWN_PARM
)
4058 kills
[i
].parm_index
= map
[kills
[i
].parm_index
];
4062 kills
.unordered_remove (i
);
4068 /* If signature changed, update the summary. */
4071 update_signature (struct cgraph_node
*node
)
4073 clone_info
*info
= clone_info::get (node
);
4074 if (!info
|| !info
->param_adjustments
)
4077 modref_summary
*r
= optimization_summaries
4078 ? optimization_summaries
->get (node
) : NULL
;
4079 modref_summary_lto
*r_lto
= summaries_lto
4080 ? summaries_lto
->get (node
) : NULL
;
4085 fprintf (dump_file
, "Updating summary for %s from:\n",
4086 node
->dump_name ());
4088 r
->dump (dump_file
);
4090 r_lto
->dump (dump_file
);
4094 ipa_adjusted_param
*p
;
4096 FOR_EACH_VEC_SAFE_ELT (info
->param_adjustments
->m_adj_params
, i
, p
)
4098 int idx
= info
->param_adjustments
->get_original_index (i
);
4103 auto_vec
<int, 32> map
;
4105 map
.reserve (max
+ 1);
4106 for (i
= 0; i
<= max
; i
++)
4107 map
.quick_push (MODREF_UNKNOWN_PARM
);
4108 FOR_EACH_VEC_SAFE_ELT (info
->param_adjustments
->m_adj_params
, i
, p
)
4110 int idx
= info
->param_adjustments
->get_original_index (i
);
4116 r
->loads
->remap_params (&map
);
4117 r
->stores
->remap_params (&map
);
4118 remap_kills (r
->kills
, map
);
4119 if (r
->arg_flags
.length ())
4120 remap_arg_flags (r
->arg_flags
, info
);
4124 r_lto
->loads
->remap_params (&map
);
4125 r_lto
->stores
->remap_params (&map
);
4126 remap_kills (r_lto
->kills
, map
);
4127 if (r_lto
->arg_flags
.length ())
4128 remap_arg_flags (r_lto
->arg_flags
, info
);
4132 fprintf (dump_file
, "to:\n");
4134 r
->dump (dump_file
);
4136 r_lto
->dump (dump_file
);
4139 r
->finalize (node
->decl
);
4143 /* Definition of the modref IPA pass. */
4144 const pass_data pass_data_ipa_modref
=
4146 IPA_PASS
, /* type */
4147 "modref", /* name */
4148 OPTGROUP_IPA
, /* optinfo_flags */
4149 TV_IPA_MODREF
, /* tv_id */
4150 0, /* properties_required */
4151 0, /* properties_provided */
4152 0, /* properties_destroyed */
4153 0, /* todo_flags_start */
4154 ( TODO_dump_symtab
), /* todo_flags_finish */
4157 class pass_ipa_modref
: public ipa_opt_pass_d
4160 pass_ipa_modref (gcc::context
*ctxt
)
4161 : ipa_opt_pass_d (pass_data_ipa_modref
, ctxt
,
4162 modref_generate
, /* generate_summary */
4163 modref_write
, /* write_summary */
4164 modref_read
, /* read_summary */
4165 modref_write
, /* write_optimization_summary */
4166 modref_read
, /* read_optimization_summary */
4167 NULL
, /* stmt_fixup */
4168 0, /* function_transform_todo_flags_start */
4169 NULL
, /* function_transform */
4170 NULL
) /* variable_transform */
4173 /* opt_pass methods: */
4174 opt_pass
*clone () final override
{ return new pass_ipa_modref (m_ctxt
); }
4175 bool gate (function
*) final override
4179 unsigned int execute (function
*) final override
;
4185 unsigned int pass_modref::execute (function
*)
4187 if (analyze_function (false))
4188 return execute_fixup_cfg ();
4193 make_pass_modref (gcc::context
*ctxt
)
4195 return new pass_modref (ctxt
);
4199 make_pass_ipa_modref (gcc::context
*ctxt
)
4201 return new pass_ipa_modref (ctxt
);
4206 /* Skip edges from and to nodes without ipa_pure_const enabled.
4207 Ignore not available symbols. */
4210 ignore_edge (struct cgraph_edge
*e
)
4212 /* We merge summaries of inline clones into summaries of functions they
4213 are inlined to. For that reason the complete function bodies must
4215 if (!e
->inline_failed
)
4217 enum availability avail
;
4218 cgraph_node
*callee
= e
->callee
->ultimate_alias_target
4219 (&avail
, e
->caller
);
4221 return (avail
<= AVAIL_INTERPOSABLE
4222 || ((!optimization_summaries
|| !optimization_summaries
->get (callee
))
4223 && (!summaries_lto
|| !summaries_lto
->get (callee
))));
4226 /* Compute parm_map for CALLEE_EDGE. */
4229 compute_parm_map (cgraph_edge
*callee_edge
, vec
<modref_parm_map
> *parm_map
)
4231 class ipa_edge_args
*args
;
4232 if (ipa_node_params_sum
4233 && !callee_edge
->call_stmt_cannot_inline_p
4234 && (args
= ipa_edge_args_sum
->get (callee_edge
)) != NULL
)
4236 int i
, count
= ipa_get_cs_argument_count (args
);
4237 class ipa_node_params
*caller_parms_info
, *callee_pi
;
4238 class ipa_call_summary
*es
4239 = ipa_call_summaries
->get (callee_edge
);
4241 = callee_edge
->callee
->ultimate_alias_target
4242 (NULL
, callee_edge
->caller
);
4245 = ipa_node_params_sum
->get (callee_edge
->caller
->inlined_to
4246 ? callee_edge
->caller
->inlined_to
4247 : callee_edge
->caller
);
4248 callee_pi
= ipa_node_params_sum
->get (callee
);
4250 (*parm_map
).safe_grow_cleared (count
, true);
4252 for (i
= 0; i
< count
; i
++)
4254 if (es
&& es
->param
[i
].points_to_local_or_readonly_memory
)
4256 (*parm_map
)[i
].parm_index
= MODREF_LOCAL_MEMORY_PARM
;
4260 struct ipa_jump_func
*jf
4261 = ipa_get_ith_jump_func (args
, i
);
4262 if (jf
&& callee_pi
)
4264 tree cst
= ipa_value_from_jfunc (caller_parms_info
,
4268 if (cst
&& points_to_local_or_readonly_memory_p (cst
))
4270 (*parm_map
)[i
].parm_index
= MODREF_LOCAL_MEMORY_PARM
;
4274 if (jf
&& jf
->type
== IPA_JF_PASS_THROUGH
)
4276 (*parm_map
)[i
].parm_index
4277 = ipa_get_jf_pass_through_formal_id (jf
);
4278 if (ipa_get_jf_pass_through_operation (jf
) == NOP_EXPR
)
4280 (*parm_map
)[i
].parm_offset_known
= true;
4281 (*parm_map
)[i
].parm_offset
= 0;
4283 else if (ipa_get_jf_pass_through_operation (jf
)
4284 == POINTER_PLUS_EXPR
4285 && ptrdiff_tree_p (ipa_get_jf_pass_through_operand (jf
),
4286 &(*parm_map
)[i
].parm_offset
))
4287 (*parm_map
)[i
].parm_offset_known
= true;
4289 (*parm_map
)[i
].parm_offset_known
= false;
4292 if (jf
&& jf
->type
== IPA_JF_ANCESTOR
)
4294 (*parm_map
)[i
].parm_index
= ipa_get_jf_ancestor_formal_id (jf
);
4295 (*parm_map
)[i
].parm_offset_known
= true;
4297 (!(ipa_get_jf_ancestor_offset (jf
) & (BITS_PER_UNIT
- 1)));
4298 (*parm_map
)[i
].parm_offset
4299 = ipa_get_jf_ancestor_offset (jf
) >> LOG2_BITS_PER_UNIT
;
4302 (*parm_map
)[i
].parm_index
= -1;
4306 fprintf (dump_file
, " Parm map: ");
4307 for (i
= 0; i
< count
; i
++)
4308 fprintf (dump_file
, " %i", (*parm_map
)[i
].parm_index
);
4309 fprintf (dump_file
, "\n");
4316 /* Map used to translate escape infos. */
4324 /* Update escape map for E. */
4327 update_escape_summary_1 (cgraph_edge
*e
,
4328 vec
<vec
<escape_map
>> &map
,
4331 escape_summary
*sum
= escape_summaries
->get (e
);
4334 auto_vec
<escape_entry
> old
= sum
->esc
.copy ();
4335 sum
->esc
.release ();
4339 FOR_EACH_VEC_ELT (old
, i
, ee
)
4342 struct escape_map
*em
;
4343 /* TODO: We do not have jump functions for return slots, so we
4344 never propagate them to outer function. */
4345 if (ee
->parm_index
>= (int)map
.length ()
4346 || ee
->parm_index
< 0)
4348 FOR_EACH_VEC_ELT (map
[ee
->parm_index
], j
, em
)
4350 int min_flags
= ee
->min_flags
;
4351 if (ee
->direct
&& !em
->direct
)
4352 min_flags
= deref_flags (min_flags
, ignore_stores
);
4353 struct escape_entry entry
= {em
->parm_index
, ee
->arg
,
4355 ee
->direct
& em
->direct
};
4356 sum
->esc
.safe_push (entry
);
4359 if (!sum
->esc
.length ())
4360 escape_summaries
->remove (e
);
4363 /* Update escape map for NODE. */
4366 update_escape_summary (cgraph_node
*node
,
4367 vec
<vec
<escape_map
>> &map
,
4370 if (!escape_summaries
)
4372 for (cgraph_edge
*e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
4373 update_escape_summary_1 (e
, map
, ignore_stores
);
4374 for (cgraph_edge
*e
= node
->callees
; e
; e
= e
->next_callee
)
4376 if (!e
->inline_failed
)
4377 update_escape_summary (e
->callee
, map
, ignore_stores
);
4379 update_escape_summary_1 (e
, map
, ignore_stores
);
4383 /* Get parameter type from DECL. This is only safe for special cases
4384 like builtins we create fnspec for because the type match is checked
4385 at fnspec creation time. */
4388 get_parm_type (tree decl
, unsigned int i
)
4390 tree t
= TYPE_ARG_TYPES (TREE_TYPE (decl
));
4392 for (unsigned int p
= 0; p
< i
; p
++)
4394 return TREE_VALUE (t
);
4397 /* Return access mode for argument I of call E with FNSPEC. */
4399 static modref_access_node
4400 get_access_for_fnspec (cgraph_edge
*e
, attr_fnspec
&fnspec
,
4401 unsigned int i
, modref_parm_map
&map
)
4403 tree size
= NULL_TREE
;
4404 unsigned int size_arg
;
4406 if (!fnspec
.arg_specified_p (i
))
4408 else if (fnspec
.arg_max_access_size_given_by_arg_p (i
, &size_arg
))
4410 cgraph_node
*node
= e
->caller
->inlined_to
4411 ? e
->caller
->inlined_to
: e
->caller
;
4412 ipa_node_params
*caller_parms_info
= ipa_node_params_sum
->get (node
);
4413 ipa_edge_args
*args
= ipa_edge_args_sum
->get (e
);
4414 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, size_arg
);
4417 size
= ipa_value_from_jfunc (caller_parms_info
, jf
,
4418 get_parm_type (e
->callee
->decl
, size_arg
));
4420 else if (fnspec
.arg_access_size_given_by_type_p (i
))
4421 size
= TYPE_SIZE_UNIT (get_parm_type (e
->callee
->decl
, i
));
4422 modref_access_node a
= {0, -1, -1,
4423 map
.parm_offset
, map
.parm_index
,
4424 map
.parm_offset_known
, 0};
4425 poly_int64 size_hwi
;
4427 && poly_int_tree_p (size
, &size_hwi
)
4428 && coeffs_in_range_p (size_hwi
, 0,
4429 HOST_WIDE_INT_MAX
/ BITS_PER_UNIT
))
4432 a
.max_size
= size_hwi
<< LOG2_BITS_PER_UNIT
;
4437 /* Collapse loads and return true if something changed. */
4439 collapse_loads (modref_summary
*cur_summary
,
4440 modref_summary_lto
*cur_summary_lto
)
4442 bool changed
= false;
4444 if (cur_summary
&& !cur_summary
->loads
->every_base
)
4446 cur_summary
->loads
->collapse ();
4450 && !cur_summary_lto
->loads
->every_base
)
4452 cur_summary_lto
->loads
->collapse ();
4458 /* Collapse loads and return true if something changed. */
4461 collapse_stores (modref_summary
*cur_summary
,
4462 modref_summary_lto
*cur_summary_lto
)
4464 bool changed
= false;
4466 if (cur_summary
&& !cur_summary
->stores
->every_base
)
4468 cur_summary
->stores
->collapse ();
4472 && !cur_summary_lto
->stores
->every_base
)
4474 cur_summary_lto
->stores
->collapse ();
4480 /* Call E in NODE with ECF_FLAGS has no summary; update MODREF_SUMMARY and
4481 CUR_SUMMARY_LTO accordingly. Return true if something changed. */
4484 propagate_unknown_call (cgraph_node
*node
,
4485 cgraph_edge
*e
, int ecf_flags
,
4486 modref_summary
*cur_summary
,
4487 modref_summary_lto
*cur_summary_lto
,
4488 bool nontrivial_scc
)
4490 bool changed
= false;
4491 class fnspec_summary
*fnspec_sum
= fnspec_summaries
->get (e
);
4492 auto_vec
<modref_parm_map
, 32> parm_map
;
4496 && builtin_safe_for_const_function_p (&looping
, e
->callee
->decl
))
4498 if (looping
&& cur_summary
&& !cur_summary
->side_effects
)
4500 cur_summary
->side_effects
= true;
4503 if (looping
&& cur_summary_lto
&& !cur_summary_lto
->side_effects
)
4505 cur_summary_lto
->side_effects
= true;
4511 if (!(ecf_flags
& (ECF_CONST
| ECF_NOVOPS
| ECF_PURE
))
4512 || (ecf_flags
& ECF_LOOPING_CONST_OR_PURE
)
4515 if (cur_summary
&& !cur_summary
->side_effects
)
4517 cur_summary
->side_effects
= true;
4520 if (cur_summary_lto
&& !cur_summary_lto
->side_effects
)
4522 cur_summary_lto
->side_effects
= true;
4525 if (cur_summary
&& !cur_summary
->nondeterministic
4526 && !ignore_nondeterminism_p (node
->decl
, ecf_flags
))
4528 cur_summary
->nondeterministic
= true;
4531 if (cur_summary_lto
&& !cur_summary_lto
->nondeterministic
4532 && !ignore_nondeterminism_p (node
->decl
, ecf_flags
))
4534 cur_summary_lto
->nondeterministic
= true;
4538 if (ecf_flags
& (ECF_CONST
| ECF_NOVOPS
))
4542 && compute_parm_map (e
, &parm_map
))
4544 attr_fnspec
fnspec (fnspec_sum
->fnspec
);
4546 gcc_checking_assert (fnspec
.known_p ());
4547 if (fnspec
.global_memory_read_p ())
4548 collapse_loads (cur_summary
, cur_summary_lto
);
4551 tree t
= TYPE_ARG_TYPES (TREE_TYPE (e
->callee
->decl
));
4552 for (unsigned i
= 0; i
< parm_map
.length () && t
;
4553 i
++, t
= TREE_CHAIN (t
))
4554 if (!POINTER_TYPE_P (TREE_VALUE (t
)))
4556 else if (!fnspec
.arg_specified_p (i
)
4557 || fnspec
.arg_maybe_read_p (i
))
4559 modref_parm_map map
= parm_map
[i
];
4560 if (map
.parm_index
== MODREF_LOCAL_MEMORY_PARM
)
4562 if (map
.parm_index
== MODREF_UNKNOWN_PARM
)
4564 collapse_loads (cur_summary
, cur_summary_lto
);
4568 changed
|= cur_summary
->loads
->insert
4570 get_access_for_fnspec (e
, fnspec
, i
, map
), false);
4571 if (cur_summary_lto
)
4572 changed
|= cur_summary_lto
->loads
->insert
4574 get_access_for_fnspec (e
, fnspec
, i
, map
), false);
4577 if (ignore_stores_p (node
->decl
, ecf_flags
))
4579 else if (fnspec
.global_memory_written_p ())
4580 collapse_stores (cur_summary
, cur_summary_lto
);
4583 tree t
= TYPE_ARG_TYPES (TREE_TYPE (e
->callee
->decl
));
4584 for (unsigned i
= 0; i
< parm_map
.length () && t
;
4585 i
++, t
= TREE_CHAIN (t
))
4586 if (!POINTER_TYPE_P (TREE_VALUE (t
)))
4588 else if (!fnspec
.arg_specified_p (i
)
4589 || fnspec
.arg_maybe_written_p (i
))
4591 modref_parm_map map
= parm_map
[i
];
4592 if (map
.parm_index
== MODREF_LOCAL_MEMORY_PARM
)
4594 if (map
.parm_index
== MODREF_UNKNOWN_PARM
)
4596 collapse_stores (cur_summary
, cur_summary_lto
);
4600 changed
|= cur_summary
->stores
->insert
4602 get_access_for_fnspec (e
, fnspec
, i
, map
), false);
4603 if (cur_summary_lto
)
4604 changed
|= cur_summary_lto
->stores
->insert
4606 get_access_for_fnspec (e
, fnspec
, i
, map
), false);
4609 if (fnspec
.errno_maybe_written_p () && flag_errno_math
)
4611 if (cur_summary
&& !cur_summary
->writes_errno
)
4613 cur_summary
->writes_errno
= true;
4616 if (cur_summary_lto
&& !cur_summary_lto
->writes_errno
)
4618 cur_summary_lto
->writes_errno
= true;
4625 fprintf (dump_file
, " collapsing loads\n");
4626 changed
|= collapse_loads (cur_summary
, cur_summary_lto
);
4627 if (!ignore_stores_p (node
->decl
, ecf_flags
))
4630 fprintf (dump_file
, " collapsing stores\n");
4631 changed
|= collapse_stores (cur_summary
, cur_summary_lto
);
4636 /* Maybe remove summaries of NODE pointed to by CUR_SUMMARY_PTR
4637 and CUR_SUMMARY_LTO_PTR if they are useless according to ECF_FLAGS. */
4640 remove_useless_summaries (cgraph_node
*node
,
4641 modref_summary
**cur_summary_ptr
,
4642 modref_summary_lto
**cur_summary_lto_ptr
,
4645 if (*cur_summary_ptr
&& !(*cur_summary_ptr
)->useful_p (ecf_flags
, false))
4647 optimization_summaries
->remove (node
);
4648 *cur_summary_ptr
= NULL
;
4650 if (*cur_summary_lto_ptr
4651 && !(*cur_summary_lto_ptr
)->useful_p (ecf_flags
, false))
4653 summaries_lto
->remove (node
);
4654 *cur_summary_lto_ptr
= NULL
;
4658 /* Perform iterative dataflow on SCC component starting in COMPONENT_NODE
4659 and propagate loads/stores. */
4662 modref_propagate_in_scc (cgraph_node
*component_node
)
4664 bool changed
= true;
4671 = ((struct ipa_dfs_info
*) component_node
->aux
)->next_cycle
;
4673 for (struct cgraph_node
*cur
= component_node
; cur
;
4674 cur
= ((struct ipa_dfs_info
*) cur
->aux
)->next_cycle
)
4676 cgraph_node
*node
= cur
->inlined_to
? cur
->inlined_to
: cur
;
4677 modref_summary
*cur_summary
= optimization_summaries
4678 ? optimization_summaries
->get (node
)
4680 modref_summary_lto
*cur_summary_lto
= summaries_lto
4681 ? summaries_lto
->get (node
)
4684 if (!cur_summary
&& !cur_summary_lto
)
4687 int cur_ecf_flags
= flags_from_decl_or_type (node
->decl
);
4690 fprintf (dump_file
, " Processing %s%s%s\n",
4692 TREE_READONLY (cur
->decl
) ? " (const)" : "",
4693 DECL_PURE_P (cur
->decl
) ? " (pure)" : "");
4695 for (cgraph_edge
*e
= cur
->indirect_calls
; e
; e
= e
->next_callee
)
4698 fprintf (dump_file
, " Indirect call\n");
4699 if (propagate_unknown_call
4700 (node
, e
, e
->indirect_info
->ecf_flags
,
4701 cur_summary
, cur_summary_lto
,
4705 remove_useless_summaries (node
, &cur_summary
,
4708 if (!cur_summary
&& !cur_summary_lto
)
4713 if (!cur_summary
&& !cur_summary_lto
)
4716 for (cgraph_edge
*callee_edge
= cur
->callees
; callee_edge
;
4717 callee_edge
= callee_edge
->next_callee
)
4719 int flags
= flags_from_decl_or_type (callee_edge
->callee
->decl
);
4720 modref_summary
*callee_summary
= NULL
;
4721 modref_summary_lto
*callee_summary_lto
= NULL
;
4722 struct cgraph_node
*callee
;
4724 if (!callee_edge
->inline_failed
4725 || ((flags
& (ECF_CONST
| ECF_NOVOPS
))
4726 && !(flags
& ECF_LOOPING_CONST_OR_PURE
)))
4729 /* Get the callee and its summary. */
4730 enum availability avail
;
4731 callee
= callee_edge
->callee
->ultimate_alias_target
4734 /* It is not necessary to re-process calls outside of the
4738 || ((struct ipa_dfs_info
*)cur
->aux
)->scc_no
4739 != ((struct ipa_dfs_info
*)callee
->aux
)->scc_no
))
4743 fprintf (dump_file
, " Call to %s\n",
4744 callee_edge
->callee
->dump_name ());
4746 bool ignore_stores
= ignore_stores_p (cur
->decl
, flags
);
4748 if (avail
<= AVAIL_INTERPOSABLE
)
4751 fprintf (dump_file
, " Call target interposable"
4752 " or not available\n");
4753 changed
|= propagate_unknown_call
4754 (node
, callee_edge
, flags
,
4755 cur_summary
, cur_summary_lto
,
4757 if (!cur_summary
&& !cur_summary_lto
)
4762 /* We don't know anything about CALLEE, hence we cannot tell
4763 anything about the entire component. */
4766 && !(callee_summary
= optimization_summaries
->get (callee
)))
4769 fprintf (dump_file
, " No call target summary\n");
4770 changed
|= propagate_unknown_call
4771 (node
, callee_edge
, flags
,
4776 && !(callee_summary_lto
= summaries_lto
->get (callee
)))
4779 fprintf (dump_file
, " No call target summary\n");
4780 changed
|= propagate_unknown_call
4781 (node
, callee_edge
, flags
,
4782 NULL
, cur_summary_lto
,
4786 if (callee_summary
&& !cur_summary
->side_effects
4787 && (callee_summary
->side_effects
4788 || callee_edge
->recursive_p ()))
4790 cur_summary
->side_effects
= true;
4793 if (callee_summary_lto
&& !cur_summary_lto
->side_effects
4794 && (callee_summary_lto
->side_effects
4795 || callee_edge
->recursive_p ()))
4797 cur_summary_lto
->side_effects
= true;
4800 if (callee_summary
&& !cur_summary
->nondeterministic
4801 && callee_summary
->nondeterministic
4802 && !ignore_nondeterminism_p (cur
->decl
, flags
))
4804 cur_summary
->nondeterministic
= true;
4807 if (callee_summary_lto
&& !cur_summary_lto
->nondeterministic
4808 && callee_summary_lto
->nondeterministic
4809 && !ignore_nondeterminism_p (cur
->decl
, flags
))
4811 cur_summary_lto
->nondeterministic
= true;
4814 if (flags
& (ECF_CONST
| ECF_NOVOPS
))
4817 /* We can not safely optimize based on summary of callee if it
4818 does not always bind to current def: it is possible that
4819 memory load was optimized out earlier which may not happen in
4820 the interposed variant. */
4821 if (!callee_edge
->binds_to_current_def_p ())
4823 if (cur_summary
&& !cur_summary
->calls_interposable
)
4825 cur_summary
->calls_interposable
= true;
4828 if (cur_summary_lto
&& !cur_summary_lto
->calls_interposable
)
4830 cur_summary_lto
->calls_interposable
= true;
4834 fprintf (dump_file
, " May not bind local;"
4835 " collapsing loads\n");
4839 auto_vec
<modref_parm_map
, 32> parm_map
;
4840 modref_parm_map chain_map
;
4841 /* TODO: Once we get jump functions for static chains we could
4843 chain_map
.parm_index
= MODREF_UNKNOWN_PARM
;
4845 compute_parm_map (callee_edge
, &parm_map
);
4847 /* Merge in callee's information. */
4850 changed
|= cur_summary
->loads
->merge
4851 (node
->decl
, callee_summary
->loads
,
4852 &parm_map
, &chain_map
, !first
);
4855 changed
|= cur_summary
->stores
->merge
4856 (node
->decl
, callee_summary
->stores
,
4857 &parm_map
, &chain_map
, !first
);
4858 if (!cur_summary
->writes_errno
4859 && callee_summary
->writes_errno
)
4861 cur_summary
->writes_errno
= true;
4866 if (callee_summary_lto
)
4868 changed
|= cur_summary_lto
->loads
->merge
4869 (node
->decl
, callee_summary_lto
->loads
,
4870 &parm_map
, &chain_map
, !first
);
4873 changed
|= cur_summary_lto
->stores
->merge
4874 (node
->decl
, callee_summary_lto
->stores
,
4875 &parm_map
, &chain_map
, !first
);
4876 if (!cur_summary_lto
->writes_errno
4877 && callee_summary_lto
->writes_errno
)
4879 cur_summary_lto
->writes_errno
= true;
4885 remove_useless_summaries (node
, &cur_summary
,
4888 if (!cur_summary
&& !cur_summary_lto
)
4890 if (dump_file
&& changed
)
4893 cur_summary
->dump (dump_file
);
4894 if (cur_summary_lto
)
4895 cur_summary_lto
->dump (dump_file
);
4896 dump_modref_edge_summaries (dump_file
, node
, 4);
4905 "Propagation finished in %i iterations\n", iteration
);
4906 bool pureconst
= false;
4907 for (struct cgraph_node
*cur
= component_node
; cur
;
4908 cur
= ((struct ipa_dfs_info
*) cur
->aux
)->next_cycle
)
4909 if (!cur
->inlined_to
&& opt_for_fn (cur
->decl
, flag_ipa_pure_const
))
4911 modref_summary
*summary
= optimization_summaries
4912 ? optimization_summaries
->get (cur
)
4914 modref_summary_lto
*summary_lto
= summaries_lto
4915 ? summaries_lto
->get (cur
)
4917 if (summary
&& !summary
->stores
->every_base
&& !summary
->stores
->bases
4918 && !summary
->nondeterministic
)
4920 if (!summary
->loads
->every_base
&& !summary
->loads
->bases
4921 && !summary
->calls_interposable
)
4922 pureconst
|= ipa_make_function_const
4923 (cur
, summary
->side_effects
, false);
4925 pureconst
|= ipa_make_function_pure
4926 (cur
, summary
->side_effects
, false);
4928 if (summary_lto
&& !summary_lto
->stores
->every_base
4929 && !summary_lto
->stores
->bases
&& !summary_lto
->nondeterministic
)
4931 if (!summary_lto
->loads
->every_base
&& !summary_lto
->loads
->bases
4932 && !summary_lto
->calls_interposable
)
4933 pureconst
|= ipa_make_function_const
4934 (cur
, summary_lto
->side_effects
, false);
4936 pureconst
|= ipa_make_function_pure
4937 (cur
, summary_lto
->side_effects
, false);
4943 /* Dump results of propagation in SCC rooted in COMPONENT_NODE. */
4946 modref_propagate_dump_scc (cgraph_node
*component_node
)
4948 for (struct cgraph_node
*cur
= component_node
; cur
;
4949 cur
= ((struct ipa_dfs_info
*) cur
->aux
)->next_cycle
)
4950 if (!cur
->inlined_to
)
4952 modref_summary
*cur_summary
= optimization_summaries
4953 ? optimization_summaries
->get (cur
)
4955 modref_summary_lto
*cur_summary_lto
= summaries_lto
4956 ? summaries_lto
->get (cur
)
4959 fprintf (dump_file
, "Propagated modref for %s%s%s\n",
4961 TREE_READONLY (cur
->decl
) ? " (const)" : "",
4962 DECL_PURE_P (cur
->decl
) ? " (pure)" : "");
4963 if (optimization_summaries
)
4966 cur_summary
->dump (dump_file
);
4968 fprintf (dump_file
, " Not tracked\n");
4972 if (cur_summary_lto
)
4973 cur_summary_lto
->dump (dump_file
);
4975 fprintf (dump_file
, " Not tracked (lto)\n");
4980 /* Determine EAF flags know for call E with CALLEE_ECF_FLAGS and ARG. */
4983 implicit_eaf_flags_for_edge_and_arg (cgraph_edge
*e
, int callee_ecf_flags
,
4984 bool ignore_stores
, int arg
)
4986 /* Returning the value is already accounted to at local propagation. */
4987 int implicit_flags
= EAF_NOT_RETURNED_DIRECTLY
4988 | EAF_NOT_RETURNED_INDIRECTLY
;
4990 implicit_flags
|= ignore_stores_eaf_flags
;
4991 if (callee_ecf_flags
& ECF_PURE
)
4992 implicit_flags
|= implicit_pure_eaf_flags
;
4993 if (callee_ecf_flags
& (ECF_CONST
| ECF_NOVOPS
))
4994 implicit_flags
|= implicit_const_eaf_flags
;
4995 class fnspec_summary
*fnspec_sum
= fnspec_summaries
->get (e
);
4998 attr_fnspec
fnspec (fnspec_sum
->fnspec
);
4999 implicit_flags
|= fnspec
.arg_eaf_flags (arg
);
5001 return implicit_flags
;
5004 /* Process escapes in SUM and merge SUMMARY to CUR_SUMMARY
5005 and SUMMARY_LTO to CUR_SUMMARY_LTO.
5006 Return true if something changed. */
5009 modref_merge_call_site_flags (escape_summary
*sum
,
5010 modref_summary
*cur_summary
,
5011 modref_summary_lto
*cur_summary_lto
,
5012 modref_summary
*summary
,
5013 modref_summary_lto
*summary_lto
,
5016 int caller_ecf_flags
,
5017 int callee_ecf_flags
,
5018 bool binds_to_current_def
)
5022 bool changed
= false;
5023 bool ignore_stores
= ignore_stores_p (caller
, callee_ecf_flags
);
5025 /* Return early if we have no useful info to propagate. */
5027 || (!cur_summary
->arg_flags
.length ()
5028 && !cur_summary
->static_chain_flags
5029 && !cur_summary
->retslot_flags
))
5030 && (!cur_summary_lto
5031 || (!cur_summary_lto
->arg_flags
.length ()
5032 && !cur_summary_lto
->static_chain_flags
5033 && !cur_summary_lto
->retslot_flags
)))
5036 FOR_EACH_VEC_ELT (sum
->esc
, i
, ee
)
5040 int implicit_flags
= implicit_eaf_flags_for_edge_and_arg
5041 (e
, callee_ecf_flags
, ignore_stores
, ee
->arg
);
5043 if (summary
&& ee
->arg
< summary
->arg_flags
.length ())
5044 flags
= summary
->arg_flags
[ee
->arg
];
5046 && ee
->arg
< summary_lto
->arg_flags
.length ())
5047 flags_lto
= summary_lto
->arg_flags
[ee
->arg
];
5050 flags
= deref_flags (flags
, ignore_stores
);
5051 flags_lto
= deref_flags (flags_lto
, ignore_stores
);
5054 implicit_flags
|= ignore_stores_eaf_flags
;
5055 if (callee_ecf_flags
& ECF_PURE
)
5056 implicit_flags
|= implicit_pure_eaf_flags
;
5057 if (callee_ecf_flags
& (ECF_CONST
| ECF_NOVOPS
))
5058 implicit_flags
|= implicit_const_eaf_flags
;
5059 class fnspec_summary
*fnspec_sum
= fnspec_summaries
->get (e
);
5062 attr_fnspec
fnspec (fnspec_sum
->fnspec
);
5063 implicit_flags
|= fnspec
.arg_eaf_flags (ee
->arg
);
5066 implicit_flags
= deref_flags (implicit_flags
, ignore_stores
);
5067 flags
|= implicit_flags
;
5068 flags_lto
|= implicit_flags
;
5069 if (!binds_to_current_def
&& (flags
|| flags_lto
))
5071 flags
= interposable_eaf_flags (flags
, implicit_flags
);
5072 flags_lto
= interposable_eaf_flags (flags_lto
, implicit_flags
);
5074 if (!(flags
& EAF_UNUSED
)
5075 && cur_summary
&& ee
->parm_index
< (int)cur_summary
->arg_flags
.length ())
5077 eaf_flags_t
&f
= ee
->parm_index
== MODREF_RETSLOT_PARM
5078 ? cur_summary
->retslot_flags
5079 : ee
->parm_index
== MODREF_STATIC_CHAIN_PARM
5080 ? cur_summary
->static_chain_flags
5081 : cur_summary
->arg_flags
[ee
->parm_index
];
5082 if ((f
& flags
) != f
)
5084 f
= remove_useless_eaf_flags
5085 (f
& flags
, caller_ecf_flags
,
5086 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (caller
))));
5090 if (!(flags_lto
& EAF_UNUSED
)
5092 && ee
->parm_index
< (int)cur_summary_lto
->arg_flags
.length ())
5094 eaf_flags_t
&f
= ee
->parm_index
== MODREF_RETSLOT_PARM
5095 ? cur_summary_lto
->retslot_flags
5096 : ee
->parm_index
== MODREF_STATIC_CHAIN_PARM
5097 ? cur_summary_lto
->static_chain_flags
5098 : cur_summary_lto
->arg_flags
[ee
->parm_index
];
5099 if ((f
& flags_lto
) != f
)
5101 f
= remove_useless_eaf_flags
5102 (f
& flags_lto
, caller_ecf_flags
,
5103 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (caller
))));
5111 /* Perform iterative dataflow on SCC component starting in COMPONENT_NODE
5112 and propagate arg flags. */
5115 modref_propagate_flags_in_scc (cgraph_node
*component_node
)
5117 bool changed
= true;
5123 for (struct cgraph_node
*cur
= component_node
; cur
;
5124 cur
= ((struct ipa_dfs_info
*) cur
->aux
)->next_cycle
)
5126 cgraph_node
*node
= cur
->inlined_to
? cur
->inlined_to
: cur
;
5127 modref_summary
*cur_summary
= optimization_summaries
5128 ? optimization_summaries
->get (node
)
5130 modref_summary_lto
*cur_summary_lto
= summaries_lto
5131 ? summaries_lto
->get (node
)
5134 if (!cur_summary
&& !cur_summary_lto
)
5136 int caller_ecf_flags
= flags_from_decl_or_type (cur
->decl
);
5139 fprintf (dump_file
, " Processing %s%s%s\n",
5141 TREE_READONLY (cur
->decl
) ? " (const)" : "",
5142 DECL_PURE_P (cur
->decl
) ? " (pure)" : "");
5144 for (cgraph_edge
*e
= cur
->indirect_calls
; e
; e
= e
->next_callee
)
5146 escape_summary
*sum
= escape_summaries
->get (e
);
5148 if (!sum
|| (e
->indirect_info
->ecf_flags
5149 & (ECF_CONST
| ECF_NOVOPS
)))
5152 changed
|= modref_merge_call_site_flags
5153 (sum
, cur_summary
, cur_summary_lto
,
5158 e
->indirect_info
->ecf_flags
,
5162 if (!cur_summary
&& !cur_summary_lto
)
5165 for (cgraph_edge
*callee_edge
= cur
->callees
; callee_edge
;
5166 callee_edge
= callee_edge
->next_callee
)
5168 int ecf_flags
= flags_from_decl_or_type
5169 (callee_edge
->callee
->decl
);
5170 modref_summary
*callee_summary
= NULL
;
5171 modref_summary_lto
*callee_summary_lto
= NULL
;
5172 struct cgraph_node
*callee
;
5174 if (ecf_flags
& (ECF_CONST
| ECF_NOVOPS
)
5175 || !callee_edge
->inline_failed
)
5178 /* Get the callee and its summary. */
5179 enum availability avail
;
5180 callee
= callee_edge
->callee
->ultimate_alias_target
5183 /* It is not necessary to re-process calls outside of the
5187 || ((struct ipa_dfs_info
*)cur
->aux
)->scc_no
5188 != ((struct ipa_dfs_info
*)callee
->aux
)->scc_no
))
5191 escape_summary
*sum
= escape_summaries
->get (callee_edge
);
5196 fprintf (dump_file
, " Call to %s\n",
5197 callee_edge
->callee
->dump_name ());
5199 if (avail
<= AVAIL_INTERPOSABLE
5200 || callee_edge
->call_stmt_cannot_inline_p
)
5205 callee_summary
= optimization_summaries
->get (callee
);
5206 if (cur_summary_lto
)
5207 callee_summary_lto
= summaries_lto
->get (callee
);
5209 changed
|= modref_merge_call_site_flags
5210 (sum
, cur_summary
, cur_summary_lto
,
5211 callee_summary
, callee_summary_lto
,
5216 callee
->binds_to_current_def_p ());
5217 if (dump_file
&& changed
)
5220 cur_summary
->dump (dump_file
);
5221 if (cur_summary_lto
)
5222 cur_summary_lto
->dump (dump_file
);
5230 "Propagation of flags finished in %i iterations\n", iteration
);
5233 } /* ANON namespace. */
5235 /* Call EDGE was inlined; merge summary from callee to the caller. */
5238 ipa_merge_modref_summary_after_inlining (cgraph_edge
*edge
)
5240 if (!summaries
&& !summaries_lto
)
5243 struct cgraph_node
*to
= (edge
->caller
->inlined_to
5244 ? edge
->caller
->inlined_to
: edge
->caller
);
5245 class modref_summary
*to_info
= summaries
? summaries
->get (to
) : NULL
;
5246 class modref_summary_lto
*to_info_lto
= summaries_lto
5247 ? summaries_lto
->get (to
) : NULL
;
5249 if (!to_info
&& !to_info_lto
)
5252 summaries
->remove (edge
->callee
);
5254 summaries_lto
->remove (edge
->callee
);
5255 remove_modref_edge_summaries (edge
->callee
);
5259 class modref_summary
*callee_info
= summaries
? summaries
->get (edge
->callee
)
5261 class modref_summary_lto
*callee_info_lto
5262 = summaries_lto
? summaries_lto
->get (edge
->callee
) : NULL
;
5263 int flags
= flags_from_decl_or_type (edge
->callee
->decl
);
5264 /* Combine in outer flags. */
5266 for (n
= edge
->caller
; n
->inlined_to
; n
= n
->callers
->caller
)
5267 flags
|= flags_from_decl_or_type (n
->decl
);
5268 flags
|= flags_from_decl_or_type (n
->decl
);
5269 bool ignore_stores
= ignore_stores_p (edge
->caller
->decl
, flags
);
5271 if (!callee_info
&& to_info
)
5273 if (!(flags
& (ECF_CONST
| ECF_NOVOPS
)))
5274 to_info
->loads
->collapse ();
5276 to_info
->stores
->collapse ();
5278 if (!callee_info_lto
&& to_info_lto
)
5280 if (!(flags
& (ECF_CONST
| ECF_NOVOPS
)))
5281 to_info_lto
->loads
->collapse ();
5283 to_info_lto
->stores
->collapse ();
5285 /* Merge side effects and non-determinism.
5286 PURE/CONST flags makes functions deterministic and if there is
5287 no LOOPING_CONST_OR_PURE they also have no side effects. */
5288 if (!(flags
& (ECF_CONST
| ECF_NOVOPS
| ECF_PURE
))
5289 || (flags
& ECF_LOOPING_CONST_OR_PURE
))
5293 if (!callee_info
|| callee_info
->side_effects
)
5294 to_info
->side_effects
= true;
5295 if ((!callee_info
|| callee_info
->nondeterministic
)
5296 && !ignore_nondeterminism_p (edge
->caller
->decl
, flags
))
5297 to_info
->nondeterministic
= true;
5301 if (!callee_info_lto
|| callee_info_lto
->side_effects
)
5302 to_info_lto
->side_effects
= true;
5303 if ((!callee_info_lto
|| callee_info_lto
->nondeterministic
)
5304 && !ignore_nondeterminism_p (edge
->caller
->decl
, flags
))
5305 to_info_lto
->nondeterministic
= true;
5308 if (callee_info
|| callee_info_lto
)
5310 auto_vec
<modref_parm_map
, 32> parm_map
;
5311 modref_parm_map chain_map
;
5312 /* TODO: Once we get jump functions for static chains we could
5313 compute parm_index. */
5315 compute_parm_map (edge
, &parm_map
);
5319 if (to_info
&& callee_info
)
5320 to_info
->stores
->merge (to
->decl
, callee_info
->stores
, &parm_map
,
5322 if (to_info_lto
&& callee_info_lto
)
5323 to_info_lto
->stores
->merge (to
->decl
, callee_info_lto
->stores
,
5324 &parm_map
, &chain_map
, false);
5326 if (!(flags
& (ECF_CONST
| ECF_NOVOPS
)))
5328 if (to_info
&& callee_info
)
5329 to_info
->loads
->merge (to
->decl
, callee_info
->loads
, &parm_map
,
5331 if (to_info_lto
&& callee_info_lto
)
5332 to_info_lto
->loads
->merge (to
->decl
, callee_info_lto
->loads
,
5333 &parm_map
, &chain_map
, false);
5337 /* Now merge escape summaries.
5338 For every escape to the callee we need to merge callee flags
5339 and remap callee's escapes. */
5340 class escape_summary
*sum
= escape_summaries
->get (edge
);
5341 int max_escape
= -1;
5345 if (sum
&& !(flags
& (ECF_CONST
| ECF_NOVOPS
)))
5346 FOR_EACH_VEC_ELT (sum
->esc
, i
, ee
)
5347 if ((int)ee
->arg
> max_escape
)
5348 max_escape
= ee
->arg
;
5350 auto_vec
<vec
<struct escape_map
>, 32> emap (max_escape
+ 1);
5351 emap
.safe_grow (max_escape
+ 1, true);
5352 for (i
= 0; (int)i
< max_escape
+ 1; i
++)
5355 if (sum
&& !(flags
& (ECF_CONST
| ECF_NOVOPS
)))
5356 FOR_EACH_VEC_ELT (sum
->esc
, i
, ee
)
5358 bool needed
= false;
5359 int implicit_flags
= implicit_eaf_flags_for_edge_and_arg
5360 (edge
, flags
, ignore_stores
,
5363 implicit_flags
= deref_flags (implicit_flags
, ignore_stores
);
5364 if (to_info
&& (int)to_info
->arg_flags
.length () > ee
->parm_index
)
5366 int flags
= callee_info
5367 && callee_info
->arg_flags
.length () > ee
->arg
5368 ? callee_info
->arg_flags
[ee
->arg
] : 0;
5370 flags
= deref_flags (flags
, ignore_stores
);
5371 flags
|= ee
->min_flags
| implicit_flags
;
5372 eaf_flags_t
&f
= ee
->parm_index
== MODREF_RETSLOT_PARM
5373 ? to_info
->retslot_flags
5374 : ee
->parm_index
== MODREF_STATIC_CHAIN_PARM
5375 ? to_info
->static_chain_flags
5376 : to_info
->arg_flags
[ee
->parm_index
];
5382 && (int)to_info_lto
->arg_flags
.length () > ee
->parm_index
)
5384 int flags
= callee_info_lto
5385 && callee_info_lto
->arg_flags
.length () > ee
->arg
5386 ? callee_info_lto
->arg_flags
[ee
->arg
] : 0;
5388 flags
= deref_flags (flags
, ignore_stores
);
5389 flags
|= ee
->min_flags
| implicit_flags
;
5390 eaf_flags_t
&f
= ee
->parm_index
== MODREF_RETSLOT_PARM
5391 ? to_info_lto
->retslot_flags
5392 : ee
->parm_index
== MODREF_STATIC_CHAIN_PARM
5393 ? to_info_lto
->static_chain_flags
5394 : to_info_lto
->arg_flags
[ee
->parm_index
];
5399 struct escape_map entry
= {ee
->parm_index
, ee
->direct
};
5401 emap
[ee
->arg
].safe_push (entry
);
5403 update_escape_summary (edge
->callee
, emap
, ignore_stores
);
5404 for (i
= 0; (int)i
< max_escape
+ 1; i
++)
5407 escape_summaries
->remove (edge
);
5411 if (to_info
&& !to_info
->useful_p (flags
))
5414 fprintf (dump_file
, "Removed mod-ref summary for %s\n",
5416 summaries
->remove (to
);
5419 else if (to_info
&& dump_file
)
5422 fprintf (dump_file
, "Updated mod-ref summary for %s\n",
5424 to_info
->dump (dump_file
);
5427 summaries
->remove (edge
->callee
);
5431 if (to_info_lto
&& !to_info_lto
->useful_p (flags
))
5434 fprintf (dump_file
, "Removed mod-ref summary for %s\n",
5436 summaries_lto
->remove (to
);
5439 else if (to_info_lto
&& dump_file
)
5442 fprintf (dump_file
, "Updated mod-ref summary for %s\n",
5444 to_info_lto
->dump (dump_file
);
5446 if (callee_info_lto
)
5447 summaries_lto
->remove (edge
->callee
);
5449 if (!to_info
&& !to_info_lto
)
5450 remove_modref_edge_summaries (to
);
5454 /* Run the IPA pass. This will take a function's summaries and calls and
5455 construct new summaries which represent a transitive closure. So that
5456 summary of an analyzed function contains information about the loads and
5457 stores that the function or any function that it calls does. */
5460 pass_ipa_modref::execute (function
*)
5462 if (!summaries
&& !summaries_lto
)
5464 bool pureconst
= false;
5466 if (optimization_summaries
)
5467 ggc_delete (optimization_summaries
);
5468 optimization_summaries
= summaries
;
5471 struct cgraph_node
**order
= XCNEWVEC (struct cgraph_node
*,
5472 symtab
->cgraph_count
);
5474 order_pos
= ipa_reduced_postorder (order
, true, ignore_edge
);
5477 /* Iterate over all strongly connected components in post-order. */
5478 for (i
= 0; i
< order_pos
; i
++)
5480 /* Get the component's representative. That's just any node in the
5481 component from which we can traverse the entire component. */
5482 struct cgraph_node
*component_node
= order
[i
];
5485 fprintf (dump_file
, "\n\nStart of SCC component\n");
5487 pureconst
|= modref_propagate_in_scc (component_node
);
5488 modref_propagate_flags_in_scc (component_node
);
5489 if (optimization_summaries
)
5490 for (struct cgraph_node
*cur
= component_node
; cur
;
5491 cur
= ((struct ipa_dfs_info
*) cur
->aux
)->next_cycle
)
5492 if (modref_summary
*sum
= optimization_summaries
->get (cur
))
5493 sum
->finalize (cur
->decl
);
5495 modref_propagate_dump_scc (component_node
);
5498 FOR_EACH_FUNCTION (node
)
5499 update_signature (node
);
5501 ((modref_summaries_lto
*)summaries_lto
)->propagated
= true;
5502 ipa_free_postorder_info ();
5504 delete fnspec_summaries
;
5505 fnspec_summaries
= NULL
;
5506 delete escape_summaries
;
5507 escape_summaries
= NULL
;
5509 /* If we possibly made constructors const/pure we may need to remove
5511 return pureconst
? TODO_remove_functions
: 0;
5514 /* Summaries must stay alive until end of compilation. */
5517 ipa_modref_cc_finalize ()
5519 if (optimization_summaries
)
5520 ggc_delete (optimization_summaries
);
5521 optimization_summaries
= NULL
;
5523 ggc_delete (summaries_lto
);
5524 summaries_lto
= NULL
;
5525 if (fnspec_summaries
)
5526 delete fnspec_summaries
;
5527 fnspec_summaries
= NULL
;
5528 if (escape_summaries
)
5529 delete escape_summaries
;
5530 escape_summaries
= NULL
;
5533 #include "gt-ipa-modref.h"