1 /* Analysis used by inlining decision heuristics.
2 Copyright (C) 2003-2019 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
30 #include "tree-streamer.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "print-tree.h"
35 #include "tree-inline.h"
36 #include "gimple-pretty-print.h"
39 #include "gimple-iterator.h"
41 #include "tree-ssa-loop-niter.h"
42 #include "tree-ssa-loop.h"
43 #include "symbol-summary.h"
45 #include "ipa-fnsummary.h"
46 #include "ipa-inline.h"
48 #include "tree-scalar-evolution.h"
49 #include "ipa-utils.h"
50 #include "cfgexpand.h"
53 /* Cached node/edge growths. */
54 call_summary
<edge_growth_cache_entry
*> *edge_growth_cache
= NULL
;
56 /* Give initial reasons why inlining would fail on EDGE. This gets either
57 nullified or usually overwritten by more precise reasons later. */
60 initialize_inline_failed (struct cgraph_edge
*e
)
62 struct cgraph_node
*callee
= e
->callee
;
64 if (e
->inline_failed
&& e
->inline_failed
!= CIF_BODY_NOT_AVAILABLE
65 && cgraph_inline_failed_type (e
->inline_failed
) == CIF_FINAL_ERROR
)
67 else if (e
->indirect_unknown_callee
)
68 e
->inline_failed
= CIF_INDIRECT_UNKNOWN_CALL
;
69 else if (!callee
->definition
)
70 e
->inline_failed
= CIF_BODY_NOT_AVAILABLE
;
71 else if (callee
->local
.redefined_extern_inline
)
72 e
->inline_failed
= CIF_REDEFINED_EXTERN_INLINE
;
74 e
->inline_failed
= CIF_FUNCTION_NOT_CONSIDERED
;
75 gcc_checking_assert (!e
->call_stmt_cannot_inline_p
76 || cgraph_inline_failed_type (e
->inline_failed
)
81 /* Free growth caches. */
84 free_growth_caches (void)
86 delete edge_growth_cache
;
87 edge_growth_cache
= NULL
;
90 /* Return hints derrived from EDGE. */
93 simple_edge_hints (struct cgraph_edge
*edge
)
96 struct cgraph_node
*to
= (edge
->caller
->global
.inlined_to
97 ? edge
->caller
->global
.inlined_to
: edge
->caller
);
98 struct cgraph_node
*callee
= edge
->callee
->ultimate_alias_target ();
99 int to_scc_no
= ipa_fn_summaries
->get (to
)->scc_no
;
100 int callee_scc_no
= ipa_fn_summaries
->get (callee
)->scc_no
;
102 if (to_scc_no
&& to_scc_no
== callee_scc_no
&& !edge
->recursive_p ())
103 hints
|= INLINE_HINT_same_scc
;
105 if (callee
->lto_file_data
&& edge
->caller
->lto_file_data
106 && edge
->caller
->lto_file_data
!= callee
->lto_file_data
107 && !callee
->merged_comdat
&& !callee
->icf_merged
)
108 hints
|= INLINE_HINT_cross_module
;
113 /* Estimate the time cost for the caller when inlining EDGE.
114 Only to be called via estimate_edge_time, that handles the
117 When caching, also update the cache entry. Compute both time and
118 size, since we always need both metrics eventually. */
121 do_estimate_edge_time (struct cgraph_edge
*edge
)
123 sreal time
, nonspec_time
;
126 struct cgraph_node
*callee
;
127 clause_t clause
, nonspec_clause
;
128 vec
<tree
> known_vals
;
129 vec
<ipa_polymorphic_call_context
> known_contexts
;
130 vec
<ipa_agg_jump_function_p
> known_aggs
;
131 struct ipa_call_summary
*es
= ipa_call_summaries
->get (edge
);
134 callee
= edge
->callee
->ultimate_alias_target ();
136 gcc_checking_assert (edge
->inline_failed
);
137 evaluate_properties_for_edge (edge
, true,
138 &clause
, &nonspec_clause
, &known_vals
,
139 &known_contexts
, &known_aggs
);
140 estimate_node_size_and_time (callee
, clause
, nonspec_clause
, known_vals
,
141 known_contexts
, known_aggs
, &size
, &min_size
,
142 &time
, &nonspec_time
, &hints
, es
->param
);
144 /* When we have profile feedback, we can quite safely identify hot
145 edges and for those we disable size limits. Don't do that when
146 probability that caller will call the callee is low however, since it
147 may hurt optimization of the caller's hot path. */
148 if (edge
->count
.ipa ().initialized_p () && edge
->maybe_hot_p ()
149 && (edge
->count
.ipa ().apply_scale (2, 1)
150 > (edge
->caller
->global
.inlined_to
151 ? edge
->caller
->global
.inlined_to
->count
.ipa ()
152 : edge
->caller
->count
.ipa ())))
153 hints
|= INLINE_HINT_known_hot
;
155 known_vals
.release ();
156 known_contexts
.release ();
157 known_aggs
.release ();
158 gcc_checking_assert (size
>= 0);
159 gcc_checking_assert (time
>= 0);
161 /* When caching, update the cache entry. */
162 if (edge_growth_cache
!= NULL
)
164 ipa_fn_summaries
->get_create (edge
->callee
)->min_size
= min_size
;
165 edge_growth_cache_entry
*entry
166 = edge_growth_cache
->get_create (edge
);
168 entry
->nonspec_time
= nonspec_time
;
170 entry
->size
= size
+ (size
>= 0);
171 hints
|= simple_edge_hints (edge
);
172 entry
->hints
= hints
+ 1;
178 /* Return estimated callee growth after inlining EDGE.
179 Only to be called via estimate_edge_size. */
182 do_estimate_edge_size (struct cgraph_edge
*edge
)
185 struct cgraph_node
*callee
;
186 clause_t clause
, nonspec_clause
;
187 vec
<tree
> known_vals
;
188 vec
<ipa_polymorphic_call_context
> known_contexts
;
189 vec
<ipa_agg_jump_function_p
> known_aggs
;
191 /* When we do caching, use do_estimate_edge_time to populate the entry. */
193 if (edge_growth_cache
!= NULL
)
195 do_estimate_edge_time (edge
);
196 size
= edge_growth_cache
->get (edge
)->size
;
197 gcc_checking_assert (size
);
198 return size
- (size
> 0);
201 callee
= edge
->callee
->ultimate_alias_target ();
203 /* Early inliner runs without caching, go ahead and do the dirty work. */
204 gcc_checking_assert (edge
->inline_failed
);
205 evaluate_properties_for_edge (edge
, true,
206 &clause
, &nonspec_clause
,
207 &known_vals
, &known_contexts
,
209 estimate_node_size_and_time (callee
, clause
, nonspec_clause
, known_vals
,
210 known_contexts
, known_aggs
, &size
, NULL
, NULL
,
212 known_vals
.release ();
213 known_contexts
.release ();
214 known_aggs
.release ();
219 /* Estimate the growth of the caller when inlining EDGE.
220 Only to be called via estimate_edge_size. */
223 do_estimate_edge_hints (struct cgraph_edge
*edge
)
226 struct cgraph_node
*callee
;
227 clause_t clause
, nonspec_clause
;
228 vec
<tree
> known_vals
;
229 vec
<ipa_polymorphic_call_context
> known_contexts
;
230 vec
<ipa_agg_jump_function_p
> known_aggs
;
232 /* When we do caching, use do_estimate_edge_time to populate the entry. */
234 if (edge_growth_cache
!= NULL
)
236 do_estimate_edge_time (edge
);
237 hints
= edge_growth_cache
->get (edge
)->hints
;
238 gcc_checking_assert (hints
);
242 callee
= edge
->callee
->ultimate_alias_target ();
244 /* Early inliner runs without caching, go ahead and do the dirty work. */
245 gcc_checking_assert (edge
->inline_failed
);
246 evaluate_properties_for_edge (edge
, true,
247 &clause
, &nonspec_clause
,
248 &known_vals
, &known_contexts
,
250 estimate_node_size_and_time (callee
, clause
, nonspec_clause
, known_vals
,
251 known_contexts
, known_aggs
, NULL
, NULL
,
252 NULL
, NULL
, &hints
, vNULL
);
253 known_vals
.release ();
254 known_contexts
.release ();
255 known_aggs
.release ();
256 hints
|= simple_edge_hints (edge
);
260 /* Estimate the size of NODE after inlining EDGE which should be an
261 edge to either NODE or a call inlined into NODE. */
264 estimate_size_after_inlining (struct cgraph_node
*node
,
265 struct cgraph_edge
*edge
)
267 struct ipa_call_summary
*es
= ipa_call_summaries
->get (edge
);
268 ipa_fn_summary
*s
= ipa_fn_summaries
->get (node
);
269 if (!es
->predicate
|| *es
->predicate
!= false)
271 int size
= s
->size
+ estimate_edge_growth (edge
);
272 gcc_assert (size
>= 0);
281 struct cgraph_node
*node
;
288 /* Worker for do_estimate_growth. Collect growth for all callers. */
291 do_estimate_growth_1 (struct cgraph_node
*node
, void *data
)
293 struct cgraph_edge
*e
;
294 struct growth_data
*d
= (struct growth_data
*) data
;
296 for (e
= node
->callers
; e
; e
= e
->next_caller
)
298 gcc_checking_assert (e
->inline_failed
);
300 if (cgraph_inline_failed_type (e
->inline_failed
) == CIF_FINAL_ERROR
301 || !opt_for_fn (e
->caller
->decl
, optimize
))
303 d
->uninlinable
= true;
307 if (e
->recursive_p ())
309 d
->self_recursive
= true;
312 d
->growth
+= estimate_edge_growth (e
);
318 /* Estimate the growth caused by inlining NODE into all callees. */
321 estimate_growth (struct cgraph_node
*node
)
323 struct growth_data d
= { node
, false, false, 0 };
324 struct ipa_fn_summary
*info
= ipa_fn_summaries
->get (node
);
326 node
->call_for_symbol_and_aliases (do_estimate_growth_1
, &d
, true);
328 /* For self recursive functions the growth estimation really should be
329 infinity. We don't want to return very large values because the growth
330 plays various roles in badness computation fractions. Be sure to not
331 return zero or negative growths. */
332 if (d
.self_recursive
)
333 d
.growth
= d
.growth
< info
->size
? info
->size
: d
.growth
;
334 else if (DECL_EXTERNAL (node
->decl
) || d
.uninlinable
)
338 if (node
->will_be_removed_from_program_if_no_direct_calls_p ())
339 d
.growth
-= info
->size
;
340 /* COMDAT functions are very often not shared across multiple units
341 since they come from various template instantiations.
342 Take this into account. */
343 else if (DECL_COMDAT (node
->decl
)
344 && node
->can_remove_if_no_direct_calls_p ())
345 d
.growth
-= (info
->size
346 * (100 - PARAM_VALUE (PARAM_COMDAT_SHARING_PROBABILITY
))
353 /* Verify if there are fewer than MAX_CALLERS. */
356 check_callers (cgraph_node
*node
, int *max_callers
)
360 if (!node
->can_remove_if_no_direct_calls_and_refs_p ())
363 for (cgraph_edge
*e
= node
->callers
; e
; e
= e
->next_caller
)
367 || cgraph_inline_failed_type (e
->inline_failed
) == CIF_FINAL_ERROR
)
371 FOR_EACH_ALIAS (node
, ref
)
372 if (check_callers (dyn_cast
<cgraph_node
*> (ref
->referring
), max_callers
))
379 /* Make cheap estimation if growth of NODE is likely positive knowing
380 EDGE_GROWTH of one particular edge.
381 We assume that most of other edges will have similar growth
382 and skip computation if there are too many callers. */
385 growth_likely_positive (struct cgraph_node
*node
,
389 struct cgraph_edge
*e
;
390 gcc_checking_assert (edge_growth
> 0);
392 /* First quickly check if NODE is removable at all. */
393 if (DECL_EXTERNAL (node
->decl
))
395 if (!node
->can_remove_if_no_direct_calls_and_refs_p ()
396 || node
->address_taken
)
399 max_callers
= ipa_fn_summaries
->get (node
)->size
* 4 / edge_growth
+ 2;
401 for (e
= node
->callers
; e
; e
= e
->next_caller
)
405 || cgraph_inline_failed_type (e
->inline_failed
) == CIF_FINAL_ERROR
)
410 FOR_EACH_ALIAS (node
, ref
)
411 if (check_callers (dyn_cast
<cgraph_node
*> (ref
->referring
), &max_callers
))
414 /* Unlike for functions called once, we play unsafe with
415 COMDATs. We can allow that since we know functions
416 in consideration are small (and thus risk is small) and
417 moreover grow estimates already accounts that COMDAT
418 functions may or may not disappear when eliminated from
419 current unit. With good probability making aggressive
420 choice in all units is going to make overall program
422 if (DECL_COMDAT (node
->decl
))
424 if (!node
->can_remove_if_no_direct_calls_p ())
427 else if (!node
->will_be_removed_from_program_if_no_direct_calls_p ())
430 return estimate_growth (node
) > 0;