1 /* Callgraph transformations to handle inlining
2 Copyright (C) 2003-2017 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* The inline decisions are stored in callgraph in "inline plan" and
24 To mark given call inline, use inline_call function.
25 The function marks the edge inlinable and, if necessary, produces
26 virtual clone in the callgraph representing the new copy of callee's
29 The inline plan is applied on given function body by inline_transform. */
33 #include "coretypes.h"
37 #include "alloc-pool.h"
38 #include "tree-pass.h"
41 #include "symbol-summary.h"
44 #include "ipa-inline.h"
45 #include "tree-inline.h"
48 int nfunctions_inlined
;
50 /* Scale frequency of NODE edges by FREQ_SCALE. */
53 update_noncloned_frequencies (struct cgraph_node
*node
,
56 struct cgraph_edge
*e
;
58 /* We do not want to ignore high loop nest after freq drops to 0. */
61 for (e
= node
->callees
; e
; e
= e
->next_callee
)
63 e
->frequency
= e
->frequency
* (gcov_type
) freq_scale
/ CGRAPH_FREQ_BASE
;
64 if (e
->frequency
> CGRAPH_FREQ_MAX
)
65 e
->frequency
= CGRAPH_FREQ_MAX
;
66 if (!e
->inline_failed
)
67 update_noncloned_frequencies (e
->callee
, freq_scale
);
69 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
71 e
->frequency
= e
->frequency
* (gcov_type
) freq_scale
/ CGRAPH_FREQ_BASE
;
72 if (e
->frequency
> CGRAPH_FREQ_MAX
)
73 e
->frequency
= CGRAPH_FREQ_MAX
;
77 /* We removed or are going to remove the last call to NODE.
78 Return true if we can and want proactively remove the NODE now.
79 This is important to do, since we want inliner to know when offline
80 copy of function was removed. */
83 can_remove_node_now_p_1 (struct cgraph_node
*node
, struct cgraph_edge
*e
)
87 FOR_EACH_ALIAS (node
, ref
)
89 cgraph_node
*alias
= dyn_cast
<cgraph_node
*> (ref
->referring
);
90 if ((alias
->callers
&& alias
->callers
!= e
)
91 || !can_remove_node_now_p_1 (alias
, e
))
94 /* FIXME: When address is taken of DECL_EXTERNAL function we still
95 can remove its offline copy, but we would need to keep unanalyzed node in
96 the callgraph so references can point to it.
98 Also for comdat group we can ignore references inside a group as we
99 want to prove the group as a whole to be dead. */
100 return (!node
->address_taken
101 && node
->can_remove_if_no_direct_calls_and_refs_p ()
102 /* Inlining might enable more devirtualizing, so we want to remove
103 those only after all devirtualizable virtual calls are processed.
104 Lacking may edges in callgraph we just preserve them post
106 && (!DECL_VIRTUAL_P (node
->decl
)
107 || !opt_for_fn (node
->decl
, flag_devirtualize
))
108 /* During early inlining some unanalyzed cgraph nodes might be in the
109 callgraph and they might reffer the function in question. */
110 && !cgraph_new_nodes
.exists ());
113 /* We are going to eliminate last direct call to NODE (or alias of it) via edge E.
114 Verify that the NODE can be removed from unit and if it is contained in comdat
115 group that the whole comdat group is removable. */
118 can_remove_node_now_p (struct cgraph_node
*node
, struct cgraph_edge
*e
)
120 struct cgraph_node
*next
;
121 if (!can_remove_node_now_p_1 (node
, e
))
124 /* When we see same comdat group, we need to be sure that all
125 items can be removed. */
126 if (!node
->same_comdat_group
|| !node
->externally_visible
)
128 for (next
= dyn_cast
<cgraph_node
*> (node
->same_comdat_group
);
129 next
!= node
; next
= dyn_cast
<cgraph_node
*> (next
->same_comdat_group
))
133 if ((next
->callers
&& next
->callers
!= e
)
134 || !can_remove_node_now_p_1 (next
, e
))
140 /* Return true if NODE is a master clone with non-inline clones. */
143 master_clone_with_noninline_clones_p (struct cgraph_node
*node
)
148 for (struct cgraph_node
*n
= node
->clones
; n
; n
= n
->next_sibling_clone
)
149 if (n
->decl
!= node
->decl
)
155 /* E is expected to be an edge being inlined. Clone destination node of
156 the edge and redirect it to the new clone.
157 DUPLICATE is used for bookkeeping on whether we are actually creating new
158 clones or re-using node originally representing out-of-line function call.
159 By default the offline copy is removed, when it appears dead after inlining.
160 UPDATE_ORIGINAL prevents this transformation.
161 If OVERALL_SIZE is non-NULL, the size is updated to reflect the
163 FREQ_SCALE specify the scaling of frequencies of call sites. */
166 clone_inlined_nodes (struct cgraph_edge
*e
, bool duplicate
,
167 bool update_original
, int *overall_size
, int freq_scale
)
169 struct cgraph_node
*inlining_into
;
170 struct cgraph_edge
*next
;
172 if (e
->caller
->global
.inlined_to
)
173 inlining_into
= e
->caller
->global
.inlined_to
;
175 inlining_into
= e
->caller
;
179 /* We may eliminate the need for out-of-line copy to be output.
180 In that case just go ahead and re-use it. This is not just an
181 memory optimization. Making offline copy of fuction disappear
182 from the program will improve future decisions on inlining. */
183 if (!e
->callee
->callers
->next_caller
184 /* Recursive inlining never wants the master clone to
187 && can_remove_node_now_p (e
->callee
, e
)
188 /* We cannot overwrite a master clone with non-inline clones
189 until after these clones are materialized. */
190 && !master_clone_with_noninline_clones_p (e
->callee
))
192 /* TODO: When callee is in a comdat group, we could remove all of it,
193 including all inline clones inlined into it. That would however
194 need small function inlining to register edge removal hook to
195 maintain the priority queue.
197 For now we keep the ohter functions in the group in program until
198 cgraph_remove_unreachable_functions gets rid of them. */
199 gcc_assert (!e
->callee
->global
.inlined_to
);
200 e
->callee
->remove_from_same_comdat_group ();
201 if (e
->callee
->definition
202 && inline_account_function_p (e
->callee
))
204 gcc_assert (!e
->callee
->alias
);
206 *overall_size
-= inline_summaries
->get (e
->callee
)->size
;
207 nfunctions_inlined
++;
210 e
->callee
->externally_visible
= false;
211 update_noncloned_frequencies (e
->callee
, e
->frequency
);
213 dump_callgraph_transformation (e
->callee
, inlining_into
,
218 struct cgraph_node
*n
;
220 if (freq_scale
== -1)
221 freq_scale
= e
->frequency
;
222 n
= e
->callee
->create_clone (e
->callee
->decl
,
223 MIN (e
->count
, e
->callee
->count
),
225 update_original
, vNULL
, true,
228 n
->used_as_abstract_origin
= e
->callee
->used_as_abstract_origin
;
229 e
->redirect_callee (n
);
233 e
->callee
->remove_from_same_comdat_group ();
235 e
->callee
->global
.inlined_to
= inlining_into
;
237 /* Recursively clone all bodies. */
238 for (e
= e
->callee
->callees
; e
; e
= next
)
240 next
= e
->next_callee
;
241 if (!e
->inline_failed
)
242 clone_inlined_nodes (e
, duplicate
, update_original
, overall_size
, freq_scale
);
246 /* Check all speculations in N and resolve them if they seems useless. */
249 check_speculations (cgraph_node
*n
)
251 bool speculation_removed
= false;
254 for (cgraph_edge
*e
= n
->callees
; e
; e
= next
)
256 next
= e
->next_callee
;
257 if (e
->speculative
&& !speculation_useful_p (e
, true))
259 e
->resolve_speculation (NULL
);
260 speculation_removed
= true;
262 else if (!e
->inline_failed
)
263 speculation_removed
|= check_speculations (e
->callee
);
265 return speculation_removed
;
268 /* Mark all call graph edges coming out of NODE and all nodes that have been
269 inlined to it as in_polymorphic_cdtor. */
272 mark_all_inlined_calls_cdtor (cgraph_node
*node
)
274 for (cgraph_edge
*cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
276 cs
->in_polymorphic_cdtor
= true;
277 if (!cs
->inline_failed
)
278 mark_all_inlined_calls_cdtor (cs
->callee
);
280 for (cgraph_edge
*cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
281 cs
->in_polymorphic_cdtor
= true;
285 /* Mark edge E as inlined and update callgraph accordingly. UPDATE_ORIGINAL
286 specify whether profile of original function should be updated. If any new
287 indirect edges are discovered in the process, add them to NEW_EDGES, unless
288 it is NULL. If UPDATE_OVERALL_SUMMARY is false, do not bother to recompute overall
289 size of caller after inlining. Caller is required to eventually do it via
290 inline_update_overall_summary.
291 If callee_removed is non-NULL, set it to true if we removed callee node.
293 Return true iff any new callgraph edges were discovered as a
294 result of inlining. */
297 inline_call (struct cgraph_edge
*e
, bool update_original
,
298 vec
<cgraph_edge
*> *new_edges
,
299 int *overall_size
, bool update_overall_summary
,
300 bool *callee_removed
)
302 int old_size
= 0, new_size
= 0;
303 struct cgraph_node
*to
= NULL
;
304 struct cgraph_edge
*curr
= e
;
305 struct cgraph_node
*callee
= e
->callee
->ultimate_alias_target ();
306 bool new_edges_found
= false;
308 int estimated_growth
= 0;
309 if (! update_overall_summary
)
310 estimated_growth
= estimate_edge_growth (e
);
311 /* This is used only for assert bellow. */
313 bool predicated
= inline_edge_summary (e
)->predicate
!= NULL
;
316 /* Don't inline inlined edges. */
317 gcc_assert (e
->inline_failed
);
318 /* Don't even think of inlining inline clone. */
319 gcc_assert (!callee
->global
.inlined_to
);
322 if (to
->global
.inlined_to
)
323 to
= to
->global
.inlined_to
;
324 if (to
->thunk
.thunk_p
)
326 struct cgraph_node
*target
= to
->callees
->callee
;
328 to
->get_untransformed_body ();
329 to
->expand_thunk (false, true);
330 /* When thunk is instrumented we may have multiple callees. */
331 for (e
= to
->callees
; e
&& e
->callee
!= target
; e
= e
->next_callee
)
337 e
->inline_failed
= CIF_OK
;
338 DECL_POSSIBLY_INLINED (callee
->decl
) = true;
340 if (DECL_FUNCTION_PERSONALITY (callee
->decl
))
341 DECL_FUNCTION_PERSONALITY (to
->decl
)
342 = DECL_FUNCTION_PERSONALITY (callee
->decl
);
344 bool reload_optimization_node
= false;
345 if (!opt_for_fn (callee
->decl
, flag_strict_aliasing
)
346 && opt_for_fn (to
->decl
, flag_strict_aliasing
))
348 struct gcc_options opts
= global_options
;
350 cl_optimization_restore (&opts
, opts_for_fn (to
->decl
));
351 opts
.x_flag_strict_aliasing
= false;
353 fprintf (dump_file
, "Dropping flag_strict_aliasing on %s:%i\n",
354 to
->name (), to
->order
);
355 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (to
->decl
)
356 = build_optimization_node (&opts
);
357 reload_optimization_node
= true;
360 inline_summary
*caller_info
= inline_summaries
->get (to
);
361 inline_summary
*callee_info
= inline_summaries
->get (callee
);
362 if (!caller_info
->fp_expressions
&& callee_info
->fp_expressions
)
364 caller_info
->fp_expressions
= true;
365 if (opt_for_fn (callee
->decl
, flag_rounding_math
)
366 != opt_for_fn (to
->decl
, flag_rounding_math
)
367 || opt_for_fn (callee
->decl
, flag_trapping_math
)
368 != opt_for_fn (to
->decl
, flag_trapping_math
)
369 || opt_for_fn (callee
->decl
, flag_unsafe_math_optimizations
)
370 != opt_for_fn (to
->decl
, flag_unsafe_math_optimizations
)
371 || opt_for_fn (callee
->decl
, flag_finite_math_only
)
372 != opt_for_fn (to
->decl
, flag_finite_math_only
)
373 || opt_for_fn (callee
->decl
, flag_signaling_nans
)
374 != opt_for_fn (to
->decl
, flag_signaling_nans
)
375 || opt_for_fn (callee
->decl
, flag_cx_limited_range
)
376 != opt_for_fn (to
->decl
, flag_cx_limited_range
)
377 || opt_for_fn (callee
->decl
, flag_signed_zeros
)
378 != opt_for_fn (to
->decl
, flag_signed_zeros
)
379 || opt_for_fn (callee
->decl
, flag_associative_math
)
380 != opt_for_fn (to
->decl
, flag_associative_math
)
381 || opt_for_fn (callee
->decl
, flag_reciprocal_math
)
382 != opt_for_fn (to
->decl
, flag_reciprocal_math
)
383 || opt_for_fn (callee
->decl
, flag_fp_int_builtin_inexact
)
384 != opt_for_fn (to
->decl
, flag_fp_int_builtin_inexact
)
385 || opt_for_fn (callee
->decl
, flag_errno_math
)
386 != opt_for_fn (to
->decl
, flag_errno_math
))
388 struct gcc_options opts
= global_options
;
390 cl_optimization_restore (&opts
, opts_for_fn (to
->decl
));
391 opts
.x_flag_rounding_math
392 = opt_for_fn (callee
->decl
, flag_rounding_math
);
393 opts
.x_flag_trapping_math
394 = opt_for_fn (callee
->decl
, flag_trapping_math
);
395 opts
.x_flag_unsafe_math_optimizations
396 = opt_for_fn (callee
->decl
, flag_unsafe_math_optimizations
);
397 opts
.x_flag_finite_math_only
398 = opt_for_fn (callee
->decl
, flag_finite_math_only
);
399 opts
.x_flag_signaling_nans
400 = opt_for_fn (callee
->decl
, flag_signaling_nans
);
401 opts
.x_flag_cx_limited_range
402 = opt_for_fn (callee
->decl
, flag_cx_limited_range
);
403 opts
.x_flag_signed_zeros
404 = opt_for_fn (callee
->decl
, flag_signed_zeros
);
405 opts
.x_flag_associative_math
406 = opt_for_fn (callee
->decl
, flag_associative_math
);
407 opts
.x_flag_reciprocal_math
408 = opt_for_fn (callee
->decl
, flag_reciprocal_math
);
409 opts
.x_flag_fp_int_builtin_inexact
410 = opt_for_fn (callee
->decl
, flag_fp_int_builtin_inexact
);
411 opts
.x_flag_errno_math
412 = opt_for_fn (callee
->decl
, flag_errno_math
);
414 fprintf (dump_file
, "Copying FP flags from %s:%i to %s:%i\n",
415 callee
->name (), callee
->order
, to
->name (), to
->order
);
416 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (to
->decl
)
417 = build_optimization_node (&opts
);
418 reload_optimization_node
= true;
422 /* Reload global optimization flags. */
423 if (reload_optimization_node
&& DECL_STRUCT_FUNCTION (to
->decl
) == cfun
)
424 set_cfun (cfun
, true);
426 /* If aliases are involved, redirect edge to the actual destination and
427 possibly remove the aliases. */
428 if (e
->callee
!= callee
)
430 struct cgraph_node
*alias
= e
->callee
, *next_alias
;
431 e
->redirect_callee (callee
);
432 while (alias
&& alias
!= callee
)
435 && can_remove_node_now_p (alias
,
436 !e
->next_caller
&& !e
->prev_caller
? e
: NULL
))
438 next_alias
= alias
->get_alias_target ();
441 *callee_removed
= true;
449 clone_inlined_nodes (e
, true, update_original
, overall_size
, e
->frequency
);
451 gcc_assert (curr
->callee
->global
.inlined_to
== to
);
453 old_size
= inline_summaries
->get (to
)->size
;
454 inline_merge_summary (e
);
455 if (e
->in_polymorphic_cdtor
)
456 mark_all_inlined_calls_cdtor (e
->callee
);
457 if (opt_for_fn (e
->caller
->decl
, optimize
))
458 new_edges_found
= ipa_propagate_indirect_call_infos (curr
, new_edges
);
459 check_speculations (e
->callee
);
460 if (update_overall_summary
)
461 inline_update_overall_summary (to
);
463 /* Update self size by the estimate so overall function growth limits
464 work for further inlining into this function. Before inlining
465 the function we inlined to again we expect the caller to update
466 the overall summary. */
467 inline_summaries
->get (to
)->size
+= estimated_growth
;
468 new_size
= inline_summaries
->get (to
)->size
;
470 if (callee
->calls_comdat_local
)
471 to
->calls_comdat_local
= true;
472 else if (to
->calls_comdat_local
&& callee
->comdat_local_p ())
474 struct cgraph_edge
*se
= to
->callees
;
475 for (; se
; se
= se
->next_callee
)
476 if (se
->inline_failed
&& se
->callee
->comdat_local_p ())
479 to
->calls_comdat_local
= false;
482 /* FIXME: This assert suffers from roundoff errors, disable it for GCC 5
483 and revisit it after conversion to sreals in GCC 6.
486 /* Verify that estimated growth match real growth. Allow off-by-one
487 error due to INLINE_SIZE_SCALE roudoff errors. */
488 gcc_assert (!update_overall_summary
|| !overall_size
|| new_edges_found
489 || abs (estimated_growth
- (new_size
- old_size
)) <= 1
490 || speculation_removed
491 /* FIXME: a hack. Edges with false predicate are accounted
492 wrong, we should remove them from callgraph. */
496 /* Account the change of overall unit size; external functions will be
497 removed and are thus not accounted. */
498 if (overall_size
&& inline_account_function_p (to
))
499 *overall_size
+= new_size
- old_size
;
502 /* This must happen after inline_merge_summary that rely on jump
503 functions of callee to not be updated. */
504 return new_edges_found
;
508 /* Copy function body of NODE and redirect all inline clones to it.
509 This is done before inline plan is applied to NODE when there are
510 still some inline clones if it.
512 This is necessary because inline decisions are not really transitive
513 and the other inline clones may have different bodies. */
515 static struct cgraph_node
*
516 save_inline_function_body (struct cgraph_node
*node
)
518 struct cgraph_node
*first_clone
, *n
;
521 fprintf (dump_file
, "\nSaving body of %s for later reuse\n",
524 gcc_assert (node
== cgraph_node::get (node
->decl
));
526 /* first_clone will be turned into real function. */
527 first_clone
= node
->clones
;
529 /* Arrange first clone to not be thunk as those do not have bodies. */
530 if (first_clone
->thunk
.thunk_p
)
532 while (first_clone
->thunk
.thunk_p
)
533 first_clone
= first_clone
->next_sibling_clone
;
534 first_clone
->prev_sibling_clone
->next_sibling_clone
535 = first_clone
->next_sibling_clone
;
536 if (first_clone
->next_sibling_clone
)
537 first_clone
->next_sibling_clone
->prev_sibling_clone
538 = first_clone
->prev_sibling_clone
;
539 first_clone
->next_sibling_clone
= node
->clones
;
540 first_clone
->prev_sibling_clone
= NULL
;
541 node
->clones
->prev_sibling_clone
= first_clone
;
542 node
->clones
= first_clone
;
544 first_clone
->decl
= copy_node (node
->decl
);
545 first_clone
->decl
->decl_with_vis
.symtab_node
= first_clone
;
546 gcc_assert (first_clone
== cgraph_node::get (first_clone
->decl
));
548 /* Now reshape the clone tree, so all other clones descends from
550 if (first_clone
->next_sibling_clone
)
552 for (n
= first_clone
->next_sibling_clone
; n
->next_sibling_clone
;
553 n
= n
->next_sibling_clone
)
554 n
->clone_of
= first_clone
;
555 n
->clone_of
= first_clone
;
556 n
->next_sibling_clone
= first_clone
->clones
;
557 if (first_clone
->clones
)
558 first_clone
->clones
->prev_sibling_clone
= n
;
559 first_clone
->clones
= first_clone
->next_sibling_clone
;
560 first_clone
->next_sibling_clone
->prev_sibling_clone
= NULL
;
561 first_clone
->next_sibling_clone
= NULL
;
562 gcc_assert (!first_clone
->prev_sibling_clone
);
564 first_clone
->clone_of
= NULL
;
566 /* Now node in question has no clones. */
569 /* Inline clones share decl with the function they are cloned
570 from. Walk the whole clone tree and redirect them all to the
572 if (first_clone
->clones
)
573 for (n
= first_clone
->clones
; n
!= first_clone
;)
575 gcc_assert (n
->decl
== node
->decl
);
576 n
->decl
= first_clone
->decl
;
579 else if (n
->next_sibling_clone
)
580 n
= n
->next_sibling_clone
;
583 while (n
!= first_clone
&& !n
->next_sibling_clone
)
585 if (n
!= first_clone
)
586 n
= n
->next_sibling_clone
;
590 /* Copy the OLD_VERSION_NODE function tree to the new version. */
591 tree_function_versioning (node
->decl
, first_clone
->decl
,
592 NULL
, true, NULL
, false,
595 /* The function will be short lived and removed after we inline all the clones,
596 but make it internal so we won't confuse ourself. */
597 DECL_EXTERNAL (first_clone
->decl
) = 0;
598 TREE_PUBLIC (first_clone
->decl
) = 0;
599 DECL_COMDAT (first_clone
->decl
) = 0;
600 first_clone
->ipa_transforms_to_apply
.release ();
602 /* When doing recursive inlining, the clone may become unnecessary.
603 This is possible i.e. in the case when the recursive function is proved to be
604 non-throwing and the recursion happens only in the EH landing pad.
605 We can not remove the clone until we are done with saving the body.
607 if (!first_clone
->callers
)
609 first_clone
->remove_symbol_and_inline_clones ();
612 else if (flag_checking
)
613 first_clone
->verify ();
618 /* Return true when function body of DECL still needs to be kept around
621 preserve_function_body_p (struct cgraph_node
*node
)
623 gcc_assert (symtab
->global_info_ready
);
624 gcc_assert (!node
->alias
&& !node
->thunk
.thunk_p
);
626 /* Look if there is any non-thunk clone around. */
627 for (node
= node
->clones
; node
; node
= node
->next_sibling_clone
)
628 if (!node
->thunk
.thunk_p
)
633 /* Apply inline plan to function. */
636 inline_transform (struct cgraph_node
*node
)
638 unsigned int todo
= 0;
639 struct cgraph_edge
*e
, *next
;
640 bool has_inline
= false;
642 /* FIXME: Currently the pass manager is adding inline transform more than
643 once to some clones. This needs revisiting after WPA cleanups. */
644 if (cfun
->after_inlining
)
647 /* We might need the body of this function so that we can expand
648 it inline somewhere else. */
649 if (preserve_function_body_p (node
))
650 save_inline_function_body (node
);
652 for (e
= node
->callees
; e
; e
= next
)
654 if (!e
->inline_failed
)
656 next
= e
->next_callee
;
657 e
->redirect_call_stmt_to_callee ();
659 node
->remove_all_references ();
661 timevar_push (TV_INTEGRATION
);
662 if (node
->callees
&& (opt_for_fn (node
->decl
, optimize
) || has_inline
))
663 todo
= optimize_inline_calls (current_function_decl
);
664 timevar_pop (TV_INTEGRATION
);
666 cfun
->always_inline_functions_inlined
= true;
667 cfun
->after_inlining
= true;
668 todo
|= execute_fixup_cfg ();
670 if (!(todo
& TODO_update_ssa_any
))
671 /* Redirecting edges might lead to a need for vops to be recomputed. */
672 todo
|= TODO_update_ssa_only_virtuals
;