PR tree-optimization/48734
[official-gcc.git] / gcc / ipa-inline-transform.c
blob1fdb6d01ad7eb146bf5f7a63de1870252c02105c
1 /* Callgraph transformations to handle inlining
2 Copyright (C) 2003, 2004, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* The inline decisions are stored in callgraph in "inline plan" and
23 applied later.
25 To mark given call inline, use inline_call function.
26 The function marks the edge inlinable and, if necessary, produces
27 virtual clone in the callgraph representing the new copy of callee's
28 function body.
30 The inline plan is applied on given function body by inline_transform. */
32 #include "config.h"
33 #include "system.h"
34 #include "coretypes.h"
35 #include "tm.h"
36 #include "tree.h"
37 #include "langhooks.h"
38 #include "cgraph.h"
39 #include "timevar.h"
40 #include "output.h"
41 #include "intl.h"
42 #include "coverage.h"
43 #include "ggc.h"
44 #include "tree-flow.h"
45 #include "ipa-prop.h"
46 #include "ipa-inline.h"
47 #include "tree-inline.h"
49 int ncalls_inlined;
50 int nfunctions_inlined;
52 /* Scale frequency of NODE edges by FREQ_SCALE and increase loop nest
53 by NEST. */
55 static void
56 update_noncloned_frequencies (struct cgraph_node *node,
57 int freq_scale, int nest)
59 struct cgraph_edge *e;
61 /* We do not want to ignore high loop nest after freq drops to 0. */
62 if (!freq_scale)
63 freq_scale = 1;
64 for (e = node->callees; e; e = e->next_callee)
66 e->loop_nest += nest;
67 e->frequency = e->frequency * (gcov_type) freq_scale / CGRAPH_FREQ_BASE;
68 if (e->frequency > CGRAPH_FREQ_MAX)
69 e->frequency = CGRAPH_FREQ_MAX;
70 if (!e->inline_failed)
71 update_noncloned_frequencies (e->callee, freq_scale, nest);
76 /* E is expected to be an edge being inlined. Clone destination node of
77 the edge and redirect it to the new clone.
78 DUPLICATE is used for bookkeeping on whether we are actually creating new
79 clones or re-using node originally representing out-of-line function call.
82 void
83 clone_inlined_nodes (struct cgraph_edge *e, bool duplicate,
84 bool update_original, int *overall_size)
86 HOST_WIDE_INT peak;
87 struct inline_summary *caller_info, *callee_info;
89 if (duplicate)
91 /* We may eliminate the need for out-of-line copy to be output.
92 In that case just go ahead and re-use it. This is not just an
93 memory optimization. Making offline copy of fuction disappear
94 from the program will improve future decisions on inlining. */
95 if (!e->callee->callers->next_caller
96 /* Recursive inlining never wants the master clone to
97 be overwritten. */
98 && update_original
99 /* FIXME: When address is taken of DECL_EXTERNAL function we still
100 can remove its offline copy, but we would need to keep unanalyzed
101 node in the callgraph so references can point to it. */
102 && !e->callee->address_taken
103 && cgraph_can_remove_if_no_direct_calls_p (e->callee)
104 /* Inlining might enable more devirtualizing, so we want to remove
105 those only after all devirtualizable virtual calls are processed.
106 Lacking may edges in callgraph we just preserve them post
107 inlining. */
108 && (!DECL_VIRTUAL_P (e->callee->decl)
109 || (!DECL_COMDAT (e->callee->decl)
110 && !DECL_EXTERNAL (e->callee->decl)))
111 /* Don't reuse if more than one function shares a comdat group.
112 If the other function(s) are needed, we need to emit even
113 this function out of line. */
114 && !e->callee->same_comdat_group
115 /* During early inlining some unanalyzed cgraph nodes might be in the
116 callgraph and they might reffer the function in question. */
117 && !cgraph_new_nodes)
119 gcc_assert (!e->callee->global.inlined_to);
120 if (e->callee->analyzed && !DECL_EXTERNAL (e->callee->decl))
122 if (overall_size)
123 *overall_size -= inline_summary (e->callee)->size;
124 nfunctions_inlined++;
126 duplicate = false;
127 e->callee->local.externally_visible = false;
128 update_noncloned_frequencies (e->callee, e->frequency, e->loop_nest);
130 else
132 struct cgraph_node *n;
133 n = cgraph_clone_node (e->callee, e->callee->decl,
134 e->count, e->frequency, e->loop_nest,
135 update_original, NULL);
136 cgraph_redirect_edge_callee (e, n);
140 callee_info = inline_summary (e->callee);
141 caller_info = inline_summary (e->caller);
143 if (e->caller->global.inlined_to)
144 e->callee->global.inlined_to = e->caller->global.inlined_to;
145 else
146 e->callee->global.inlined_to = e->caller;
147 callee_info->stack_frame_offset
148 = caller_info->stack_frame_offset
149 + caller_info->estimated_self_stack_size;
150 peak = callee_info->stack_frame_offset
151 + callee_info->estimated_self_stack_size;
152 if (inline_summary (e->callee->global.inlined_to)->estimated_stack_size
153 < peak)
154 inline_summary (e->callee->global.inlined_to)->estimated_stack_size = peak;
155 cgraph_propagate_frequency (e->callee);
157 /* Recursively clone all bodies. */
158 for (e = e->callee->callees; e; e = e->next_callee)
159 if (!e->inline_failed)
160 clone_inlined_nodes (e, duplicate, update_original, overall_size);
164 /* Mark edge E as inlined and update callgraph accordingly. UPDATE_ORIGINAL
165 specify whether profile of original function should be updated. If any new
166 indirect edges are discovered in the process, add them to NEW_EDGES, unless
167 it is NULL. Return true iff any new callgraph edges were discovered as a
168 result of inlining. */
170 bool
171 inline_call (struct cgraph_edge *e, bool update_original,
172 VEC (cgraph_edge_p, heap) **new_edges,
173 int *overall_size)
175 int old_size = 0, new_size = 0;
176 struct cgraph_node *to = NULL;
177 struct cgraph_edge *curr = e;
179 /* Don't inline inlined edges. */
180 gcc_assert (e->inline_failed);
181 /* Don't even think of inlining inline clone. */
182 gcc_assert (!e->callee->global.inlined_to);
184 e->inline_failed = CIF_OK;
185 DECL_POSSIBLY_INLINED (e->callee->decl) = true;
187 to = e->caller;
188 if (to->global.inlined_to)
189 to = to->global.inlined_to;
190 old_size = inline_summary (to)->size;
191 inline_merge_summary (e);
192 new_size = inline_summary (to)->size;
194 clone_inlined_nodes (e, true, update_original, overall_size);
196 gcc_assert (curr->callee->global.inlined_to == to);
197 if (overall_size && new_size > old_size)
198 *overall_size += new_size - old_size;
199 ncalls_inlined++;
201 if (flag_indirect_inlining && optimize)
202 return ipa_propagate_indirect_call_infos (curr, new_edges);
203 else
204 return false;
208 /* Copy function body of NODE and redirect all inline clones to it.
209 This is done before inline plan is applied to NODE when there are
210 still some inline clones if it.
212 This is neccesary because inline decisions are not really transitive
213 and the other inline clones may have different bodies. */
215 static struct cgraph_node *
216 save_inline_function_body (struct cgraph_node *node)
218 struct cgraph_node *first_clone, *n;
220 if (dump_file)
221 fprintf (dump_file, "\nSaving body of %s for later reuse\n",
222 cgraph_node_name (node));
224 gcc_assert (node == cgraph_get_node (node->decl));
226 /* first_clone will be turned into real function. */
227 first_clone = node->clones;
228 first_clone->decl = copy_node (node->decl);
229 cgraph_insert_node_to_hashtable (first_clone);
230 gcc_assert (first_clone == cgraph_get_node (first_clone->decl));
232 /* Now reshape the clone tree, so all other clones descends from
233 first_clone. */
234 if (first_clone->next_sibling_clone)
236 for (n = first_clone->next_sibling_clone; n->next_sibling_clone; n = n->next_sibling_clone)
237 n->clone_of = first_clone;
238 n->clone_of = first_clone;
239 n->next_sibling_clone = first_clone->clones;
240 if (first_clone->clones)
241 first_clone->clones->prev_sibling_clone = n;
242 first_clone->clones = first_clone->next_sibling_clone;
243 first_clone->next_sibling_clone->prev_sibling_clone = NULL;
244 first_clone->next_sibling_clone = NULL;
245 gcc_assert (!first_clone->prev_sibling_clone);
247 first_clone->clone_of = NULL;
249 /* Now node in question has no clones. */
250 node->clones = NULL;
252 /* Inline clones share decl with the function they are cloned
253 from. Walk the whole clone tree and redirect them all to the
254 new decl. */
255 if (first_clone->clones)
256 for (n = first_clone->clones; n != first_clone;)
258 gcc_assert (n->decl == node->decl);
259 n->decl = first_clone->decl;
260 if (n->clones)
261 n = n->clones;
262 else if (n->next_sibling_clone)
263 n = n->next_sibling_clone;
264 else
266 while (n != first_clone && !n->next_sibling_clone)
267 n = n->clone_of;
268 if (n != first_clone)
269 n = n->next_sibling_clone;
273 /* Copy the OLD_VERSION_NODE function tree to the new version. */
274 tree_function_versioning (node->decl, first_clone->decl, NULL, true, NULL,
275 NULL, NULL);
277 /* The function will be short lived and removed after we inline all the clones,
278 but make it internal so we won't confuse ourself. */
279 DECL_EXTERNAL (first_clone->decl) = 0;
280 DECL_COMDAT_GROUP (first_clone->decl) = NULL_TREE;
281 TREE_PUBLIC (first_clone->decl) = 0;
282 DECL_COMDAT (first_clone->decl) = 0;
283 VEC_free (ipa_opt_pass, heap,
284 first_clone->ipa_transforms_to_apply);
285 first_clone->ipa_transforms_to_apply = NULL;
287 #ifdef ENABLE_CHECKING
288 verify_cgraph_node (first_clone);
289 #endif
290 return first_clone;
294 /* Apply inline plan to function. */
296 unsigned int
297 inline_transform (struct cgraph_node *node)
299 unsigned int todo = 0;
300 struct cgraph_edge *e;
301 bool inline_p = false;
303 /* FIXME: Currently the pass manager is adding inline transform more than
304 once to some clones. This needs revisiting after WPA cleanups. */
305 if (cfun->after_inlining)
306 return 0;
308 /* We might need the body of this function so that we can expand
309 it inline somewhere else. */
310 if (cgraph_preserve_function_body_p (node->decl))
311 save_inline_function_body (node);
313 for (e = node->callees; e; e = e->next_callee)
315 cgraph_redirect_edge_call_stmt_to_callee (e);
316 if (!e->inline_failed || warn_inline)
317 inline_p = true;
320 if (inline_p)
322 timevar_push (TV_INTEGRATION);
323 todo = optimize_inline_calls (current_function_decl);
324 timevar_pop (TV_INTEGRATION);
326 cfun->always_inline_functions_inlined = true;
327 cfun->after_inlining = true;
328 return todo | execute_fixup_cfg ();