PR c/60439
[official-gcc.git] / gcc / cgraphclones.c
blob94c9760b37f0053b6208f9e77e9e8b00ad0e4edc
1 /* Callgraph clones
2 Copyright (C) 2003-2014 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* This module provide facilities for clonning functions. I.e. creating
22 new functions based on existing functions with simple modifications,
23 such as replacement of parameters.
25 To allow whole program optimization without actual presence of function
26 bodies, an additional infrastructure is provided for so-called virtual
27 clones
29 A virtual clone in the callgraph is a function that has no
30 associated body, just a description of how to create its body based
31 on a different function (which itself may be a virtual clone).
33 The description of function modifications includes adjustments to
34 the function's signature (which allows, for example, removing or
35 adding function arguments), substitutions to perform on the
36 function body, and, for inlined functions, a pointer to the
37 function that it will be inlined into.
39 It is also possible to redirect any edge of the callgraph from a
40 function to its virtual clone. This implies updating of the call
41 site to adjust for the new function signature.
43 Most of the transformations performed by inter-procedural
44 optimizations can be represented via virtual clones. For
45 instance, a constant propagation pass can produce a virtual clone
46 of the function which replaces one of its arguments by a
47 constant. The inliner can represent its decisions by producing a
48 clone of a function whose body will be later integrated into
49 a given function.
51 Using virtual clones, the program can be easily updated
52 during the Execute stage, solving most of pass interactions
53 problems that would otherwise occur during Transform.
55 Virtual clones are later materialized in the LTRANS stage and
56 turned into real functions. Passes executed after the virtual
57 clone were introduced also perform their Transform stage
58 on new functions, so for a pass there is no significant
59 difference between operating on a real function or a virtual
60 clone introduced before its Execute stage.
62 Optimization passes then work on virtual clones introduced before
63 their Execute stage as if they were real functions. The
64 only difference is that clones are not visible during the
65 Generate Summary stage. */
67 #include "config.h"
68 #include "system.h"
69 #include "coretypes.h"
70 #include "tm.h"
71 #include "rtl.h"
72 #include "tree.h"
73 #include "stringpool.h"
74 #include "function.h"
75 #include "emit-rtl.h"
76 #include "basic-block.h"
77 #include "tree-ssa-alias.h"
78 #include "internal-fn.h"
79 #include "tree-eh.h"
80 #include "gimple-expr.h"
81 #include "is-a.h"
82 #include "gimple.h"
83 #include "bitmap.h"
84 #include "tree-cfg.h"
85 #include "tree-inline.h"
86 #include "langhooks.h"
87 #include "toplev.h"
88 #include "flags.h"
89 #include "debug.h"
90 #include "target.h"
91 #include "diagnostic.h"
92 #include "params.h"
93 #include "intl.h"
94 #include "function.h"
95 #include "ipa-prop.h"
96 #include "tree-iterator.h"
97 #include "tree-dump.h"
98 #include "gimple-pretty-print.h"
99 #include "coverage.h"
100 #include "ipa-inline.h"
101 #include "ipa-utils.h"
102 #include "lto-streamer.h"
103 #include "except.h"
105 /* Create clone of E in the node N represented by CALL_EXPR the callgraph. */
106 struct cgraph_edge *
107 cgraph_clone_edge (struct cgraph_edge *e, struct cgraph_node *n,
108 gimple call_stmt, unsigned stmt_uid, gcov_type count_scale,
109 int freq_scale, bool update_original)
111 struct cgraph_edge *new_edge;
112 gcov_type count = apply_probability (e->count, count_scale);
113 gcov_type freq;
115 /* We do not want to ignore loop nest after frequency drops to 0. */
116 if (!freq_scale)
117 freq_scale = 1;
118 freq = e->frequency * (gcov_type) freq_scale / CGRAPH_FREQ_BASE;
119 if (freq > CGRAPH_FREQ_MAX)
120 freq = CGRAPH_FREQ_MAX;
122 if (e->indirect_unknown_callee)
124 tree decl;
126 if (call_stmt && (decl = gimple_call_fndecl (call_stmt))
127 /* When the call is speculative, we need to resolve it
128 via cgraph_resolve_speculation and not here. */
129 && !e->speculative)
131 struct cgraph_node *callee = cgraph_get_node (decl);
132 gcc_checking_assert (callee);
133 new_edge = cgraph_create_edge (n, callee, call_stmt, count, freq);
135 else
137 new_edge = cgraph_create_indirect_edge (n, call_stmt,
138 e->indirect_info->ecf_flags,
139 count, freq);
140 *new_edge->indirect_info = *e->indirect_info;
143 else
145 new_edge = cgraph_create_edge (n, e->callee, call_stmt, count, freq);
146 if (e->indirect_info)
148 new_edge->indirect_info
149 = ggc_cleared_alloc<cgraph_indirect_call_info> ();
150 *new_edge->indirect_info = *e->indirect_info;
154 new_edge->inline_failed = e->inline_failed;
155 new_edge->indirect_inlining_edge = e->indirect_inlining_edge;
156 new_edge->lto_stmt_uid = stmt_uid;
157 /* Clone flags that depend on call_stmt availability manually. */
158 new_edge->can_throw_external = e->can_throw_external;
159 new_edge->call_stmt_cannot_inline_p = e->call_stmt_cannot_inline_p;
160 new_edge->speculative = e->speculative;
161 if (update_original)
163 e->count -= new_edge->count;
164 if (e->count < 0)
165 e->count = 0;
167 cgraph_call_edge_duplication_hooks (e, new_edge);
168 return new_edge;
171 /* Build variant of function type ORIG_TYPE skipping ARGS_TO_SKIP and the
172 return value if SKIP_RETURN is true. */
174 static tree
175 build_function_type_skip_args (tree orig_type, bitmap args_to_skip,
176 bool skip_return)
178 tree new_type = NULL;
179 tree args, new_args = NULL, t;
180 tree new_reversed;
181 int i = 0;
183 for (args = TYPE_ARG_TYPES (orig_type); args && args != void_list_node;
184 args = TREE_CHAIN (args), i++)
185 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
186 new_args = tree_cons (NULL_TREE, TREE_VALUE (args), new_args);
188 new_reversed = nreverse (new_args);
189 if (args)
191 if (new_reversed)
192 TREE_CHAIN (new_args) = void_list_node;
193 else
194 new_reversed = void_list_node;
197 /* Use copy_node to preserve as much as possible from original type
198 (debug info, attribute lists etc.)
199 Exception is METHOD_TYPEs must have THIS argument.
200 When we are asked to remove it, we need to build new FUNCTION_TYPE
201 instead. */
202 if (TREE_CODE (orig_type) != METHOD_TYPE
203 || !args_to_skip
204 || !bitmap_bit_p (args_to_skip, 0))
206 new_type = build_distinct_type_copy (orig_type);
207 TYPE_ARG_TYPES (new_type) = new_reversed;
209 else
211 new_type
212 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
213 new_reversed));
214 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
217 if (skip_return)
218 TREE_TYPE (new_type) = void_type_node;
220 /* This is a new type, not a copy of an old type. Need to reassociate
221 variants. We can handle everything except the main variant lazily. */
222 t = TYPE_MAIN_VARIANT (orig_type);
223 if (t != orig_type)
225 t = build_function_type_skip_args (t, args_to_skip, skip_return);
226 TYPE_MAIN_VARIANT (new_type) = t;
227 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
228 TYPE_NEXT_VARIANT (t) = new_type;
230 else
232 TYPE_MAIN_VARIANT (new_type) = new_type;
233 TYPE_NEXT_VARIANT (new_type) = NULL;
236 return new_type;
239 /* Build variant of function decl ORIG_DECL skipping ARGS_TO_SKIP and the
240 return value if SKIP_RETURN is true.
242 Arguments from DECL_ARGUMENTS list can't be removed now, since they are
243 linked by TREE_CHAIN directly. The caller is responsible for eliminating
244 them when they are being duplicated (i.e. copy_arguments_for_versioning). */
246 static tree
247 build_function_decl_skip_args (tree orig_decl, bitmap args_to_skip,
248 bool skip_return)
250 tree new_decl = copy_node (orig_decl);
251 tree new_type;
253 new_type = TREE_TYPE (orig_decl);
254 if (prototype_p (new_type)
255 || (skip_return && !VOID_TYPE_P (TREE_TYPE (new_type))))
256 new_type
257 = build_function_type_skip_args (new_type, args_to_skip, skip_return);
258 TREE_TYPE (new_decl) = new_type;
260 /* For declarations setting DECL_VINDEX (i.e. methods)
261 we expect first argument to be THIS pointer. */
262 if (args_to_skip && bitmap_bit_p (args_to_skip, 0))
263 DECL_VINDEX (new_decl) = NULL_TREE;
265 /* When signature changes, we need to clear builtin info. */
266 if (DECL_BUILT_IN (new_decl)
267 && args_to_skip
268 && !bitmap_empty_p (args_to_skip))
270 DECL_BUILT_IN_CLASS (new_decl) = NOT_BUILT_IN;
271 DECL_FUNCTION_CODE (new_decl) = (enum built_in_function) 0;
273 /* The FE might have information and assumptions about the other
274 arguments. */
275 DECL_LANG_SPECIFIC (new_decl) = NULL;
276 return new_decl;
279 /* Set flags of NEW_NODE and its decl. NEW_NODE is a newly created private
280 clone or its thunk. */
282 static void
283 set_new_clone_decl_and_node_flags (cgraph_node *new_node)
285 DECL_EXTERNAL (new_node->decl) = 0;
286 TREE_PUBLIC (new_node->decl) = 0;
287 DECL_COMDAT (new_node->decl) = 0;
288 DECL_WEAK (new_node->decl) = 0;
289 DECL_VIRTUAL_P (new_node->decl) = 0;
290 DECL_STATIC_CONSTRUCTOR (new_node->decl) = 0;
291 DECL_STATIC_DESTRUCTOR (new_node->decl) = 0;
293 new_node->externally_visible = 0;
294 new_node->local.local = 1;
295 new_node->lowered = true;
296 new_node->reset_section ();
299 /* Duplicate thunk THUNK if necessary but make it to refer to NODE.
300 ARGS_TO_SKIP, if non-NULL, determines which parameters should be omitted.
301 Function can return NODE if no thunk is necessary, which can happen when
302 thunk is this_adjusting but we are removing this parameter. */
304 static cgraph_node *
305 duplicate_thunk_for_node (cgraph_node *thunk, cgraph_node *node,
306 bitmap args_to_skip)
308 cgraph_node *new_thunk, *thunk_of;
309 thunk_of = cgraph_function_or_thunk_node (thunk->callees->callee);
311 if (thunk_of->thunk.thunk_p)
312 node = duplicate_thunk_for_node (thunk_of, node, args_to_skip);
314 struct cgraph_edge *cs;
315 for (cs = node->callers; cs; cs = cs->next_caller)
316 if (cs->caller->thunk.thunk_p
317 && cs->caller->thunk.this_adjusting == thunk->thunk.this_adjusting
318 && cs->caller->thunk.fixed_offset == thunk->thunk.fixed_offset
319 && cs->caller->thunk.virtual_offset_p == thunk->thunk.virtual_offset_p
320 && cs->caller->thunk.virtual_value == thunk->thunk.virtual_value)
321 return cs->caller;
323 tree new_decl;
324 if (!args_to_skip)
325 new_decl = copy_node (thunk->decl);
326 else
328 /* We do not need to duplicate this_adjusting thunks if we have removed
329 this. */
330 if (thunk->thunk.this_adjusting
331 && bitmap_bit_p (args_to_skip, 0))
332 return node;
334 new_decl = build_function_decl_skip_args (thunk->decl, args_to_skip,
335 false);
337 gcc_checking_assert (!DECL_STRUCT_FUNCTION (new_decl));
338 gcc_checking_assert (!DECL_INITIAL (new_decl));
339 gcc_checking_assert (!DECL_RESULT (new_decl));
340 gcc_checking_assert (!DECL_RTL_SET_P (new_decl));
342 DECL_NAME (new_decl) = clone_function_name (thunk->decl, "artificial_thunk");
343 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
345 new_thunk = cgraph_create_node (new_decl);
346 set_new_clone_decl_and_node_flags (new_thunk);
347 new_thunk->definition = true;
348 new_thunk->thunk = thunk->thunk;
349 new_thunk->unique_name = in_lto_p;
350 new_thunk->former_clone_of = thunk->decl;
352 struct cgraph_edge *e = cgraph_create_edge (new_thunk, node, NULL, 0,
353 CGRAPH_FREQ_BASE);
354 e->call_stmt_cannot_inline_p = true;
355 cgraph_call_edge_duplication_hooks (thunk->callees, e);
356 if (!expand_thunk (new_thunk, false, false))
357 new_thunk->analyzed = true;
358 cgraph_call_node_duplication_hooks (thunk, new_thunk);
359 return new_thunk;
362 /* If E does not lead to a thunk, simply redirect it to N. Otherwise create
363 one or more equivalent thunks for N and redirect E to the first in the
364 chain. */
366 void
367 redirect_edge_duplicating_thunks (struct cgraph_edge *e, struct cgraph_node *n,
368 bitmap args_to_skip)
370 cgraph_node *orig_to = cgraph_function_or_thunk_node (e->callee);
371 if (orig_to->thunk.thunk_p)
372 n = duplicate_thunk_for_node (orig_to, n, args_to_skip);
374 cgraph_redirect_edge_callee (e, n);
377 /* Create node representing clone of N executed COUNT times. Decrease
378 the execution counts from original node too.
379 The new clone will have decl set to DECL that may or may not be the same
380 as decl of N.
382 When UPDATE_ORIGINAL is true, the counts are subtracted from the original
383 function's profile to reflect the fact that part of execution is handled
384 by node.
385 When CALL_DUPLICATOIN_HOOK is true, the ipa passes are acknowledged about
386 the new clone. Otherwise the caller is responsible for doing so later.
388 If the new node is being inlined into another one, NEW_INLINED_TO should be
389 the outline function the new one is (even indirectly) inlined to. All hooks
390 will see this in node's global.inlined_to, when invoked. Can be NULL if the
391 node is not inlined. */
393 struct cgraph_node *
394 cgraph_clone_node (struct cgraph_node *n, tree decl, gcov_type count, int freq,
395 bool update_original,
396 vec<cgraph_edge_p> redirect_callers,
397 bool call_duplication_hook,
398 struct cgraph_node *new_inlined_to,
399 bitmap args_to_skip)
401 struct cgraph_node *new_node = cgraph_create_empty_node ();
402 struct cgraph_edge *e;
403 gcov_type count_scale;
404 unsigned i;
406 new_node->decl = decl;
407 symtab_register_node (new_node);
408 new_node->origin = n->origin;
409 new_node->lto_file_data = n->lto_file_data;
410 if (new_node->origin)
412 new_node->next_nested = new_node->origin->nested;
413 new_node->origin->nested = new_node;
415 new_node->analyzed = n->analyzed;
416 new_node->definition = n->definition;
417 new_node->local = n->local;
418 new_node->externally_visible = false;
419 new_node->local.local = true;
420 new_node->global = n->global;
421 new_node->global.inlined_to = new_inlined_to;
422 new_node->rtl = n->rtl;
423 new_node->count = count;
424 new_node->frequency = n->frequency;
425 new_node->clone = n->clone;
426 new_node->clone.tree_map = NULL;
427 new_node->tp_first_run = n->tp_first_run;
428 if (n->count)
430 if (new_node->count > n->count)
431 count_scale = REG_BR_PROB_BASE;
432 else
433 count_scale = GCOV_COMPUTE_SCALE (new_node->count, n->count);
435 else
436 count_scale = 0;
437 if (update_original)
439 n->count -= count;
440 if (n->count < 0)
441 n->count = 0;
444 FOR_EACH_VEC_ELT (redirect_callers, i, e)
446 /* Redirect calls to the old version node to point to its new
447 version. The only exception is when the edge was proved to
448 be unreachable during the clonning procedure. */
449 if (!e->callee
450 || DECL_BUILT_IN_CLASS (e->callee->decl) != BUILT_IN_NORMAL
451 || DECL_FUNCTION_CODE (e->callee->decl) != BUILT_IN_UNREACHABLE)
452 redirect_edge_duplicating_thunks (e, new_node, args_to_skip);
456 for (e = n->callees;e; e=e->next_callee)
457 cgraph_clone_edge (e, new_node, e->call_stmt, e->lto_stmt_uid,
458 count_scale, freq, update_original);
460 for (e = n->indirect_calls; e; e = e->next_callee)
461 cgraph_clone_edge (e, new_node, e->call_stmt, e->lto_stmt_uid,
462 count_scale, freq, update_original);
463 ipa_clone_references (new_node, &n->ref_list);
465 new_node->next_sibling_clone = n->clones;
466 if (n->clones)
467 n->clones->prev_sibling_clone = new_node;
468 n->clones = new_node;
469 new_node->clone_of = n;
471 if (call_duplication_hook)
472 cgraph_call_node_duplication_hooks (n, new_node);
473 return new_node;
476 /* Return a new assembler name for a clone of DECL with SUFFIX. */
478 static GTY(()) unsigned int clone_fn_id_num;
480 tree
481 clone_function_name (tree decl, const char *suffix)
483 tree name = DECL_ASSEMBLER_NAME (decl);
484 size_t len = IDENTIFIER_LENGTH (name);
485 char *tmp_name, *prefix;
487 prefix = XALLOCAVEC (char, len + strlen (suffix) + 2);
488 memcpy (prefix, IDENTIFIER_POINTER (name), len);
489 strcpy (prefix + len + 1, suffix);
490 #ifndef NO_DOT_IN_LABEL
491 prefix[len] = '.';
492 #elif !defined NO_DOLLAR_IN_LABEL
493 prefix[len] = '$';
494 #else
495 prefix[len] = '_';
496 #endif
497 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix, clone_fn_id_num++);
498 return get_identifier (tmp_name);
501 /* Create callgraph node clone with new declaration. The actual body will
502 be copied later at compilation stage.
504 TODO: after merging in ipa-sra use function call notes instead of args_to_skip
505 bitmap interface.
507 struct cgraph_node *
508 cgraph_create_virtual_clone (struct cgraph_node *old_node,
509 vec<cgraph_edge_p> redirect_callers,
510 vec<ipa_replace_map_p, va_gc> *tree_map,
511 bitmap args_to_skip,
512 const char * suffix)
514 tree old_decl = old_node->decl;
515 struct cgraph_node *new_node = NULL;
516 tree new_decl;
517 size_t len, i;
518 struct ipa_replace_map *map;
519 char *name;
521 if (!in_lto_p)
522 gcc_checking_assert (tree_versionable_function_p (old_decl));
524 gcc_assert (old_node->local.can_change_signature || !args_to_skip);
526 /* Make a new FUNCTION_DECL tree node */
527 if (!args_to_skip)
528 new_decl = copy_node (old_decl);
529 else
530 new_decl = build_function_decl_skip_args (old_decl, args_to_skip, false);
532 /* These pointers represent function body and will be populated only when clone
533 is materialized. */
534 gcc_assert (new_decl != old_decl);
535 DECL_STRUCT_FUNCTION (new_decl) = NULL;
536 DECL_ARGUMENTS (new_decl) = NULL;
537 DECL_INITIAL (new_decl) = NULL;
538 DECL_RESULT (new_decl) = NULL;
539 /* We can not do DECL_RESULT (new_decl) = NULL; here because of LTO partitioning
540 sometimes storing only clone decl instead of original. */
542 /* Generate a new name for the new version. */
543 len = IDENTIFIER_LENGTH (DECL_NAME (old_decl));
544 name = XALLOCAVEC (char, len + strlen (suffix) + 2);
545 memcpy (name, IDENTIFIER_POINTER (DECL_NAME (old_decl)), len);
546 strcpy (name + len + 1, suffix);
547 name[len] = '.';
548 DECL_NAME (new_decl) = get_identifier (name);
549 SET_DECL_ASSEMBLER_NAME (new_decl, clone_function_name (old_decl, suffix));
550 SET_DECL_RTL (new_decl, NULL);
552 new_node = cgraph_clone_node (old_node, new_decl, old_node->count,
553 CGRAPH_FREQ_BASE, false,
554 redirect_callers, false, NULL, args_to_skip);
555 /* Update the properties.
556 Make clone visible only within this translation unit. Make sure
557 that is not weak also.
558 ??? We cannot use COMDAT linkage because there is no
559 ABI support for this. */
560 set_new_clone_decl_and_node_flags (new_node);
561 new_node->clone.tree_map = tree_map;
562 new_node->clone.args_to_skip = args_to_skip;
564 /* Clones of global symbols or symbols with unique names are unique. */
565 if ((TREE_PUBLIC (old_decl)
566 && !DECL_EXTERNAL (old_decl)
567 && !DECL_WEAK (old_decl)
568 && !DECL_COMDAT (old_decl))
569 || in_lto_p)
570 new_node->unique_name = true;
571 FOR_EACH_VEC_SAFE_ELT (tree_map, i, map)
572 ipa_maybe_record_reference (new_node, map->new_tree,
573 IPA_REF_ADDR, NULL);
574 if (!args_to_skip)
575 new_node->clone.combined_args_to_skip = old_node->clone.combined_args_to_skip;
576 else if (old_node->clone.combined_args_to_skip)
578 int newi = 0, oldi = 0;
579 tree arg;
580 bitmap new_args_to_skip = BITMAP_GGC_ALLOC ();
581 struct cgraph_node *orig_node;
582 for (orig_node = old_node; orig_node->clone_of; orig_node = orig_node->clone_of)
584 for (arg = DECL_ARGUMENTS (orig_node->decl);
585 arg; arg = DECL_CHAIN (arg), oldi++)
587 if (bitmap_bit_p (old_node->clone.combined_args_to_skip, oldi))
589 bitmap_set_bit (new_args_to_skip, oldi);
590 continue;
592 if (bitmap_bit_p (args_to_skip, newi))
593 bitmap_set_bit (new_args_to_skip, oldi);
594 newi++;
596 new_node->clone.combined_args_to_skip = new_args_to_skip;
598 else
599 new_node->clone.combined_args_to_skip = args_to_skip;
600 if (old_node->ipa_transforms_to_apply.exists ())
601 new_node->ipa_transforms_to_apply
602 = old_node->ipa_transforms_to_apply.copy ();
604 cgraph_call_node_duplication_hooks (old_node, new_node);
607 return new_node;
610 /* NODE is being removed from symbol table; see if its entry can be replaced by
611 other inline clone. */
612 struct cgraph_node *
613 cgraph_find_replacement_node (struct cgraph_node *node)
615 struct cgraph_node *next_inline_clone, *replacement;
617 for (next_inline_clone = node->clones;
618 next_inline_clone
619 && next_inline_clone->decl != node->decl;
620 next_inline_clone = next_inline_clone->next_sibling_clone)
623 /* If there is inline clone of the node being removed, we need
624 to put it into the position of removed node and reorganize all
625 other clones to be based on it. */
626 if (next_inline_clone)
628 struct cgraph_node *n;
629 struct cgraph_node *new_clones;
631 replacement = next_inline_clone;
633 /* Unlink inline clone from the list of clones of removed node. */
634 if (next_inline_clone->next_sibling_clone)
635 next_inline_clone->next_sibling_clone->prev_sibling_clone
636 = next_inline_clone->prev_sibling_clone;
637 if (next_inline_clone->prev_sibling_clone)
639 gcc_assert (node->clones != next_inline_clone);
640 next_inline_clone->prev_sibling_clone->next_sibling_clone
641 = next_inline_clone->next_sibling_clone;
643 else
645 gcc_assert (node->clones == next_inline_clone);
646 node->clones = next_inline_clone->next_sibling_clone;
649 new_clones = node->clones;
650 node->clones = NULL;
652 /* Copy clone info. */
653 next_inline_clone->clone = node->clone;
655 /* Now place it into clone tree at same level at NODE. */
656 next_inline_clone->clone_of = node->clone_of;
657 next_inline_clone->prev_sibling_clone = NULL;
658 next_inline_clone->next_sibling_clone = NULL;
659 if (node->clone_of)
661 if (node->clone_of->clones)
662 node->clone_of->clones->prev_sibling_clone = next_inline_clone;
663 next_inline_clone->next_sibling_clone = node->clone_of->clones;
664 node->clone_of->clones = next_inline_clone;
667 /* Merge the clone list. */
668 if (new_clones)
670 if (!next_inline_clone->clones)
671 next_inline_clone->clones = new_clones;
672 else
674 n = next_inline_clone->clones;
675 while (n->next_sibling_clone)
676 n = n->next_sibling_clone;
677 n->next_sibling_clone = new_clones;
678 new_clones->prev_sibling_clone = n;
682 /* Update clone_of pointers. */
683 n = new_clones;
684 while (n)
686 n->clone_of = next_inline_clone;
687 n = n->next_sibling_clone;
689 return replacement;
691 else
692 return NULL;
695 /* Like cgraph_set_call_stmt but walk the clone tree and update all
696 clones sharing the same function body.
697 When WHOLE_SPECULATIVE_EDGES is true, all three components of
698 speculative edge gets updated. Otherwise we update only direct
699 call. */
701 void
702 cgraph_set_call_stmt_including_clones (struct cgraph_node *orig,
703 gimple old_stmt, gimple new_stmt,
704 bool update_speculative)
706 struct cgraph_node *node;
707 struct cgraph_edge *edge = cgraph_edge (orig, old_stmt);
709 if (edge)
710 cgraph_set_call_stmt (edge, new_stmt, update_speculative);
712 node = orig->clones;
713 if (node)
714 while (node != orig)
716 struct cgraph_edge *edge = cgraph_edge (node, old_stmt);
717 if (edge)
719 cgraph_set_call_stmt (edge, new_stmt, update_speculative);
720 /* If UPDATE_SPECULATIVE is false, it means that we are turning
721 speculative call into a real code sequence. Update the
722 callgraph edges. */
723 if (edge->speculative && !update_speculative)
725 struct cgraph_edge *direct, *indirect;
726 struct ipa_ref *ref;
728 gcc_assert (!edge->indirect_unknown_callee);
729 cgraph_speculative_call_info (edge, direct, indirect, ref);
730 direct->speculative = false;
731 indirect->speculative = false;
732 ref->speculative = false;
735 if (node->clones)
736 node = node->clones;
737 else if (node->next_sibling_clone)
738 node = node->next_sibling_clone;
739 else
741 while (node != orig && !node->next_sibling_clone)
742 node = node->clone_of;
743 if (node != orig)
744 node = node->next_sibling_clone;
749 /* Like cgraph_create_edge walk the clone tree and update all clones sharing
750 same function body. If clones already have edge for OLD_STMT; only
751 update the edge same way as cgraph_set_call_stmt_including_clones does.
753 TODO: COUNT and LOOP_DEPTH should be properly distributed based on relative
754 frequencies of the clones. */
756 void
757 cgraph_create_edge_including_clones (struct cgraph_node *orig,
758 struct cgraph_node *callee,
759 gimple old_stmt,
760 gimple stmt, gcov_type count,
761 int freq,
762 cgraph_inline_failed_t reason)
764 struct cgraph_node *node;
765 struct cgraph_edge *edge;
767 if (!cgraph_edge (orig, stmt))
769 edge = cgraph_create_edge (orig, callee, stmt, count, freq);
770 edge->inline_failed = reason;
773 node = orig->clones;
774 if (node)
775 while (node != orig)
777 struct cgraph_edge *edge = cgraph_edge (node, old_stmt);
779 /* It is possible that clones already contain the edge while
780 master didn't. Either we promoted indirect call into direct
781 call in the clone or we are processing clones of unreachable
782 master where edges has been removed. */
783 if (edge)
784 cgraph_set_call_stmt (edge, stmt);
785 else if (!cgraph_edge (node, stmt))
787 edge = cgraph_create_edge (node, callee, stmt, count,
788 freq);
789 edge->inline_failed = reason;
792 if (node->clones)
793 node = node->clones;
794 else if (node->next_sibling_clone)
795 node = node->next_sibling_clone;
796 else
798 while (node != orig && !node->next_sibling_clone)
799 node = node->clone_of;
800 if (node != orig)
801 node = node->next_sibling_clone;
806 /* Remove the node from cgraph and all inline clones inlined into it.
807 Skip however removal of FORBIDDEN_NODE and return true if it needs to be
808 removed. This allows to call the function from outer loop walking clone
809 tree. */
811 bool
812 cgraph_remove_node_and_inline_clones (struct cgraph_node *node, struct cgraph_node *forbidden_node)
814 struct cgraph_edge *e, *next;
815 bool found = false;
817 if (node == forbidden_node)
819 cgraph_remove_edge (node->callers);
820 return true;
822 for (e = node->callees; e; e = next)
824 next = e->next_callee;
825 if (!e->inline_failed)
826 found |= cgraph_remove_node_and_inline_clones (e->callee, forbidden_node);
828 cgraph_remove_node (node);
829 return found;
832 /* The edges representing the callers of the NEW_VERSION node were
833 fixed by cgraph_function_versioning (), now the call_expr in their
834 respective tree code should be updated to call the NEW_VERSION. */
836 static void
837 update_call_expr (struct cgraph_node *new_version)
839 struct cgraph_edge *e;
841 gcc_assert (new_version);
843 /* Update the call expr on the edges to call the new version. */
844 for (e = new_version->callers; e; e = e->next_caller)
846 struct function *inner_function = DECL_STRUCT_FUNCTION (e->caller->decl);
847 gimple_call_set_fndecl (e->call_stmt, new_version->decl);
848 maybe_clean_eh_stmt_fn (inner_function, e->call_stmt);
853 /* Create a new cgraph node which is the new version of
854 OLD_VERSION node. REDIRECT_CALLERS holds the callers
855 edges which should be redirected to point to
856 NEW_VERSION. ALL the callees edges of OLD_VERSION
857 are cloned to the new version node. Return the new
858 version node.
860 If non-NULL BLOCK_TO_COPY determine what basic blocks
861 was copied to prevent duplications of calls that are dead
862 in the clone. */
864 struct cgraph_node *
865 cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
866 tree new_decl,
867 vec<cgraph_edge_p> redirect_callers,
868 bitmap bbs_to_copy)
870 struct cgraph_node *new_version;
871 struct cgraph_edge *e;
872 unsigned i;
874 gcc_assert (old_version);
876 new_version = cgraph_create_node (new_decl);
878 new_version->analyzed = old_version->analyzed;
879 new_version->definition = old_version->definition;
880 new_version->local = old_version->local;
881 new_version->externally_visible = false;
882 new_version->local.local = new_version->definition;
883 new_version->global = old_version->global;
884 new_version->rtl = old_version->rtl;
885 new_version->count = old_version->count;
887 for (e = old_version->callees; e; e=e->next_callee)
888 if (!bbs_to_copy
889 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
890 cgraph_clone_edge (e, new_version, e->call_stmt,
891 e->lto_stmt_uid, REG_BR_PROB_BASE,
892 CGRAPH_FREQ_BASE,
893 true);
894 for (e = old_version->indirect_calls; e; e=e->next_callee)
895 if (!bbs_to_copy
896 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
897 cgraph_clone_edge (e, new_version, e->call_stmt,
898 e->lto_stmt_uid, REG_BR_PROB_BASE,
899 CGRAPH_FREQ_BASE,
900 true);
901 FOR_EACH_VEC_ELT (redirect_callers, i, e)
903 /* Redirect calls to the old version node to point to its new
904 version. */
905 cgraph_redirect_edge_callee (e, new_version);
908 cgraph_call_node_duplication_hooks (old_version, new_version);
910 return new_version;
913 /* Perform function versioning.
914 Function versioning includes copying of the tree and
915 a callgraph update (creating a new cgraph node and updating
916 its callees and callers).
918 REDIRECT_CALLERS varray includes the edges to be redirected
919 to the new version.
921 TREE_MAP is a mapping of tree nodes we want to replace with
922 new ones (according to results of prior analysis).
923 OLD_VERSION_NODE is the node that is versioned.
925 If non-NULL ARGS_TO_SKIP determine function parameters to remove
926 from new version.
927 If SKIP_RETURN is true, the new version will return void.
928 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
929 If non_NULL NEW_ENTRY determine new entry BB of the clone.
931 Return the new version's cgraph node. */
933 struct cgraph_node *
934 cgraph_function_versioning (struct cgraph_node *old_version_node,
935 vec<cgraph_edge_p> redirect_callers,
936 vec<ipa_replace_map_p, va_gc> *tree_map,
937 bitmap args_to_skip,
938 bool skip_return,
939 bitmap bbs_to_copy,
940 basic_block new_entry_block,
941 const char *clone_name)
943 tree old_decl = old_version_node->decl;
944 struct cgraph_node *new_version_node = NULL;
945 tree new_decl;
947 if (!tree_versionable_function_p (old_decl))
948 return NULL;
950 gcc_assert (old_version_node->local.can_change_signature || !args_to_skip);
952 /* Make a new FUNCTION_DECL tree node for the new version. */
953 if (!args_to_skip && !skip_return)
954 new_decl = copy_node (old_decl);
955 else
956 new_decl
957 = build_function_decl_skip_args (old_decl, args_to_skip, skip_return);
959 /* Generate a new name for the new version. */
960 DECL_NAME (new_decl) = clone_function_name (old_decl, clone_name);
961 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
962 SET_DECL_RTL (new_decl, NULL);
964 /* When the old decl was a con-/destructor make sure the clone isn't. */
965 DECL_STATIC_CONSTRUCTOR (new_decl) = 0;
966 DECL_STATIC_DESTRUCTOR (new_decl) = 0;
968 /* Create the new version's call-graph node.
969 and update the edges of the new node. */
970 new_version_node =
971 cgraph_copy_node_for_versioning (old_version_node, new_decl,
972 redirect_callers, bbs_to_copy);
974 if (old_version_node->ipa_transforms_to_apply.exists ())
975 new_version_node->ipa_transforms_to_apply
976 = old_version_node->ipa_transforms_to_apply.copy ();
977 /* Copy the OLD_VERSION_NODE function tree to the new version. */
978 tree_function_versioning (old_decl, new_decl, tree_map, false, args_to_skip,
979 skip_return, bbs_to_copy, new_entry_block);
981 /* Update the new version's properties.
982 Make The new version visible only within this translation unit. Make sure
983 that is not weak also.
984 ??? We cannot use COMDAT linkage because there is no
985 ABI support for this. */
986 symtab_make_decl_local (new_version_node->decl);
987 DECL_VIRTUAL_P (new_version_node->decl) = 0;
988 new_version_node->externally_visible = 0;
989 new_version_node->local.local = 1;
990 new_version_node->lowered = true;
991 /* Clones of global symbols or symbols with unique names are unique. */
992 if ((TREE_PUBLIC (old_decl)
993 && !DECL_EXTERNAL (old_decl)
994 && !DECL_WEAK (old_decl)
995 && !DECL_COMDAT (old_decl))
996 || in_lto_p)
997 new_version_node->unique_name = true;
999 /* Update the call_expr on the edges to call the new version node. */
1000 update_call_expr (new_version_node);
1002 cgraph_call_function_insertion_hooks (new_version_node);
1003 return new_version_node;
1006 /* Given virtual clone, turn it into actual clone. */
1008 static void
1009 cgraph_materialize_clone (struct cgraph_node *node)
1011 bitmap_obstack_initialize (NULL);
1012 node->former_clone_of = node->clone_of->decl;
1013 if (node->clone_of->former_clone_of)
1014 node->former_clone_of = node->clone_of->former_clone_of;
1015 /* Copy the OLD_VERSION_NODE function tree to the new version. */
1016 tree_function_versioning (node->clone_of->decl, node->decl,
1017 node->clone.tree_map, true,
1018 node->clone.args_to_skip, false,
1019 NULL, NULL);
1020 if (cgraph_dump_file)
1022 dump_function_to_file (node->clone_of->decl, cgraph_dump_file, dump_flags);
1023 dump_function_to_file (node->decl, cgraph_dump_file, dump_flags);
1026 /* Function is no longer clone. */
1027 if (node->next_sibling_clone)
1028 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
1029 if (node->prev_sibling_clone)
1030 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
1031 else
1032 node->clone_of->clones = node->next_sibling_clone;
1033 node->next_sibling_clone = NULL;
1034 node->prev_sibling_clone = NULL;
1035 if (!node->clone_of->analyzed && !node->clone_of->clones)
1037 cgraph_release_function_body (node->clone_of);
1038 cgraph_node_remove_callees (node->clone_of);
1039 ipa_remove_all_references (&node->clone_of->ref_list);
1041 node->clone_of = NULL;
1042 bitmap_obstack_release (NULL);
1045 /* Once all functions from compilation unit are in memory, produce all clones
1046 and update all calls. We might also do this on demand if we don't want to
1047 bring all functions to memory prior compilation, but current WHOPR
1048 implementation does that and it is is bit easier to keep everything right in
1049 this order. */
1051 void
1052 cgraph_materialize_all_clones (void)
1054 struct cgraph_node *node;
1055 bool stabilized = false;
1058 if (cgraph_dump_file)
1059 fprintf (cgraph_dump_file, "Materializing clones\n");
1060 #ifdef ENABLE_CHECKING
1061 verify_cgraph ();
1062 #endif
1064 /* We can also do topological order, but number of iterations should be
1065 bounded by number of IPA passes since single IPA pass is probably not
1066 going to create clones of clones it created itself. */
1067 while (!stabilized)
1069 stabilized = true;
1070 FOR_EACH_FUNCTION (node)
1072 if (node->clone_of && node->decl != node->clone_of->decl
1073 && !gimple_has_body_p (node->decl))
1075 if (!node->clone_of->clone_of)
1076 cgraph_get_body (node->clone_of);
1077 if (gimple_has_body_p (node->clone_of->decl))
1079 if (cgraph_dump_file)
1081 fprintf (cgraph_dump_file, "cloning %s to %s\n",
1082 xstrdup (node->clone_of->name ()),
1083 xstrdup (node->name ()));
1084 if (node->clone.tree_map)
1086 unsigned int i;
1087 fprintf (cgraph_dump_file, " replace map: ");
1088 for (i = 0;
1089 i < vec_safe_length (node->clone.tree_map);
1090 i++)
1092 struct ipa_replace_map *replace_info;
1093 replace_info = (*node->clone.tree_map)[i];
1094 print_generic_expr (cgraph_dump_file, replace_info->old_tree, 0);
1095 fprintf (cgraph_dump_file, " -> ");
1096 print_generic_expr (cgraph_dump_file, replace_info->new_tree, 0);
1097 fprintf (cgraph_dump_file, "%s%s;",
1098 replace_info->replace_p ? "(replace)":"",
1099 replace_info->ref_p ? "(ref)":"");
1101 fprintf (cgraph_dump_file, "\n");
1103 if (node->clone.args_to_skip)
1105 fprintf (cgraph_dump_file, " args_to_skip: ");
1106 dump_bitmap (cgraph_dump_file, node->clone.args_to_skip);
1108 if (node->clone.args_to_skip)
1110 fprintf (cgraph_dump_file, " combined_args_to_skip:");
1111 dump_bitmap (cgraph_dump_file, node->clone.combined_args_to_skip);
1114 cgraph_materialize_clone (node);
1115 stabilized = false;
1120 FOR_EACH_FUNCTION (node)
1121 if (!node->analyzed && node->callees)
1123 cgraph_node_remove_callees (node);
1124 ipa_remove_all_references (&node->ref_list);
1126 else
1127 ipa_clear_stmts_in_references (node);
1128 if (cgraph_dump_file)
1129 fprintf (cgraph_dump_file, "Materialization Call site updates done.\n");
1130 #ifdef ENABLE_CHECKING
1131 verify_cgraph ();
1132 #endif
1133 symtab_remove_unreachable_nodes (false, cgraph_dump_file);
1136 #include "gt-cgraphclones.h"