PR c/61077
[official-gcc.git] / gcc / cgraphclones.c
blobf097da8b22d847a0fb3ca95916de970b6877a7b0
1 /* Callgraph clones
2 Copyright (C) 2003-2014 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* This module provide facilities for clonning functions. I.e. creating
22 new functions based on existing functions with simple modifications,
23 such as replacement of parameters.
25 To allow whole program optimization without actual presence of function
26 bodies, an additional infrastructure is provided for so-called virtual
27 clones
29 A virtual clone in the callgraph is a function that has no
30 associated body, just a description of how to create its body based
31 on a different function (which itself may be a virtual clone).
33 The description of function modifications includes adjustments to
34 the function's signature (which allows, for example, removing or
35 adding function arguments), substitutions to perform on the
36 function body, and, for inlined functions, a pointer to the
37 function that it will be inlined into.
39 It is also possible to redirect any edge of the callgraph from a
40 function to its virtual clone. This implies updating of the call
41 site to adjust for the new function signature.
43 Most of the transformations performed by inter-procedural
44 optimizations can be represented via virtual clones. For
45 instance, a constant propagation pass can produce a virtual clone
46 of the function which replaces one of its arguments by a
47 constant. The inliner can represent its decisions by producing a
48 clone of a function whose body will be later integrated into
49 a given function.
51 Using virtual clones, the program can be easily updated
52 during the Execute stage, solving most of pass interactions
53 problems that would otherwise occur during Transform.
55 Virtual clones are later materialized in the LTRANS stage and
56 turned into real functions. Passes executed after the virtual
57 clone were introduced also perform their Transform stage
58 on new functions, so for a pass there is no significant
59 difference between operating on a real function or a virtual
60 clone introduced before its Execute stage.
62 Optimization passes then work on virtual clones introduced before
63 their Execute stage as if they were real functions. The
64 only difference is that clones are not visible during the
65 Generate Summary stage. */
67 #include "config.h"
68 #include "system.h"
69 #include "coretypes.h"
70 #include "tm.h"
71 #include "rtl.h"
72 #include "tree.h"
73 #include "stringpool.h"
74 #include "function.h"
75 #include "emit-rtl.h"
76 #include "basic-block.h"
77 #include "tree-ssa-alias.h"
78 #include "internal-fn.h"
79 #include "tree-eh.h"
80 #include "gimple-expr.h"
81 #include "is-a.h"
82 #include "gimple.h"
83 #include "bitmap.h"
84 #include "tree-cfg.h"
85 #include "tree-inline.h"
86 #include "langhooks.h"
87 #include "toplev.h"
88 #include "flags.h"
89 #include "debug.h"
90 #include "target.h"
91 #include "diagnostic.h"
92 #include "params.h"
93 #include "intl.h"
94 #include "function.h"
95 #include "ipa-prop.h"
96 #include "tree-iterator.h"
97 #include "tree-dump.h"
98 #include "gimple-pretty-print.h"
99 #include "coverage.h"
100 #include "ipa-inline.h"
101 #include "ipa-utils.h"
102 #include "lto-streamer.h"
103 #include "except.h"
105 /* Create clone of E in the node N represented by CALL_EXPR the callgraph. */
106 struct cgraph_edge *
107 cgraph_clone_edge (struct cgraph_edge *e, struct cgraph_node *n,
108 gimple call_stmt, unsigned stmt_uid, gcov_type count_scale,
109 int freq_scale, bool update_original)
111 struct cgraph_edge *new_edge;
112 gcov_type count = apply_probability (e->count, count_scale);
113 gcov_type freq;
115 /* We do not want to ignore loop nest after frequency drops to 0. */
116 if (!freq_scale)
117 freq_scale = 1;
118 freq = e->frequency * (gcov_type) freq_scale / CGRAPH_FREQ_BASE;
119 if (freq > CGRAPH_FREQ_MAX)
120 freq = CGRAPH_FREQ_MAX;
122 if (e->indirect_unknown_callee)
124 tree decl;
126 if (call_stmt && (decl = gimple_call_fndecl (call_stmt))
127 /* When the call is speculative, we need to resolve it
128 via cgraph_resolve_speculation and not here. */
129 && !e->speculative)
131 struct cgraph_node *callee = cgraph_node::get (decl);
132 gcc_checking_assert (callee);
133 new_edge = n->create_edge (callee, call_stmt, count, freq);
135 else
137 new_edge = n->create_indirect_edge (call_stmt,
138 e->indirect_info->ecf_flags,
139 count, freq);
140 *new_edge->indirect_info = *e->indirect_info;
143 else
145 new_edge = n->create_edge (e->callee, call_stmt, count, freq);
146 if (e->indirect_info)
148 new_edge->indirect_info
149 = ggc_cleared_alloc<cgraph_indirect_call_info> ();
150 *new_edge->indirect_info = *e->indirect_info;
154 new_edge->inline_failed = e->inline_failed;
155 new_edge->indirect_inlining_edge = e->indirect_inlining_edge;
156 new_edge->lto_stmt_uid = stmt_uid;
157 /* Clone flags that depend on call_stmt availability manually. */
158 new_edge->can_throw_external = e->can_throw_external;
159 new_edge->call_stmt_cannot_inline_p = e->call_stmt_cannot_inline_p;
160 new_edge->speculative = e->speculative;
161 if (update_original)
163 e->count -= new_edge->count;
164 if (e->count < 0)
165 e->count = 0;
167 cgraph_call_edge_duplication_hooks (e, new_edge);
168 return new_edge;
171 /* Build variant of function type ORIG_TYPE skipping ARGS_TO_SKIP and the
172 return value if SKIP_RETURN is true. */
174 static tree
175 build_function_type_skip_args (tree orig_type, bitmap args_to_skip,
176 bool skip_return)
178 tree new_type = NULL;
179 tree args, new_args = NULL, t;
180 tree new_reversed;
181 int i = 0;
183 for (args = TYPE_ARG_TYPES (orig_type); args && args != void_list_node;
184 args = TREE_CHAIN (args), i++)
185 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
186 new_args = tree_cons (NULL_TREE, TREE_VALUE (args), new_args);
188 new_reversed = nreverse (new_args);
189 if (args)
191 if (new_reversed)
192 TREE_CHAIN (new_args) = void_list_node;
193 else
194 new_reversed = void_list_node;
197 /* Use copy_node to preserve as much as possible from original type
198 (debug info, attribute lists etc.)
199 Exception is METHOD_TYPEs must have THIS argument.
200 When we are asked to remove it, we need to build new FUNCTION_TYPE
201 instead. */
202 if (TREE_CODE (orig_type) != METHOD_TYPE
203 || !args_to_skip
204 || !bitmap_bit_p (args_to_skip, 0))
206 new_type = build_distinct_type_copy (orig_type);
207 TYPE_ARG_TYPES (new_type) = new_reversed;
209 else
211 new_type
212 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
213 new_reversed));
214 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
217 if (skip_return)
218 TREE_TYPE (new_type) = void_type_node;
220 /* This is a new type, not a copy of an old type. Need to reassociate
221 variants. We can handle everything except the main variant lazily. */
222 t = TYPE_MAIN_VARIANT (orig_type);
223 if (t != orig_type)
225 t = build_function_type_skip_args (t, args_to_skip, skip_return);
226 TYPE_MAIN_VARIANT (new_type) = t;
227 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
228 TYPE_NEXT_VARIANT (t) = new_type;
230 else
232 TYPE_MAIN_VARIANT (new_type) = new_type;
233 TYPE_NEXT_VARIANT (new_type) = NULL;
236 return new_type;
239 /* Build variant of function decl ORIG_DECL skipping ARGS_TO_SKIP and the
240 return value if SKIP_RETURN is true.
242 Arguments from DECL_ARGUMENTS list can't be removed now, since they are
243 linked by TREE_CHAIN directly. The caller is responsible for eliminating
244 them when they are being duplicated (i.e. copy_arguments_for_versioning). */
246 static tree
247 build_function_decl_skip_args (tree orig_decl, bitmap args_to_skip,
248 bool skip_return)
250 tree new_decl = copy_node (orig_decl);
251 tree new_type;
253 new_type = TREE_TYPE (orig_decl);
254 if (prototype_p (new_type)
255 || (skip_return && !VOID_TYPE_P (TREE_TYPE (new_type))))
256 new_type
257 = build_function_type_skip_args (new_type, args_to_skip, skip_return);
258 TREE_TYPE (new_decl) = new_type;
260 /* For declarations setting DECL_VINDEX (i.e. methods)
261 we expect first argument to be THIS pointer. */
262 if (args_to_skip && bitmap_bit_p (args_to_skip, 0))
263 DECL_VINDEX (new_decl) = NULL_TREE;
265 /* When signature changes, we need to clear builtin info. */
266 if (DECL_BUILT_IN (new_decl)
267 && args_to_skip
268 && !bitmap_empty_p (args_to_skip))
270 DECL_BUILT_IN_CLASS (new_decl) = NOT_BUILT_IN;
271 DECL_FUNCTION_CODE (new_decl) = (enum built_in_function) 0;
273 /* The FE might have information and assumptions about the other
274 arguments. */
275 DECL_LANG_SPECIFIC (new_decl) = NULL;
276 return new_decl;
279 /* Set flags of NEW_NODE and its decl. NEW_NODE is a newly created private
280 clone or its thunk. */
282 static void
283 set_new_clone_decl_and_node_flags (cgraph_node *new_node)
285 DECL_EXTERNAL (new_node->decl) = 0;
286 TREE_PUBLIC (new_node->decl) = 0;
287 DECL_COMDAT (new_node->decl) = 0;
288 DECL_WEAK (new_node->decl) = 0;
289 DECL_VIRTUAL_P (new_node->decl) = 0;
290 DECL_STATIC_CONSTRUCTOR (new_node->decl) = 0;
291 DECL_STATIC_DESTRUCTOR (new_node->decl) = 0;
293 new_node->externally_visible = 0;
294 new_node->local.local = 1;
295 new_node->lowered = true;
298 /* Duplicate thunk THUNK if necessary but make it to refer to NODE.
299 ARGS_TO_SKIP, if non-NULL, determines which parameters should be omitted.
300 Function can return NODE if no thunk is necessary, which can happen when
301 thunk is this_adjusting but we are removing this parameter. */
303 static cgraph_node *
304 duplicate_thunk_for_node (cgraph_node *thunk, cgraph_node *node)
306 cgraph_node *new_thunk, *thunk_of;
307 thunk_of = thunk->callees->callee->ultimate_alias_target ();
309 if (thunk_of->thunk.thunk_p)
310 node = duplicate_thunk_for_node (thunk_of, node);
312 struct cgraph_edge *cs;
313 for (cs = node->callers; cs; cs = cs->next_caller)
314 if (cs->caller->thunk.thunk_p
315 && cs->caller->thunk.this_adjusting == thunk->thunk.this_adjusting
316 && cs->caller->thunk.fixed_offset == thunk->thunk.fixed_offset
317 && cs->caller->thunk.virtual_offset_p == thunk->thunk.virtual_offset_p
318 && cs->caller->thunk.virtual_value == thunk->thunk.virtual_value)
319 return cs->caller;
321 tree new_decl;
322 if (!node->clone.args_to_skip)
323 new_decl = copy_node (thunk->decl);
324 else
326 /* We do not need to duplicate this_adjusting thunks if we have removed
327 this. */
328 if (thunk->thunk.this_adjusting
329 && bitmap_bit_p (node->clone.args_to_skip, 0))
330 return node;
332 new_decl = build_function_decl_skip_args (thunk->decl,
333 node->clone.args_to_skip,
334 false);
336 gcc_checking_assert (!DECL_STRUCT_FUNCTION (new_decl));
337 gcc_checking_assert (!DECL_INITIAL (new_decl));
338 gcc_checking_assert (!DECL_RESULT (new_decl));
339 gcc_checking_assert (!DECL_RTL_SET_P (new_decl));
341 DECL_NAME (new_decl) = clone_function_name (thunk->decl, "artificial_thunk");
342 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
344 new_thunk = cgraph_node::create (new_decl);
345 set_new_clone_decl_and_node_flags (new_thunk);
346 new_thunk->definition = true;
347 new_thunk->thunk = thunk->thunk;
348 new_thunk->unique_name = in_lto_p;
349 new_thunk->former_clone_of = thunk->decl;
350 new_thunk->clone.args_to_skip = node->clone.args_to_skip;
351 new_thunk->clone.combined_args_to_skip = node->clone.combined_args_to_skip;
353 struct cgraph_edge *e = new_thunk->create_edge (node, NULL, 0,
354 CGRAPH_FREQ_BASE);
355 e->call_stmt_cannot_inline_p = true;
356 cgraph_call_edge_duplication_hooks (thunk->callees, e);
357 if (!new_thunk->expand_thunk (false, false))
358 new_thunk->analyzed = true;
359 thunk->call_duplication_hooks (new_thunk);
360 return new_thunk;
363 /* If E does not lead to a thunk, simply redirect it to N. Otherwise create
364 one or more equivalent thunks for N and redirect E to the first in the
365 chain. */
367 void
368 redirect_edge_duplicating_thunks (struct cgraph_edge *e, struct cgraph_node *n)
370 cgraph_node *orig_to = e->callee->ultimate_alias_target ();
371 if (orig_to->thunk.thunk_p)
372 n = duplicate_thunk_for_node (orig_to, n);
374 cgraph_redirect_edge_callee (e, n);
377 /* Create node representing clone of N executed COUNT times. Decrease
378 the execution counts from original node too.
379 The new clone will have decl set to DECL that may or may not be the same
380 as decl of N.
382 When UPDATE_ORIGINAL is true, the counts are subtracted from the original
383 function's profile to reflect the fact that part of execution is handled
384 by node.
385 When CALL_DUPLICATOIN_HOOK is true, the ipa passes are acknowledged about
386 the new clone. Otherwise the caller is responsible for doing so later.
388 If the new node is being inlined into another one, NEW_INLINED_TO should be
389 the outline function the new one is (even indirectly) inlined to. All hooks
390 will see this in node's global.inlined_to, when invoked. Can be NULL if the
391 node is not inlined. */
393 cgraph_node *
394 cgraph_node::create_clone (tree decl, gcov_type gcov_count, int freq,
395 bool update_original,
396 vec<cgraph_edge *> redirect_callers,
397 bool call_duplication_hook,
398 struct cgraph_node *new_inlined_to,
399 bitmap args_to_skip)
401 struct cgraph_node *new_node = cgraph_node::create_empty ();
402 struct cgraph_edge *e;
403 gcov_type count_scale;
404 unsigned i;
406 new_node->decl = decl;
407 new_node->register_symbol ();
408 new_node->origin = origin;
409 new_node->lto_file_data = lto_file_data;
410 if (new_node->origin)
412 new_node->next_nested = new_node->origin->nested;
413 new_node->origin->nested = new_node;
415 new_node->analyzed = analyzed;
416 new_node->definition = definition;
417 new_node->local = local;
418 new_node->externally_visible = false;
419 new_node->local.local = true;
420 new_node->global = global;
421 new_node->global.inlined_to = new_inlined_to;
422 new_node->rtl = rtl;
423 new_node->count = count;
424 new_node->frequency = frequency;
425 new_node->tp_first_run = tp_first_run;
427 new_node->clone.tree_map = NULL;
428 new_node->clone.args_to_skip = args_to_skip;
429 if (!args_to_skip)
430 new_node->clone.combined_args_to_skip = clone.combined_args_to_skip;
431 else if (clone.combined_args_to_skip)
433 new_node->clone.combined_args_to_skip = BITMAP_GGC_ALLOC ();
434 bitmap_ior (new_node->clone.combined_args_to_skip,
435 clone.combined_args_to_skip, args_to_skip);
437 else
438 new_node->clone.combined_args_to_skip = args_to_skip;
440 if (count)
442 if (new_node->count > count)
443 count_scale = REG_BR_PROB_BASE;
444 else
445 count_scale = GCOV_COMPUTE_SCALE (new_node->count, count);
447 else
448 count_scale = 0;
449 if (update_original)
451 count -= gcov_count;
452 if (count < 0)
453 count = 0;
456 FOR_EACH_VEC_ELT (redirect_callers, i, e)
458 /* Redirect calls to the old version node to point to its new
459 version. The only exception is when the edge was proved to
460 be unreachable during the clonning procedure. */
461 if (!e->callee
462 || DECL_BUILT_IN_CLASS (e->callee->decl) != BUILT_IN_NORMAL
463 || DECL_FUNCTION_CODE (e->callee->decl) != BUILT_IN_UNREACHABLE)
464 redirect_edge_duplicating_thunks (e, new_node);
467 for (e = callees;e; e=e->next_callee)
468 cgraph_clone_edge (e, new_node, e->call_stmt, e->lto_stmt_uid,
469 count_scale, freq, update_original);
471 for (e = indirect_calls; e; e = e->next_callee)
472 cgraph_clone_edge (e, new_node, e->call_stmt, e->lto_stmt_uid,
473 count_scale, freq, update_original);
474 new_node->clone_references (this);
476 new_node->next_sibling_clone = clones;
477 if (clones)
478 clones->prev_sibling_clone = new_node;
479 clones = new_node;
480 new_node->clone_of = this;
482 if (call_duplication_hook)
483 call_duplication_hooks (new_node);
484 return new_node;
487 /* Return a new assembler name for a clone of DECL with SUFFIX. */
489 static GTY(()) unsigned int clone_fn_id_num;
491 tree
492 clone_function_name (tree decl, const char *suffix)
494 tree name = DECL_ASSEMBLER_NAME (decl);
495 size_t len = IDENTIFIER_LENGTH (name);
496 char *tmp_name, *prefix;
498 prefix = XALLOCAVEC (char, len + strlen (suffix) + 2);
499 memcpy (prefix, IDENTIFIER_POINTER (name), len);
500 strcpy (prefix + len + 1, suffix);
501 #ifndef NO_DOT_IN_LABEL
502 prefix[len] = '.';
503 #elif !defined NO_DOLLAR_IN_LABEL
504 prefix[len] = '$';
505 #else
506 prefix[len] = '_';
507 #endif
508 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix, clone_fn_id_num++);
509 return get_identifier (tmp_name);
512 /* Create callgraph node clone with new declaration. The actual body will
513 be copied later at compilation stage.
515 TODO: after merging in ipa-sra use function call notes instead of args_to_skip
516 bitmap interface.
518 struct cgraph_node *
519 cgraph_node::create_virtual_clone (vec<cgraph_edge *> redirect_callers,
520 vec<ipa_replace_map *, va_gc> *tree_map,
521 bitmap args_to_skip, const char * suffix)
523 tree old_decl = decl;
524 struct cgraph_node *new_node = NULL;
525 tree new_decl;
526 size_t len, i;
527 struct ipa_replace_map *map;
528 char *name;
530 if (!in_lto_p)
531 gcc_checking_assert (tree_versionable_function_p (old_decl));
533 gcc_assert (local.can_change_signature || !args_to_skip);
535 /* Make a new FUNCTION_DECL tree node */
536 if (!args_to_skip)
537 new_decl = copy_node (old_decl);
538 else
539 new_decl = build_function_decl_skip_args (old_decl, args_to_skip, false);
541 /* These pointers represent function body and will be populated only when clone
542 is materialized. */
543 gcc_assert (new_decl != old_decl);
544 DECL_STRUCT_FUNCTION (new_decl) = NULL;
545 DECL_ARGUMENTS (new_decl) = NULL;
546 DECL_INITIAL (new_decl) = NULL;
547 DECL_RESULT (new_decl) = NULL;
548 /* We can not do DECL_RESULT (new_decl) = NULL; here because of LTO partitioning
549 sometimes storing only clone decl instead of original. */
551 /* Generate a new name for the new version. */
552 len = IDENTIFIER_LENGTH (DECL_NAME (old_decl));
553 name = XALLOCAVEC (char, len + strlen (suffix) + 2);
554 memcpy (name, IDENTIFIER_POINTER (DECL_NAME (old_decl)), len);
555 strcpy (name + len + 1, suffix);
556 name[len] = '.';
557 DECL_NAME (new_decl) = get_identifier (name);
558 SET_DECL_ASSEMBLER_NAME (new_decl, clone_function_name (old_decl, suffix));
559 SET_DECL_RTL (new_decl, NULL);
561 new_node = create_clone (new_decl, count, CGRAPH_FREQ_BASE, false,
562 redirect_callers, false, NULL, args_to_skip);
564 /* Update the properties.
565 Make clone visible only within this translation unit. Make sure
566 that is not weak also.
567 ??? We cannot use COMDAT linkage because there is no
568 ABI support for this. */
569 set_new_clone_decl_and_node_flags (new_node);
570 new_node->clone.tree_map = tree_map;
572 /* Clones of global symbols or symbols with unique names are unique. */
573 if ((TREE_PUBLIC (old_decl)
574 && !DECL_EXTERNAL (old_decl)
575 && !DECL_WEAK (old_decl)
576 && !DECL_COMDAT (old_decl))
577 || in_lto_p)
578 new_node->unique_name = true;
579 FOR_EACH_VEC_SAFE_ELT (tree_map, i, map)
580 new_node->maybe_add_reference (map->new_tree, IPA_REF_ADDR, NULL);
582 if (ipa_transforms_to_apply.exists ())
583 new_node->ipa_transforms_to_apply
584 = ipa_transforms_to_apply.copy ();
586 call_duplication_hooks (new_node);
588 return new_node;
591 /* callgraph node being removed from symbol table; see if its entry can be
592 replaced by other inline clone. */
593 cgraph_node *
594 cgraph_node::find_replacement (void)
596 struct cgraph_node *next_inline_clone, *replacement;
598 for (next_inline_clone = clones;
599 next_inline_clone
600 && next_inline_clone->decl != decl;
601 next_inline_clone = next_inline_clone->next_sibling_clone)
604 /* If there is inline clone of the node being removed, we need
605 to put it into the position of removed node and reorganize all
606 other clones to be based on it. */
607 if (next_inline_clone)
609 struct cgraph_node *n;
610 struct cgraph_node *new_clones;
612 replacement = next_inline_clone;
614 /* Unlink inline clone from the list of clones of removed node. */
615 if (next_inline_clone->next_sibling_clone)
616 next_inline_clone->next_sibling_clone->prev_sibling_clone
617 = next_inline_clone->prev_sibling_clone;
618 if (next_inline_clone->prev_sibling_clone)
620 gcc_assert (clones != next_inline_clone);
621 next_inline_clone->prev_sibling_clone->next_sibling_clone
622 = next_inline_clone->next_sibling_clone;
624 else
626 gcc_assert (clones == next_inline_clone);
627 clones = next_inline_clone->next_sibling_clone;
630 new_clones = clones;
631 clones = NULL;
633 /* Copy clone info. */
634 next_inline_clone->clone = clone;
636 /* Now place it into clone tree at same level at NODE. */
637 next_inline_clone->clone_of = clone_of;
638 next_inline_clone->prev_sibling_clone = NULL;
639 next_inline_clone->next_sibling_clone = NULL;
640 if (clone_of)
642 if (clone_of->clones)
643 clone_of->clones->prev_sibling_clone = next_inline_clone;
644 next_inline_clone->next_sibling_clone = clone_of->clones;
645 clone_of->clones = next_inline_clone;
648 /* Merge the clone list. */
649 if (new_clones)
651 if (!next_inline_clone->clones)
652 next_inline_clone->clones = new_clones;
653 else
655 n = next_inline_clone->clones;
656 while (n->next_sibling_clone)
657 n = n->next_sibling_clone;
658 n->next_sibling_clone = new_clones;
659 new_clones->prev_sibling_clone = n;
663 /* Update clone_of pointers. */
664 n = new_clones;
665 while (n)
667 n->clone_of = next_inline_clone;
668 n = n->next_sibling_clone;
670 return replacement;
672 else
673 return NULL;
676 /* Like cgraph_set_call_stmt but walk the clone tree and update all
677 clones sharing the same function body.
678 When WHOLE_SPECULATIVE_EDGES is true, all three components of
679 speculative edge gets updated. Otherwise we update only direct
680 call. */
682 void
683 cgraph_node::set_call_stmt_including_clones (gimple old_stmt, gimple new_stmt,
684 bool update_speculative)
686 struct cgraph_node *node;
687 struct cgraph_edge *edge = get_edge (old_stmt);
689 if (edge)
690 cgraph_set_call_stmt (edge, new_stmt, update_speculative);
692 node = clones;
693 if (node)
694 while (node != this)
696 struct cgraph_edge *edge = node->get_edge (old_stmt);
697 if (edge)
699 cgraph_set_call_stmt (edge, new_stmt, update_speculative);
700 /* If UPDATE_SPECULATIVE is false, it means that we are turning
701 speculative call into a real code sequence. Update the
702 callgraph edges. */
703 if (edge->speculative && !update_speculative)
705 struct cgraph_edge *direct, *indirect;
706 struct ipa_ref *ref;
708 gcc_assert (!edge->indirect_unknown_callee);
709 cgraph_speculative_call_info (edge, direct, indirect, ref);
710 direct->speculative = false;
711 indirect->speculative = false;
712 ref->speculative = false;
715 if (node->clones)
716 node = node->clones;
717 else if (node->next_sibling_clone)
718 node = node->next_sibling_clone;
719 else
721 while (node != this && !node->next_sibling_clone)
722 node = node->clone_of;
723 if (node != this)
724 node = node->next_sibling_clone;
729 /* Like cgraph_create_edge walk the clone tree and update all clones sharing
730 same function body. If clones already have edge for OLD_STMT; only
731 update the edge same way as cgraph_set_call_stmt_including_clones does.
733 TODO: COUNT and LOOP_DEPTH should be properly distributed based on relative
734 frequencies of the clones. */
736 void
737 cgraph_node::create_edge_including_clones (struct cgraph_node *callee,
738 gimple old_stmt, gimple stmt,
739 gcov_type count,
740 int freq,
741 cgraph_inline_failed_t reason)
743 struct cgraph_node *node;
744 struct cgraph_edge *edge;
746 if (!get_edge (stmt))
748 edge = create_edge (callee, stmt, count, freq);
749 edge->inline_failed = reason;
752 node = clones;
753 if (node)
754 while (node != this)
756 struct cgraph_edge *edge = node->get_edge (old_stmt);
758 /* It is possible that clones already contain the edge while
759 master didn't. Either we promoted indirect call into direct
760 call in the clone or we are processing clones of unreachable
761 master where edges has been removed. */
762 if (edge)
763 cgraph_set_call_stmt (edge, stmt);
764 else if (! node->get_edge (stmt))
766 edge = node->create_edge (callee, stmt, count, freq);
767 edge->inline_failed = reason;
770 if (node->clones)
771 node = node->clones;
772 else if (node->next_sibling_clone)
773 node = node->next_sibling_clone;
774 else
776 while (node != this && !node->next_sibling_clone)
777 node = node->clone_of;
778 if (node != this)
779 node = node->next_sibling_clone;
784 /* Remove the node from cgraph and all inline clones inlined into it.
785 Skip however removal of FORBIDDEN_NODE and return true if it needs to be
786 removed. This allows to call the function from outer loop walking clone
787 tree. */
789 bool
790 cgraph_node::remove_symbol_and_inline_clones (cgraph_node *forbidden_node)
792 struct cgraph_edge *e, *next;
793 bool found = false;
795 if (this == forbidden_node)
797 cgraph_remove_edge (callers);
798 return true;
800 for (e = callees; e; e = next)
802 next = e->next_callee;
803 if (!e->inline_failed)
804 found |= e->callee->remove_symbol_and_inline_clones (forbidden_node);
806 remove ();
807 return found;
810 /* The edges representing the callers of the NEW_VERSION node were
811 fixed by cgraph_function_versioning (), now the call_expr in their
812 respective tree code should be updated to call the NEW_VERSION. */
814 static void
815 update_call_expr (struct cgraph_node *new_version)
817 struct cgraph_edge *e;
819 gcc_assert (new_version);
821 /* Update the call expr on the edges to call the new version. */
822 for (e = new_version->callers; e; e = e->next_caller)
824 struct function *inner_function = DECL_STRUCT_FUNCTION (e->caller->decl);
825 gimple_call_set_fndecl (e->call_stmt, new_version->decl);
826 maybe_clean_eh_stmt_fn (inner_function, e->call_stmt);
831 /* Create a new cgraph node which is the new version of
832 callgraph node. REDIRECT_CALLERS holds the callers
833 edges which should be redirected to point to
834 NEW_VERSION. ALL the callees edges of the node
835 are cloned to the new version node. Return the new
836 version node.
838 If non-NULL BLOCK_TO_COPY determine what basic blocks
839 was copied to prevent duplications of calls that are dead
840 in the clone. */
842 cgraph_node *
843 cgraph_node::create_version_clone (tree new_decl,
844 vec<cgraph_edge *> redirect_callers,
845 bitmap bbs_to_copy)
847 struct cgraph_node *new_version;
848 struct cgraph_edge *e;
849 unsigned i;
851 new_version = cgraph_node::create (new_decl);
853 new_version->analyzed = analyzed;
854 new_version->definition = definition;
855 new_version->local = local;
856 new_version->externally_visible = false;
857 new_version->local.local = new_version->definition;
858 new_version->global = global;
859 new_version->rtl = rtl;
860 new_version->count = count;
862 for (e = callees; e; e=e->next_callee)
863 if (!bbs_to_copy
864 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
865 cgraph_clone_edge (e, new_version, e->call_stmt,
866 e->lto_stmt_uid, REG_BR_PROB_BASE,
867 CGRAPH_FREQ_BASE,
868 true);
869 for (e = indirect_calls; e; e=e->next_callee)
870 if (!bbs_to_copy
871 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
872 cgraph_clone_edge (e, new_version, e->call_stmt,
873 e->lto_stmt_uid, REG_BR_PROB_BASE,
874 CGRAPH_FREQ_BASE,
875 true);
876 FOR_EACH_VEC_ELT (redirect_callers, i, e)
878 /* Redirect calls to the old version node to point to its new
879 version. */
880 cgraph_redirect_edge_callee (e, new_version);
883 call_duplication_hooks (new_version);
885 return new_version;
888 /* Perform function versioning.
889 Function versioning includes copying of the tree and
890 a callgraph update (creating a new cgraph node and updating
891 its callees and callers).
893 REDIRECT_CALLERS varray includes the edges to be redirected
894 to the new version.
896 TREE_MAP is a mapping of tree nodes we want to replace with
897 new ones (according to results of prior analysis).
899 If non-NULL ARGS_TO_SKIP determine function parameters to remove
900 from new version.
901 If SKIP_RETURN is true, the new version will return void.
902 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
903 If non_NULL NEW_ENTRY determine new entry BB of the clone.
905 Return the new version's cgraph node. */
907 cgraph_node *
908 cgraph_node::create_version_clone_with_body
909 (vec<cgraph_edge *> redirect_callers,
910 vec<ipa_replace_map *, va_gc> *tree_map, bitmap args_to_skip,
911 bool skip_return, bitmap bbs_to_copy, basic_block new_entry_block,
912 const char *clone_name)
914 tree old_decl = decl;
915 struct cgraph_node *new_version_node = NULL;
916 tree new_decl;
918 if (!tree_versionable_function_p (old_decl))
919 return NULL;
921 gcc_assert (local.can_change_signature || !args_to_skip);
923 /* Make a new FUNCTION_DECL tree node for the new version. */
924 if (!args_to_skip && !skip_return)
925 new_decl = copy_node (old_decl);
926 else
927 new_decl
928 = build_function_decl_skip_args (old_decl, args_to_skip, skip_return);
930 /* Generate a new name for the new version. */
931 DECL_NAME (new_decl) = clone_function_name (old_decl, clone_name);
932 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
933 SET_DECL_RTL (new_decl, NULL);
935 /* When the old decl was a con-/destructor make sure the clone isn't. */
936 DECL_STATIC_CONSTRUCTOR (new_decl) = 0;
937 DECL_STATIC_DESTRUCTOR (new_decl) = 0;
939 /* Create the new version's call-graph node.
940 and update the edges of the new node. */
941 new_version_node = create_version_clone (new_decl, redirect_callers,
942 bbs_to_copy);
944 if (ipa_transforms_to_apply.exists ())
945 new_version_node->ipa_transforms_to_apply
946 = ipa_transforms_to_apply.copy ();
947 /* Copy the OLD_VERSION_NODE function tree to the new version. */
948 tree_function_versioning (old_decl, new_decl, tree_map, false, args_to_skip,
949 skip_return, bbs_to_copy, new_entry_block);
951 /* Update the new version's properties.
952 Make The new version visible only within this translation unit. Make sure
953 that is not weak also.
954 ??? We cannot use COMDAT linkage because there is no
955 ABI support for this. */
956 new_version_node->make_decl_local ();
957 DECL_VIRTUAL_P (new_version_node->decl) = 0;
958 new_version_node->externally_visible = 0;
959 new_version_node->local.local = 1;
960 new_version_node->lowered = true;
961 /* Clones of global symbols or symbols with unique names are unique. */
962 if ((TREE_PUBLIC (old_decl)
963 && !DECL_EXTERNAL (old_decl)
964 && !DECL_WEAK (old_decl)
965 && !DECL_COMDAT (old_decl))
966 || in_lto_p)
967 new_version_node->unique_name = true;
969 /* Update the call_expr on the edges to call the new version node. */
970 update_call_expr (new_version_node);
972 new_version_node->call_function_insertion_hooks ();
973 return new_version_node;
976 /* Given virtual clone, turn it into actual clone. */
978 static void
979 cgraph_materialize_clone (struct cgraph_node *node)
981 bitmap_obstack_initialize (NULL);
982 node->former_clone_of = node->clone_of->decl;
983 if (node->clone_of->former_clone_of)
984 node->former_clone_of = node->clone_of->former_clone_of;
985 /* Copy the OLD_VERSION_NODE function tree to the new version. */
986 tree_function_versioning (node->clone_of->decl, node->decl,
987 node->clone.tree_map, true,
988 node->clone.args_to_skip, false,
989 NULL, NULL);
990 if (cgraph_dump_file)
992 dump_function_to_file (node->clone_of->decl, cgraph_dump_file, dump_flags);
993 dump_function_to_file (node->decl, cgraph_dump_file, dump_flags);
996 /* Function is no longer clone. */
997 if (node->next_sibling_clone)
998 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
999 if (node->prev_sibling_clone)
1000 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
1001 else
1002 node->clone_of->clones = node->next_sibling_clone;
1003 node->next_sibling_clone = NULL;
1004 node->prev_sibling_clone = NULL;
1005 if (!node->clone_of->analyzed && !node->clone_of->clones)
1007 node->clone_of->release_body ();
1008 node->clone_of->remove_callees ();
1009 node->clone_of->remove_all_references ();
1011 node->clone_of = NULL;
1012 bitmap_obstack_release (NULL);
1015 /* Once all functions from compilation unit are in memory, produce all clones
1016 and update all calls. We might also do this on demand if we don't want to
1017 bring all functions to memory prior compilation, but current WHOPR
1018 implementation does that and it is is bit easier to keep everything right in
1019 this order. */
1021 void
1022 cgraph_materialize_all_clones (void)
1024 struct cgraph_node *node;
1025 bool stabilized = false;
1028 if (cgraph_dump_file)
1029 fprintf (cgraph_dump_file, "Materializing clones\n");
1030 #ifdef ENABLE_CHECKING
1031 cgraph_node::verify_cgraph_nodes ();
1032 #endif
1034 /* We can also do topological order, but number of iterations should be
1035 bounded by number of IPA passes since single IPA pass is probably not
1036 going to create clones of clones it created itself. */
1037 while (!stabilized)
1039 stabilized = true;
1040 FOR_EACH_FUNCTION (node)
1042 if (node->clone_of && node->decl != node->clone_of->decl
1043 && !gimple_has_body_p (node->decl))
1045 if (!node->clone_of->clone_of)
1046 node->clone_of->get_body ();
1047 if (gimple_has_body_p (node->clone_of->decl))
1049 if (cgraph_dump_file)
1051 fprintf (cgraph_dump_file, "cloning %s to %s\n",
1052 xstrdup (node->clone_of->name ()),
1053 xstrdup (node->name ()));
1054 if (node->clone.tree_map)
1056 unsigned int i;
1057 fprintf (cgraph_dump_file, " replace map: ");
1058 for (i = 0;
1059 i < vec_safe_length (node->clone.tree_map);
1060 i++)
1062 struct ipa_replace_map *replace_info;
1063 replace_info = (*node->clone.tree_map)[i];
1064 print_generic_expr (cgraph_dump_file, replace_info->old_tree, 0);
1065 fprintf (cgraph_dump_file, " -> ");
1066 print_generic_expr (cgraph_dump_file, replace_info->new_tree, 0);
1067 fprintf (cgraph_dump_file, "%s%s;",
1068 replace_info->replace_p ? "(replace)":"",
1069 replace_info->ref_p ? "(ref)":"");
1071 fprintf (cgraph_dump_file, "\n");
1073 if (node->clone.args_to_skip)
1075 fprintf (cgraph_dump_file, " args_to_skip: ");
1076 dump_bitmap (cgraph_dump_file, node->clone.args_to_skip);
1078 if (node->clone.args_to_skip)
1080 fprintf (cgraph_dump_file, " combined_args_to_skip:");
1081 dump_bitmap (cgraph_dump_file, node->clone.combined_args_to_skip);
1084 cgraph_materialize_clone (node);
1085 stabilized = false;
1090 FOR_EACH_FUNCTION (node)
1091 if (!node->analyzed && node->callees)
1093 node->remove_callees ();
1094 node->remove_all_references ();
1096 else
1097 node->clear_stmts_in_references ();
1098 if (cgraph_dump_file)
1099 fprintf (cgraph_dump_file, "Materialization Call site updates done.\n");
1100 #ifdef ENABLE_CHECKING
1101 cgraph_node::verify_cgraph_nodes ();
1102 #endif
1103 symtab_remove_unreachable_nodes (false, cgraph_dump_file);
1106 #include "gt-cgraphclones.h"