[ree] PR rtl-optimization/78038: Handle global register dataflow definitions in ree
[official-gcc.git] / gcc / cgraphclones.c
blob686c28900210c73e924902f6401fd403a1f46f35
1 /* Callgraph clones
2 Copyright (C) 2003-2016 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* This module provide facilities for clonning functions. I.e. creating
22 new functions based on existing functions with simple modifications,
23 such as replacement of parameters.
25 To allow whole program optimization without actual presence of function
26 bodies, an additional infrastructure is provided for so-called virtual
27 clones
29 A virtual clone in the callgraph is a function that has no
30 associated body, just a description of how to create its body based
31 on a different function (which itself may be a virtual clone).
33 The description of function modifications includes adjustments to
34 the function's signature (which allows, for example, removing or
35 adding function arguments), substitutions to perform on the
36 function body, and, for inlined functions, a pointer to the
37 function that it will be inlined into.
39 It is also possible to redirect any edge of the callgraph from a
40 function to its virtual clone. This implies updating of the call
41 site to adjust for the new function signature.
43 Most of the transformations performed by inter-procedural
44 optimizations can be represented via virtual clones. For
45 instance, a constant propagation pass can produce a virtual clone
46 of the function which replaces one of its arguments by a
47 constant. The inliner can represent its decisions by producing a
48 clone of a function whose body will be later integrated into
49 a given function.
51 Using virtual clones, the program can be easily updated
52 during the Execute stage, solving most of pass interactions
53 problems that would otherwise occur during Transform.
55 Virtual clones are later materialized in the LTRANS stage and
56 turned into real functions. Passes executed after the virtual
57 clone were introduced also perform their Transform stage
58 on new functions, so for a pass there is no significant
59 difference between operating on a real function or a virtual
60 clone introduced before its Execute stage.
62 Optimization passes then work on virtual clones introduced before
63 their Execute stage as if they were real functions. The
64 only difference is that clones are not visible during the
65 Generate Summary stage. */
67 #include "config.h"
68 #include "system.h"
69 #include "coretypes.h"
70 #include "backend.h"
71 #include "target.h"
72 #include "rtl.h"
73 #include "tree.h"
74 #include "gimple.h"
75 #include "stringpool.h"
76 #include "cgraph.h"
77 #include "lto-streamer.h"
78 #include "tree-eh.h"
79 #include "tree-cfg.h"
80 #include "tree-inline.h"
81 #include "tree-dump.h"
82 #include "gimple-pretty-print.h"
84 /* Create clone of edge in the node N represented by CALL_EXPR
85 the callgraph. */
87 cgraph_edge *
88 cgraph_edge::clone (cgraph_node *n, gcall *call_stmt, unsigned stmt_uid,
89 gcov_type count_scale, int freq_scale, bool update_original)
91 cgraph_edge *new_edge;
92 gcov_type gcov_count = apply_probability (count, count_scale);
93 gcov_type freq;
95 /* We do not want to ignore loop nest after frequency drops to 0. */
96 if (!freq_scale)
97 freq_scale = 1;
98 freq = frequency * (gcov_type) freq_scale / CGRAPH_FREQ_BASE;
99 if (freq > CGRAPH_FREQ_MAX)
100 freq = CGRAPH_FREQ_MAX;
102 if (indirect_unknown_callee)
104 tree decl;
106 if (call_stmt && (decl = gimple_call_fndecl (call_stmt))
107 /* When the call is speculative, we need to resolve it
108 via cgraph_resolve_speculation and not here. */
109 && !speculative)
111 cgraph_node *callee = cgraph_node::get (decl);
112 gcc_checking_assert (callee);
113 new_edge = n->create_edge (callee, call_stmt, gcov_count, freq);
115 else
117 new_edge = n->create_indirect_edge (call_stmt,
118 indirect_info->ecf_flags,
119 count, freq, false);
120 *new_edge->indirect_info = *indirect_info;
123 else
125 new_edge = n->create_edge (callee, call_stmt, gcov_count, freq);
126 if (indirect_info)
128 new_edge->indirect_info
129 = ggc_cleared_alloc<cgraph_indirect_call_info> ();
130 *new_edge->indirect_info = *indirect_info;
134 new_edge->inline_failed = inline_failed;
135 new_edge->indirect_inlining_edge = indirect_inlining_edge;
136 new_edge->lto_stmt_uid = stmt_uid;
137 /* Clone flags that depend on call_stmt availability manually. */
138 new_edge->can_throw_external = can_throw_external;
139 new_edge->call_stmt_cannot_inline_p = call_stmt_cannot_inline_p;
140 new_edge->speculative = speculative;
141 new_edge->in_polymorphic_cdtor = in_polymorphic_cdtor;
142 if (update_original)
144 count -= new_edge->count;
145 if (count < 0)
146 count = 0;
148 symtab->call_edge_duplication_hooks (this, new_edge);
149 return new_edge;
152 /* Build variant of function type ORIG_TYPE skipping ARGS_TO_SKIP and the
153 return value if SKIP_RETURN is true. */
155 static tree
156 build_function_type_skip_args (tree orig_type, bitmap args_to_skip,
157 bool skip_return)
159 tree new_type = NULL;
160 tree args, new_args = NULL;
161 tree new_reversed;
162 int i = 0;
164 for (args = TYPE_ARG_TYPES (orig_type); args && args != void_list_node;
165 args = TREE_CHAIN (args), i++)
166 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
167 new_args = tree_cons (NULL_TREE, TREE_VALUE (args), new_args);
169 new_reversed = nreverse (new_args);
170 if (args)
172 if (new_reversed)
173 TREE_CHAIN (new_args) = void_list_node;
174 else
175 new_reversed = void_list_node;
178 /* Use copy_node to preserve as much as possible from original type
179 (debug info, attribute lists etc.)
180 Exception is METHOD_TYPEs must have THIS argument.
181 When we are asked to remove it, we need to build new FUNCTION_TYPE
182 instead. */
183 if (TREE_CODE (orig_type) != METHOD_TYPE
184 || !args_to_skip
185 || !bitmap_bit_p (args_to_skip, 0))
187 new_type = build_distinct_type_copy (orig_type);
188 TYPE_ARG_TYPES (new_type) = new_reversed;
190 else
192 new_type
193 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
194 new_reversed));
195 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
198 if (skip_return)
199 TREE_TYPE (new_type) = void_type_node;
201 return new_type;
204 /* Build variant of function decl ORIG_DECL skipping ARGS_TO_SKIP and the
205 return value if SKIP_RETURN is true.
207 Arguments from DECL_ARGUMENTS list can't be removed now, since they are
208 linked by TREE_CHAIN directly. The caller is responsible for eliminating
209 them when they are being duplicated (i.e. copy_arguments_for_versioning). */
211 static tree
212 build_function_decl_skip_args (tree orig_decl, bitmap args_to_skip,
213 bool skip_return)
215 tree new_decl = copy_node (orig_decl);
216 tree new_type;
218 new_type = TREE_TYPE (orig_decl);
219 if (prototype_p (new_type)
220 || (skip_return && !VOID_TYPE_P (TREE_TYPE (new_type))))
221 new_type
222 = build_function_type_skip_args (new_type, args_to_skip, skip_return);
223 TREE_TYPE (new_decl) = new_type;
225 /* For declarations setting DECL_VINDEX (i.e. methods)
226 we expect first argument to be THIS pointer. */
227 if (args_to_skip && bitmap_bit_p (args_to_skip, 0))
228 DECL_VINDEX (new_decl) = NULL_TREE;
230 /* When signature changes, we need to clear builtin info. */
231 if (DECL_BUILT_IN (new_decl)
232 && args_to_skip
233 && !bitmap_empty_p (args_to_skip))
235 DECL_BUILT_IN_CLASS (new_decl) = NOT_BUILT_IN;
236 DECL_FUNCTION_CODE (new_decl) = (enum built_in_function) 0;
238 /* The FE might have information and assumptions about the other
239 arguments. */
240 DECL_LANG_SPECIFIC (new_decl) = NULL;
241 return new_decl;
244 /* Set flags of NEW_NODE and its decl. NEW_NODE is a newly created private
245 clone or its thunk. */
247 static void
248 set_new_clone_decl_and_node_flags (cgraph_node *new_node)
250 DECL_EXTERNAL (new_node->decl) = 0;
251 TREE_PUBLIC (new_node->decl) = 0;
252 DECL_COMDAT (new_node->decl) = 0;
253 DECL_WEAK (new_node->decl) = 0;
254 DECL_VIRTUAL_P (new_node->decl) = 0;
255 DECL_STATIC_CONSTRUCTOR (new_node->decl) = 0;
256 DECL_STATIC_DESTRUCTOR (new_node->decl) = 0;
258 new_node->externally_visible = 0;
259 new_node->local.local = 1;
260 new_node->lowered = true;
263 /* Duplicate thunk THUNK if necessary but make it to refer to NODE.
264 ARGS_TO_SKIP, if non-NULL, determines which parameters should be omitted.
265 Function can return NODE if no thunk is necessary, which can happen when
266 thunk is this_adjusting but we are removing this parameter. */
268 static cgraph_node *
269 duplicate_thunk_for_node (cgraph_node *thunk, cgraph_node *node)
271 cgraph_node *new_thunk, *thunk_of;
272 thunk_of = thunk->callees->callee->ultimate_alias_target ();
274 if (thunk_of->thunk.thunk_p)
275 node = duplicate_thunk_for_node (thunk_of, node);
277 if (!DECL_ARGUMENTS (thunk->decl))
278 thunk->get_untransformed_body ();
280 cgraph_edge *cs;
281 for (cs = node->callers; cs; cs = cs->next_caller)
282 if (cs->caller->thunk.thunk_p
283 && cs->caller->thunk.this_adjusting == thunk->thunk.this_adjusting
284 && cs->caller->thunk.fixed_offset == thunk->thunk.fixed_offset
285 && cs->caller->thunk.virtual_offset_p == thunk->thunk.virtual_offset_p
286 && cs->caller->thunk.virtual_value == thunk->thunk.virtual_value)
287 return cs->caller;
289 tree new_decl;
290 if (!node->clone.args_to_skip)
291 new_decl = copy_node (thunk->decl);
292 else
294 /* We do not need to duplicate this_adjusting thunks if we have removed
295 this. */
296 if (thunk->thunk.this_adjusting
297 && bitmap_bit_p (node->clone.args_to_skip, 0))
298 return node;
300 new_decl = build_function_decl_skip_args (thunk->decl,
301 node->clone.args_to_skip,
302 false);
305 tree *link = &DECL_ARGUMENTS (new_decl);
306 int i = 0;
307 for (tree pd = DECL_ARGUMENTS (thunk->decl); pd; pd = DECL_CHAIN (pd), i++)
309 if (!node->clone.args_to_skip
310 || !bitmap_bit_p (node->clone.args_to_skip, i))
312 tree nd = copy_node (pd);
313 DECL_CONTEXT (nd) = new_decl;
314 *link = nd;
315 link = &DECL_CHAIN (nd);
318 *link = NULL_TREE;
320 gcc_checking_assert (!DECL_STRUCT_FUNCTION (new_decl));
321 gcc_checking_assert (!DECL_INITIAL (new_decl));
322 gcc_checking_assert (!DECL_RESULT (new_decl));
323 gcc_checking_assert (!DECL_RTL_SET_P (new_decl));
325 DECL_NAME (new_decl) = clone_function_name (thunk->decl, "artificial_thunk");
326 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
328 new_thunk = cgraph_node::create (new_decl);
329 set_new_clone_decl_and_node_flags (new_thunk);
330 new_thunk->definition = true;
331 new_thunk->local.can_change_signature = node->local.can_change_signature;
332 new_thunk->thunk = thunk->thunk;
333 new_thunk->unique_name = in_lto_p;
334 new_thunk->former_clone_of = thunk->decl;
335 new_thunk->clone.args_to_skip = node->clone.args_to_skip;
336 new_thunk->clone.combined_args_to_skip = node->clone.combined_args_to_skip;
338 cgraph_edge *e = new_thunk->create_edge (node, NULL, 0,
339 CGRAPH_FREQ_BASE);
340 symtab->call_edge_duplication_hooks (thunk->callees, e);
341 symtab->call_cgraph_duplication_hooks (thunk, new_thunk);
342 return new_thunk;
345 /* If E does not lead to a thunk, simply redirect it to N. Otherwise create
346 one or more equivalent thunks for N and redirect E to the first in the
347 chain. Note that it is then necessary to call
348 n->expand_all_artificial_thunks once all callers are redirected. */
350 void
351 cgraph_edge::redirect_callee_duplicating_thunks (cgraph_node *n)
353 cgraph_node *orig_to = callee->ultimate_alias_target ();
354 if (orig_to->thunk.thunk_p)
355 n = duplicate_thunk_for_node (orig_to, n);
357 redirect_callee (n);
360 /* Call expand_thunk on all callers that are thunks and if analyze those nodes
361 that were expanded. */
363 void
364 cgraph_node::expand_all_artificial_thunks ()
366 cgraph_edge *e;
367 for (e = callers; e;)
368 if (e->caller->thunk.thunk_p)
370 cgraph_node *thunk = e->caller;
372 e = e->next_caller;
373 if (thunk->expand_thunk (false, false))
375 thunk->thunk.thunk_p = false;
376 thunk->analyze ();
378 thunk->expand_all_artificial_thunks ();
380 else
381 e = e->next_caller;
384 /* Create node representing clone of N executed COUNT times. Decrease
385 the execution counts from original node too.
386 The new clone will have decl set to DECL that may or may not be the same
387 as decl of N.
389 When UPDATE_ORIGINAL is true, the counts are subtracted from the original
390 function's profile to reflect the fact that part of execution is handled
391 by node.
392 When CALL_DUPLICATOIN_HOOK is true, the ipa passes are acknowledged about
393 the new clone. Otherwise the caller is responsible for doing so later.
395 If the new node is being inlined into another one, NEW_INLINED_TO should be
396 the outline function the new one is (even indirectly) inlined to. All hooks
397 will see this in node's global.inlined_to, when invoked. Can be NULL if the
398 node is not inlined. */
400 cgraph_node *
401 cgraph_node::create_clone (tree new_decl, gcov_type gcov_count, int freq,
402 bool update_original,
403 vec<cgraph_edge *> redirect_callers,
404 bool call_duplication_hook,
405 cgraph_node *new_inlined_to,
406 bitmap args_to_skip)
408 cgraph_node *new_node = symtab->create_empty ();
409 cgraph_edge *e;
410 gcov_type count_scale;
411 unsigned i;
413 new_node->decl = new_decl;
414 new_node->register_symbol ();
415 new_node->origin = origin;
416 new_node->lto_file_data = lto_file_data;
417 if (new_node->origin)
419 new_node->next_nested = new_node->origin->nested;
420 new_node->origin->nested = new_node;
422 new_node->analyzed = analyzed;
423 new_node->definition = definition;
424 new_node->local = local;
425 new_node->externally_visible = false;
426 new_node->no_reorder = no_reorder;
427 new_node->local.local = true;
428 new_node->global = global;
429 new_node->global.inlined_to = new_inlined_to;
430 new_node->rtl = rtl;
431 new_node->count = count;
432 new_node->frequency = frequency;
433 new_node->tp_first_run = tp_first_run;
434 new_node->tm_clone = tm_clone;
435 new_node->icf_merged = icf_merged;
436 new_node->merged_comdat = merged_comdat;
437 new_node->thunk = thunk;
439 new_node->clone.tree_map = NULL;
440 new_node->clone.args_to_skip = args_to_skip;
441 new_node->split_part = split_part;
442 if (!args_to_skip)
443 new_node->clone.combined_args_to_skip = clone.combined_args_to_skip;
444 else if (clone.combined_args_to_skip)
446 new_node->clone.combined_args_to_skip = BITMAP_GGC_ALLOC ();
447 bitmap_ior (new_node->clone.combined_args_to_skip,
448 clone.combined_args_to_skip, args_to_skip);
450 else
451 new_node->clone.combined_args_to_skip = args_to_skip;
453 if (count)
455 if (new_node->count > count)
456 count_scale = REG_BR_PROB_BASE;
457 else
458 count_scale = GCOV_COMPUTE_SCALE (new_node->count, count);
460 else
461 count_scale = 0;
462 if (update_original)
464 count -= gcov_count;
465 if (count < 0)
466 count = 0;
469 FOR_EACH_VEC_ELT (redirect_callers, i, e)
471 /* Redirect calls to the old version node to point to its new
472 version. The only exception is when the edge was proved to
473 be unreachable during the clonning procedure. */
474 if (!e->callee
475 || DECL_BUILT_IN_CLASS (e->callee->decl) != BUILT_IN_NORMAL
476 || DECL_FUNCTION_CODE (e->callee->decl) != BUILT_IN_UNREACHABLE)
477 e->redirect_callee_duplicating_thunks (new_node);
479 new_node->expand_all_artificial_thunks ();
481 for (e = callees;e; e=e->next_callee)
482 e->clone (new_node, e->call_stmt, e->lto_stmt_uid, count_scale,
483 freq, update_original);
485 for (e = indirect_calls; e; e = e->next_callee)
486 e->clone (new_node, e->call_stmt, e->lto_stmt_uid,
487 count_scale, freq, update_original);
488 new_node->clone_references (this);
490 new_node->next_sibling_clone = clones;
491 if (clones)
492 clones->prev_sibling_clone = new_node;
493 clones = new_node;
494 new_node->clone_of = this;
496 if (call_duplication_hook)
497 symtab->call_cgraph_duplication_hooks (this, new_node);
498 return new_node;
501 static GTY(()) unsigned int clone_fn_id_num;
503 /* Return a new assembler name for a clone with SUFFIX of a decl named
504 NAME. */
506 tree
507 clone_function_name_1 (const char *name, const char *suffix)
509 size_t len = strlen (name);
510 char *tmp_name, *prefix;
512 prefix = XALLOCAVEC (char, len + strlen (suffix) + 2);
513 memcpy (prefix, name, len);
514 strcpy (prefix + len + 1, suffix);
515 prefix[len] = symbol_table::symbol_suffix_separator ();
516 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix, clone_fn_id_num++);
517 return get_identifier (tmp_name);
520 /* Return a new assembler name for a clone of DECL with SUFFIX. */
522 tree
523 clone_function_name (tree decl, const char *suffix)
525 tree name = DECL_ASSEMBLER_NAME (decl);
526 return clone_function_name_1 (IDENTIFIER_POINTER (name), suffix);
530 /* Create callgraph node clone with new declaration. The actual body will
531 be copied later at compilation stage.
533 TODO: after merging in ipa-sra use function call notes instead of args_to_skip
534 bitmap interface.
536 cgraph_node *
537 cgraph_node::create_virtual_clone (vec<cgraph_edge *> redirect_callers,
538 vec<ipa_replace_map *, va_gc> *tree_map,
539 bitmap args_to_skip, const char * suffix)
541 tree old_decl = decl;
542 cgraph_node *new_node = NULL;
543 tree new_decl;
544 size_t len, i;
545 ipa_replace_map *map;
546 char *name;
548 gcc_checking_assert (local.versionable);
549 gcc_assert (local.can_change_signature || !args_to_skip);
551 /* Make a new FUNCTION_DECL tree node */
552 if (!args_to_skip)
553 new_decl = copy_node (old_decl);
554 else
555 new_decl = build_function_decl_skip_args (old_decl, args_to_skip, false);
557 /* These pointers represent function body and will be populated only when clone
558 is materialized. */
559 gcc_assert (new_decl != old_decl);
560 DECL_STRUCT_FUNCTION (new_decl) = NULL;
561 DECL_ARGUMENTS (new_decl) = NULL;
562 DECL_INITIAL (new_decl) = NULL;
563 DECL_RESULT (new_decl) = NULL;
564 /* We can not do DECL_RESULT (new_decl) = NULL; here because of LTO partitioning
565 sometimes storing only clone decl instead of original. */
567 /* Generate a new name for the new version. */
568 len = IDENTIFIER_LENGTH (DECL_NAME (old_decl));
569 name = XALLOCAVEC (char, len + strlen (suffix) + 2);
570 memcpy (name, IDENTIFIER_POINTER (DECL_NAME (old_decl)), len);
571 strcpy (name + len + 1, suffix);
572 name[len] = '.';
573 DECL_NAME (new_decl) = get_identifier (name);
574 SET_DECL_ASSEMBLER_NAME (new_decl, clone_function_name (old_decl, suffix));
575 SET_DECL_RTL (new_decl, NULL);
577 new_node = create_clone (new_decl, count, CGRAPH_FREQ_BASE, false,
578 redirect_callers, false, NULL, args_to_skip);
580 /* Update the properties.
581 Make clone visible only within this translation unit. Make sure
582 that is not weak also.
583 ??? We cannot use COMDAT linkage because there is no
584 ABI support for this. */
585 set_new_clone_decl_and_node_flags (new_node);
586 new_node->clone.tree_map = tree_map;
587 if (!implicit_section)
588 new_node->set_section (get_section ());
590 /* Clones of global symbols or symbols with unique names are unique. */
591 if ((TREE_PUBLIC (old_decl)
592 && !DECL_EXTERNAL (old_decl)
593 && !DECL_WEAK (old_decl)
594 && !DECL_COMDAT (old_decl))
595 || in_lto_p)
596 new_node->unique_name = true;
597 FOR_EACH_VEC_SAFE_ELT (tree_map, i, map)
598 new_node->maybe_create_reference (map->new_tree, IPA_REF_ADDR, NULL);
600 if (ipa_transforms_to_apply.exists ())
601 new_node->ipa_transforms_to_apply
602 = ipa_transforms_to_apply.copy ();
604 symtab->call_cgraph_duplication_hooks (this, new_node);
606 return new_node;
609 /* callgraph node being removed from symbol table; see if its entry can be
610 replaced by other inline clone. */
611 cgraph_node *
612 cgraph_node::find_replacement (void)
614 cgraph_node *next_inline_clone, *replacement;
616 for (next_inline_clone = clones;
617 next_inline_clone
618 && next_inline_clone->decl != decl;
619 next_inline_clone = next_inline_clone->next_sibling_clone)
622 /* If there is inline clone of the node being removed, we need
623 to put it into the position of removed node and reorganize all
624 other clones to be based on it. */
625 if (next_inline_clone)
627 cgraph_node *n;
628 cgraph_node *new_clones;
630 replacement = next_inline_clone;
632 /* Unlink inline clone from the list of clones of removed node. */
633 if (next_inline_clone->next_sibling_clone)
634 next_inline_clone->next_sibling_clone->prev_sibling_clone
635 = next_inline_clone->prev_sibling_clone;
636 if (next_inline_clone->prev_sibling_clone)
638 gcc_assert (clones != next_inline_clone);
639 next_inline_clone->prev_sibling_clone->next_sibling_clone
640 = next_inline_clone->next_sibling_clone;
642 else
644 gcc_assert (clones == next_inline_clone);
645 clones = next_inline_clone->next_sibling_clone;
648 new_clones = clones;
649 clones = NULL;
651 /* Copy clone info. */
652 next_inline_clone->clone = clone;
654 /* Now place it into clone tree at same level at NODE. */
655 next_inline_clone->clone_of = clone_of;
656 next_inline_clone->prev_sibling_clone = NULL;
657 next_inline_clone->next_sibling_clone = NULL;
658 if (clone_of)
660 if (clone_of->clones)
661 clone_of->clones->prev_sibling_clone = next_inline_clone;
662 next_inline_clone->next_sibling_clone = clone_of->clones;
663 clone_of->clones = next_inline_clone;
666 /* Merge the clone list. */
667 if (new_clones)
669 if (!next_inline_clone->clones)
670 next_inline_clone->clones = new_clones;
671 else
673 n = next_inline_clone->clones;
674 while (n->next_sibling_clone)
675 n = n->next_sibling_clone;
676 n->next_sibling_clone = new_clones;
677 new_clones->prev_sibling_clone = n;
681 /* Update clone_of pointers. */
682 n = new_clones;
683 while (n)
685 n->clone_of = next_inline_clone;
686 n = n->next_sibling_clone;
688 return replacement;
690 else
691 return NULL;
694 /* Like cgraph_set_call_stmt but walk the clone tree and update all
695 clones sharing the same function body.
696 When WHOLE_SPECULATIVE_EDGES is true, all three components of
697 speculative edge gets updated. Otherwise we update only direct
698 call. */
700 void
701 cgraph_node::set_call_stmt_including_clones (gimple *old_stmt,
702 gcall *new_stmt,
703 bool update_speculative)
705 cgraph_node *node;
706 cgraph_edge *edge = get_edge (old_stmt);
708 if (edge)
709 edge->set_call_stmt (new_stmt, update_speculative);
711 node = clones;
712 if (node)
713 while (node != this)
715 cgraph_edge *edge = node->get_edge (old_stmt);
716 if (edge)
718 edge->set_call_stmt (new_stmt, update_speculative);
719 /* If UPDATE_SPECULATIVE is false, it means that we are turning
720 speculative call into a real code sequence. Update the
721 callgraph edges. */
722 if (edge->speculative && !update_speculative)
724 cgraph_edge *direct, *indirect;
725 ipa_ref *ref;
727 gcc_assert (!edge->indirect_unknown_callee);
728 edge->speculative_call_info (direct, indirect, ref);
729 direct->speculative = false;
730 indirect->speculative = false;
731 ref->speculative = false;
734 if (node->clones)
735 node = node->clones;
736 else if (node->next_sibling_clone)
737 node = node->next_sibling_clone;
738 else
740 while (node != this && !node->next_sibling_clone)
741 node = node->clone_of;
742 if (node != this)
743 node = node->next_sibling_clone;
748 /* Like cgraph_create_edge walk the clone tree and update all clones sharing
749 same function body. If clones already have edge for OLD_STMT; only
750 update the edge same way as cgraph_set_call_stmt_including_clones does.
752 TODO: COUNT and LOOP_DEPTH should be properly distributed based on relative
753 frequencies of the clones. */
755 void
756 cgraph_node::create_edge_including_clones (cgraph_node *callee,
757 gimple *old_stmt, gcall *stmt,
758 gcov_type count,
759 int freq,
760 cgraph_inline_failed_t reason)
762 cgraph_node *node;
763 cgraph_edge *edge;
765 if (!get_edge (stmt))
767 edge = create_edge (callee, stmt, count, freq);
768 edge->inline_failed = reason;
771 node = clones;
772 if (node)
773 while (node != this)
774 /* Thunk clones do not get updated while copying inline function body. */
775 if (!node->thunk.thunk_p)
777 cgraph_edge *edge = node->get_edge (old_stmt);
779 /* It is possible that clones already contain the edge while
780 master didn't. Either we promoted indirect call into direct
781 call in the clone or we are processing clones of unreachable
782 master where edges has been removed. */
783 if (edge)
784 edge->set_call_stmt (stmt);
785 else if (! node->get_edge (stmt))
787 edge = node->create_edge (callee, stmt, count, freq);
788 edge->inline_failed = reason;
791 if (node->clones)
792 node = node->clones;
793 else if (node->next_sibling_clone)
794 node = node->next_sibling_clone;
795 else
797 while (node != this && !node->next_sibling_clone)
798 node = node->clone_of;
799 if (node != this)
800 node = node->next_sibling_clone;
805 /* Remove the node from cgraph and all inline clones inlined into it.
806 Skip however removal of FORBIDDEN_NODE and return true if it needs to be
807 removed. This allows to call the function from outer loop walking clone
808 tree. */
810 bool
811 cgraph_node::remove_symbol_and_inline_clones (cgraph_node *forbidden_node)
813 cgraph_edge *e, *next;
814 bool found = false;
816 if (this == forbidden_node)
818 callers->remove ();
819 return true;
821 for (e = callees; e; e = next)
823 next = e->next_callee;
824 if (!e->inline_failed)
825 found |= e->callee->remove_symbol_and_inline_clones (forbidden_node);
827 remove ();
828 return found;
831 /* The edges representing the callers of the NEW_VERSION node were
832 fixed by cgraph_function_versioning (), now the call_expr in their
833 respective tree code should be updated to call the NEW_VERSION. */
835 static void
836 update_call_expr (cgraph_node *new_version)
838 cgraph_edge *e;
840 gcc_assert (new_version);
842 /* Update the call expr on the edges to call the new version. */
843 for (e = new_version->callers; e; e = e->next_caller)
845 function *inner_function = DECL_STRUCT_FUNCTION (e->caller->decl);
846 gimple_call_set_fndecl (e->call_stmt, new_version->decl);
847 maybe_clean_eh_stmt_fn (inner_function, e->call_stmt);
852 /* Create a new cgraph node which is the new version of
853 callgraph node. REDIRECT_CALLERS holds the callers
854 edges which should be redirected to point to
855 NEW_VERSION. ALL the callees edges of the node
856 are cloned to the new version node. Return the new
857 version node.
859 If non-NULL BLOCK_TO_COPY determine what basic blocks
860 was copied to prevent duplications of calls that are dead
861 in the clone. */
863 cgraph_node *
864 cgraph_node::create_version_clone (tree new_decl,
865 vec<cgraph_edge *> redirect_callers,
866 bitmap bbs_to_copy)
868 cgraph_node *new_version;
869 cgraph_edge *e;
870 unsigned i;
872 new_version = cgraph_node::create (new_decl);
874 new_version->analyzed = analyzed;
875 new_version->definition = definition;
876 new_version->local = local;
877 new_version->externally_visible = false;
878 new_version->no_reorder = no_reorder;
879 new_version->local.local = new_version->definition;
880 new_version->global = global;
881 new_version->rtl = rtl;
882 new_version->count = count;
884 for (e = callees; e; e=e->next_callee)
885 if (!bbs_to_copy
886 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
887 e->clone (new_version, e->call_stmt,
888 e->lto_stmt_uid, REG_BR_PROB_BASE,
889 CGRAPH_FREQ_BASE,
890 true);
891 for (e = indirect_calls; e; e=e->next_callee)
892 if (!bbs_to_copy
893 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
894 e->clone (new_version, e->call_stmt,
895 e->lto_stmt_uid, REG_BR_PROB_BASE,
896 CGRAPH_FREQ_BASE,
897 true);
898 FOR_EACH_VEC_ELT (redirect_callers, i, e)
900 /* Redirect calls to the old version node to point to its new
901 version. */
902 e->redirect_callee (new_version);
905 symtab->call_cgraph_duplication_hooks (this, new_version);
907 return new_version;
910 /* Perform function versioning.
911 Function versioning includes copying of the tree and
912 a callgraph update (creating a new cgraph node and updating
913 its callees and callers).
915 REDIRECT_CALLERS varray includes the edges to be redirected
916 to the new version.
918 TREE_MAP is a mapping of tree nodes we want to replace with
919 new ones (according to results of prior analysis).
921 If non-NULL ARGS_TO_SKIP determine function parameters to remove
922 from new version.
923 If SKIP_RETURN is true, the new version will return void.
924 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
925 If non_NULL NEW_ENTRY determine new entry BB of the clone.
927 Return the new version's cgraph node. */
929 cgraph_node *
930 cgraph_node::create_version_clone_with_body
931 (vec<cgraph_edge *> redirect_callers,
932 vec<ipa_replace_map *, va_gc> *tree_map, bitmap args_to_skip,
933 bool skip_return, bitmap bbs_to_copy, basic_block new_entry_block,
934 const char *clone_name)
936 tree old_decl = decl;
937 cgraph_node *new_version_node = NULL;
938 tree new_decl;
940 if (!tree_versionable_function_p (old_decl))
941 return NULL;
943 gcc_assert (local.can_change_signature || !args_to_skip);
945 /* Make a new FUNCTION_DECL tree node for the new version. */
946 if (!args_to_skip && !skip_return)
947 new_decl = copy_node (old_decl);
948 else
949 new_decl
950 = build_function_decl_skip_args (old_decl, args_to_skip, skip_return);
952 /* Generate a new name for the new version. */
953 DECL_NAME (new_decl) = clone_function_name (old_decl, clone_name);
954 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
955 SET_DECL_RTL (new_decl, NULL);
957 /* When the old decl was a con-/destructor make sure the clone isn't. */
958 DECL_STATIC_CONSTRUCTOR (new_decl) = 0;
959 DECL_STATIC_DESTRUCTOR (new_decl) = 0;
961 /* Create the new version's call-graph node.
962 and update the edges of the new node. */
963 new_version_node = create_version_clone (new_decl, redirect_callers,
964 bbs_to_copy);
966 if (ipa_transforms_to_apply.exists ())
967 new_version_node->ipa_transforms_to_apply
968 = ipa_transforms_to_apply.copy ();
969 /* Copy the OLD_VERSION_NODE function tree to the new version. */
970 tree_function_versioning (old_decl, new_decl, tree_map, false, args_to_skip,
971 skip_return, bbs_to_copy, new_entry_block);
973 /* Update the new version's properties.
974 Make The new version visible only within this translation unit. Make sure
975 that is not weak also.
976 ??? We cannot use COMDAT linkage because there is no
977 ABI support for this. */
978 new_version_node->make_decl_local ();
979 DECL_VIRTUAL_P (new_version_node->decl) = 0;
980 new_version_node->externally_visible = 0;
981 new_version_node->local.local = 1;
982 new_version_node->lowered = true;
983 if (!implicit_section)
984 new_version_node->set_section (get_section ());
985 /* Clones of global symbols or symbols with unique names are unique. */
986 if ((TREE_PUBLIC (old_decl)
987 && !DECL_EXTERNAL (old_decl)
988 && !DECL_WEAK (old_decl)
989 && !DECL_COMDAT (old_decl))
990 || in_lto_p)
991 new_version_node->unique_name = true;
993 /* Update the call_expr on the edges to call the new version node. */
994 update_call_expr (new_version_node);
996 symtab->call_cgraph_insertion_hooks (this);
997 return new_version_node;
1000 /* Given virtual clone, turn it into actual clone. */
1002 static void
1003 cgraph_materialize_clone (cgraph_node *node)
1005 bitmap_obstack_initialize (NULL);
1006 node->former_clone_of = node->clone_of->decl;
1007 if (node->clone_of->former_clone_of)
1008 node->former_clone_of = node->clone_of->former_clone_of;
1009 /* Copy the OLD_VERSION_NODE function tree to the new version. */
1010 tree_function_versioning (node->clone_of->decl, node->decl,
1011 node->clone.tree_map, true,
1012 node->clone.args_to_skip, false,
1013 NULL, NULL);
1014 if (symtab->dump_file)
1016 dump_function_to_file (node->clone_of->decl, symtab->dump_file,
1017 dump_flags);
1018 dump_function_to_file (node->decl, symtab->dump_file, dump_flags);
1021 /* Function is no longer clone. */
1022 if (node->next_sibling_clone)
1023 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
1024 if (node->prev_sibling_clone)
1025 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
1026 else
1027 node->clone_of->clones = node->next_sibling_clone;
1028 node->next_sibling_clone = NULL;
1029 node->prev_sibling_clone = NULL;
1030 if (!node->clone_of->analyzed && !node->clone_of->clones)
1032 node->clone_of->release_body ();
1033 node->clone_of->remove_callees ();
1034 node->clone_of->remove_all_references ();
1036 node->clone_of = NULL;
1037 bitmap_obstack_release (NULL);
1040 /* Once all functions from compilation unit are in memory, produce all clones
1041 and update all calls. We might also do this on demand if we don't want to
1042 bring all functions to memory prior compilation, but current WHOPR
1043 implementation does that and it is a bit easier to keep everything right in
1044 this order. */
1046 void
1047 symbol_table::materialize_all_clones (void)
1049 cgraph_node *node;
1050 bool stabilized = false;
1053 if (symtab->dump_file)
1054 fprintf (symtab->dump_file, "Materializing clones\n");
1056 cgraph_node::checking_verify_cgraph_nodes ();
1058 /* We can also do topological order, but number of iterations should be
1059 bounded by number of IPA passes since single IPA pass is probably not
1060 going to create clones of clones it created itself. */
1061 while (!stabilized)
1063 stabilized = true;
1064 FOR_EACH_FUNCTION (node)
1066 if (node->clone_of && node->decl != node->clone_of->decl
1067 && !gimple_has_body_p (node->decl))
1069 if (!node->clone_of->clone_of)
1070 node->clone_of->get_untransformed_body ();
1071 if (gimple_has_body_p (node->clone_of->decl))
1073 if (symtab->dump_file)
1075 fprintf (symtab->dump_file, "cloning %s to %s\n",
1076 xstrdup_for_dump (node->clone_of->name ()),
1077 xstrdup_for_dump (node->name ()));
1078 if (node->clone.tree_map)
1080 unsigned int i;
1081 fprintf (symtab->dump_file, " replace map: ");
1082 for (i = 0;
1083 i < vec_safe_length (node->clone.tree_map);
1084 i++)
1086 ipa_replace_map *replace_info;
1087 replace_info = (*node->clone.tree_map)[i];
1088 print_generic_expr (symtab->dump_file, replace_info->old_tree, 0);
1089 fprintf (symtab->dump_file, " -> ");
1090 print_generic_expr (symtab->dump_file, replace_info->new_tree, 0);
1091 fprintf (symtab->dump_file, "%s%s;",
1092 replace_info->replace_p ? "(replace)":"",
1093 replace_info->ref_p ? "(ref)":"");
1095 fprintf (symtab->dump_file, "\n");
1097 if (node->clone.args_to_skip)
1099 fprintf (symtab->dump_file, " args_to_skip: ");
1100 dump_bitmap (symtab->dump_file,
1101 node->clone.args_to_skip);
1103 if (node->clone.args_to_skip)
1105 fprintf (symtab->dump_file, " combined_args_to_skip:");
1106 dump_bitmap (symtab->dump_file, node->clone.combined_args_to_skip);
1109 cgraph_materialize_clone (node);
1110 stabilized = false;
1115 FOR_EACH_FUNCTION (node)
1116 if (!node->analyzed && node->callees)
1118 node->remove_callees ();
1119 node->remove_all_references ();
1121 else
1122 node->clear_stmts_in_references ();
1123 if (symtab->dump_file)
1124 fprintf (symtab->dump_file, "Materialization Call site updates done.\n");
1126 cgraph_node::checking_verify_cgraph_nodes ();
1128 symtab->remove_unreachable_nodes (symtab->dump_file);
1131 #include "gt-cgraphclones.h"