* testsuite/29_atomics/atomic/operators/pointer_partial_void.cc: Fix
[official-gcc.git] / gcc / cgraphclones.c
blob373f5013d26b6e8ad63e2b5ee327621f0054fdf2
1 /* Callgraph clones
2 Copyright (C) 2003-2013 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* This module provide facilities for clonning functions. I.e. creating
22 new functions based on existing functions with simple modifications,
23 such as replacement of parameters.
25 To allow whole program optimization without actual presence of function
26 bodies, an additional infrastructure is provided for so-called virtual
27 clones
29 A virtual clone in the callgraph is a function that has no
30 associated body, just a description of how to create its body based
31 on a different function (which itself may be a virtual clone).
33 The description of function modifications includes adjustments to
34 the function's signature (which allows, for example, removing or
35 adding function arguments), substitutions to perform on the
36 function body, and, for inlined functions, a pointer to the
37 function that it will be inlined into.
39 It is also possible to redirect any edge of the callgraph from a
40 function to its virtual clone. This implies updating of the call
41 site to adjust for the new function signature.
43 Most of the transformations performed by inter-procedural
44 optimizations can be represented via virtual clones. For
45 instance, a constant propagation pass can produce a virtual clone
46 of the function which replaces one of its arguments by a
47 constant. The inliner can represent its decisions by producing a
48 clone of a function whose body will be later integrated into
49 a given function.
51 Using virtual clones, the program can be easily updated
52 during the Execute stage, solving most of pass interactions
53 problems that would otherwise occur during Transform.
55 Virtual clones are later materialized in the LTRANS stage and
56 turned into real functions. Passes executed after the virtual
57 clone were introduced also perform their Transform stage
58 on new functions, so for a pass there is no significant
59 difference between operating on a real function or a virtual
60 clone introduced before its Execute stage.
62 Optimization passes then work on virtual clones introduced before
63 their Execute stage as if they were real functions. The
64 only difference is that clones are not visible during the
65 Generate Summary stage. */
67 #include "config.h"
68 #include "system.h"
69 #include "coretypes.h"
70 #include "tm.h"
71 #include "tree.h"
72 #include "gimple.h"
73 #include "rtl.h"
74 #include "bitmap.h"
75 #include "tree-cfg.h"
76 #include "tree-inline.h"
77 #include "langhooks.h"
78 #include "pointer-set.h"
79 #include "toplev.h"
80 #include "flags.h"
81 #include "ggc.h"
82 #include "debug.h"
83 #include "target.h"
84 #include "diagnostic.h"
85 #include "params.h"
86 #include "intl.h"
87 #include "function.h"
88 #include "ipa-prop.h"
89 #include "tree-iterator.h"
90 #include "tree-dump.h"
91 #include "gimple-pretty-print.h"
92 #include "coverage.h"
93 #include "ipa-inline.h"
94 #include "ipa-utils.h"
95 #include "lto-streamer.h"
96 #include "except.h"
98 /* Create clone of E in the node N represented by CALL_EXPR the callgraph. */
99 struct cgraph_edge *
100 cgraph_clone_edge (struct cgraph_edge *e, struct cgraph_node *n,
101 gimple call_stmt, unsigned stmt_uid, gcov_type count_scale,
102 int freq_scale, bool update_original)
104 struct cgraph_edge *new_edge;
105 gcov_type count = apply_probability (e->count, count_scale);
106 gcov_type freq;
108 /* We do not want to ignore loop nest after frequency drops to 0. */
109 if (!freq_scale)
110 freq_scale = 1;
111 freq = e->frequency * (gcov_type) freq_scale / CGRAPH_FREQ_BASE;
112 if (freq > CGRAPH_FREQ_MAX)
113 freq = CGRAPH_FREQ_MAX;
115 if (e->indirect_unknown_callee)
117 tree decl;
119 if (call_stmt && (decl = gimple_call_fndecl (call_stmt)))
121 struct cgraph_node *callee = cgraph_get_node (decl);
122 gcc_checking_assert (callee);
123 new_edge = cgraph_create_edge (n, callee, call_stmt, count, freq);
125 else
127 new_edge = cgraph_create_indirect_edge (n, call_stmt,
128 e->indirect_info->ecf_flags,
129 count, freq);
130 *new_edge->indirect_info = *e->indirect_info;
133 else
135 new_edge = cgraph_create_edge (n, e->callee, call_stmt, count, freq);
136 if (e->indirect_info)
138 new_edge->indirect_info
139 = ggc_alloc_cleared_cgraph_indirect_call_info ();
140 *new_edge->indirect_info = *e->indirect_info;
144 new_edge->inline_failed = e->inline_failed;
145 new_edge->indirect_inlining_edge = e->indirect_inlining_edge;
146 new_edge->lto_stmt_uid = stmt_uid;
147 /* Clone flags that depend on call_stmt availability manually. */
148 new_edge->can_throw_external = e->can_throw_external;
149 new_edge->call_stmt_cannot_inline_p = e->call_stmt_cannot_inline_p;
150 new_edge->speculative = e->speculative;
151 if (update_original)
153 e->count -= new_edge->count;
154 if (e->count < 0)
155 e->count = 0;
157 cgraph_call_edge_duplication_hooks (e, new_edge);
158 return new_edge;
162 /* Create node representing clone of N executed COUNT times. Decrease
163 the execution counts from original node too.
164 The new clone will have decl set to DECL that may or may not be the same
165 as decl of N.
167 When UPDATE_ORIGINAL is true, the counts are subtracted from the original
168 function's profile to reflect the fact that part of execution is handled
169 by node.
170 When CALL_DUPLICATOIN_HOOK is true, the ipa passes are acknowledged about
171 the new clone. Otherwise the caller is responsible for doing so later.
173 If the new node is being inlined into another one, NEW_INLINED_TO should be
174 the outline function the new one is (even indirectly) inlined to. All hooks
175 will see this in node's global.inlined_to, when invoked. Can be NULL if the
176 node is not inlined. */
178 struct cgraph_node *
179 cgraph_clone_node (struct cgraph_node *n, tree decl, gcov_type count, int freq,
180 bool update_original,
181 vec<cgraph_edge_p> redirect_callers,
182 bool call_duplication_hook,
183 struct cgraph_node *new_inlined_to)
185 struct cgraph_node *new_node = cgraph_create_empty_node ();
186 struct cgraph_edge *e;
187 gcov_type count_scale;
188 unsigned i;
190 new_node->decl = decl;
191 symtab_register_node (new_node);
192 new_node->origin = n->origin;
193 new_node->lto_file_data = n->lto_file_data;
194 if (new_node->origin)
196 new_node->next_nested = new_node->origin->nested;
197 new_node->origin->nested = new_node;
199 new_node->analyzed = n->analyzed;
200 new_node->definition = n->definition;
201 new_node->local = n->local;
202 new_node->externally_visible = false;
203 new_node->local.local = true;
204 new_node->global = n->global;
205 new_node->global.inlined_to = new_inlined_to;
206 new_node->rtl = n->rtl;
207 new_node->count = count;
208 new_node->frequency = n->frequency;
209 new_node->clone = n->clone;
210 new_node->clone.tree_map = NULL;
211 if (n->count)
213 if (new_node->count > n->count)
214 count_scale = REG_BR_PROB_BASE;
215 else
216 count_scale = GCOV_COMPUTE_SCALE (new_node->count, n->count);
218 else
219 count_scale = 0;
220 if (update_original)
222 n->count -= count;
223 if (n->count < 0)
224 n->count = 0;
227 FOR_EACH_VEC_ELT (redirect_callers, i, e)
229 /* Redirect calls to the old version node to point to its new
230 version. */
231 cgraph_redirect_edge_callee (e, new_node);
235 for (e = n->callees;e; e=e->next_callee)
236 cgraph_clone_edge (e, new_node, e->call_stmt, e->lto_stmt_uid,
237 count_scale, freq, update_original);
239 for (e = n->indirect_calls; e; e = e->next_callee)
240 cgraph_clone_edge (e, new_node, e->call_stmt, e->lto_stmt_uid,
241 count_scale, freq, update_original);
242 ipa_clone_references (new_node, &n->ref_list);
244 new_node->next_sibling_clone = n->clones;
245 if (n->clones)
246 n->clones->prev_sibling_clone = new_node;
247 n->clones = new_node;
248 new_node->clone_of = n;
250 if (call_duplication_hook)
251 cgraph_call_node_duplication_hooks (n, new_node);
252 return new_node;
255 /* Return a new assembler name for a clone of DECL with SUFFIX. */
257 static GTY(()) unsigned int clone_fn_id_num;
259 tree
260 clone_function_name (tree decl, const char *suffix)
262 tree name = DECL_ASSEMBLER_NAME (decl);
263 size_t len = IDENTIFIER_LENGTH (name);
264 char *tmp_name, *prefix;
266 prefix = XALLOCAVEC (char, len + strlen (suffix) + 2);
267 memcpy (prefix, IDENTIFIER_POINTER (name), len);
268 strcpy (prefix + len + 1, suffix);
269 #ifndef NO_DOT_IN_LABEL
270 prefix[len] = '.';
271 #elif !defined NO_DOLLAR_IN_LABEL
272 prefix[len] = '$';
273 #else
274 prefix[len] = '_';
275 #endif
276 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix, clone_fn_id_num++);
277 return get_identifier (tmp_name);
280 /* Create callgraph node clone with new declaration. The actual body will
281 be copied later at compilation stage.
283 TODO: after merging in ipa-sra use function call notes instead of args_to_skip
284 bitmap interface.
286 struct cgraph_node *
287 cgraph_create_virtual_clone (struct cgraph_node *old_node,
288 vec<cgraph_edge_p> redirect_callers,
289 vec<ipa_replace_map_p, va_gc> *tree_map,
290 bitmap args_to_skip,
291 const char * suffix)
293 tree old_decl = old_node->decl;
294 struct cgraph_node *new_node = NULL;
295 tree new_decl;
296 size_t len, i;
297 struct ipa_replace_map *map;
298 char *name;
300 if (!in_lto_p)
301 gcc_checking_assert (tree_versionable_function_p (old_decl));
303 gcc_assert (old_node->local.can_change_signature || !args_to_skip);
305 /* Make a new FUNCTION_DECL tree node */
306 if (!args_to_skip)
307 new_decl = copy_node (old_decl);
308 else
309 new_decl = build_function_decl_skip_args (old_decl, args_to_skip, false);
311 /* These pointers represent function body and will be populated only when clone
312 is materialized. */
313 gcc_assert (new_decl != old_decl);
314 DECL_STRUCT_FUNCTION (new_decl) = NULL;
315 DECL_ARGUMENTS (new_decl) = NULL;
316 DECL_INITIAL (new_decl) = NULL;
317 DECL_RESULT (new_decl) = NULL;
318 /* We can not do DECL_RESULT (new_decl) = NULL; here because of LTO partitioning
319 sometimes storing only clone decl instead of original. */
321 /* Generate a new name for the new version. */
322 len = IDENTIFIER_LENGTH (DECL_NAME (old_decl));
323 name = XALLOCAVEC (char, len + strlen (suffix) + 2);
324 memcpy (name, IDENTIFIER_POINTER (DECL_NAME (old_decl)), len);
325 strcpy (name + len + 1, suffix);
326 name[len] = '.';
327 DECL_NAME (new_decl) = get_identifier (name);
328 SET_DECL_ASSEMBLER_NAME (new_decl, clone_function_name (old_decl, suffix));
329 SET_DECL_RTL (new_decl, NULL);
331 new_node = cgraph_clone_node (old_node, new_decl, old_node->count,
332 CGRAPH_FREQ_BASE, false,
333 redirect_callers, false, NULL);
334 /* Update the properties.
335 Make clone visible only within this translation unit. Make sure
336 that is not weak also.
337 ??? We cannot use COMDAT linkage because there is no
338 ABI support for this. */
339 DECL_EXTERNAL (new_node->decl) = 0;
340 if (DECL_ONE_ONLY (old_decl))
341 DECL_SECTION_NAME (new_node->decl) = NULL;
342 DECL_COMDAT_GROUP (new_node->decl) = 0;
343 TREE_PUBLIC (new_node->decl) = 0;
344 DECL_COMDAT (new_node->decl) = 0;
345 DECL_WEAK (new_node->decl) = 0;
346 DECL_VIRTUAL_P (new_node->decl) = 0;
347 DECL_STATIC_CONSTRUCTOR (new_node->decl) = 0;
348 DECL_STATIC_DESTRUCTOR (new_node->decl) = 0;
349 new_node->clone.tree_map = tree_map;
350 new_node->clone.args_to_skip = args_to_skip;
352 /* Clones of global symbols or symbols with unique names are unique. */
353 if ((TREE_PUBLIC (old_decl)
354 && !DECL_EXTERNAL (old_decl)
355 && !DECL_WEAK (old_decl)
356 && !DECL_COMDAT (old_decl))
357 || in_lto_p)
358 new_node->unique_name = true;
359 FOR_EACH_VEC_SAFE_ELT (tree_map, i, map)
360 ipa_maybe_record_reference (new_node, map->new_tree,
361 IPA_REF_ADDR, NULL);
362 if (!args_to_skip)
363 new_node->clone.combined_args_to_skip = old_node->clone.combined_args_to_skip;
364 else if (old_node->clone.combined_args_to_skip)
366 int newi = 0, oldi = 0;
367 tree arg;
368 bitmap new_args_to_skip = BITMAP_GGC_ALLOC ();
369 struct cgraph_node *orig_node;
370 for (orig_node = old_node; orig_node->clone_of; orig_node = orig_node->clone_of)
372 for (arg = DECL_ARGUMENTS (orig_node->decl);
373 arg; arg = DECL_CHAIN (arg), oldi++)
375 if (bitmap_bit_p (old_node->clone.combined_args_to_skip, oldi))
377 bitmap_set_bit (new_args_to_skip, oldi);
378 continue;
380 if (bitmap_bit_p (args_to_skip, newi))
381 bitmap_set_bit (new_args_to_skip, oldi);
382 newi++;
384 new_node->clone.combined_args_to_skip = new_args_to_skip;
386 else
387 new_node->clone.combined_args_to_skip = args_to_skip;
388 new_node->externally_visible = 0;
389 new_node->local.local = 1;
390 new_node->lowered = true;
392 cgraph_call_node_duplication_hooks (old_node, new_node);
395 return new_node;
398 /* NODE is being removed from symbol table; see if its entry can be replaced by
399 other inline clone. */
400 struct cgraph_node *
401 cgraph_find_replacement_node (struct cgraph_node *node)
403 struct cgraph_node *next_inline_clone, *replacement;
405 for (next_inline_clone = node->clones;
406 next_inline_clone
407 && next_inline_clone->decl != node->decl;
408 next_inline_clone = next_inline_clone->next_sibling_clone)
411 /* If there is inline clone of the node being removed, we need
412 to put it into the position of removed node and reorganize all
413 other clones to be based on it. */
414 if (next_inline_clone)
416 struct cgraph_node *n;
417 struct cgraph_node *new_clones;
419 replacement = next_inline_clone;
421 /* Unlink inline clone from the list of clones of removed node. */
422 if (next_inline_clone->next_sibling_clone)
423 next_inline_clone->next_sibling_clone->prev_sibling_clone
424 = next_inline_clone->prev_sibling_clone;
425 if (next_inline_clone->prev_sibling_clone)
427 gcc_assert (node->clones != next_inline_clone);
428 next_inline_clone->prev_sibling_clone->next_sibling_clone
429 = next_inline_clone->next_sibling_clone;
431 else
433 gcc_assert (node->clones == next_inline_clone);
434 node->clones = next_inline_clone->next_sibling_clone;
437 new_clones = node->clones;
438 node->clones = NULL;
440 /* Copy clone info. */
441 next_inline_clone->clone = node->clone;
443 /* Now place it into clone tree at same level at NODE. */
444 next_inline_clone->clone_of = node->clone_of;
445 next_inline_clone->prev_sibling_clone = NULL;
446 next_inline_clone->next_sibling_clone = NULL;
447 if (node->clone_of)
449 if (node->clone_of->clones)
450 node->clone_of->clones->prev_sibling_clone = next_inline_clone;
451 next_inline_clone->next_sibling_clone = node->clone_of->clones;
452 node->clone_of->clones = next_inline_clone;
455 /* Merge the clone list. */
456 if (new_clones)
458 if (!next_inline_clone->clones)
459 next_inline_clone->clones = new_clones;
460 else
462 n = next_inline_clone->clones;
463 while (n->next_sibling_clone)
464 n = n->next_sibling_clone;
465 n->next_sibling_clone = new_clones;
466 new_clones->prev_sibling_clone = n;
470 /* Update clone_of pointers. */
471 n = new_clones;
472 while (n)
474 n->clone_of = next_inline_clone;
475 n = n->next_sibling_clone;
477 return replacement;
479 else
480 return NULL;
483 /* Like cgraph_set_call_stmt but walk the clone tree and update all
484 clones sharing the same function body.
485 When WHOLE_SPECULATIVE_EDGES is true, all three components of
486 speculative edge gets updated. Otherwise we update only direct
487 call. */
489 void
490 cgraph_set_call_stmt_including_clones (struct cgraph_node *orig,
491 gimple old_stmt, gimple new_stmt,
492 bool update_speculative)
494 struct cgraph_node *node;
495 struct cgraph_edge *edge = cgraph_edge (orig, old_stmt);
497 if (edge)
498 cgraph_set_call_stmt (edge, new_stmt, update_speculative);
500 node = orig->clones;
501 if (node)
502 while (node != orig)
504 struct cgraph_edge *edge = cgraph_edge (node, old_stmt);
505 if (edge)
507 cgraph_set_call_stmt (edge, new_stmt, update_speculative);
508 /* If UPDATE_SPECULATIVE is false, it means that we are turning
509 speculative call into a real code sequence. Update the
510 callgraph edges. */
511 if (edge->speculative && !update_speculative)
513 struct cgraph_edge *direct, *indirect;
514 struct ipa_ref *ref;
516 gcc_assert (!edge->indirect_unknown_callee);
517 cgraph_speculative_call_info (edge, direct, indirect, ref);
518 direct->speculative = false;
519 indirect->speculative = false;
520 ref->speculative = false;
523 if (node->clones)
524 node = node->clones;
525 else if (node->next_sibling_clone)
526 node = node->next_sibling_clone;
527 else
529 while (node != orig && !node->next_sibling_clone)
530 node = node->clone_of;
531 if (node != orig)
532 node = node->next_sibling_clone;
537 /* Like cgraph_create_edge walk the clone tree and update all clones sharing
538 same function body. If clones already have edge for OLD_STMT; only
539 update the edge same way as cgraph_set_call_stmt_including_clones does.
541 TODO: COUNT and LOOP_DEPTH should be properly distributed based on relative
542 frequencies of the clones. */
544 void
545 cgraph_create_edge_including_clones (struct cgraph_node *orig,
546 struct cgraph_node *callee,
547 gimple old_stmt,
548 gimple stmt, gcov_type count,
549 int freq,
550 cgraph_inline_failed_t reason)
552 struct cgraph_node *node;
553 struct cgraph_edge *edge;
555 if (!cgraph_edge (orig, stmt))
557 edge = cgraph_create_edge (orig, callee, stmt, count, freq);
558 edge->inline_failed = reason;
561 node = orig->clones;
562 if (node)
563 while (node != orig)
565 struct cgraph_edge *edge = cgraph_edge (node, old_stmt);
567 /* It is possible that clones already contain the edge while
568 master didn't. Either we promoted indirect call into direct
569 call in the clone or we are processing clones of unreachable
570 master where edges has been removed. */
571 if (edge)
572 cgraph_set_call_stmt (edge, stmt);
573 else if (!cgraph_edge (node, stmt))
575 edge = cgraph_create_edge (node, callee, stmt, count,
576 freq);
577 edge->inline_failed = reason;
580 if (node->clones)
581 node = node->clones;
582 else if (node->next_sibling_clone)
583 node = node->next_sibling_clone;
584 else
586 while (node != orig && !node->next_sibling_clone)
587 node = node->clone_of;
588 if (node != orig)
589 node = node->next_sibling_clone;
594 /* Remove the node from cgraph and all inline clones inlined into it.
595 Skip however removal of FORBIDDEN_NODE and return true if it needs to be
596 removed. This allows to call the function from outer loop walking clone
597 tree. */
599 bool
600 cgraph_remove_node_and_inline_clones (struct cgraph_node *node, struct cgraph_node *forbidden_node)
602 struct cgraph_edge *e, *next;
603 bool found = false;
605 if (node == forbidden_node)
607 cgraph_remove_edge (node->callers);
608 return true;
610 for (e = node->callees; e; e = next)
612 next = e->next_callee;
613 if (!e->inline_failed)
614 found |= cgraph_remove_node_and_inline_clones (e->callee, forbidden_node);
616 cgraph_remove_node (node);
617 return found;
620 /* The edges representing the callers of the NEW_VERSION node were
621 fixed by cgraph_function_versioning (), now the call_expr in their
622 respective tree code should be updated to call the NEW_VERSION. */
624 static void
625 update_call_expr (struct cgraph_node *new_version)
627 struct cgraph_edge *e;
629 gcc_assert (new_version);
631 /* Update the call expr on the edges to call the new version. */
632 for (e = new_version->callers; e; e = e->next_caller)
634 struct function *inner_function = DECL_STRUCT_FUNCTION (e->caller->decl);
635 gimple_call_set_fndecl (e->call_stmt, new_version->decl);
636 maybe_clean_eh_stmt_fn (inner_function, e->call_stmt);
641 /* Create a new cgraph node which is the new version of
642 OLD_VERSION node. REDIRECT_CALLERS holds the callers
643 edges which should be redirected to point to
644 NEW_VERSION. ALL the callees edges of OLD_VERSION
645 are cloned to the new version node. Return the new
646 version node.
648 If non-NULL BLOCK_TO_COPY determine what basic blocks
649 was copied to prevent duplications of calls that are dead
650 in the clone. */
652 struct cgraph_node *
653 cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
654 tree new_decl,
655 vec<cgraph_edge_p> redirect_callers,
656 bitmap bbs_to_copy)
658 struct cgraph_node *new_version;
659 struct cgraph_edge *e;
660 unsigned i;
662 gcc_assert (old_version);
664 new_version = cgraph_create_node (new_decl);
666 new_version->analyzed = old_version->analyzed;
667 new_version->definition = old_version->definition;
668 new_version->local = old_version->local;
669 new_version->externally_visible = false;
670 new_version->local.local = new_version->definition;
671 new_version->global = old_version->global;
672 new_version->rtl = old_version->rtl;
673 new_version->count = old_version->count;
675 for (e = old_version->callees; e; e=e->next_callee)
676 if (!bbs_to_copy
677 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
678 cgraph_clone_edge (e, new_version, e->call_stmt,
679 e->lto_stmt_uid, REG_BR_PROB_BASE,
680 CGRAPH_FREQ_BASE,
681 true);
682 for (e = old_version->indirect_calls; e; e=e->next_callee)
683 if (!bbs_to_copy
684 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
685 cgraph_clone_edge (e, new_version, e->call_stmt,
686 e->lto_stmt_uid, REG_BR_PROB_BASE,
687 CGRAPH_FREQ_BASE,
688 true);
689 FOR_EACH_VEC_ELT (redirect_callers, i, e)
691 /* Redirect calls to the old version node to point to its new
692 version. */
693 cgraph_redirect_edge_callee (e, new_version);
696 cgraph_call_node_duplication_hooks (old_version, new_version);
698 return new_version;
701 /* Perform function versioning.
702 Function versioning includes copying of the tree and
703 a callgraph update (creating a new cgraph node and updating
704 its callees and callers).
706 REDIRECT_CALLERS varray includes the edges to be redirected
707 to the new version.
709 TREE_MAP is a mapping of tree nodes we want to replace with
710 new ones (according to results of prior analysis).
711 OLD_VERSION_NODE is the node that is versioned.
713 If non-NULL ARGS_TO_SKIP determine function parameters to remove
714 from new version.
715 If SKIP_RETURN is true, the new version will return void.
716 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
717 If non_NULL NEW_ENTRY determine new entry BB of the clone.
719 Return the new version's cgraph node. */
721 struct cgraph_node *
722 cgraph_function_versioning (struct cgraph_node *old_version_node,
723 vec<cgraph_edge_p> redirect_callers,
724 vec<ipa_replace_map_p, va_gc> *tree_map,
725 bitmap args_to_skip,
726 bool skip_return,
727 bitmap bbs_to_copy,
728 basic_block new_entry_block,
729 const char *clone_name)
731 tree old_decl = old_version_node->decl;
732 struct cgraph_node *new_version_node = NULL;
733 tree new_decl;
735 if (!tree_versionable_function_p (old_decl))
736 return NULL;
738 gcc_assert (old_version_node->local.can_change_signature || !args_to_skip);
740 /* Make a new FUNCTION_DECL tree node for the new version. */
741 if (!args_to_skip && !skip_return)
742 new_decl = copy_node (old_decl);
743 else
744 new_decl
745 = build_function_decl_skip_args (old_decl, args_to_skip, skip_return);
747 /* Generate a new name for the new version. */
748 DECL_NAME (new_decl) = clone_function_name (old_decl, clone_name);
749 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
750 SET_DECL_RTL (new_decl, NULL);
752 /* When the old decl was a con-/destructor make sure the clone isn't. */
753 DECL_STATIC_CONSTRUCTOR (new_decl) = 0;
754 DECL_STATIC_DESTRUCTOR (new_decl) = 0;
756 /* Create the new version's call-graph node.
757 and update the edges of the new node. */
758 new_version_node =
759 cgraph_copy_node_for_versioning (old_version_node, new_decl,
760 redirect_callers, bbs_to_copy);
762 /* Copy the OLD_VERSION_NODE function tree to the new version. */
763 tree_function_versioning (old_decl, new_decl, tree_map, false, args_to_skip,
764 skip_return, bbs_to_copy, new_entry_block);
766 /* Update the new version's properties.
767 Make The new version visible only within this translation unit. Make sure
768 that is not weak also.
769 ??? We cannot use COMDAT linkage because there is no
770 ABI support for this. */
771 symtab_make_decl_local (new_version_node->decl);
772 DECL_VIRTUAL_P (new_version_node->decl) = 0;
773 new_version_node->externally_visible = 0;
774 new_version_node->local.local = 1;
775 new_version_node->lowered = true;
776 /* Clones of global symbols or symbols with unique names are unique. */
777 if ((TREE_PUBLIC (old_decl)
778 && !DECL_EXTERNAL (old_decl)
779 && !DECL_WEAK (old_decl)
780 && !DECL_COMDAT (old_decl))
781 || in_lto_p)
782 new_version_node->unique_name = true;
784 /* Update the call_expr on the edges to call the new version node. */
785 update_call_expr (new_version_node);
787 cgraph_call_function_insertion_hooks (new_version_node);
788 return new_version_node;
791 /* Given virtual clone, turn it into actual clone. */
793 static void
794 cgraph_materialize_clone (struct cgraph_node *node)
796 bitmap_obstack_initialize (NULL);
797 node->former_clone_of = node->clone_of->decl;
798 if (node->clone_of->former_clone_of)
799 node->former_clone_of = node->clone_of->former_clone_of;
800 /* Copy the OLD_VERSION_NODE function tree to the new version. */
801 tree_function_versioning (node->clone_of->decl, node->decl,
802 node->clone.tree_map, true,
803 node->clone.args_to_skip, false,
804 NULL, NULL);
805 if (cgraph_dump_file)
807 dump_function_to_file (node->clone_of->decl, cgraph_dump_file, dump_flags);
808 dump_function_to_file (node->decl, cgraph_dump_file, dump_flags);
811 /* Function is no longer clone. */
812 if (node->next_sibling_clone)
813 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
814 if (node->prev_sibling_clone)
815 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
816 else
817 node->clone_of->clones = node->next_sibling_clone;
818 node->next_sibling_clone = NULL;
819 node->prev_sibling_clone = NULL;
820 if (!node->clone_of->analyzed && !node->clone_of->clones)
822 cgraph_release_function_body (node->clone_of);
823 cgraph_node_remove_callees (node->clone_of);
824 ipa_remove_all_references (&node->clone_of->ref_list);
826 node->clone_of = NULL;
827 bitmap_obstack_release (NULL);
830 /* Once all functions from compilation unit are in memory, produce all clones
831 and update all calls. We might also do this on demand if we don't want to
832 bring all functions to memory prior compilation, but current WHOPR
833 implementation does that and it is is bit easier to keep everything right in
834 this order. */
836 void
837 cgraph_materialize_all_clones (void)
839 struct cgraph_node *node;
840 bool stabilized = false;
843 if (cgraph_dump_file)
844 fprintf (cgraph_dump_file, "Materializing clones\n");
845 #ifdef ENABLE_CHECKING
846 verify_cgraph ();
847 #endif
849 /* We can also do topological order, but number of iterations should be
850 bounded by number of IPA passes since single IPA pass is probably not
851 going to create clones of clones it created itself. */
852 while (!stabilized)
854 stabilized = true;
855 FOR_EACH_FUNCTION (node)
857 if (node->clone_of && node->decl != node->clone_of->decl
858 && !gimple_has_body_p (node->decl))
860 if (!node->clone_of->clone_of)
861 cgraph_get_body (node->clone_of);
862 if (gimple_has_body_p (node->clone_of->decl))
864 if (cgraph_dump_file)
866 fprintf (cgraph_dump_file, "cloning %s to %s\n",
867 xstrdup (cgraph_node_name (node->clone_of)),
868 xstrdup (cgraph_node_name (node)));
869 if (node->clone.tree_map)
871 unsigned int i;
872 fprintf (cgraph_dump_file, " replace map: ");
873 for (i = 0;
874 i < vec_safe_length (node->clone.tree_map);
875 i++)
877 struct ipa_replace_map *replace_info;
878 replace_info = (*node->clone.tree_map)[i];
879 print_generic_expr (cgraph_dump_file, replace_info->old_tree, 0);
880 fprintf (cgraph_dump_file, " -> ");
881 print_generic_expr (cgraph_dump_file, replace_info->new_tree, 0);
882 fprintf (cgraph_dump_file, "%s%s;",
883 replace_info->replace_p ? "(replace)":"",
884 replace_info->ref_p ? "(ref)":"");
886 fprintf (cgraph_dump_file, "\n");
888 if (node->clone.args_to_skip)
890 fprintf (cgraph_dump_file, " args_to_skip: ");
891 dump_bitmap (cgraph_dump_file, node->clone.args_to_skip);
893 if (node->clone.args_to_skip)
895 fprintf (cgraph_dump_file, " combined_args_to_skip:");
896 dump_bitmap (cgraph_dump_file, node->clone.combined_args_to_skip);
899 cgraph_materialize_clone (node);
900 stabilized = false;
905 FOR_EACH_FUNCTION (node)
906 if (!node->analyzed && node->callees)
908 cgraph_node_remove_callees (node);
909 ipa_remove_all_references (&node->ref_list);
911 else
912 ipa_clear_stmts_in_references (node);
913 if (cgraph_dump_file)
914 fprintf (cgraph_dump_file, "Materialization Call site updates done.\n");
915 #ifdef ENABLE_CHECKING
916 verify_cgraph ();
917 #endif
918 symtab_remove_unreachable_nodes (false, cgraph_dump_file);
921 #include "gt-cgraphclones.h"