Remove UBSAN in dwarf2out.c (PR tree-optimization/82491).
[official-gcc.git] / gcc / cgraphclones.c
blob6e84a31c1a52b448691a37adf6d2ed6d4f5a013a
1 /* Callgraph clones
2 Copyright (C) 2003-2018 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* This module provide facilities for clonning functions. I.e. creating
22 new functions based on existing functions with simple modifications,
23 such as replacement of parameters.
25 To allow whole program optimization without actual presence of function
26 bodies, an additional infrastructure is provided for so-called virtual
27 clones
29 A virtual clone in the callgraph is a function that has no
30 associated body, just a description of how to create its body based
31 on a different function (which itself may be a virtual clone).
33 The description of function modifications includes adjustments to
34 the function's signature (which allows, for example, removing or
35 adding function arguments), substitutions to perform on the
36 function body, and, for inlined functions, a pointer to the
37 function that it will be inlined into.
39 It is also possible to redirect any edge of the callgraph from a
40 function to its virtual clone. This implies updating of the call
41 site to adjust for the new function signature.
43 Most of the transformations performed by inter-procedural
44 optimizations can be represented via virtual clones. For
45 instance, a constant propagation pass can produce a virtual clone
46 of the function which replaces one of its arguments by a
47 constant. The inliner can represent its decisions by producing a
48 clone of a function whose body will be later integrated into
49 a given function.
51 Using virtual clones, the program can be easily updated
52 during the Execute stage, solving most of pass interactions
53 problems that would otherwise occur during Transform.
55 Virtual clones are later materialized in the LTRANS stage and
56 turned into real functions. Passes executed after the virtual
57 clone were introduced also perform their Transform stage
58 on new functions, so for a pass there is no significant
59 difference between operating on a real function or a virtual
60 clone introduced before its Execute stage.
62 Optimization passes then work on virtual clones introduced before
63 their Execute stage as if they were real functions. The
64 only difference is that clones are not visible during the
65 Generate Summary stage. */
67 #include "config.h"
68 #include "system.h"
69 #include "coretypes.h"
70 #include "backend.h"
71 #include "target.h"
72 #include "rtl.h"
73 #include "tree.h"
74 #include "gimple.h"
75 #include "stringpool.h"
76 #include "cgraph.h"
77 #include "lto-streamer.h"
78 #include "tree-eh.h"
79 #include "tree-cfg.h"
80 #include "tree-inline.h"
81 #include "dumpfile.h"
82 #include "gimple-pretty-print.h"
84 /* Create clone of edge in the node N represented by CALL_EXPR
85 the callgraph. */
87 cgraph_edge *
88 cgraph_edge::clone (cgraph_node *n, gcall *call_stmt, unsigned stmt_uid,
89 profile_count num, profile_count den,
90 bool update_original)
92 cgraph_edge *new_edge;
93 profile_count::adjust_for_ipa_scaling (&num, &den);
94 profile_count prof_count = count.apply_scale (num, den);
96 if (indirect_unknown_callee)
98 tree decl;
100 if (call_stmt && (decl = gimple_call_fndecl (call_stmt))
101 /* When the call is speculative, we need to resolve it
102 via cgraph_resolve_speculation and not here. */
103 && !speculative)
105 cgraph_node *callee = cgraph_node::get (decl);
106 gcc_checking_assert (callee);
107 new_edge = n->create_edge (callee, call_stmt, prof_count);
109 else
111 new_edge = n->create_indirect_edge (call_stmt,
112 indirect_info->ecf_flags,
113 prof_count, false);
114 *new_edge->indirect_info = *indirect_info;
117 else
119 new_edge = n->create_edge (callee, call_stmt, prof_count);
120 if (indirect_info)
122 new_edge->indirect_info
123 = ggc_cleared_alloc<cgraph_indirect_call_info> ();
124 *new_edge->indirect_info = *indirect_info;
128 new_edge->inline_failed = inline_failed;
129 new_edge->indirect_inlining_edge = indirect_inlining_edge;
130 new_edge->lto_stmt_uid = stmt_uid;
131 /* Clone flags that depend on call_stmt availability manually. */
132 new_edge->can_throw_external = can_throw_external;
133 new_edge->call_stmt_cannot_inline_p = call_stmt_cannot_inline_p;
134 new_edge->speculative = speculative;
135 new_edge->in_polymorphic_cdtor = in_polymorphic_cdtor;
137 /* Update IPA profile. Local profiles need no updating in original. */
138 if (update_original)
139 count = count.combine_with_ipa_count (count.ipa ()
140 - new_edge->count.ipa ());
141 symtab->call_edge_duplication_hooks (this, new_edge);
142 return new_edge;
145 /* Build variant of function type ORIG_TYPE skipping ARGS_TO_SKIP and the
146 return value if SKIP_RETURN is true. */
148 tree
149 cgraph_build_function_type_skip_args (tree orig_type, bitmap args_to_skip,
150 bool skip_return)
152 tree new_type = NULL;
153 tree args, new_args = NULL;
154 tree new_reversed;
155 int i = 0;
157 for (args = TYPE_ARG_TYPES (orig_type); args && args != void_list_node;
158 args = TREE_CHAIN (args), i++)
159 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
160 new_args = tree_cons (NULL_TREE, TREE_VALUE (args), new_args);
162 new_reversed = nreverse (new_args);
163 if (args)
165 if (new_reversed)
166 TREE_CHAIN (new_args) = void_list_node;
167 else
168 new_reversed = void_list_node;
171 /* Use copy_node to preserve as much as possible from original type
172 (debug info, attribute lists etc.)
173 Exception is METHOD_TYPEs must have THIS argument.
174 When we are asked to remove it, we need to build new FUNCTION_TYPE
175 instead. */
176 if (TREE_CODE (orig_type) != METHOD_TYPE
177 || !args_to_skip
178 || !bitmap_bit_p (args_to_skip, 0))
180 new_type = build_distinct_type_copy (orig_type);
181 TYPE_ARG_TYPES (new_type) = new_reversed;
183 else
185 new_type
186 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
187 new_reversed));
188 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
191 if (skip_return)
192 TREE_TYPE (new_type) = void_type_node;
194 return new_type;
197 /* Build variant of function decl ORIG_DECL skipping ARGS_TO_SKIP and the
198 return value if SKIP_RETURN is true.
200 Arguments from DECL_ARGUMENTS list can't be removed now, since they are
201 linked by TREE_CHAIN directly. The caller is responsible for eliminating
202 them when they are being duplicated (i.e. copy_arguments_for_versioning). */
204 static tree
205 build_function_decl_skip_args (tree orig_decl, bitmap args_to_skip,
206 bool skip_return)
208 tree new_decl = copy_node (orig_decl);
209 tree new_type;
211 new_type = TREE_TYPE (orig_decl);
212 if (prototype_p (new_type)
213 || (skip_return && !VOID_TYPE_P (TREE_TYPE (new_type))))
214 new_type
215 = cgraph_build_function_type_skip_args (new_type, args_to_skip,
216 skip_return);
217 TREE_TYPE (new_decl) = new_type;
219 /* For declarations setting DECL_VINDEX (i.e. methods)
220 we expect first argument to be THIS pointer. */
221 if (args_to_skip && bitmap_bit_p (args_to_skip, 0))
222 DECL_VINDEX (new_decl) = NULL_TREE;
224 /* When signature changes, we need to clear builtin info. */
225 if (DECL_BUILT_IN (new_decl)
226 && args_to_skip
227 && !bitmap_empty_p (args_to_skip))
229 DECL_BUILT_IN_CLASS (new_decl) = NOT_BUILT_IN;
230 DECL_FUNCTION_CODE (new_decl) = (enum built_in_function) 0;
232 /* The FE might have information and assumptions about the other
233 arguments. */
234 DECL_LANG_SPECIFIC (new_decl) = NULL;
235 return new_decl;
238 /* Set flags of NEW_NODE and its decl. NEW_NODE is a newly created private
239 clone or its thunk. */
241 static void
242 set_new_clone_decl_and_node_flags (cgraph_node *new_node)
244 DECL_EXTERNAL (new_node->decl) = 0;
245 TREE_PUBLIC (new_node->decl) = 0;
246 DECL_COMDAT (new_node->decl) = 0;
247 DECL_WEAK (new_node->decl) = 0;
248 DECL_VIRTUAL_P (new_node->decl) = 0;
249 DECL_STATIC_CONSTRUCTOR (new_node->decl) = 0;
250 DECL_STATIC_DESTRUCTOR (new_node->decl) = 0;
252 new_node->externally_visible = 0;
253 new_node->local.local = 1;
254 new_node->lowered = true;
257 /* Duplicate thunk THUNK if necessary but make it to refer to NODE.
258 ARGS_TO_SKIP, if non-NULL, determines which parameters should be omitted.
259 Function can return NODE if no thunk is necessary, which can happen when
260 thunk is this_adjusting but we are removing this parameter. */
262 static cgraph_node *
263 duplicate_thunk_for_node (cgraph_node *thunk, cgraph_node *node)
265 cgraph_node *new_thunk, *thunk_of;
266 thunk_of = thunk->callees->callee->ultimate_alias_target ();
268 if (thunk_of->thunk.thunk_p)
269 node = duplicate_thunk_for_node (thunk_of, node);
271 if (!DECL_ARGUMENTS (thunk->decl))
272 thunk->get_untransformed_body ();
274 cgraph_edge *cs;
275 for (cs = node->callers; cs; cs = cs->next_caller)
276 if (cs->caller->thunk.thunk_p
277 && cs->caller->thunk.this_adjusting == thunk->thunk.this_adjusting
278 && cs->caller->thunk.fixed_offset == thunk->thunk.fixed_offset
279 && cs->caller->thunk.virtual_offset_p == thunk->thunk.virtual_offset_p
280 && cs->caller->thunk.virtual_value == thunk->thunk.virtual_value)
281 return cs->caller;
283 tree new_decl;
284 if (!node->clone.args_to_skip)
285 new_decl = copy_node (thunk->decl);
286 else
288 /* We do not need to duplicate this_adjusting thunks if we have removed
289 this. */
290 if (thunk->thunk.this_adjusting
291 && bitmap_bit_p (node->clone.args_to_skip, 0))
292 return node;
294 new_decl = build_function_decl_skip_args (thunk->decl,
295 node->clone.args_to_skip,
296 false);
299 tree *link = &DECL_ARGUMENTS (new_decl);
300 int i = 0;
301 for (tree pd = DECL_ARGUMENTS (thunk->decl); pd; pd = DECL_CHAIN (pd), i++)
303 if (!node->clone.args_to_skip
304 || !bitmap_bit_p (node->clone.args_to_skip, i))
306 tree nd = copy_node (pd);
307 DECL_CONTEXT (nd) = new_decl;
308 *link = nd;
309 link = &DECL_CHAIN (nd);
312 *link = NULL_TREE;
314 gcc_checking_assert (!DECL_STRUCT_FUNCTION (new_decl));
315 gcc_checking_assert (!DECL_INITIAL (new_decl));
316 gcc_checking_assert (!DECL_RESULT (new_decl));
317 gcc_checking_assert (!DECL_RTL_SET_P (new_decl));
319 DECL_NAME (new_decl) = clone_function_name (thunk->decl, "artificial_thunk");
320 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
322 new_thunk = cgraph_node::create (new_decl);
323 set_new_clone_decl_and_node_flags (new_thunk);
324 new_thunk->definition = true;
325 new_thunk->local.can_change_signature = node->local.can_change_signature;
326 new_thunk->thunk = thunk->thunk;
327 new_thunk->unique_name = in_lto_p;
328 new_thunk->former_clone_of = thunk->decl;
329 new_thunk->clone.args_to_skip = node->clone.args_to_skip;
330 new_thunk->clone.combined_args_to_skip = node->clone.combined_args_to_skip;
332 cgraph_edge *e = new_thunk->create_edge (node, NULL, new_thunk->count);
333 symtab->call_edge_duplication_hooks (thunk->callees, e);
334 symtab->call_cgraph_duplication_hooks (thunk, new_thunk);
335 return new_thunk;
338 /* If E does not lead to a thunk, simply redirect it to N. Otherwise create
339 one or more equivalent thunks for N and redirect E to the first in the
340 chain. Note that it is then necessary to call
341 n->expand_all_artificial_thunks once all callers are redirected. */
343 void
344 cgraph_edge::redirect_callee_duplicating_thunks (cgraph_node *n)
346 cgraph_node *orig_to = callee->ultimate_alias_target ();
347 if (orig_to->thunk.thunk_p)
348 n = duplicate_thunk_for_node (orig_to, n);
350 redirect_callee (n);
353 /* Call expand_thunk on all callers that are thunks and if analyze those nodes
354 that were expanded. */
356 void
357 cgraph_node::expand_all_artificial_thunks ()
359 cgraph_edge *e;
360 for (e = callers; e;)
361 if (e->caller->thunk.thunk_p)
363 cgraph_node *thunk = e->caller;
365 e = e->next_caller;
366 if (thunk->expand_thunk (false, false))
368 thunk->thunk.thunk_p = false;
369 thunk->analyze ();
371 thunk->expand_all_artificial_thunks ();
373 else
374 e = e->next_caller;
377 void
378 dump_callgraph_transformation (const cgraph_node *original,
379 const cgraph_node *clone,
380 const char *suffix)
382 if (symtab->ipa_clones_dump_file)
384 fprintf (symtab->ipa_clones_dump_file,
385 "Callgraph clone;%s;%d;%s;%d;%d;%s;%d;%s;%d;%d;%s\n",
386 original->asm_name (), original->order,
387 DECL_SOURCE_FILE (original->decl),
388 DECL_SOURCE_LINE (original->decl),
389 DECL_SOURCE_COLUMN (original->decl), clone->asm_name (),
390 clone->order, DECL_SOURCE_FILE (clone->decl),
391 DECL_SOURCE_LINE (clone->decl), DECL_SOURCE_COLUMN (clone->decl),
392 suffix);
394 symtab->cloned_nodes.add (original);
395 symtab->cloned_nodes.add (clone);
399 /* Create node representing clone of N executed COUNT times. Decrease
400 the execution counts from original node too.
401 The new clone will have decl set to DECL that may or may not be the same
402 as decl of N.
404 When UPDATE_ORIGINAL is true, the counts are subtracted from the original
405 function's profile to reflect the fact that part of execution is handled
406 by node.
407 When CALL_DUPLICATOIN_HOOK is true, the ipa passes are acknowledged about
408 the new clone. Otherwise the caller is responsible for doing so later.
410 If the new node is being inlined into another one, NEW_INLINED_TO should be
411 the outline function the new one is (even indirectly) inlined to. All hooks
412 will see this in node's global.inlined_to, when invoked. Can be NULL if the
413 node is not inlined. */
415 cgraph_node *
416 cgraph_node::create_clone (tree new_decl, profile_count prof_count,
417 bool update_original,
418 vec<cgraph_edge *> redirect_callers,
419 bool call_duplication_hook,
420 cgraph_node *new_inlined_to,
421 bitmap args_to_skip, const char *suffix)
423 cgraph_node *new_node = symtab->create_empty ();
424 cgraph_edge *e;
425 unsigned i;
426 profile_count old_count = count;
428 if (new_inlined_to)
429 dump_callgraph_transformation (this, new_inlined_to, "inlining to");
431 /* When inlining we scale precisely to prof_count, when cloning we can
432 preserve local profile. */
433 if (!new_inlined_to)
434 prof_count = count.combine_with_ipa_count (prof_count);
435 new_node->count = prof_count;
437 /* Update IPA profile. Local profiles need no updating in original. */
438 if (update_original)
439 count = count.combine_with_ipa_count (count.ipa () - prof_count.ipa ());
440 new_node->decl = new_decl;
441 new_node->register_symbol ();
442 new_node->origin = origin;
443 new_node->lto_file_data = lto_file_data;
444 if (new_node->origin)
446 new_node->next_nested = new_node->origin->nested;
447 new_node->origin->nested = new_node;
449 new_node->analyzed = analyzed;
450 new_node->definition = definition;
451 new_node->local = local;
452 new_node->externally_visible = false;
453 new_node->no_reorder = no_reorder;
454 new_node->local.local = true;
455 new_node->global = global;
456 new_node->global.inlined_to = new_inlined_to;
457 new_node->rtl = rtl;
458 new_node->frequency = frequency;
459 new_node->tp_first_run = tp_first_run;
460 new_node->tm_clone = tm_clone;
461 new_node->icf_merged = icf_merged;
462 new_node->merged_comdat = merged_comdat;
463 new_node->thunk = thunk;
465 new_node->clone.tree_map = NULL;
466 new_node->clone.args_to_skip = args_to_skip;
467 new_node->split_part = split_part;
468 if (!args_to_skip)
469 new_node->clone.combined_args_to_skip = clone.combined_args_to_skip;
470 else if (clone.combined_args_to_skip)
472 new_node->clone.combined_args_to_skip = BITMAP_GGC_ALLOC ();
473 bitmap_ior (new_node->clone.combined_args_to_skip,
474 clone.combined_args_to_skip, args_to_skip);
476 else
477 new_node->clone.combined_args_to_skip = args_to_skip;
479 FOR_EACH_VEC_ELT (redirect_callers, i, e)
481 /* Redirect calls to the old version node to point to its new
482 version. The only exception is when the edge was proved to
483 be unreachable during the clonning procedure. */
484 if (!e->callee
485 || DECL_BUILT_IN_CLASS (e->callee->decl) != BUILT_IN_NORMAL
486 || DECL_FUNCTION_CODE (e->callee->decl) != BUILT_IN_UNREACHABLE)
487 e->redirect_callee_duplicating_thunks (new_node);
489 new_node->expand_all_artificial_thunks ();
491 for (e = callees;e; e=e->next_callee)
492 e->clone (new_node, e->call_stmt, e->lto_stmt_uid, new_node->count, old_count,
493 update_original);
495 for (e = indirect_calls; e; e = e->next_callee)
496 e->clone (new_node, e->call_stmt, e->lto_stmt_uid,
497 new_node->count, old_count, update_original);
498 new_node->clone_references (this);
500 new_node->next_sibling_clone = clones;
501 if (clones)
502 clones->prev_sibling_clone = new_node;
503 clones = new_node;
504 new_node->clone_of = this;
506 if (call_duplication_hook)
507 symtab->call_cgraph_duplication_hooks (this, new_node);
509 if (!new_inlined_to)
510 dump_callgraph_transformation (this, new_node, suffix);
512 return new_node;
515 static GTY(()) unsigned int clone_fn_id_num;
517 /* Return a new assembler name for a clone with SUFFIX of a decl named
518 NAME. */
520 tree
521 clone_function_name_1 (const char *name, const char *suffix)
523 size_t len = strlen (name);
524 char *tmp_name, *prefix;
526 prefix = XALLOCAVEC (char, len + strlen (suffix) + 2);
527 memcpy (prefix, name, len);
528 strcpy (prefix + len + 1, suffix);
529 prefix[len] = symbol_table::symbol_suffix_separator ();
530 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix, clone_fn_id_num++);
531 return get_identifier (tmp_name);
534 /* Return a new assembler name for a clone of DECL with SUFFIX. */
536 tree
537 clone_function_name (tree decl, const char *suffix)
539 tree name = DECL_ASSEMBLER_NAME (decl);
540 return clone_function_name_1 (IDENTIFIER_POINTER (name), suffix);
544 /* Create callgraph node clone with new declaration. The actual body will
545 be copied later at compilation stage.
547 TODO: after merging in ipa-sra use function call notes instead of args_to_skip
548 bitmap interface.
550 cgraph_node *
551 cgraph_node::create_virtual_clone (vec<cgraph_edge *> redirect_callers,
552 vec<ipa_replace_map *, va_gc> *tree_map,
553 bitmap args_to_skip, const char * suffix)
555 tree old_decl = decl;
556 cgraph_node *new_node = NULL;
557 tree new_decl;
558 size_t len, i;
559 ipa_replace_map *map;
560 char *name;
562 gcc_checking_assert (local.versionable);
563 gcc_assert (local.can_change_signature || !args_to_skip);
565 /* Make a new FUNCTION_DECL tree node */
566 if (!args_to_skip)
567 new_decl = copy_node (old_decl);
568 else
569 new_decl = build_function_decl_skip_args (old_decl, args_to_skip, false);
571 /* These pointers represent function body and will be populated only when clone
572 is materialized. */
573 gcc_assert (new_decl != old_decl);
574 DECL_STRUCT_FUNCTION (new_decl) = NULL;
575 DECL_ARGUMENTS (new_decl) = NULL;
576 DECL_INITIAL (new_decl) = NULL;
577 DECL_RESULT (new_decl) = NULL;
578 /* We can not do DECL_RESULT (new_decl) = NULL; here because of LTO partitioning
579 sometimes storing only clone decl instead of original. */
581 /* Generate a new name for the new version. */
582 len = IDENTIFIER_LENGTH (DECL_NAME (old_decl));
583 name = XALLOCAVEC (char, len + strlen (suffix) + 2);
584 memcpy (name, IDENTIFIER_POINTER (DECL_NAME (old_decl)), len);
585 strcpy (name + len + 1, suffix);
586 name[len] = '.';
587 DECL_NAME (new_decl) = get_identifier (name);
588 SET_DECL_ASSEMBLER_NAME (new_decl, clone_function_name (old_decl, suffix));
589 SET_DECL_RTL (new_decl, NULL);
591 new_node = create_clone (new_decl, count, false,
592 redirect_callers, false, NULL, args_to_skip, suffix);
594 /* Update the properties.
595 Make clone visible only within this translation unit. Make sure
596 that is not weak also.
597 ??? We cannot use COMDAT linkage because there is no
598 ABI support for this. */
599 set_new_clone_decl_and_node_flags (new_node);
600 new_node->clone.tree_map = tree_map;
601 if (!implicit_section)
602 new_node->set_section (get_section ());
604 /* Clones of global symbols or symbols with unique names are unique. */
605 if ((TREE_PUBLIC (old_decl)
606 && !DECL_EXTERNAL (old_decl)
607 && !DECL_WEAK (old_decl)
608 && !DECL_COMDAT (old_decl))
609 || in_lto_p)
610 new_node->unique_name = true;
611 FOR_EACH_VEC_SAFE_ELT (tree_map, i, map)
612 new_node->maybe_create_reference (map->new_tree, NULL);
614 if (ipa_transforms_to_apply.exists ())
615 new_node->ipa_transforms_to_apply
616 = ipa_transforms_to_apply.copy ();
618 symtab->call_cgraph_duplication_hooks (this, new_node);
620 return new_node;
623 /* callgraph node being removed from symbol table; see if its entry can be
624 replaced by other inline clone. */
625 cgraph_node *
626 cgraph_node::find_replacement (void)
628 cgraph_node *next_inline_clone, *replacement;
630 for (next_inline_clone = clones;
631 next_inline_clone
632 && next_inline_clone->decl != decl;
633 next_inline_clone = next_inline_clone->next_sibling_clone)
636 /* If there is inline clone of the node being removed, we need
637 to put it into the position of removed node and reorganize all
638 other clones to be based on it. */
639 if (next_inline_clone)
641 cgraph_node *n;
642 cgraph_node *new_clones;
644 replacement = next_inline_clone;
646 /* Unlink inline clone from the list of clones of removed node. */
647 if (next_inline_clone->next_sibling_clone)
648 next_inline_clone->next_sibling_clone->prev_sibling_clone
649 = next_inline_clone->prev_sibling_clone;
650 if (next_inline_clone->prev_sibling_clone)
652 gcc_assert (clones != next_inline_clone);
653 next_inline_clone->prev_sibling_clone->next_sibling_clone
654 = next_inline_clone->next_sibling_clone;
656 else
658 gcc_assert (clones == next_inline_clone);
659 clones = next_inline_clone->next_sibling_clone;
662 new_clones = clones;
663 clones = NULL;
665 /* Copy clone info. */
666 next_inline_clone->clone = clone;
668 /* Now place it into clone tree at same level at NODE. */
669 next_inline_clone->clone_of = clone_of;
670 next_inline_clone->prev_sibling_clone = NULL;
671 next_inline_clone->next_sibling_clone = NULL;
672 if (clone_of)
674 if (clone_of->clones)
675 clone_of->clones->prev_sibling_clone = next_inline_clone;
676 next_inline_clone->next_sibling_clone = clone_of->clones;
677 clone_of->clones = next_inline_clone;
680 /* Merge the clone list. */
681 if (new_clones)
683 if (!next_inline_clone->clones)
684 next_inline_clone->clones = new_clones;
685 else
687 n = next_inline_clone->clones;
688 while (n->next_sibling_clone)
689 n = n->next_sibling_clone;
690 n->next_sibling_clone = new_clones;
691 new_clones->prev_sibling_clone = n;
695 /* Update clone_of pointers. */
696 n = new_clones;
697 while (n)
699 n->clone_of = next_inline_clone;
700 n = n->next_sibling_clone;
702 return replacement;
704 else
705 return NULL;
708 /* Like cgraph_set_call_stmt but walk the clone tree and update all
709 clones sharing the same function body.
710 When WHOLE_SPECULATIVE_EDGES is true, all three components of
711 speculative edge gets updated. Otherwise we update only direct
712 call. */
714 void
715 cgraph_node::set_call_stmt_including_clones (gimple *old_stmt,
716 gcall *new_stmt,
717 bool update_speculative)
719 cgraph_node *node;
720 cgraph_edge *edge = get_edge (old_stmt);
722 if (edge)
723 edge->set_call_stmt (new_stmt, update_speculative);
725 node = clones;
726 if (node)
727 while (node != this)
729 cgraph_edge *edge = node->get_edge (old_stmt);
730 if (edge)
732 edge->set_call_stmt (new_stmt, update_speculative);
733 /* If UPDATE_SPECULATIVE is false, it means that we are turning
734 speculative call into a real code sequence. Update the
735 callgraph edges. */
736 if (edge->speculative && !update_speculative)
738 cgraph_edge *direct, *indirect;
739 ipa_ref *ref;
741 gcc_assert (!edge->indirect_unknown_callee);
742 edge->speculative_call_info (direct, indirect, ref);
743 direct->speculative = false;
744 indirect->speculative = false;
745 ref->speculative = false;
748 if (node->clones)
749 node = node->clones;
750 else if (node->next_sibling_clone)
751 node = node->next_sibling_clone;
752 else
754 while (node != this && !node->next_sibling_clone)
755 node = node->clone_of;
756 if (node != this)
757 node = node->next_sibling_clone;
762 /* Like cgraph_create_edge walk the clone tree and update all clones sharing
763 same function body. If clones already have edge for OLD_STMT; only
764 update the edge same way as cgraph_set_call_stmt_including_clones does.
766 TODO: COUNT and LOOP_DEPTH should be properly distributed based on relative
767 frequencies of the clones. */
769 void
770 cgraph_node::create_edge_including_clones (cgraph_node *callee,
771 gimple *old_stmt, gcall *stmt,
772 profile_count count,
773 cgraph_inline_failed_t reason)
775 cgraph_node *node;
776 cgraph_edge *edge;
778 if (!get_edge (stmt))
780 edge = create_edge (callee, stmt, count);
781 edge->inline_failed = reason;
784 node = clones;
785 if (node)
786 while (node != this)
787 /* Thunk clones do not get updated while copying inline function body. */
788 if (!node->thunk.thunk_p)
790 cgraph_edge *edge = node->get_edge (old_stmt);
792 /* It is possible that clones already contain the edge while
793 master didn't. Either we promoted indirect call into direct
794 call in the clone or we are processing clones of unreachable
795 master where edges has been removed. */
796 if (edge)
797 edge->set_call_stmt (stmt);
798 else if (! node->get_edge (stmt))
800 edge = node->create_edge (callee, stmt, count);
801 edge->inline_failed = reason;
804 if (node->clones)
805 node = node->clones;
806 else if (node->next_sibling_clone)
807 node = node->next_sibling_clone;
808 else
810 while (node != this && !node->next_sibling_clone)
811 node = node->clone_of;
812 if (node != this)
813 node = node->next_sibling_clone;
818 /* Remove the node from cgraph and all inline clones inlined into it.
819 Skip however removal of FORBIDDEN_NODE and return true if it needs to be
820 removed. This allows to call the function from outer loop walking clone
821 tree. */
823 bool
824 cgraph_node::remove_symbol_and_inline_clones (cgraph_node *forbidden_node)
826 cgraph_edge *e, *next;
827 bool found = false;
829 if (this == forbidden_node)
831 callers->remove ();
832 return true;
834 for (e = callees; e; e = next)
836 next = e->next_callee;
837 if (!e->inline_failed)
838 found |= e->callee->remove_symbol_and_inline_clones (forbidden_node);
840 remove ();
841 return found;
844 /* The edges representing the callers of the NEW_VERSION node were
845 fixed by cgraph_function_versioning (), now the call_expr in their
846 respective tree code should be updated to call the NEW_VERSION. */
848 static void
849 update_call_expr (cgraph_node *new_version)
851 cgraph_edge *e;
853 gcc_assert (new_version);
855 /* Update the call expr on the edges to call the new version. */
856 for (e = new_version->callers; e; e = e->next_caller)
858 function *inner_function = DECL_STRUCT_FUNCTION (e->caller->decl);
859 gimple_call_set_fndecl (e->call_stmt, new_version->decl);
860 maybe_clean_eh_stmt_fn (inner_function, e->call_stmt);
865 /* Create a new cgraph node which is the new version of
866 callgraph node. REDIRECT_CALLERS holds the callers
867 edges which should be redirected to point to
868 NEW_VERSION. ALL the callees edges of the node
869 are cloned to the new version node. Return the new
870 version node.
872 If non-NULL BLOCK_TO_COPY determine what basic blocks
873 was copied to prevent duplications of calls that are dead
874 in the clone. */
876 cgraph_node *
877 cgraph_node::create_version_clone (tree new_decl,
878 vec<cgraph_edge *> redirect_callers,
879 bitmap bbs_to_copy,
880 const char *suffix)
882 cgraph_node *new_version;
883 cgraph_edge *e;
884 unsigned i;
886 new_version = cgraph_node::create (new_decl);
888 new_version->analyzed = analyzed;
889 new_version->definition = definition;
890 new_version->local = local;
891 new_version->externally_visible = false;
892 new_version->no_reorder = no_reorder;
893 new_version->local.local = new_version->definition;
894 new_version->global = global;
895 new_version->rtl = rtl;
896 new_version->count = count;
898 for (e = callees; e; e=e->next_callee)
899 if (!bbs_to_copy
900 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
901 e->clone (new_version, e->call_stmt,
902 e->lto_stmt_uid, count, count,
903 true);
904 for (e = indirect_calls; e; e=e->next_callee)
905 if (!bbs_to_copy
906 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
907 e->clone (new_version, e->call_stmt,
908 e->lto_stmt_uid, count, count,
909 true);
910 FOR_EACH_VEC_ELT (redirect_callers, i, e)
912 /* Redirect calls to the old version node to point to its new
913 version. */
914 e->redirect_callee (new_version);
917 symtab->call_cgraph_duplication_hooks (this, new_version);
919 dump_callgraph_transformation (this, new_version, suffix);
921 return new_version;
924 /* Perform function versioning.
925 Function versioning includes copying of the tree and
926 a callgraph update (creating a new cgraph node and updating
927 its callees and callers).
929 REDIRECT_CALLERS varray includes the edges to be redirected
930 to the new version.
932 TREE_MAP is a mapping of tree nodes we want to replace with
933 new ones (according to results of prior analysis).
935 If non-NULL ARGS_TO_SKIP determine function parameters to remove
936 from new version.
937 If SKIP_RETURN is true, the new version will return void.
938 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
939 If non_NULL NEW_ENTRY determine new entry BB of the clone.
941 Return the new version's cgraph node. */
943 cgraph_node *
944 cgraph_node::create_version_clone_with_body
945 (vec<cgraph_edge *> redirect_callers,
946 vec<ipa_replace_map *, va_gc> *tree_map, bitmap args_to_skip,
947 bool skip_return, bitmap bbs_to_copy, basic_block new_entry_block,
948 const char *suffix)
950 tree old_decl = decl;
951 cgraph_node *new_version_node = NULL;
952 tree new_decl;
954 if (!tree_versionable_function_p (old_decl))
955 return NULL;
957 gcc_assert (local.can_change_signature || !args_to_skip);
959 /* Make a new FUNCTION_DECL tree node for the new version. */
960 if (!args_to_skip && !skip_return)
961 new_decl = copy_node (old_decl);
962 else
963 new_decl
964 = build_function_decl_skip_args (old_decl, args_to_skip, skip_return);
966 /* Generate a new name for the new version. */
967 DECL_NAME (new_decl) = clone_function_name (old_decl, suffix);
968 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
969 SET_DECL_RTL (new_decl, NULL);
971 /* When the old decl was a con-/destructor make sure the clone isn't. */
972 DECL_STATIC_CONSTRUCTOR (new_decl) = 0;
973 DECL_STATIC_DESTRUCTOR (new_decl) = 0;
975 /* Create the new version's call-graph node.
976 and update the edges of the new node. */
977 new_version_node = create_version_clone (new_decl, redirect_callers,
978 bbs_to_copy, suffix);
980 if (ipa_transforms_to_apply.exists ())
981 new_version_node->ipa_transforms_to_apply
982 = ipa_transforms_to_apply.copy ();
983 /* Copy the OLD_VERSION_NODE function tree to the new version. */
984 tree_function_versioning (old_decl, new_decl, tree_map, false, args_to_skip,
985 skip_return, bbs_to_copy, new_entry_block);
987 /* Update the new version's properties.
988 Make The new version visible only within this translation unit. Make sure
989 that is not weak also.
990 ??? We cannot use COMDAT linkage because there is no
991 ABI support for this. */
992 new_version_node->make_decl_local ();
993 DECL_VIRTUAL_P (new_version_node->decl) = 0;
994 new_version_node->externally_visible = 0;
995 new_version_node->local.local = 1;
996 new_version_node->lowered = true;
997 if (!implicit_section)
998 new_version_node->set_section (get_section ());
999 /* Clones of global symbols or symbols with unique names are unique. */
1000 if ((TREE_PUBLIC (old_decl)
1001 && !DECL_EXTERNAL (old_decl)
1002 && !DECL_WEAK (old_decl)
1003 && !DECL_COMDAT (old_decl))
1004 || in_lto_p)
1005 new_version_node->unique_name = true;
1007 /* Update the call_expr on the edges to call the new version node. */
1008 update_call_expr (new_version_node);
1010 symtab->call_cgraph_insertion_hooks (new_version_node);
1011 return new_version_node;
1014 /* Given virtual clone, turn it into actual clone. */
1016 static void
1017 cgraph_materialize_clone (cgraph_node *node)
1019 bitmap_obstack_initialize (NULL);
1020 node->former_clone_of = node->clone_of->decl;
1021 if (node->clone_of->former_clone_of)
1022 node->former_clone_of = node->clone_of->former_clone_of;
1023 /* Copy the OLD_VERSION_NODE function tree to the new version. */
1024 tree_function_versioning (node->clone_of->decl, node->decl,
1025 node->clone.tree_map, true,
1026 node->clone.args_to_skip, false,
1027 NULL, NULL);
1028 if (symtab->dump_file)
1030 dump_function_to_file (node->clone_of->decl, symtab->dump_file,
1031 dump_flags);
1032 dump_function_to_file (node->decl, symtab->dump_file, dump_flags);
1035 /* Function is no longer clone. */
1036 if (node->next_sibling_clone)
1037 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
1038 if (node->prev_sibling_clone)
1039 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
1040 else
1041 node->clone_of->clones = node->next_sibling_clone;
1042 node->next_sibling_clone = NULL;
1043 node->prev_sibling_clone = NULL;
1044 if (!node->clone_of->analyzed && !node->clone_of->clones)
1046 node->clone_of->release_body ();
1047 node->clone_of->remove_callees ();
1048 node->clone_of->remove_all_references ();
1050 node->clone_of = NULL;
1051 bitmap_obstack_release (NULL);
1054 /* Once all functions from compilation unit are in memory, produce all clones
1055 and update all calls. We might also do this on demand if we don't want to
1056 bring all functions to memory prior compilation, but current WHOPR
1057 implementation does that and it is a bit easier to keep everything right in
1058 this order. */
1060 void
1061 symbol_table::materialize_all_clones (void)
1063 cgraph_node *node;
1064 bool stabilized = false;
1067 if (symtab->dump_file)
1068 fprintf (symtab->dump_file, "Materializing clones\n");
1070 cgraph_node::checking_verify_cgraph_nodes ();
1072 /* We can also do topological order, but number of iterations should be
1073 bounded by number of IPA passes since single IPA pass is probably not
1074 going to create clones of clones it created itself. */
1075 while (!stabilized)
1077 stabilized = true;
1078 FOR_EACH_FUNCTION (node)
1080 if (node->clone_of && node->decl != node->clone_of->decl
1081 && !gimple_has_body_p (node->decl))
1083 if (!node->clone_of->clone_of)
1084 node->clone_of->get_untransformed_body ();
1085 if (gimple_has_body_p (node->clone_of->decl))
1087 if (symtab->dump_file)
1089 fprintf (symtab->dump_file, "cloning %s to %s\n",
1090 xstrdup_for_dump (node->clone_of->name ()),
1091 xstrdup_for_dump (node->name ()));
1092 if (node->clone.tree_map)
1094 unsigned int i;
1095 fprintf (symtab->dump_file, " replace map: ");
1096 for (i = 0;
1097 i < vec_safe_length (node->clone.tree_map);
1098 i++)
1100 ipa_replace_map *replace_info;
1101 replace_info = (*node->clone.tree_map)[i];
1102 print_generic_expr (symtab->dump_file,
1103 replace_info->old_tree);
1104 fprintf (symtab->dump_file, " -> ");
1105 print_generic_expr (symtab->dump_file,
1106 replace_info->new_tree);
1107 fprintf (symtab->dump_file, "%s%s;",
1108 replace_info->replace_p ? "(replace)":"",
1109 replace_info->ref_p ? "(ref)":"");
1111 fprintf (symtab->dump_file, "\n");
1113 if (node->clone.args_to_skip)
1115 fprintf (symtab->dump_file, " args_to_skip: ");
1116 dump_bitmap (symtab->dump_file,
1117 node->clone.args_to_skip);
1119 if (node->clone.args_to_skip)
1121 fprintf (symtab->dump_file, " combined_args_to_skip:");
1122 dump_bitmap (symtab->dump_file, node->clone.combined_args_to_skip);
1125 cgraph_materialize_clone (node);
1126 stabilized = false;
1131 FOR_EACH_FUNCTION (node)
1132 if (!node->analyzed && node->callees)
1134 node->remove_callees ();
1135 node->remove_all_references ();
1137 else
1138 node->clear_stmts_in_references ();
1139 if (symtab->dump_file)
1140 fprintf (symtab->dump_file, "Materialization Call site updates done.\n");
1142 cgraph_node::checking_verify_cgraph_nodes ();
1144 symtab->remove_unreachable_nodes (symtab->dump_file);
1147 #include "gt-cgraphclones.h"