2 Copyright (C) 2003-2018 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* This module provide facilities for clonning functions. I.e. creating
22 new functions based on existing functions with simple modifications,
23 such as replacement of parameters.
25 To allow whole program optimization without actual presence of function
26 bodies, an additional infrastructure is provided for so-called virtual
29 A virtual clone in the callgraph is a function that has no
30 associated body, just a description of how to create its body based
31 on a different function (which itself may be a virtual clone).
33 The description of function modifications includes adjustments to
34 the function's signature (which allows, for example, removing or
35 adding function arguments), substitutions to perform on the
36 function body, and, for inlined functions, a pointer to the
37 function that it will be inlined into.
39 It is also possible to redirect any edge of the callgraph from a
40 function to its virtual clone. This implies updating of the call
41 site to adjust for the new function signature.
43 Most of the transformations performed by inter-procedural
44 optimizations can be represented via virtual clones. For
45 instance, a constant propagation pass can produce a virtual clone
46 of the function which replaces one of its arguments by a
47 constant. The inliner can represent its decisions by producing a
48 clone of a function whose body will be later integrated into
51 Using virtual clones, the program can be easily updated
52 during the Execute stage, solving most of pass interactions
53 problems that would otherwise occur during Transform.
55 Virtual clones are later materialized in the LTRANS stage and
56 turned into real functions. Passes executed after the virtual
57 clone were introduced also perform their Transform stage
58 on new functions, so for a pass there is no significant
59 difference between operating on a real function or a virtual
60 clone introduced before its Execute stage.
62 Optimization passes then work on virtual clones introduced before
63 their Execute stage as if they were real functions. The
64 only difference is that clones are not visible during the
65 Generate Summary stage. */
69 #include "coretypes.h"
75 #include "stringpool.h"
77 #include "lto-streamer.h"
80 #include "tree-inline.h"
82 #include "gimple-pretty-print.h"
84 /* Create clone of edge in the node N represented by CALL_EXPR
88 cgraph_edge::clone (cgraph_node
*n
, gcall
*call_stmt
, unsigned stmt_uid
,
89 profile_count num
, profile_count den
,
92 cgraph_edge
*new_edge
;
93 profile_count::adjust_for_ipa_scaling (&num
, &den
);
94 profile_count prof_count
= count
.apply_scale (num
, den
);
96 if (indirect_unknown_callee
)
100 if (call_stmt
&& (decl
= gimple_call_fndecl (call_stmt
))
101 /* When the call is speculative, we need to resolve it
102 via cgraph_resolve_speculation and not here. */
105 cgraph_node
*callee
= cgraph_node::get (decl
);
106 gcc_checking_assert (callee
);
107 new_edge
= n
->create_edge (callee
, call_stmt
, prof_count
);
111 new_edge
= n
->create_indirect_edge (call_stmt
,
112 indirect_info
->ecf_flags
,
114 *new_edge
->indirect_info
= *indirect_info
;
119 new_edge
= n
->create_edge (callee
, call_stmt
, prof_count
);
122 new_edge
->indirect_info
123 = ggc_cleared_alloc
<cgraph_indirect_call_info
> ();
124 *new_edge
->indirect_info
= *indirect_info
;
128 new_edge
->inline_failed
= inline_failed
;
129 new_edge
->indirect_inlining_edge
= indirect_inlining_edge
;
130 new_edge
->lto_stmt_uid
= stmt_uid
;
131 /* Clone flags that depend on call_stmt availability manually. */
132 new_edge
->can_throw_external
= can_throw_external
;
133 new_edge
->call_stmt_cannot_inline_p
= call_stmt_cannot_inline_p
;
134 new_edge
->speculative
= speculative
;
135 new_edge
->in_polymorphic_cdtor
= in_polymorphic_cdtor
;
137 /* Update IPA profile. Local profiles need no updating in original. */
139 count
= count
.combine_with_ipa_count (count
.ipa ()
140 - new_edge
->count
.ipa ());
141 symtab
->call_edge_duplication_hooks (this, new_edge
);
145 /* Build variant of function type ORIG_TYPE skipping ARGS_TO_SKIP and the
146 return value if SKIP_RETURN is true. */
149 cgraph_build_function_type_skip_args (tree orig_type
, bitmap args_to_skip
,
152 tree new_type
= NULL
;
153 tree args
, new_args
= NULL
;
157 for (args
= TYPE_ARG_TYPES (orig_type
); args
&& args
!= void_list_node
;
158 args
= TREE_CHAIN (args
), i
++)
159 if (!args_to_skip
|| !bitmap_bit_p (args_to_skip
, i
))
160 new_args
= tree_cons (NULL_TREE
, TREE_VALUE (args
), new_args
);
162 new_reversed
= nreverse (new_args
);
166 TREE_CHAIN (new_args
) = void_list_node
;
168 new_reversed
= void_list_node
;
171 /* Use copy_node to preserve as much as possible from original type
172 (debug info, attribute lists etc.)
173 Exception is METHOD_TYPEs must have THIS argument.
174 When we are asked to remove it, we need to build new FUNCTION_TYPE
176 if (TREE_CODE (orig_type
) != METHOD_TYPE
178 || !bitmap_bit_p (args_to_skip
, 0))
180 new_type
= build_distinct_type_copy (orig_type
);
181 TYPE_ARG_TYPES (new_type
) = new_reversed
;
186 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type
),
188 TYPE_CONTEXT (new_type
) = TYPE_CONTEXT (orig_type
);
192 TREE_TYPE (new_type
) = void_type_node
;
197 /* Build variant of function decl ORIG_DECL skipping ARGS_TO_SKIP and the
198 return value if SKIP_RETURN is true.
200 Arguments from DECL_ARGUMENTS list can't be removed now, since they are
201 linked by TREE_CHAIN directly. The caller is responsible for eliminating
202 them when they are being duplicated (i.e. copy_arguments_for_versioning). */
205 build_function_decl_skip_args (tree orig_decl
, bitmap args_to_skip
,
208 tree new_decl
= copy_node (orig_decl
);
211 new_type
= TREE_TYPE (orig_decl
);
212 if (prototype_p (new_type
)
213 || (skip_return
&& !VOID_TYPE_P (TREE_TYPE (new_type
))))
215 = cgraph_build_function_type_skip_args (new_type
, args_to_skip
,
217 TREE_TYPE (new_decl
) = new_type
;
219 /* For declarations setting DECL_VINDEX (i.e. methods)
220 we expect first argument to be THIS pointer. */
221 if (args_to_skip
&& bitmap_bit_p (args_to_skip
, 0))
222 DECL_VINDEX (new_decl
) = NULL_TREE
;
224 /* When signature changes, we need to clear builtin info. */
225 if (DECL_BUILT_IN (new_decl
)
227 && !bitmap_empty_p (args_to_skip
))
229 DECL_BUILT_IN_CLASS (new_decl
) = NOT_BUILT_IN
;
230 DECL_FUNCTION_CODE (new_decl
) = (enum built_in_function
) 0;
232 /* The FE might have information and assumptions about the other
234 DECL_LANG_SPECIFIC (new_decl
) = NULL
;
238 /* Set flags of NEW_NODE and its decl. NEW_NODE is a newly created private
239 clone or its thunk. */
242 set_new_clone_decl_and_node_flags (cgraph_node
*new_node
)
244 DECL_EXTERNAL (new_node
->decl
) = 0;
245 TREE_PUBLIC (new_node
->decl
) = 0;
246 DECL_COMDAT (new_node
->decl
) = 0;
247 DECL_WEAK (new_node
->decl
) = 0;
248 DECL_VIRTUAL_P (new_node
->decl
) = 0;
249 DECL_STATIC_CONSTRUCTOR (new_node
->decl
) = 0;
250 DECL_STATIC_DESTRUCTOR (new_node
->decl
) = 0;
252 new_node
->externally_visible
= 0;
253 new_node
->local
.local
= 1;
254 new_node
->lowered
= true;
257 /* Duplicate thunk THUNK if necessary but make it to refer to NODE.
258 ARGS_TO_SKIP, if non-NULL, determines which parameters should be omitted.
259 Function can return NODE if no thunk is necessary, which can happen when
260 thunk is this_adjusting but we are removing this parameter. */
263 duplicate_thunk_for_node (cgraph_node
*thunk
, cgraph_node
*node
)
265 cgraph_node
*new_thunk
, *thunk_of
;
266 thunk_of
= thunk
->callees
->callee
->ultimate_alias_target ();
268 if (thunk_of
->thunk
.thunk_p
)
269 node
= duplicate_thunk_for_node (thunk_of
, node
);
271 if (!DECL_ARGUMENTS (thunk
->decl
))
272 thunk
->get_untransformed_body ();
275 for (cs
= node
->callers
; cs
; cs
= cs
->next_caller
)
276 if (cs
->caller
->thunk
.thunk_p
277 && cs
->caller
->thunk
.this_adjusting
== thunk
->thunk
.this_adjusting
278 && cs
->caller
->thunk
.fixed_offset
== thunk
->thunk
.fixed_offset
279 && cs
->caller
->thunk
.virtual_offset_p
== thunk
->thunk
.virtual_offset_p
280 && cs
->caller
->thunk
.virtual_value
== thunk
->thunk
.virtual_value
)
284 if (!node
->clone
.args_to_skip
)
285 new_decl
= copy_node (thunk
->decl
);
288 /* We do not need to duplicate this_adjusting thunks if we have removed
290 if (thunk
->thunk
.this_adjusting
291 && bitmap_bit_p (node
->clone
.args_to_skip
, 0))
294 new_decl
= build_function_decl_skip_args (thunk
->decl
,
295 node
->clone
.args_to_skip
,
299 tree
*link
= &DECL_ARGUMENTS (new_decl
);
301 for (tree pd
= DECL_ARGUMENTS (thunk
->decl
); pd
; pd
= DECL_CHAIN (pd
), i
++)
303 if (!node
->clone
.args_to_skip
304 || !bitmap_bit_p (node
->clone
.args_to_skip
, i
))
306 tree nd
= copy_node (pd
);
307 DECL_CONTEXT (nd
) = new_decl
;
309 link
= &DECL_CHAIN (nd
);
314 gcc_checking_assert (!DECL_STRUCT_FUNCTION (new_decl
));
315 gcc_checking_assert (!DECL_INITIAL (new_decl
));
316 gcc_checking_assert (!DECL_RESULT (new_decl
));
317 gcc_checking_assert (!DECL_RTL_SET_P (new_decl
));
319 DECL_NAME (new_decl
) = clone_function_name (thunk
->decl
, "artificial_thunk");
320 SET_DECL_ASSEMBLER_NAME (new_decl
, DECL_NAME (new_decl
));
322 new_thunk
= cgraph_node::create (new_decl
);
323 set_new_clone_decl_and_node_flags (new_thunk
);
324 new_thunk
->definition
= true;
325 new_thunk
->local
.can_change_signature
= node
->local
.can_change_signature
;
326 new_thunk
->thunk
= thunk
->thunk
;
327 new_thunk
->unique_name
= in_lto_p
;
328 new_thunk
->former_clone_of
= thunk
->decl
;
329 new_thunk
->clone
.args_to_skip
= node
->clone
.args_to_skip
;
330 new_thunk
->clone
.combined_args_to_skip
= node
->clone
.combined_args_to_skip
;
332 cgraph_edge
*e
= new_thunk
->create_edge (node
, NULL
, new_thunk
->count
);
333 symtab
->call_edge_duplication_hooks (thunk
->callees
, e
);
334 symtab
->call_cgraph_duplication_hooks (thunk
, new_thunk
);
338 /* If E does not lead to a thunk, simply redirect it to N. Otherwise create
339 one or more equivalent thunks for N and redirect E to the first in the
340 chain. Note that it is then necessary to call
341 n->expand_all_artificial_thunks once all callers are redirected. */
344 cgraph_edge::redirect_callee_duplicating_thunks (cgraph_node
*n
)
346 cgraph_node
*orig_to
= callee
->ultimate_alias_target ();
347 if (orig_to
->thunk
.thunk_p
)
348 n
= duplicate_thunk_for_node (orig_to
, n
);
353 /* Call expand_thunk on all callers that are thunks and if analyze those nodes
354 that were expanded. */
357 cgraph_node::expand_all_artificial_thunks ()
360 for (e
= callers
; e
;)
361 if (e
->caller
->thunk
.thunk_p
)
363 cgraph_node
*thunk
= e
->caller
;
366 if (thunk
->expand_thunk (false, false))
368 thunk
->thunk
.thunk_p
= false;
371 thunk
->expand_all_artificial_thunks ();
378 dump_callgraph_transformation (const cgraph_node
*original
,
379 const cgraph_node
*clone
,
382 if (symtab
->ipa_clones_dump_file
)
384 fprintf (symtab
->ipa_clones_dump_file
,
385 "Callgraph clone;%s;%d;%s;%d;%d;%s;%d;%s;%d;%d;%s\n",
386 original
->asm_name (), original
->order
,
387 DECL_SOURCE_FILE (original
->decl
),
388 DECL_SOURCE_LINE (original
->decl
),
389 DECL_SOURCE_COLUMN (original
->decl
), clone
->asm_name (),
390 clone
->order
, DECL_SOURCE_FILE (clone
->decl
),
391 DECL_SOURCE_LINE (clone
->decl
), DECL_SOURCE_COLUMN (clone
->decl
),
394 symtab
->cloned_nodes
.add (original
);
395 symtab
->cloned_nodes
.add (clone
);
399 /* Create node representing clone of N executed COUNT times. Decrease
400 the execution counts from original node too.
401 The new clone will have decl set to DECL that may or may not be the same
404 When UPDATE_ORIGINAL is true, the counts are subtracted from the original
405 function's profile to reflect the fact that part of execution is handled
407 When CALL_DUPLICATOIN_HOOK is true, the ipa passes are acknowledged about
408 the new clone. Otherwise the caller is responsible for doing so later.
410 If the new node is being inlined into another one, NEW_INLINED_TO should be
411 the outline function the new one is (even indirectly) inlined to. All hooks
412 will see this in node's global.inlined_to, when invoked. Can be NULL if the
413 node is not inlined. */
416 cgraph_node::create_clone (tree new_decl
, profile_count prof_count
,
417 bool update_original
,
418 vec
<cgraph_edge
*> redirect_callers
,
419 bool call_duplication_hook
,
420 cgraph_node
*new_inlined_to
,
421 bitmap args_to_skip
, const char *suffix
)
423 cgraph_node
*new_node
= symtab
->create_empty ();
426 profile_count old_count
= count
;
429 dump_callgraph_transformation (this, new_inlined_to
, "inlining to");
431 /* When inlining we scale precisely to prof_count, when cloning we can
432 preserve local profile. */
434 prof_count
= count
.combine_with_ipa_count (prof_count
);
435 new_node
->count
= prof_count
;
437 /* Update IPA profile. Local profiles need no updating in original. */
439 count
= count
.combine_with_ipa_count (count
.ipa () - prof_count
.ipa ());
440 new_node
->decl
= new_decl
;
441 new_node
->register_symbol ();
442 new_node
->origin
= origin
;
443 new_node
->lto_file_data
= lto_file_data
;
444 if (new_node
->origin
)
446 new_node
->next_nested
= new_node
->origin
->nested
;
447 new_node
->origin
->nested
= new_node
;
449 new_node
->analyzed
= analyzed
;
450 new_node
->definition
= definition
;
451 new_node
->local
= local
;
452 new_node
->externally_visible
= false;
453 new_node
->no_reorder
= no_reorder
;
454 new_node
->local
.local
= true;
455 new_node
->global
= global
;
456 new_node
->global
.inlined_to
= new_inlined_to
;
458 new_node
->frequency
= frequency
;
459 new_node
->tp_first_run
= tp_first_run
;
460 new_node
->tm_clone
= tm_clone
;
461 new_node
->icf_merged
= icf_merged
;
462 new_node
->merged_comdat
= merged_comdat
;
463 new_node
->thunk
= thunk
;
465 new_node
->clone
.tree_map
= NULL
;
466 new_node
->clone
.args_to_skip
= args_to_skip
;
467 new_node
->split_part
= split_part
;
469 new_node
->clone
.combined_args_to_skip
= clone
.combined_args_to_skip
;
470 else if (clone
.combined_args_to_skip
)
472 new_node
->clone
.combined_args_to_skip
= BITMAP_GGC_ALLOC ();
473 bitmap_ior (new_node
->clone
.combined_args_to_skip
,
474 clone
.combined_args_to_skip
, args_to_skip
);
477 new_node
->clone
.combined_args_to_skip
= args_to_skip
;
479 FOR_EACH_VEC_ELT (redirect_callers
, i
, e
)
481 /* Redirect calls to the old version node to point to its new
482 version. The only exception is when the edge was proved to
483 be unreachable during the clonning procedure. */
485 || DECL_BUILT_IN_CLASS (e
->callee
->decl
) != BUILT_IN_NORMAL
486 || DECL_FUNCTION_CODE (e
->callee
->decl
) != BUILT_IN_UNREACHABLE
)
487 e
->redirect_callee_duplicating_thunks (new_node
);
489 new_node
->expand_all_artificial_thunks ();
491 for (e
= callees
;e
; e
=e
->next_callee
)
492 e
->clone (new_node
, e
->call_stmt
, e
->lto_stmt_uid
, new_node
->count
, old_count
,
495 for (e
= indirect_calls
; e
; e
= e
->next_callee
)
496 e
->clone (new_node
, e
->call_stmt
, e
->lto_stmt_uid
,
497 new_node
->count
, old_count
, update_original
);
498 new_node
->clone_references (this);
500 new_node
->next_sibling_clone
= clones
;
502 clones
->prev_sibling_clone
= new_node
;
504 new_node
->clone_of
= this;
506 if (call_duplication_hook
)
507 symtab
->call_cgraph_duplication_hooks (this, new_node
);
510 dump_callgraph_transformation (this, new_node
, suffix
);
515 static GTY(()) unsigned int clone_fn_id_num
;
517 /* Return a new assembler name for a clone with SUFFIX of a decl named
521 clone_function_name_1 (const char *name
, const char *suffix
)
523 size_t len
= strlen (name
);
524 char *tmp_name
, *prefix
;
526 prefix
= XALLOCAVEC (char, len
+ strlen (suffix
) + 2);
527 memcpy (prefix
, name
, len
);
528 strcpy (prefix
+ len
+ 1, suffix
);
529 prefix
[len
] = symbol_table::symbol_suffix_separator ();
530 ASM_FORMAT_PRIVATE_NAME (tmp_name
, prefix
, clone_fn_id_num
++);
531 return get_identifier (tmp_name
);
534 /* Return a new assembler name for a clone of DECL with SUFFIX. */
537 clone_function_name (tree decl
, const char *suffix
)
539 tree name
= DECL_ASSEMBLER_NAME (decl
);
540 return clone_function_name_1 (IDENTIFIER_POINTER (name
), suffix
);
544 /* Create callgraph node clone with new declaration. The actual body will
545 be copied later at compilation stage.
547 TODO: after merging in ipa-sra use function call notes instead of args_to_skip
551 cgraph_node::create_virtual_clone (vec
<cgraph_edge
*> redirect_callers
,
552 vec
<ipa_replace_map
*, va_gc
> *tree_map
,
553 bitmap args_to_skip
, const char * suffix
)
555 tree old_decl
= decl
;
556 cgraph_node
*new_node
= NULL
;
559 ipa_replace_map
*map
;
562 gcc_checking_assert (local
.versionable
);
563 gcc_assert (local
.can_change_signature
|| !args_to_skip
);
565 /* Make a new FUNCTION_DECL tree node */
567 new_decl
= copy_node (old_decl
);
569 new_decl
= build_function_decl_skip_args (old_decl
, args_to_skip
, false);
571 /* These pointers represent function body and will be populated only when clone
573 gcc_assert (new_decl
!= old_decl
);
574 DECL_STRUCT_FUNCTION (new_decl
) = NULL
;
575 DECL_ARGUMENTS (new_decl
) = NULL
;
576 DECL_INITIAL (new_decl
) = NULL
;
577 DECL_RESULT (new_decl
) = NULL
;
578 /* We can not do DECL_RESULT (new_decl) = NULL; here because of LTO partitioning
579 sometimes storing only clone decl instead of original. */
581 /* Generate a new name for the new version. */
582 len
= IDENTIFIER_LENGTH (DECL_NAME (old_decl
));
583 name
= XALLOCAVEC (char, len
+ strlen (suffix
) + 2);
584 memcpy (name
, IDENTIFIER_POINTER (DECL_NAME (old_decl
)), len
);
585 strcpy (name
+ len
+ 1, suffix
);
587 DECL_NAME (new_decl
) = get_identifier (name
);
588 SET_DECL_ASSEMBLER_NAME (new_decl
, clone_function_name (old_decl
, suffix
));
589 SET_DECL_RTL (new_decl
, NULL
);
591 new_node
= create_clone (new_decl
, count
, false,
592 redirect_callers
, false, NULL
, args_to_skip
, suffix
);
594 /* Update the properties.
595 Make clone visible only within this translation unit. Make sure
596 that is not weak also.
597 ??? We cannot use COMDAT linkage because there is no
598 ABI support for this. */
599 set_new_clone_decl_and_node_flags (new_node
);
600 new_node
->clone
.tree_map
= tree_map
;
601 if (!implicit_section
)
602 new_node
->set_section (get_section ());
604 /* Clones of global symbols or symbols with unique names are unique. */
605 if ((TREE_PUBLIC (old_decl
)
606 && !DECL_EXTERNAL (old_decl
)
607 && !DECL_WEAK (old_decl
)
608 && !DECL_COMDAT (old_decl
))
610 new_node
->unique_name
= true;
611 FOR_EACH_VEC_SAFE_ELT (tree_map
, i
, map
)
612 new_node
->maybe_create_reference (map
->new_tree
, NULL
);
614 if (ipa_transforms_to_apply
.exists ())
615 new_node
->ipa_transforms_to_apply
616 = ipa_transforms_to_apply
.copy ();
618 symtab
->call_cgraph_duplication_hooks (this, new_node
);
623 /* callgraph node being removed from symbol table; see if its entry can be
624 replaced by other inline clone. */
626 cgraph_node::find_replacement (void)
628 cgraph_node
*next_inline_clone
, *replacement
;
630 for (next_inline_clone
= clones
;
632 && next_inline_clone
->decl
!= decl
;
633 next_inline_clone
= next_inline_clone
->next_sibling_clone
)
636 /* If there is inline clone of the node being removed, we need
637 to put it into the position of removed node and reorganize all
638 other clones to be based on it. */
639 if (next_inline_clone
)
642 cgraph_node
*new_clones
;
644 replacement
= next_inline_clone
;
646 /* Unlink inline clone from the list of clones of removed node. */
647 if (next_inline_clone
->next_sibling_clone
)
648 next_inline_clone
->next_sibling_clone
->prev_sibling_clone
649 = next_inline_clone
->prev_sibling_clone
;
650 if (next_inline_clone
->prev_sibling_clone
)
652 gcc_assert (clones
!= next_inline_clone
);
653 next_inline_clone
->prev_sibling_clone
->next_sibling_clone
654 = next_inline_clone
->next_sibling_clone
;
658 gcc_assert (clones
== next_inline_clone
);
659 clones
= next_inline_clone
->next_sibling_clone
;
665 /* Copy clone info. */
666 next_inline_clone
->clone
= clone
;
668 /* Now place it into clone tree at same level at NODE. */
669 next_inline_clone
->clone_of
= clone_of
;
670 next_inline_clone
->prev_sibling_clone
= NULL
;
671 next_inline_clone
->next_sibling_clone
= NULL
;
674 if (clone_of
->clones
)
675 clone_of
->clones
->prev_sibling_clone
= next_inline_clone
;
676 next_inline_clone
->next_sibling_clone
= clone_of
->clones
;
677 clone_of
->clones
= next_inline_clone
;
680 /* Merge the clone list. */
683 if (!next_inline_clone
->clones
)
684 next_inline_clone
->clones
= new_clones
;
687 n
= next_inline_clone
->clones
;
688 while (n
->next_sibling_clone
)
689 n
= n
->next_sibling_clone
;
690 n
->next_sibling_clone
= new_clones
;
691 new_clones
->prev_sibling_clone
= n
;
695 /* Update clone_of pointers. */
699 n
->clone_of
= next_inline_clone
;
700 n
= n
->next_sibling_clone
;
708 /* Like cgraph_set_call_stmt but walk the clone tree and update all
709 clones sharing the same function body.
710 When WHOLE_SPECULATIVE_EDGES is true, all three components of
711 speculative edge gets updated. Otherwise we update only direct
715 cgraph_node::set_call_stmt_including_clones (gimple
*old_stmt
,
717 bool update_speculative
)
720 cgraph_edge
*edge
= get_edge (old_stmt
);
723 edge
->set_call_stmt (new_stmt
, update_speculative
);
729 cgraph_edge
*edge
= node
->get_edge (old_stmt
);
732 edge
->set_call_stmt (new_stmt
, update_speculative
);
733 /* If UPDATE_SPECULATIVE is false, it means that we are turning
734 speculative call into a real code sequence. Update the
736 if (edge
->speculative
&& !update_speculative
)
738 cgraph_edge
*direct
, *indirect
;
741 gcc_assert (!edge
->indirect_unknown_callee
);
742 edge
->speculative_call_info (direct
, indirect
, ref
);
743 direct
->speculative
= false;
744 indirect
->speculative
= false;
745 ref
->speculative
= false;
750 else if (node
->next_sibling_clone
)
751 node
= node
->next_sibling_clone
;
754 while (node
!= this && !node
->next_sibling_clone
)
755 node
= node
->clone_of
;
757 node
= node
->next_sibling_clone
;
762 /* Like cgraph_create_edge walk the clone tree and update all clones sharing
763 same function body. If clones already have edge for OLD_STMT; only
764 update the edge same way as cgraph_set_call_stmt_including_clones does.
766 TODO: COUNT and LOOP_DEPTH should be properly distributed based on relative
767 frequencies of the clones. */
770 cgraph_node::create_edge_including_clones (cgraph_node
*callee
,
771 gimple
*old_stmt
, gcall
*stmt
,
773 cgraph_inline_failed_t reason
)
778 if (!get_edge (stmt
))
780 edge
= create_edge (callee
, stmt
, count
);
781 edge
->inline_failed
= reason
;
787 /* Thunk clones do not get updated while copying inline function body. */
788 if (!node
->thunk
.thunk_p
)
790 cgraph_edge
*edge
= node
->get_edge (old_stmt
);
792 /* It is possible that clones already contain the edge while
793 master didn't. Either we promoted indirect call into direct
794 call in the clone or we are processing clones of unreachable
795 master where edges has been removed. */
797 edge
->set_call_stmt (stmt
);
798 else if (! node
->get_edge (stmt
))
800 edge
= node
->create_edge (callee
, stmt
, count
);
801 edge
->inline_failed
= reason
;
806 else if (node
->next_sibling_clone
)
807 node
= node
->next_sibling_clone
;
810 while (node
!= this && !node
->next_sibling_clone
)
811 node
= node
->clone_of
;
813 node
= node
->next_sibling_clone
;
818 /* Remove the node from cgraph and all inline clones inlined into it.
819 Skip however removal of FORBIDDEN_NODE and return true if it needs to be
820 removed. This allows to call the function from outer loop walking clone
824 cgraph_node::remove_symbol_and_inline_clones (cgraph_node
*forbidden_node
)
826 cgraph_edge
*e
, *next
;
829 if (this == forbidden_node
)
834 for (e
= callees
; e
; e
= next
)
836 next
= e
->next_callee
;
837 if (!e
->inline_failed
)
838 found
|= e
->callee
->remove_symbol_and_inline_clones (forbidden_node
);
844 /* The edges representing the callers of the NEW_VERSION node were
845 fixed by cgraph_function_versioning (), now the call_expr in their
846 respective tree code should be updated to call the NEW_VERSION. */
849 update_call_expr (cgraph_node
*new_version
)
853 gcc_assert (new_version
);
855 /* Update the call expr on the edges to call the new version. */
856 for (e
= new_version
->callers
; e
; e
= e
->next_caller
)
858 function
*inner_function
= DECL_STRUCT_FUNCTION (e
->caller
->decl
);
859 gimple_call_set_fndecl (e
->call_stmt
, new_version
->decl
);
860 maybe_clean_eh_stmt_fn (inner_function
, e
->call_stmt
);
865 /* Create a new cgraph node which is the new version of
866 callgraph node. REDIRECT_CALLERS holds the callers
867 edges which should be redirected to point to
868 NEW_VERSION. ALL the callees edges of the node
869 are cloned to the new version node. Return the new
872 If non-NULL BLOCK_TO_COPY determine what basic blocks
873 was copied to prevent duplications of calls that are dead
877 cgraph_node::create_version_clone (tree new_decl
,
878 vec
<cgraph_edge
*> redirect_callers
,
882 cgraph_node
*new_version
;
886 new_version
= cgraph_node::create (new_decl
);
888 new_version
->analyzed
= analyzed
;
889 new_version
->definition
= definition
;
890 new_version
->local
= local
;
891 new_version
->externally_visible
= false;
892 new_version
->no_reorder
= no_reorder
;
893 new_version
->local
.local
= new_version
->definition
;
894 new_version
->global
= global
;
895 new_version
->rtl
= rtl
;
896 new_version
->count
= count
;
898 for (e
= callees
; e
; e
=e
->next_callee
)
900 || bitmap_bit_p (bbs_to_copy
, gimple_bb (e
->call_stmt
)->index
))
901 e
->clone (new_version
, e
->call_stmt
,
902 e
->lto_stmt_uid
, count
, count
,
904 for (e
= indirect_calls
; e
; e
=e
->next_callee
)
906 || bitmap_bit_p (bbs_to_copy
, gimple_bb (e
->call_stmt
)->index
))
907 e
->clone (new_version
, e
->call_stmt
,
908 e
->lto_stmt_uid
, count
, count
,
910 FOR_EACH_VEC_ELT (redirect_callers
, i
, e
)
912 /* Redirect calls to the old version node to point to its new
914 e
->redirect_callee (new_version
);
917 symtab
->call_cgraph_duplication_hooks (this, new_version
);
919 dump_callgraph_transformation (this, new_version
, suffix
);
924 /* Perform function versioning.
925 Function versioning includes copying of the tree and
926 a callgraph update (creating a new cgraph node and updating
927 its callees and callers).
929 REDIRECT_CALLERS varray includes the edges to be redirected
932 TREE_MAP is a mapping of tree nodes we want to replace with
933 new ones (according to results of prior analysis).
935 If non-NULL ARGS_TO_SKIP determine function parameters to remove
937 If SKIP_RETURN is true, the new version will return void.
938 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
939 If non_NULL NEW_ENTRY determine new entry BB of the clone.
941 Return the new version's cgraph node. */
944 cgraph_node::create_version_clone_with_body
945 (vec
<cgraph_edge
*> redirect_callers
,
946 vec
<ipa_replace_map
*, va_gc
> *tree_map
, bitmap args_to_skip
,
947 bool skip_return
, bitmap bbs_to_copy
, basic_block new_entry_block
,
950 tree old_decl
= decl
;
951 cgraph_node
*new_version_node
= NULL
;
954 if (!tree_versionable_function_p (old_decl
))
957 gcc_assert (local
.can_change_signature
|| !args_to_skip
);
959 /* Make a new FUNCTION_DECL tree node for the new version. */
960 if (!args_to_skip
&& !skip_return
)
961 new_decl
= copy_node (old_decl
);
964 = build_function_decl_skip_args (old_decl
, args_to_skip
, skip_return
);
966 /* Generate a new name for the new version. */
967 DECL_NAME (new_decl
) = clone_function_name (old_decl
, suffix
);
968 SET_DECL_ASSEMBLER_NAME (new_decl
, DECL_NAME (new_decl
));
969 SET_DECL_RTL (new_decl
, NULL
);
971 /* When the old decl was a con-/destructor make sure the clone isn't. */
972 DECL_STATIC_CONSTRUCTOR (new_decl
) = 0;
973 DECL_STATIC_DESTRUCTOR (new_decl
) = 0;
975 /* Create the new version's call-graph node.
976 and update the edges of the new node. */
977 new_version_node
= create_version_clone (new_decl
, redirect_callers
,
978 bbs_to_copy
, suffix
);
980 if (ipa_transforms_to_apply
.exists ())
981 new_version_node
->ipa_transforms_to_apply
982 = ipa_transforms_to_apply
.copy ();
983 /* Copy the OLD_VERSION_NODE function tree to the new version. */
984 tree_function_versioning (old_decl
, new_decl
, tree_map
, false, args_to_skip
,
985 skip_return
, bbs_to_copy
, new_entry_block
);
987 /* Update the new version's properties.
988 Make The new version visible only within this translation unit. Make sure
989 that is not weak also.
990 ??? We cannot use COMDAT linkage because there is no
991 ABI support for this. */
992 new_version_node
->make_decl_local ();
993 DECL_VIRTUAL_P (new_version_node
->decl
) = 0;
994 new_version_node
->externally_visible
= 0;
995 new_version_node
->local
.local
= 1;
996 new_version_node
->lowered
= true;
997 if (!implicit_section
)
998 new_version_node
->set_section (get_section ());
999 /* Clones of global symbols or symbols with unique names are unique. */
1000 if ((TREE_PUBLIC (old_decl
)
1001 && !DECL_EXTERNAL (old_decl
)
1002 && !DECL_WEAK (old_decl
)
1003 && !DECL_COMDAT (old_decl
))
1005 new_version_node
->unique_name
= true;
1007 /* Update the call_expr on the edges to call the new version node. */
1008 update_call_expr (new_version_node
);
1010 symtab
->call_cgraph_insertion_hooks (new_version_node
);
1011 return new_version_node
;
1014 /* Given virtual clone, turn it into actual clone. */
1017 cgraph_materialize_clone (cgraph_node
*node
)
1019 bitmap_obstack_initialize (NULL
);
1020 node
->former_clone_of
= node
->clone_of
->decl
;
1021 if (node
->clone_of
->former_clone_of
)
1022 node
->former_clone_of
= node
->clone_of
->former_clone_of
;
1023 /* Copy the OLD_VERSION_NODE function tree to the new version. */
1024 tree_function_versioning (node
->clone_of
->decl
, node
->decl
,
1025 node
->clone
.tree_map
, true,
1026 node
->clone
.args_to_skip
, false,
1028 if (symtab
->dump_file
)
1030 dump_function_to_file (node
->clone_of
->decl
, symtab
->dump_file
,
1032 dump_function_to_file (node
->decl
, symtab
->dump_file
, dump_flags
);
1035 /* Function is no longer clone. */
1036 if (node
->next_sibling_clone
)
1037 node
->next_sibling_clone
->prev_sibling_clone
= node
->prev_sibling_clone
;
1038 if (node
->prev_sibling_clone
)
1039 node
->prev_sibling_clone
->next_sibling_clone
= node
->next_sibling_clone
;
1041 node
->clone_of
->clones
= node
->next_sibling_clone
;
1042 node
->next_sibling_clone
= NULL
;
1043 node
->prev_sibling_clone
= NULL
;
1044 if (!node
->clone_of
->analyzed
&& !node
->clone_of
->clones
)
1046 node
->clone_of
->release_body ();
1047 node
->clone_of
->remove_callees ();
1048 node
->clone_of
->remove_all_references ();
1050 node
->clone_of
= NULL
;
1051 bitmap_obstack_release (NULL
);
1054 /* Once all functions from compilation unit are in memory, produce all clones
1055 and update all calls. We might also do this on demand if we don't want to
1056 bring all functions to memory prior compilation, but current WHOPR
1057 implementation does that and it is a bit easier to keep everything right in
1061 symbol_table::materialize_all_clones (void)
1064 bool stabilized
= false;
1067 if (symtab
->dump_file
)
1068 fprintf (symtab
->dump_file
, "Materializing clones\n");
1070 cgraph_node::checking_verify_cgraph_nodes ();
1072 /* We can also do topological order, but number of iterations should be
1073 bounded by number of IPA passes since single IPA pass is probably not
1074 going to create clones of clones it created itself. */
1078 FOR_EACH_FUNCTION (node
)
1080 if (node
->clone_of
&& node
->decl
!= node
->clone_of
->decl
1081 && !gimple_has_body_p (node
->decl
))
1083 if (!node
->clone_of
->clone_of
)
1084 node
->clone_of
->get_untransformed_body ();
1085 if (gimple_has_body_p (node
->clone_of
->decl
))
1087 if (symtab
->dump_file
)
1089 fprintf (symtab
->dump_file
, "cloning %s to %s\n",
1090 xstrdup_for_dump (node
->clone_of
->name ()),
1091 xstrdup_for_dump (node
->name ()));
1092 if (node
->clone
.tree_map
)
1095 fprintf (symtab
->dump_file
, " replace map: ");
1097 i
< vec_safe_length (node
->clone
.tree_map
);
1100 ipa_replace_map
*replace_info
;
1101 replace_info
= (*node
->clone
.tree_map
)[i
];
1102 print_generic_expr (symtab
->dump_file
,
1103 replace_info
->old_tree
);
1104 fprintf (symtab
->dump_file
, " -> ");
1105 print_generic_expr (symtab
->dump_file
,
1106 replace_info
->new_tree
);
1107 fprintf (symtab
->dump_file
, "%s%s;",
1108 replace_info
->replace_p
? "(replace)":"",
1109 replace_info
->ref_p
? "(ref)":"");
1111 fprintf (symtab
->dump_file
, "\n");
1113 if (node
->clone
.args_to_skip
)
1115 fprintf (symtab
->dump_file
, " args_to_skip: ");
1116 dump_bitmap (symtab
->dump_file
,
1117 node
->clone
.args_to_skip
);
1119 if (node
->clone
.args_to_skip
)
1121 fprintf (symtab
->dump_file
, " combined_args_to_skip:");
1122 dump_bitmap (symtab
->dump_file
, node
->clone
.combined_args_to_skip
);
1125 cgraph_materialize_clone (node
);
1131 FOR_EACH_FUNCTION (node
)
1132 if (!node
->analyzed
&& node
->callees
)
1134 node
->remove_callees ();
1135 node
->remove_all_references ();
1138 node
->clear_stmts_in_references ();
1139 if (symtab
->dump_file
)
1140 fprintf (symtab
->dump_file
, "Materialization Call site updates done.\n");
1142 cgraph_node::checking_verify_cgraph_nodes ();
1144 symtab
->remove_unreachable_nodes (symtab
->dump_file
);
1147 #include "gt-cgraphclones.h"