2 Copyright (C) 2003-2017 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* This module provide facilities for clonning functions. I.e. creating
22 new functions based on existing functions with simple modifications,
23 such as replacement of parameters.
25 To allow whole program optimization without actual presence of function
26 bodies, an additional infrastructure is provided for so-called virtual
29 A virtual clone in the callgraph is a function that has no
30 associated body, just a description of how to create its body based
31 on a different function (which itself may be a virtual clone).
33 The description of function modifications includes adjustments to
34 the function's signature (which allows, for example, removing or
35 adding function arguments), substitutions to perform on the
36 function body, and, for inlined functions, a pointer to the
37 function that it will be inlined into.
39 It is also possible to redirect any edge of the callgraph from a
40 function to its virtual clone. This implies updating of the call
41 site to adjust for the new function signature.
43 Most of the transformations performed by inter-procedural
44 optimizations can be represented via virtual clones. For
45 instance, a constant propagation pass can produce a virtual clone
46 of the function which replaces one of its arguments by a
47 constant. The inliner can represent its decisions by producing a
48 clone of a function whose body will be later integrated into
51 Using virtual clones, the program can be easily updated
52 during the Execute stage, solving most of pass interactions
53 problems that would otherwise occur during Transform.
55 Virtual clones are later materialized in the LTRANS stage and
56 turned into real functions. Passes executed after the virtual
57 clone were introduced also perform their Transform stage
58 on new functions, so for a pass there is no significant
59 difference between operating on a real function or a virtual
60 clone introduced before its Execute stage.
62 Optimization passes then work on virtual clones introduced before
63 their Execute stage as if they were real functions. The
64 only difference is that clones are not visible during the
65 Generate Summary stage. */
69 #include "coretypes.h"
75 #include "stringpool.h"
77 #include "lto-streamer.h"
80 #include "tree-inline.h"
82 #include "gimple-pretty-print.h"
84 /* Create clone of edge in the node N represented by CALL_EXPR
88 cgraph_edge::clone (cgraph_node
*n
, gcall
*call_stmt
, unsigned stmt_uid
,
89 profile_count num
, profile_count den
,
92 cgraph_edge
*new_edge
;
93 profile_count::adjust_for_ipa_scaling (&num
, &den
);
94 profile_count prof_count
= count
.apply_scale (num
, den
);
96 if (indirect_unknown_callee
)
100 if (call_stmt
&& (decl
= gimple_call_fndecl (call_stmt
))
101 /* When the call is speculative, we need to resolve it
102 via cgraph_resolve_speculation and not here. */
105 cgraph_node
*callee
= cgraph_node::get (decl
);
106 gcc_checking_assert (callee
);
107 new_edge
= n
->create_edge (callee
, call_stmt
, prof_count
);
111 new_edge
= n
->create_indirect_edge (call_stmt
,
112 indirect_info
->ecf_flags
,
114 *new_edge
->indirect_info
= *indirect_info
;
119 new_edge
= n
->create_edge (callee
, call_stmt
, prof_count
);
122 new_edge
->indirect_info
123 = ggc_cleared_alloc
<cgraph_indirect_call_info
> ();
124 *new_edge
->indirect_info
= *indirect_info
;
128 new_edge
->inline_failed
= inline_failed
;
129 new_edge
->indirect_inlining_edge
= indirect_inlining_edge
;
130 new_edge
->lto_stmt_uid
= stmt_uid
;
131 /* Clone flags that depend on call_stmt availability manually. */
132 new_edge
->can_throw_external
= can_throw_external
;
133 new_edge
->call_stmt_cannot_inline_p
= call_stmt_cannot_inline_p
;
134 new_edge
->speculative
= speculative
;
135 new_edge
->in_polymorphic_cdtor
= in_polymorphic_cdtor
;
137 /* Update IPA profile. Local profiles need no updating in original. */
139 count
= count
.combine_with_ipa_count (count
.ipa ()
140 - new_edge
->count
.ipa ());
141 symtab
->call_edge_duplication_hooks (this, new_edge
);
145 /* Build variant of function type ORIG_TYPE skipping ARGS_TO_SKIP and the
146 return value if SKIP_RETURN is true. */
149 cgraph_build_function_type_skip_args (tree orig_type
, bitmap args_to_skip
,
152 tree new_type
= NULL
;
153 tree args
, new_args
= NULL
;
157 for (args
= TYPE_ARG_TYPES (orig_type
); args
&& args
!= void_list_node
;
158 args
= TREE_CHAIN (args
), i
++)
159 if (!args_to_skip
|| !bitmap_bit_p (args_to_skip
, i
))
160 new_args
= tree_cons (NULL_TREE
, TREE_VALUE (args
), new_args
);
162 new_reversed
= nreverse (new_args
);
166 TREE_CHAIN (new_args
) = void_list_node
;
168 new_reversed
= void_list_node
;
171 /* Use copy_node to preserve as much as possible from original type
172 (debug info, attribute lists etc.)
173 Exception is METHOD_TYPEs must have THIS argument.
174 When we are asked to remove it, we need to build new FUNCTION_TYPE
176 if (TREE_CODE (orig_type
) != METHOD_TYPE
178 || !bitmap_bit_p (args_to_skip
, 0))
180 new_type
= build_distinct_type_copy (orig_type
);
181 TYPE_ARG_TYPES (new_type
) = new_reversed
;
186 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type
),
188 TYPE_CONTEXT (new_type
) = TYPE_CONTEXT (orig_type
);
192 TREE_TYPE (new_type
) = void_type_node
;
197 /* Build variant of function decl ORIG_DECL skipping ARGS_TO_SKIP and the
198 return value if SKIP_RETURN is true.
200 Arguments from DECL_ARGUMENTS list can't be removed now, since they are
201 linked by TREE_CHAIN directly. The caller is responsible for eliminating
202 them when they are being duplicated (i.e. copy_arguments_for_versioning). */
205 build_function_decl_skip_args (tree orig_decl
, bitmap args_to_skip
,
208 tree new_decl
= copy_node (orig_decl
);
211 new_type
= TREE_TYPE (orig_decl
);
212 if (prototype_p (new_type
)
213 || (skip_return
&& !VOID_TYPE_P (TREE_TYPE (new_type
))))
215 = cgraph_build_function_type_skip_args (new_type
, args_to_skip
,
217 TREE_TYPE (new_decl
) = new_type
;
219 /* For declarations setting DECL_VINDEX (i.e. methods)
220 we expect first argument to be THIS pointer. */
221 if (args_to_skip
&& bitmap_bit_p (args_to_skip
, 0))
222 DECL_VINDEX (new_decl
) = NULL_TREE
;
224 /* When signature changes, we need to clear builtin info. */
225 if (DECL_BUILT_IN (new_decl
)
227 && !bitmap_empty_p (args_to_skip
))
229 DECL_BUILT_IN_CLASS (new_decl
) = NOT_BUILT_IN
;
230 DECL_FUNCTION_CODE (new_decl
) = (enum built_in_function
) 0;
232 /* The FE might have information and assumptions about the other
234 DECL_LANG_SPECIFIC (new_decl
) = NULL
;
238 /* Set flags of NEW_NODE and its decl. NEW_NODE is a newly created private
239 clone or its thunk. */
242 set_new_clone_decl_and_node_flags (cgraph_node
*new_node
)
244 DECL_EXTERNAL (new_node
->decl
) = 0;
245 TREE_PUBLIC (new_node
->decl
) = 0;
246 DECL_COMDAT (new_node
->decl
) = 0;
247 DECL_WEAK (new_node
->decl
) = 0;
248 DECL_VIRTUAL_P (new_node
->decl
) = 0;
249 DECL_STATIC_CONSTRUCTOR (new_node
->decl
) = 0;
250 DECL_STATIC_DESTRUCTOR (new_node
->decl
) = 0;
252 new_node
->externally_visible
= 0;
253 new_node
->local
.local
= 1;
254 new_node
->lowered
= true;
257 /* Duplicate thunk THUNK if necessary but make it to refer to NODE.
258 ARGS_TO_SKIP, if non-NULL, determines which parameters should be omitted.
259 Function can return NODE if no thunk is necessary, which can happen when
260 thunk is this_adjusting but we are removing this parameter. */
263 duplicate_thunk_for_node (cgraph_node
*thunk
, cgraph_node
*node
)
265 cgraph_node
*new_thunk
, *thunk_of
;
266 thunk_of
= thunk
->callees
->callee
->ultimate_alias_target ();
268 if (thunk_of
->thunk
.thunk_p
)
269 node
= duplicate_thunk_for_node (thunk_of
, node
);
271 if (!DECL_ARGUMENTS (thunk
->decl
))
272 thunk
->get_untransformed_body ();
275 for (cs
= node
->callers
; cs
; cs
= cs
->next_caller
)
276 if (cs
->caller
->thunk
.thunk_p
277 && cs
->caller
->thunk
.this_adjusting
== thunk
->thunk
.this_adjusting
278 && cs
->caller
->thunk
.fixed_offset
== thunk
->thunk
.fixed_offset
279 && cs
->caller
->thunk
.virtual_offset_p
== thunk
->thunk
.virtual_offset_p
280 && cs
->caller
->thunk
.virtual_value
== thunk
->thunk
.virtual_value
)
284 if (!node
->clone
.args_to_skip
)
285 new_decl
= copy_node (thunk
->decl
);
288 /* We do not need to duplicate this_adjusting thunks if we have removed
290 if (thunk
->thunk
.this_adjusting
291 && bitmap_bit_p (node
->clone
.args_to_skip
, 0))
294 new_decl
= build_function_decl_skip_args (thunk
->decl
,
295 node
->clone
.args_to_skip
,
299 tree
*link
= &DECL_ARGUMENTS (new_decl
);
301 for (tree pd
= DECL_ARGUMENTS (thunk
->decl
); pd
; pd
= DECL_CHAIN (pd
), i
++)
303 if (!node
->clone
.args_to_skip
304 || !bitmap_bit_p (node
->clone
.args_to_skip
, i
))
306 tree nd
= copy_node (pd
);
307 DECL_CONTEXT (nd
) = new_decl
;
309 link
= &DECL_CHAIN (nd
);
314 gcc_checking_assert (!DECL_STRUCT_FUNCTION (new_decl
));
315 gcc_checking_assert (!DECL_INITIAL (new_decl
));
316 gcc_checking_assert (!DECL_RESULT (new_decl
));
317 gcc_checking_assert (!DECL_RTL_SET_P (new_decl
));
319 DECL_NAME (new_decl
) = clone_function_name (thunk
->decl
, "artificial_thunk");
320 SET_DECL_ASSEMBLER_NAME (new_decl
, DECL_NAME (new_decl
));
322 new_thunk
= cgraph_node::create (new_decl
);
323 set_new_clone_decl_and_node_flags (new_thunk
);
324 new_thunk
->definition
= true;
325 new_thunk
->local
.can_change_signature
= node
->local
.can_change_signature
;
326 new_thunk
->thunk
= thunk
->thunk
;
327 new_thunk
->unique_name
= in_lto_p
;
328 new_thunk
->former_clone_of
= thunk
->decl
;
329 new_thunk
->clone
.args_to_skip
= node
->clone
.args_to_skip
;
330 new_thunk
->clone
.combined_args_to_skip
= node
->clone
.combined_args_to_skip
;
332 cgraph_edge
*e
= new_thunk
->create_edge (node
, NULL
, new_thunk
->count
);
333 symtab
->call_edge_duplication_hooks (thunk
->callees
, e
);
334 symtab
->call_cgraph_duplication_hooks (thunk
, new_thunk
);
338 /* If E does not lead to a thunk, simply redirect it to N. Otherwise create
339 one or more equivalent thunks for N and redirect E to the first in the
340 chain. Note that it is then necessary to call
341 n->expand_all_artificial_thunks once all callers are redirected. */
344 cgraph_edge::redirect_callee_duplicating_thunks (cgraph_node
*n
)
346 cgraph_node
*orig_to
= callee
->ultimate_alias_target ();
347 if (orig_to
->thunk
.thunk_p
)
348 n
= duplicate_thunk_for_node (orig_to
, n
);
353 /* Call expand_thunk on all callers that are thunks and if analyze those nodes
354 that were expanded. */
357 cgraph_node::expand_all_artificial_thunks ()
360 for (e
= callers
; e
;)
361 if (e
->caller
->thunk
.thunk_p
)
363 cgraph_node
*thunk
= e
->caller
;
366 if (thunk
->expand_thunk (false, false))
368 thunk
->thunk
.thunk_p
= false;
371 thunk
->expand_all_artificial_thunks ();
378 dump_callgraph_transformation (const cgraph_node
*original
,
379 const cgraph_node
*clone
,
382 if (symtab
->ipa_clones_dump_file
)
384 fprintf (symtab
->ipa_clones_dump_file
,
385 "Callgraph clone;%s;%d;%s;%d;%d;%s;%d;%s;%d;%d;%s\n",
386 original
->asm_name (), original
->order
,
387 DECL_SOURCE_FILE (original
->decl
),
388 DECL_SOURCE_LINE (original
->decl
),
389 DECL_SOURCE_COLUMN (original
->decl
), clone
->asm_name (),
390 clone
->order
, DECL_SOURCE_FILE (clone
->decl
),
391 DECL_SOURCE_LINE (clone
->decl
), DECL_SOURCE_COLUMN (clone
->decl
),
394 symtab
->cloned_nodes
.add (original
);
395 symtab
->cloned_nodes
.add (clone
);
399 /* Create node representing clone of N executed COUNT times. Decrease
400 the execution counts from original node too.
401 The new clone will have decl set to DECL that may or may not be the same
404 When UPDATE_ORIGINAL is true, the counts are subtracted from the original
405 function's profile to reflect the fact that part of execution is handled
407 When CALL_DUPLICATOIN_HOOK is true, the ipa passes are acknowledged about
408 the new clone. Otherwise the caller is responsible for doing so later.
410 If the new node is being inlined into another one, NEW_INLINED_TO should be
411 the outline function the new one is (even indirectly) inlined to. All hooks
412 will see this in node's global.inlined_to, when invoked. Can be NULL if the
413 node is not inlined. */
416 cgraph_node::create_clone (tree new_decl
, profile_count prof_count
,
417 bool update_original
,
418 vec
<cgraph_edge
*> redirect_callers
,
419 bool call_duplication_hook
,
420 cgraph_node
*new_inlined_to
,
421 bitmap args_to_skip
, const char *suffix
)
423 cgraph_node
*new_node
= symtab
->create_empty ();
426 profile_count old_count
= count
;
429 dump_callgraph_transformation (this, new_inlined_to
, "inlining to");
431 prof_count
= count
.combine_with_ipa_count (prof_count
);
432 new_node
->count
= prof_count
;
434 /* Update IPA profile. Local profiles need no updating in original. */
436 count
= count
.combine_with_ipa_count (count
.ipa () - prof_count
.ipa ());
437 new_node
->decl
= new_decl
;
438 new_node
->register_symbol ();
439 new_node
->origin
= origin
;
440 new_node
->lto_file_data
= lto_file_data
;
441 if (new_node
->origin
)
443 new_node
->next_nested
= new_node
->origin
->nested
;
444 new_node
->origin
->nested
= new_node
;
446 new_node
->analyzed
= analyzed
;
447 new_node
->definition
= definition
;
448 new_node
->local
= local
;
449 new_node
->externally_visible
= false;
450 new_node
->no_reorder
= no_reorder
;
451 new_node
->local
.local
= true;
452 new_node
->global
= global
;
453 new_node
->global
.inlined_to
= new_inlined_to
;
455 new_node
->frequency
= frequency
;
456 new_node
->tp_first_run
= tp_first_run
;
457 new_node
->tm_clone
= tm_clone
;
458 new_node
->icf_merged
= icf_merged
;
459 new_node
->merged_comdat
= merged_comdat
;
460 new_node
->thunk
= thunk
;
462 new_node
->clone
.tree_map
= NULL
;
463 new_node
->clone
.args_to_skip
= args_to_skip
;
464 new_node
->split_part
= split_part
;
466 new_node
->clone
.combined_args_to_skip
= clone
.combined_args_to_skip
;
467 else if (clone
.combined_args_to_skip
)
469 new_node
->clone
.combined_args_to_skip
= BITMAP_GGC_ALLOC ();
470 bitmap_ior (new_node
->clone
.combined_args_to_skip
,
471 clone
.combined_args_to_skip
, args_to_skip
);
474 new_node
->clone
.combined_args_to_skip
= args_to_skip
;
476 FOR_EACH_VEC_ELT (redirect_callers
, i
, e
)
478 /* Redirect calls to the old version node to point to its new
479 version. The only exception is when the edge was proved to
480 be unreachable during the clonning procedure. */
482 || DECL_BUILT_IN_CLASS (e
->callee
->decl
) != BUILT_IN_NORMAL
483 || DECL_FUNCTION_CODE (e
->callee
->decl
) != BUILT_IN_UNREACHABLE
)
484 e
->redirect_callee_duplicating_thunks (new_node
);
486 new_node
->expand_all_artificial_thunks ();
488 for (e
= callees
;e
; e
=e
->next_callee
)
489 e
->clone (new_node
, e
->call_stmt
, e
->lto_stmt_uid
, new_node
->count
, old_count
,
492 for (e
= indirect_calls
; e
; e
= e
->next_callee
)
493 e
->clone (new_node
, e
->call_stmt
, e
->lto_stmt_uid
,
494 new_node
->count
, old_count
, update_original
);
495 new_node
->clone_references (this);
497 new_node
->next_sibling_clone
= clones
;
499 clones
->prev_sibling_clone
= new_node
;
501 new_node
->clone_of
= this;
503 if (call_duplication_hook
)
504 symtab
->call_cgraph_duplication_hooks (this, new_node
);
507 dump_callgraph_transformation (this, new_node
, suffix
);
512 static GTY(()) unsigned int clone_fn_id_num
;
514 /* Return a new assembler name for a clone with SUFFIX of a decl named
518 clone_function_name_1 (const char *name
, const char *suffix
)
520 size_t len
= strlen (name
);
521 char *tmp_name
, *prefix
;
523 prefix
= XALLOCAVEC (char, len
+ strlen (suffix
) + 2);
524 memcpy (prefix
, name
, len
);
525 strcpy (prefix
+ len
+ 1, suffix
);
526 prefix
[len
] = symbol_table::symbol_suffix_separator ();
527 ASM_FORMAT_PRIVATE_NAME (tmp_name
, prefix
, clone_fn_id_num
++);
528 return get_identifier (tmp_name
);
531 /* Return a new assembler name for a clone of DECL with SUFFIX. */
534 clone_function_name (tree decl
, const char *suffix
)
536 tree name
= DECL_ASSEMBLER_NAME (decl
);
537 return clone_function_name_1 (IDENTIFIER_POINTER (name
), suffix
);
541 /* Create callgraph node clone with new declaration. The actual body will
542 be copied later at compilation stage.
544 TODO: after merging in ipa-sra use function call notes instead of args_to_skip
548 cgraph_node::create_virtual_clone (vec
<cgraph_edge
*> redirect_callers
,
549 vec
<ipa_replace_map
*, va_gc
> *tree_map
,
550 bitmap args_to_skip
, const char * suffix
)
552 tree old_decl
= decl
;
553 cgraph_node
*new_node
= NULL
;
556 ipa_replace_map
*map
;
559 gcc_checking_assert (local
.versionable
);
560 gcc_assert (local
.can_change_signature
|| !args_to_skip
);
562 /* Make a new FUNCTION_DECL tree node */
564 new_decl
= copy_node (old_decl
);
566 new_decl
= build_function_decl_skip_args (old_decl
, args_to_skip
, false);
568 /* These pointers represent function body and will be populated only when clone
570 gcc_assert (new_decl
!= old_decl
);
571 DECL_STRUCT_FUNCTION (new_decl
) = NULL
;
572 DECL_ARGUMENTS (new_decl
) = NULL
;
573 DECL_INITIAL (new_decl
) = NULL
;
574 DECL_RESULT (new_decl
) = NULL
;
575 /* We can not do DECL_RESULT (new_decl) = NULL; here because of LTO partitioning
576 sometimes storing only clone decl instead of original. */
578 /* Generate a new name for the new version. */
579 len
= IDENTIFIER_LENGTH (DECL_NAME (old_decl
));
580 name
= XALLOCAVEC (char, len
+ strlen (suffix
) + 2);
581 memcpy (name
, IDENTIFIER_POINTER (DECL_NAME (old_decl
)), len
);
582 strcpy (name
+ len
+ 1, suffix
);
584 DECL_NAME (new_decl
) = get_identifier (name
);
585 SET_DECL_ASSEMBLER_NAME (new_decl
, clone_function_name (old_decl
, suffix
));
586 SET_DECL_RTL (new_decl
, NULL
);
588 new_node
= create_clone (new_decl
, count
, false,
589 redirect_callers
, false, NULL
, args_to_skip
, suffix
);
591 /* Update the properties.
592 Make clone visible only within this translation unit. Make sure
593 that is not weak also.
594 ??? We cannot use COMDAT linkage because there is no
595 ABI support for this. */
596 set_new_clone_decl_and_node_flags (new_node
);
597 new_node
->clone
.tree_map
= tree_map
;
598 if (!implicit_section
)
599 new_node
->set_section (get_section ());
601 /* Clones of global symbols or symbols with unique names are unique. */
602 if ((TREE_PUBLIC (old_decl
)
603 && !DECL_EXTERNAL (old_decl
)
604 && !DECL_WEAK (old_decl
)
605 && !DECL_COMDAT (old_decl
))
607 new_node
->unique_name
= true;
608 FOR_EACH_VEC_SAFE_ELT (tree_map
, i
, map
)
609 new_node
->maybe_create_reference (map
->new_tree
, NULL
);
611 if (ipa_transforms_to_apply
.exists ())
612 new_node
->ipa_transforms_to_apply
613 = ipa_transforms_to_apply
.copy ();
615 symtab
->call_cgraph_duplication_hooks (this, new_node
);
620 /* callgraph node being removed from symbol table; see if its entry can be
621 replaced by other inline clone. */
623 cgraph_node::find_replacement (void)
625 cgraph_node
*next_inline_clone
, *replacement
;
627 for (next_inline_clone
= clones
;
629 && next_inline_clone
->decl
!= decl
;
630 next_inline_clone
= next_inline_clone
->next_sibling_clone
)
633 /* If there is inline clone of the node being removed, we need
634 to put it into the position of removed node and reorganize all
635 other clones to be based on it. */
636 if (next_inline_clone
)
639 cgraph_node
*new_clones
;
641 replacement
= next_inline_clone
;
643 /* Unlink inline clone from the list of clones of removed node. */
644 if (next_inline_clone
->next_sibling_clone
)
645 next_inline_clone
->next_sibling_clone
->prev_sibling_clone
646 = next_inline_clone
->prev_sibling_clone
;
647 if (next_inline_clone
->prev_sibling_clone
)
649 gcc_assert (clones
!= next_inline_clone
);
650 next_inline_clone
->prev_sibling_clone
->next_sibling_clone
651 = next_inline_clone
->next_sibling_clone
;
655 gcc_assert (clones
== next_inline_clone
);
656 clones
= next_inline_clone
->next_sibling_clone
;
662 /* Copy clone info. */
663 next_inline_clone
->clone
= clone
;
665 /* Now place it into clone tree at same level at NODE. */
666 next_inline_clone
->clone_of
= clone_of
;
667 next_inline_clone
->prev_sibling_clone
= NULL
;
668 next_inline_clone
->next_sibling_clone
= NULL
;
671 if (clone_of
->clones
)
672 clone_of
->clones
->prev_sibling_clone
= next_inline_clone
;
673 next_inline_clone
->next_sibling_clone
= clone_of
->clones
;
674 clone_of
->clones
= next_inline_clone
;
677 /* Merge the clone list. */
680 if (!next_inline_clone
->clones
)
681 next_inline_clone
->clones
= new_clones
;
684 n
= next_inline_clone
->clones
;
685 while (n
->next_sibling_clone
)
686 n
= n
->next_sibling_clone
;
687 n
->next_sibling_clone
= new_clones
;
688 new_clones
->prev_sibling_clone
= n
;
692 /* Update clone_of pointers. */
696 n
->clone_of
= next_inline_clone
;
697 n
= n
->next_sibling_clone
;
705 /* Like cgraph_set_call_stmt but walk the clone tree and update all
706 clones sharing the same function body.
707 When WHOLE_SPECULATIVE_EDGES is true, all three components of
708 speculative edge gets updated. Otherwise we update only direct
712 cgraph_node::set_call_stmt_including_clones (gimple
*old_stmt
,
714 bool update_speculative
)
717 cgraph_edge
*edge
= get_edge (old_stmt
);
720 edge
->set_call_stmt (new_stmt
, update_speculative
);
726 cgraph_edge
*edge
= node
->get_edge (old_stmt
);
729 edge
->set_call_stmt (new_stmt
, update_speculative
);
730 /* If UPDATE_SPECULATIVE is false, it means that we are turning
731 speculative call into a real code sequence. Update the
733 if (edge
->speculative
&& !update_speculative
)
735 cgraph_edge
*direct
, *indirect
;
738 gcc_assert (!edge
->indirect_unknown_callee
);
739 edge
->speculative_call_info (direct
, indirect
, ref
);
740 direct
->speculative
= false;
741 indirect
->speculative
= false;
742 ref
->speculative
= false;
747 else if (node
->next_sibling_clone
)
748 node
= node
->next_sibling_clone
;
751 while (node
!= this && !node
->next_sibling_clone
)
752 node
= node
->clone_of
;
754 node
= node
->next_sibling_clone
;
759 /* Like cgraph_create_edge walk the clone tree and update all clones sharing
760 same function body. If clones already have edge for OLD_STMT; only
761 update the edge same way as cgraph_set_call_stmt_including_clones does.
763 TODO: COUNT and LOOP_DEPTH should be properly distributed based on relative
764 frequencies of the clones. */
767 cgraph_node::create_edge_including_clones (cgraph_node
*callee
,
768 gimple
*old_stmt
, gcall
*stmt
,
770 cgraph_inline_failed_t reason
)
775 if (!get_edge (stmt
))
777 edge
= create_edge (callee
, stmt
, count
);
778 edge
->inline_failed
= reason
;
784 /* Thunk clones do not get updated while copying inline function body. */
785 if (!node
->thunk
.thunk_p
)
787 cgraph_edge
*edge
= node
->get_edge (old_stmt
);
789 /* It is possible that clones already contain the edge while
790 master didn't. Either we promoted indirect call into direct
791 call in the clone or we are processing clones of unreachable
792 master where edges has been removed. */
794 edge
->set_call_stmt (stmt
);
795 else if (! node
->get_edge (stmt
))
797 edge
= node
->create_edge (callee
, stmt
, count
);
798 edge
->inline_failed
= reason
;
803 else if (node
->next_sibling_clone
)
804 node
= node
->next_sibling_clone
;
807 while (node
!= this && !node
->next_sibling_clone
)
808 node
= node
->clone_of
;
810 node
= node
->next_sibling_clone
;
815 /* Remove the node from cgraph and all inline clones inlined into it.
816 Skip however removal of FORBIDDEN_NODE and return true if it needs to be
817 removed. This allows to call the function from outer loop walking clone
821 cgraph_node::remove_symbol_and_inline_clones (cgraph_node
*forbidden_node
)
823 cgraph_edge
*e
, *next
;
826 if (this == forbidden_node
)
831 for (e
= callees
; e
; e
= next
)
833 next
= e
->next_callee
;
834 if (!e
->inline_failed
)
835 found
|= e
->callee
->remove_symbol_and_inline_clones (forbidden_node
);
841 /* The edges representing the callers of the NEW_VERSION node were
842 fixed by cgraph_function_versioning (), now the call_expr in their
843 respective tree code should be updated to call the NEW_VERSION. */
846 update_call_expr (cgraph_node
*new_version
)
850 gcc_assert (new_version
);
852 /* Update the call expr on the edges to call the new version. */
853 for (e
= new_version
->callers
; e
; e
= e
->next_caller
)
855 function
*inner_function
= DECL_STRUCT_FUNCTION (e
->caller
->decl
);
856 gimple_call_set_fndecl (e
->call_stmt
, new_version
->decl
);
857 maybe_clean_eh_stmt_fn (inner_function
, e
->call_stmt
);
862 /* Create a new cgraph node which is the new version of
863 callgraph node. REDIRECT_CALLERS holds the callers
864 edges which should be redirected to point to
865 NEW_VERSION. ALL the callees edges of the node
866 are cloned to the new version node. Return the new
869 If non-NULL BLOCK_TO_COPY determine what basic blocks
870 was copied to prevent duplications of calls that are dead
874 cgraph_node::create_version_clone (tree new_decl
,
875 vec
<cgraph_edge
*> redirect_callers
,
879 cgraph_node
*new_version
;
883 new_version
= cgraph_node::create (new_decl
);
885 new_version
->analyzed
= analyzed
;
886 new_version
->definition
= definition
;
887 new_version
->local
= local
;
888 new_version
->externally_visible
= false;
889 new_version
->no_reorder
= no_reorder
;
890 new_version
->local
.local
= new_version
->definition
;
891 new_version
->global
= global
;
892 new_version
->rtl
= rtl
;
893 new_version
->count
= count
;
895 for (e
= callees
; e
; e
=e
->next_callee
)
897 || bitmap_bit_p (bbs_to_copy
, gimple_bb (e
->call_stmt
)->index
))
898 e
->clone (new_version
, e
->call_stmt
,
899 e
->lto_stmt_uid
, count
, count
,
901 for (e
= indirect_calls
; e
; e
=e
->next_callee
)
903 || bitmap_bit_p (bbs_to_copy
, gimple_bb (e
->call_stmt
)->index
))
904 e
->clone (new_version
, e
->call_stmt
,
905 e
->lto_stmt_uid
, count
, count
,
907 FOR_EACH_VEC_ELT (redirect_callers
, i
, e
)
909 /* Redirect calls to the old version node to point to its new
911 e
->redirect_callee (new_version
);
914 symtab
->call_cgraph_duplication_hooks (this, new_version
);
916 dump_callgraph_transformation (this, new_version
, suffix
);
921 /* Perform function versioning.
922 Function versioning includes copying of the tree and
923 a callgraph update (creating a new cgraph node and updating
924 its callees and callers).
926 REDIRECT_CALLERS varray includes the edges to be redirected
929 TREE_MAP is a mapping of tree nodes we want to replace with
930 new ones (according to results of prior analysis).
932 If non-NULL ARGS_TO_SKIP determine function parameters to remove
934 If SKIP_RETURN is true, the new version will return void.
935 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
936 If non_NULL NEW_ENTRY determine new entry BB of the clone.
938 Return the new version's cgraph node. */
941 cgraph_node::create_version_clone_with_body
942 (vec
<cgraph_edge
*> redirect_callers
,
943 vec
<ipa_replace_map
*, va_gc
> *tree_map
, bitmap args_to_skip
,
944 bool skip_return
, bitmap bbs_to_copy
, basic_block new_entry_block
,
947 tree old_decl
= decl
;
948 cgraph_node
*new_version_node
= NULL
;
951 if (!tree_versionable_function_p (old_decl
))
954 gcc_assert (local
.can_change_signature
|| !args_to_skip
);
956 /* Make a new FUNCTION_DECL tree node for the new version. */
957 if (!args_to_skip
&& !skip_return
)
958 new_decl
= copy_node (old_decl
);
961 = build_function_decl_skip_args (old_decl
, args_to_skip
, skip_return
);
963 /* Generate a new name for the new version. */
964 DECL_NAME (new_decl
) = clone_function_name (old_decl
, suffix
);
965 SET_DECL_ASSEMBLER_NAME (new_decl
, DECL_NAME (new_decl
));
966 SET_DECL_RTL (new_decl
, NULL
);
968 /* When the old decl was a con-/destructor make sure the clone isn't. */
969 DECL_STATIC_CONSTRUCTOR (new_decl
) = 0;
970 DECL_STATIC_DESTRUCTOR (new_decl
) = 0;
972 /* Create the new version's call-graph node.
973 and update the edges of the new node. */
974 new_version_node
= create_version_clone (new_decl
, redirect_callers
,
975 bbs_to_copy
, suffix
);
977 if (ipa_transforms_to_apply
.exists ())
978 new_version_node
->ipa_transforms_to_apply
979 = ipa_transforms_to_apply
.copy ();
980 /* Copy the OLD_VERSION_NODE function tree to the new version. */
981 tree_function_versioning (old_decl
, new_decl
, tree_map
, false, args_to_skip
,
982 skip_return
, bbs_to_copy
, new_entry_block
);
984 /* Update the new version's properties.
985 Make The new version visible only within this translation unit. Make sure
986 that is not weak also.
987 ??? We cannot use COMDAT linkage because there is no
988 ABI support for this. */
989 new_version_node
->make_decl_local ();
990 DECL_VIRTUAL_P (new_version_node
->decl
) = 0;
991 new_version_node
->externally_visible
= 0;
992 new_version_node
->local
.local
= 1;
993 new_version_node
->lowered
= true;
994 if (!implicit_section
)
995 new_version_node
->set_section (get_section ());
996 /* Clones of global symbols or symbols with unique names are unique. */
997 if ((TREE_PUBLIC (old_decl
)
998 && !DECL_EXTERNAL (old_decl
)
999 && !DECL_WEAK (old_decl
)
1000 && !DECL_COMDAT (old_decl
))
1002 new_version_node
->unique_name
= true;
1004 /* Update the call_expr on the edges to call the new version node. */
1005 update_call_expr (new_version_node
);
1007 symtab
->call_cgraph_insertion_hooks (this);
1008 return new_version_node
;
1011 /* Given virtual clone, turn it into actual clone. */
1014 cgraph_materialize_clone (cgraph_node
*node
)
1016 bitmap_obstack_initialize (NULL
);
1017 node
->former_clone_of
= node
->clone_of
->decl
;
1018 if (node
->clone_of
->former_clone_of
)
1019 node
->former_clone_of
= node
->clone_of
->former_clone_of
;
1020 /* Copy the OLD_VERSION_NODE function tree to the new version. */
1021 tree_function_versioning (node
->clone_of
->decl
, node
->decl
,
1022 node
->clone
.tree_map
, true,
1023 node
->clone
.args_to_skip
, false,
1025 if (symtab
->dump_file
)
1027 dump_function_to_file (node
->clone_of
->decl
, symtab
->dump_file
,
1029 dump_function_to_file (node
->decl
, symtab
->dump_file
, dump_flags
);
1032 /* Function is no longer clone. */
1033 if (node
->next_sibling_clone
)
1034 node
->next_sibling_clone
->prev_sibling_clone
= node
->prev_sibling_clone
;
1035 if (node
->prev_sibling_clone
)
1036 node
->prev_sibling_clone
->next_sibling_clone
= node
->next_sibling_clone
;
1038 node
->clone_of
->clones
= node
->next_sibling_clone
;
1039 node
->next_sibling_clone
= NULL
;
1040 node
->prev_sibling_clone
= NULL
;
1041 if (!node
->clone_of
->analyzed
&& !node
->clone_of
->clones
)
1043 node
->clone_of
->release_body ();
1044 node
->clone_of
->remove_callees ();
1045 node
->clone_of
->remove_all_references ();
1047 node
->clone_of
= NULL
;
1048 bitmap_obstack_release (NULL
);
1051 /* Once all functions from compilation unit are in memory, produce all clones
1052 and update all calls. We might also do this on demand if we don't want to
1053 bring all functions to memory prior compilation, but current WHOPR
1054 implementation does that and it is a bit easier to keep everything right in
1058 symbol_table::materialize_all_clones (void)
1061 bool stabilized
= false;
1064 if (symtab
->dump_file
)
1065 fprintf (symtab
->dump_file
, "Materializing clones\n");
1067 cgraph_node::checking_verify_cgraph_nodes ();
1069 /* We can also do topological order, but number of iterations should be
1070 bounded by number of IPA passes since single IPA pass is probably not
1071 going to create clones of clones it created itself. */
1075 FOR_EACH_FUNCTION (node
)
1077 if (node
->clone_of
&& node
->decl
!= node
->clone_of
->decl
1078 && !gimple_has_body_p (node
->decl
))
1080 if (!node
->clone_of
->clone_of
)
1081 node
->clone_of
->get_untransformed_body ();
1082 if (gimple_has_body_p (node
->clone_of
->decl
))
1084 if (symtab
->dump_file
)
1086 fprintf (symtab
->dump_file
, "cloning %s to %s\n",
1087 xstrdup_for_dump (node
->clone_of
->name ()),
1088 xstrdup_for_dump (node
->name ()));
1089 if (node
->clone
.tree_map
)
1092 fprintf (symtab
->dump_file
, " replace map: ");
1094 i
< vec_safe_length (node
->clone
.tree_map
);
1097 ipa_replace_map
*replace_info
;
1098 replace_info
= (*node
->clone
.tree_map
)[i
];
1099 print_generic_expr (symtab
->dump_file
,
1100 replace_info
->old_tree
);
1101 fprintf (symtab
->dump_file
, " -> ");
1102 print_generic_expr (symtab
->dump_file
,
1103 replace_info
->new_tree
);
1104 fprintf (symtab
->dump_file
, "%s%s;",
1105 replace_info
->replace_p
? "(replace)":"",
1106 replace_info
->ref_p
? "(ref)":"");
1108 fprintf (symtab
->dump_file
, "\n");
1110 if (node
->clone
.args_to_skip
)
1112 fprintf (symtab
->dump_file
, " args_to_skip: ");
1113 dump_bitmap (symtab
->dump_file
,
1114 node
->clone
.args_to_skip
);
1116 if (node
->clone
.args_to_skip
)
1118 fprintf (symtab
->dump_file
, " combined_args_to_skip:");
1119 dump_bitmap (symtab
->dump_file
, node
->clone
.combined_args_to_skip
);
1122 cgraph_materialize_clone (node
);
1128 FOR_EACH_FUNCTION (node
)
1129 if (!node
->analyzed
&& node
->callees
)
1131 node
->remove_callees ();
1132 node
->remove_all_references ();
1135 node
->clear_stmts_in_references ();
1136 if (symtab
->dump_file
)
1137 fprintf (symtab
->dump_file
, "Materialization Call site updates done.\n");
1139 cgraph_node::checking_verify_cgraph_nodes ();
1141 symtab
->remove_unreachable_nodes (symtab
->dump_file
);
1144 #include "gt-cgraphclones.h"