1 /* Callgraph handling code.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This file contains basic routines manipulating call graph
26 The call-graph is data structure designed for intra-procedural optimization
27 but it is also used in non-unit-at-a-time compilation to allow easier code
30 The call-graph consist of nodes and edges represented via linked lists.
31 Each function (external or not) corresponds to the unique node.
33 The mapping from declarations to call-graph nodes is done using hash table
34 based on DECL_UID. The call-graph nodes are created lazily using
35 cgraph_node function when called for unknown declaration.
37 The callgraph at the moment does not represent all indirect calls or calls
38 from other compilation units. Flag NEEDED is set for each node that may be
39 accessed in such an invisible way and it shall be considered an entry point
42 On the other hand, the callgraph currently does contain some edges for
43 indirect calls with unknown callees which can be accessed through
44 indirect_calls field of a node. It should be noted however that at the
45 moment only calls which are potential candidates for indirect inlining are
48 Interprocedural information:
50 Callgraph is place to store data needed for interprocedural optimization.
51 All data structures are divided into three components: local_info that
52 is produced while analyzing the function, global_info that is result
53 of global walking of the callgraph on the end of compilation and
54 rtl_info used by RTL backend to propagate data from already compiled
55 functions to their callers.
57 Moreover, each node has a uid which can be used to keep information in
58 on-the-side arrays. UIDs are reused and therefore reasonably dense.
62 The function inlining information is decided in advance and maintained
63 in the callgraph as so called inline plan.
64 For each inlined call, the callee's node is cloned to represent the
65 new function copy produced by inliner.
66 Each inlined call gets a unique corresponding clone node of the callee
67 and the data structure is updated while inlining is performed, so
68 the clones are eliminated and their callee edges redirected to the
71 Each edge has "inline_failed" field. When the field is set to NULL,
72 the call will be inlined. When it is non-NULL it contains a reason
73 why inlining wasn't performed. */
77 #include "coretypes.h"
80 #include "tree-inline.h"
81 #include "langhooks.h"
88 #include "basic-block.h"
93 #include "tree-dump.h"
94 #include "tree-flow.h"
95 #include "value-prof.h"
97 #include "diagnostic-core.h"
99 #include "ipa-utils.h"
101 static void cgraph_node_remove_callers (struct cgraph_node
*node
);
102 static inline void cgraph_edge_remove_caller (struct cgraph_edge
*e
);
103 static inline void cgraph_edge_remove_callee (struct cgraph_edge
*e
);
105 /* Hash table used to convert declarations into nodes. */
106 static GTY((param_is (struct cgraph_node
))) htab_t cgraph_hash
;
107 /* Hash table used to convert assembler names into nodes. */
108 static GTY((param_is (struct cgraph_node
))) htab_t assembler_name_hash
;
110 /* The linked list of cgraph nodes. */
111 struct cgraph_node
*cgraph_nodes
;
113 /* Queue of cgraph nodes scheduled to be lowered. */
114 struct cgraph_node
*cgraph_nodes_queue
;
116 /* Queue of cgraph nodes scheduled to be added into cgraph. This is a
117 secondary queue used during optimization to accommodate passes that
118 may generate new functions that need to be optimized and expanded. */
119 struct cgraph_node
*cgraph_new_nodes
;
121 /* Number of nodes in existence. */
124 /* Maximal uid used in cgraph nodes. */
127 /* Maximal uid used in cgraph edges. */
128 int cgraph_edge_max_uid
;
130 /* Maximal pid used for profiling */
133 /* Set when whole unit has been analyzed so we can access global info. */
134 bool cgraph_global_info_ready
= false;
136 /* What state callgraph is in right now. */
137 enum cgraph_state cgraph_state
= CGRAPH_STATE_CONSTRUCTION
;
139 /* Set when the cgraph is fully build and the basic flags are computed. */
140 bool cgraph_function_flags_ready
= false;
142 /* Linked list of cgraph asm nodes. */
143 struct cgraph_asm_node
*cgraph_asm_nodes
;
145 /* Last node in cgraph_asm_nodes. */
146 static GTY(()) struct cgraph_asm_node
*cgraph_asm_last_node
;
148 /* The order index of the next cgraph node to be created. This is
149 used so that we can sort the cgraph nodes in order by when we saw
150 them, to support -fno-toplevel-reorder. */
153 /* List of hooks trigerred on cgraph_edge events. */
154 struct cgraph_edge_hook_list
{
155 cgraph_edge_hook hook
;
157 struct cgraph_edge_hook_list
*next
;
160 /* List of hooks trigerred on cgraph_node events. */
161 struct cgraph_node_hook_list
{
162 cgraph_node_hook hook
;
164 struct cgraph_node_hook_list
*next
;
167 /* List of hooks trigerred on events involving two cgraph_edges. */
168 struct cgraph_2edge_hook_list
{
169 cgraph_2edge_hook hook
;
171 struct cgraph_2edge_hook_list
*next
;
174 /* List of hooks trigerred on events involving two cgraph_nodes. */
175 struct cgraph_2node_hook_list
{
176 cgraph_2node_hook hook
;
178 struct cgraph_2node_hook_list
*next
;
181 /* List of hooks triggered when an edge is removed. */
182 struct cgraph_edge_hook_list
*first_cgraph_edge_removal_hook
;
183 /* List of hooks triggered when a node is removed. */
184 struct cgraph_node_hook_list
*first_cgraph_node_removal_hook
;
185 /* List of hooks triggered when an edge is duplicated. */
186 struct cgraph_2edge_hook_list
*first_cgraph_edge_duplicated_hook
;
187 /* List of hooks triggered when a node is duplicated. */
188 struct cgraph_2node_hook_list
*first_cgraph_node_duplicated_hook
;
189 /* List of hooks triggered when an function is inserted. */
190 struct cgraph_node_hook_list
*first_cgraph_function_insertion_hook
;
192 /* Head of a linked list of unused (freed) call graph nodes.
193 Do not GTY((delete)) this list so UIDs gets reliably recycled. */
194 static GTY(()) struct cgraph_node
*free_nodes
;
195 /* Head of a linked list of unused (freed) call graph edges.
196 Do not GTY((delete)) this list so UIDs gets reliably recycled. */
197 static GTY(()) struct cgraph_edge
*free_edges
;
199 /* Macros to access the next item in the list of free cgraph nodes and
201 #define NEXT_FREE_NODE(NODE) (NODE)->next
202 #define NEXT_FREE_EDGE(EDGE) (EDGE)->prev_caller
204 /* Register HOOK to be called with DATA on each removed edge. */
205 struct cgraph_edge_hook_list
*
206 cgraph_add_edge_removal_hook (cgraph_edge_hook hook
, void *data
)
208 struct cgraph_edge_hook_list
*entry
;
209 struct cgraph_edge_hook_list
**ptr
= &first_cgraph_edge_removal_hook
;
211 entry
= (struct cgraph_edge_hook_list
*) xmalloc (sizeof (*entry
));
221 /* Remove ENTRY from the list of hooks called on removing edges. */
223 cgraph_remove_edge_removal_hook (struct cgraph_edge_hook_list
*entry
)
225 struct cgraph_edge_hook_list
**ptr
= &first_cgraph_edge_removal_hook
;
227 while (*ptr
!= entry
)
233 /* Call all edge removal hooks. */
235 cgraph_call_edge_removal_hooks (struct cgraph_edge
*e
)
237 struct cgraph_edge_hook_list
*entry
= first_cgraph_edge_removal_hook
;
240 entry
->hook (e
, entry
->data
);
245 /* Register HOOK to be called with DATA on each removed node. */
246 struct cgraph_node_hook_list
*
247 cgraph_add_node_removal_hook (cgraph_node_hook hook
, void *data
)
249 struct cgraph_node_hook_list
*entry
;
250 struct cgraph_node_hook_list
**ptr
= &first_cgraph_node_removal_hook
;
252 entry
= (struct cgraph_node_hook_list
*) xmalloc (sizeof (*entry
));
262 /* Remove ENTRY from the list of hooks called on removing nodes. */
264 cgraph_remove_node_removal_hook (struct cgraph_node_hook_list
*entry
)
266 struct cgraph_node_hook_list
**ptr
= &first_cgraph_node_removal_hook
;
268 while (*ptr
!= entry
)
274 /* Call all node removal hooks. */
276 cgraph_call_node_removal_hooks (struct cgraph_node
*node
)
278 struct cgraph_node_hook_list
*entry
= first_cgraph_node_removal_hook
;
281 entry
->hook (node
, entry
->data
);
286 /* Register HOOK to be called with DATA on each inserted node. */
287 struct cgraph_node_hook_list
*
288 cgraph_add_function_insertion_hook (cgraph_node_hook hook
, void *data
)
290 struct cgraph_node_hook_list
*entry
;
291 struct cgraph_node_hook_list
**ptr
= &first_cgraph_function_insertion_hook
;
293 entry
= (struct cgraph_node_hook_list
*) xmalloc (sizeof (*entry
));
303 /* Remove ENTRY from the list of hooks called on inserted nodes. */
305 cgraph_remove_function_insertion_hook (struct cgraph_node_hook_list
*entry
)
307 struct cgraph_node_hook_list
**ptr
= &first_cgraph_function_insertion_hook
;
309 while (*ptr
!= entry
)
315 /* Call all node insertion hooks. */
317 cgraph_call_function_insertion_hooks (struct cgraph_node
*node
)
319 struct cgraph_node_hook_list
*entry
= first_cgraph_function_insertion_hook
;
322 entry
->hook (node
, entry
->data
);
327 /* Register HOOK to be called with DATA on each duplicated edge. */
328 struct cgraph_2edge_hook_list
*
329 cgraph_add_edge_duplication_hook (cgraph_2edge_hook hook
, void *data
)
331 struct cgraph_2edge_hook_list
*entry
;
332 struct cgraph_2edge_hook_list
**ptr
= &first_cgraph_edge_duplicated_hook
;
334 entry
= (struct cgraph_2edge_hook_list
*) xmalloc (sizeof (*entry
));
344 /* Remove ENTRY from the list of hooks called on duplicating edges. */
346 cgraph_remove_edge_duplication_hook (struct cgraph_2edge_hook_list
*entry
)
348 struct cgraph_2edge_hook_list
**ptr
= &first_cgraph_edge_duplicated_hook
;
350 while (*ptr
!= entry
)
356 /* Call all edge duplication hooks. */
358 cgraph_call_edge_duplication_hooks (struct cgraph_edge
*cs1
,
359 struct cgraph_edge
*cs2
)
361 struct cgraph_2edge_hook_list
*entry
= first_cgraph_edge_duplicated_hook
;
364 entry
->hook (cs1
, cs2
, entry
->data
);
369 /* Register HOOK to be called with DATA on each duplicated node. */
370 struct cgraph_2node_hook_list
*
371 cgraph_add_node_duplication_hook (cgraph_2node_hook hook
, void *data
)
373 struct cgraph_2node_hook_list
*entry
;
374 struct cgraph_2node_hook_list
**ptr
= &first_cgraph_node_duplicated_hook
;
376 entry
= (struct cgraph_2node_hook_list
*) xmalloc (sizeof (*entry
));
386 /* Remove ENTRY from the list of hooks called on duplicating nodes. */
388 cgraph_remove_node_duplication_hook (struct cgraph_2node_hook_list
*entry
)
390 struct cgraph_2node_hook_list
**ptr
= &first_cgraph_node_duplicated_hook
;
392 while (*ptr
!= entry
)
398 /* Call all node duplication hooks. */
400 cgraph_call_node_duplication_hooks (struct cgraph_node
*node1
,
401 struct cgraph_node
*node2
)
403 struct cgraph_2node_hook_list
*entry
= first_cgraph_node_duplicated_hook
;
406 entry
->hook (node1
, node2
, entry
->data
);
411 /* Returns a hash code for P. */
414 hash_node (const void *p
)
416 const struct cgraph_node
*n
= (const struct cgraph_node
*) p
;
417 return (hashval_t
) DECL_UID (n
->decl
);
421 /* Returns nonzero if P1 and P2 are equal. */
424 eq_node (const void *p1
, const void *p2
)
426 const struct cgraph_node
*n1
= (const struct cgraph_node
*) p1
;
427 const struct cgraph_node
*n2
= (const struct cgraph_node
*) p2
;
428 return DECL_UID (n1
->decl
) == DECL_UID (n2
->decl
);
431 /* Allocate new callgraph node. */
433 static inline struct cgraph_node
*
434 cgraph_allocate_node (void)
436 struct cgraph_node
*node
;
441 free_nodes
= NEXT_FREE_NODE (node
);
445 node
= ggc_alloc_cleared_cgraph_node ();
446 node
->uid
= cgraph_max_uid
++;
452 /* Allocate new callgraph node and insert it into basic data structures. */
454 static struct cgraph_node
*
455 cgraph_create_node (void)
457 struct cgraph_node
*node
= cgraph_allocate_node ();
459 node
->next
= cgraph_nodes
;
461 node
->order
= cgraph_order
++;
463 cgraph_nodes
->previous
= node
;
464 node
->previous
= NULL
;
465 node
->global
.estimated_growth
= INT_MIN
;
466 node
->frequency
= NODE_FREQUENCY_NORMAL
;
467 ipa_empty_ref_list (&node
->ref_list
);
473 /* Return cgraph node assigned to DECL. Create new one when needed. */
476 cgraph_node (tree decl
)
478 struct cgraph_node key
, *node
, **slot
;
480 gcc_assert (TREE_CODE (decl
) == FUNCTION_DECL
);
483 cgraph_hash
= htab_create_ggc (10, hash_node
, eq_node
, NULL
);
487 slot
= (struct cgraph_node
**) htab_find_slot (cgraph_hash
, &key
, INSERT
);
492 if (node
->same_body_alias
)
493 node
= node
->same_body
;
497 node
= cgraph_create_node ();
500 if (DECL_CONTEXT (decl
) && TREE_CODE (DECL_CONTEXT (decl
)) == FUNCTION_DECL
)
502 node
->origin
= cgraph_node (DECL_CONTEXT (decl
));
503 node
->next_nested
= node
->origin
->nested
;
504 node
->origin
->nested
= node
;
506 if (assembler_name_hash
)
509 tree name
= DECL_ASSEMBLER_NAME (decl
);
511 aslot
= htab_find_slot_with_hash (assembler_name_hash
, name
,
512 decl_assembler_name_hash (name
),
514 /* We can have multiple declarations with same assembler name. For C++
515 it is __builtin_strlen and strlen, for instance. Do we need to
516 record them all? Original implementation marked just first one
517 so lets hope for the best. */
524 /* Mark ALIAS as an alias to DECL. */
526 static struct cgraph_node
*
527 cgraph_same_body_alias_1 (tree alias
, tree decl
)
529 struct cgraph_node key
, *alias_node
, *decl_node
, **slot
;
531 gcc_assert (TREE_CODE (decl
) == FUNCTION_DECL
);
532 gcc_assert (TREE_CODE (alias
) == FUNCTION_DECL
);
533 decl_node
= cgraph_node (decl
);
537 slot
= (struct cgraph_node
**) htab_find_slot (cgraph_hash
, &key
, INSERT
);
539 /* If the cgraph_node has been already created, fail. */
543 alias_node
= cgraph_allocate_node ();
544 alias_node
->decl
= alias
;
545 alias_node
->same_body_alias
= 1;
546 alias_node
->same_body
= decl_node
;
547 alias_node
->previous
= NULL
;
548 if (decl_node
->same_body
)
549 decl_node
->same_body
->previous
= alias_node
;
550 alias_node
->next
= decl_node
->same_body
;
551 alias_node
->thunk
.alias
= decl
;
552 decl_node
->same_body
= alias_node
;
557 /* Attempt to mark ALIAS as an alias to DECL. Return TRUE if successful.
558 Same body aliases are output whenever the body of DECL is output,
559 and cgraph_node (ALIAS) transparently returns cgraph_node (DECL). */
562 cgraph_same_body_alias (tree alias
, tree decl
)
564 #ifndef ASM_OUTPUT_DEF
565 /* If aliases aren't supported by the assembler, fail. */
569 /*gcc_assert (!assembler_name_hash);*/
571 return cgraph_same_body_alias_1 (alias
, decl
) != NULL
;
575 cgraph_add_thunk (tree alias
, tree decl
, bool this_adjusting
,
576 HOST_WIDE_INT fixed_offset
, HOST_WIDE_INT virtual_value
,
580 struct cgraph_node
*node
= cgraph_get_node (alias
);
584 gcc_assert (node
->local
.finalized
);
585 gcc_assert (!node
->same_body
);
586 cgraph_remove_node (node
);
589 node
= cgraph_same_body_alias_1 (alias
, decl
);
591 #ifdef ENABLE_CHECKING
592 gcc_assert (!virtual_offset
593 || tree_int_cst_equal (virtual_offset
, size_int (virtual_value
)));
595 node
->thunk
.fixed_offset
= fixed_offset
;
596 node
->thunk
.this_adjusting
= this_adjusting
;
597 node
->thunk
.virtual_value
= virtual_value
;
598 node
->thunk
.virtual_offset_p
= virtual_offset
!= NULL
;
599 node
->thunk
.alias
= real_alias
;
600 node
->thunk
.thunk_p
= true;
603 /* Returns the cgraph node assigned to DECL or NULL if no cgraph node
607 cgraph_get_node (tree decl
)
609 struct cgraph_node key
, *node
= NULL
, **slot
;
611 gcc_assert (TREE_CODE (decl
) == FUNCTION_DECL
);
618 slot
= (struct cgraph_node
**) htab_find_slot (cgraph_hash
, &key
,
624 if (node
->same_body_alias
)
625 node
= node
->same_body
;
630 /* Insert already constructed node into hashtable. */
633 cgraph_insert_node_to_hashtable (struct cgraph_node
*node
)
635 struct cgraph_node
**slot
;
637 slot
= (struct cgraph_node
**) htab_find_slot (cgraph_hash
, node
, INSERT
);
643 /* Returns a hash code for P. */
646 hash_node_by_assembler_name (const void *p
)
648 const struct cgraph_node
*n
= (const struct cgraph_node
*) p
;
649 return (hashval_t
) decl_assembler_name_hash (DECL_ASSEMBLER_NAME (n
->decl
));
652 /* Returns nonzero if P1 and P2 are equal. */
655 eq_assembler_name (const void *p1
, const void *p2
)
657 const struct cgraph_node
*n1
= (const struct cgraph_node
*) p1
;
658 const_tree name
= (const_tree
)p2
;
659 return (decl_assembler_name_equal (n1
->decl
, name
));
662 /* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
663 Return NULL if there's no such node. */
666 cgraph_node_for_asm (tree asmname
)
668 struct cgraph_node
*node
;
671 if (!assembler_name_hash
)
673 assembler_name_hash
=
674 htab_create_ggc (10, hash_node_by_assembler_name
, eq_assembler_name
,
676 for (node
= cgraph_nodes
; node
; node
= node
->next
)
677 if (!node
->global
.inlined_to
)
679 tree name
= DECL_ASSEMBLER_NAME (node
->decl
);
680 slot
= htab_find_slot_with_hash (assembler_name_hash
, name
,
681 decl_assembler_name_hash (name
),
683 /* We can have multiple declarations with same assembler name. For C++
684 it is __builtin_strlen and strlen, for instance. Do we need to
685 record them all? Original implementation marked just first one
686 so lets hope for the best. */
691 struct cgraph_node
*alias
;
693 for (alias
= node
->same_body
; alias
; alias
= alias
->next
)
696 name
= DECL_ASSEMBLER_NAME (alias
->decl
);
697 hash
= decl_assembler_name_hash (name
);
698 slot
= htab_find_slot_with_hash (assembler_name_hash
, name
,
707 slot
= htab_find_slot_with_hash (assembler_name_hash
, asmname
,
708 decl_assembler_name_hash (asmname
),
713 node
= (struct cgraph_node
*) *slot
;
714 if (node
->same_body_alias
)
715 node
= node
->same_body
;
721 /* Returns a hash value for X (which really is a die_struct). */
724 edge_hash (const void *x
)
726 return htab_hash_pointer (((const struct cgraph_edge
*) x
)->call_stmt
);
729 /* Return nonzero if decl_id of die_struct X is the same as UID of decl *Y. */
732 edge_eq (const void *x
, const void *y
)
734 return ((const struct cgraph_edge
*) x
)->call_stmt
== y
;
737 /* Add call graph edge E to call site hash of its caller. */
740 cgraph_add_edge_to_call_site_hash (struct cgraph_edge
*e
)
743 slot
= htab_find_slot_with_hash (e
->caller
->call_site_hash
,
745 htab_hash_pointer (e
->call_stmt
),
751 /* Return the callgraph edge representing the GIMPLE_CALL statement
755 cgraph_edge (struct cgraph_node
*node
, gimple call_stmt
)
757 struct cgraph_edge
*e
, *e2
;
760 if (node
->call_site_hash
)
761 return (struct cgraph_edge
*)
762 htab_find_with_hash (node
->call_site_hash
, call_stmt
,
763 htab_hash_pointer (call_stmt
));
765 /* This loop may turn out to be performance problem. In such case adding
766 hashtables into call nodes with very many edges is probably best
767 solution. It is not good idea to add pointer into CALL_EXPR itself
768 because we want to make possible having multiple cgraph nodes representing
769 different clones of the same body before the body is actually cloned. */
770 for (e
= node
->callees
; e
; e
= e
->next_callee
)
772 if (e
->call_stmt
== call_stmt
)
778 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
780 if (e
->call_stmt
== call_stmt
)
787 node
->call_site_hash
= htab_create_ggc (120, edge_hash
, edge_eq
, NULL
);
788 for (e2
= node
->callees
; e2
; e2
= e2
->next_callee
)
789 cgraph_add_edge_to_call_site_hash (e2
);
790 for (e2
= node
->indirect_calls
; e2
; e2
= e2
->next_callee
)
791 cgraph_add_edge_to_call_site_hash (e2
);
798 /* Change field call_stmt of edge E to NEW_STMT. */
801 cgraph_set_call_stmt (struct cgraph_edge
*e
, gimple new_stmt
)
805 if (e
->caller
->call_site_hash
)
807 htab_remove_elt_with_hash (e
->caller
->call_site_hash
,
809 htab_hash_pointer (e
->call_stmt
));
812 e
->call_stmt
= new_stmt
;
813 if (e
->indirect_unknown_callee
814 && (decl
= gimple_call_fndecl (new_stmt
)))
816 /* Constant propagation (and possibly also inlining?) can turn an
817 indirect call into a direct one. */
818 struct cgraph_node
*new_callee
= cgraph_node (decl
);
820 cgraph_make_edge_direct (e
, new_callee
);
823 push_cfun (DECL_STRUCT_FUNCTION (e
->caller
->decl
));
824 e
->can_throw_external
= stmt_can_throw_external (new_stmt
);
826 if (e
->caller
->call_site_hash
)
827 cgraph_add_edge_to_call_site_hash (e
);
830 /* Like cgraph_set_call_stmt but walk the clone tree and update all
831 clones sharing the same function body. */
834 cgraph_set_call_stmt_including_clones (struct cgraph_node
*orig
,
835 gimple old_stmt
, gimple new_stmt
)
837 struct cgraph_node
*node
;
838 struct cgraph_edge
*edge
= cgraph_edge (orig
, old_stmt
);
841 cgraph_set_call_stmt (edge
, new_stmt
);
847 struct cgraph_edge
*edge
= cgraph_edge (node
, old_stmt
);
849 cgraph_set_call_stmt (edge
, new_stmt
);
852 else if (node
->next_sibling_clone
)
853 node
= node
->next_sibling_clone
;
856 while (node
!= orig
&& !node
->next_sibling_clone
)
857 node
= node
->clone_of
;
859 node
= node
->next_sibling_clone
;
864 /* Like cgraph_create_edge walk the clone tree and update all clones sharing
865 same function body. If clones already have edge for OLD_STMT; only
866 update the edge same way as cgraph_set_call_stmt_including_clones does.
868 TODO: COUNT and LOOP_DEPTH should be properly distributed based on relative
869 frequencies of the clones. */
872 cgraph_create_edge_including_clones (struct cgraph_node
*orig
,
873 struct cgraph_node
*callee
,
875 gimple stmt
, gcov_type count
,
876 int freq
, int loop_depth
,
877 cgraph_inline_failed_t reason
)
879 struct cgraph_node
*node
;
880 struct cgraph_edge
*edge
;
882 if (!cgraph_edge (orig
, stmt
))
884 edge
= cgraph_create_edge (orig
, callee
, stmt
, count
, freq
, loop_depth
);
885 edge
->inline_failed
= reason
;
892 struct cgraph_edge
*edge
= cgraph_edge (node
, old_stmt
);
894 /* It is possible that clones already contain the edge while
895 master didn't. Either we promoted indirect call into direct
896 call in the clone or we are processing clones of unreachable
897 master where edges has been rmeoved. */
899 cgraph_set_call_stmt (edge
, stmt
);
900 else if (!cgraph_edge (node
, stmt
))
902 edge
= cgraph_create_edge (node
, callee
, stmt
, count
,
904 edge
->inline_failed
= reason
;
909 else if (node
->next_sibling_clone
)
910 node
= node
->next_sibling_clone
;
913 while (node
!= orig
&& !node
->next_sibling_clone
)
914 node
= node
->clone_of
;
916 node
= node
->next_sibling_clone
;
921 /* Give initial reasons why inlining would fail on EDGE. This gets either
922 nullified or usually overwritten by more precise reasons later. */
925 initialize_inline_failed (struct cgraph_edge
*e
)
927 struct cgraph_node
*callee
= e
->callee
;
929 if (e
->indirect_unknown_callee
)
930 e
->inline_failed
= CIF_INDIRECT_UNKNOWN_CALL
;
931 else if (!callee
->analyzed
)
932 e
->inline_failed
= CIF_BODY_NOT_AVAILABLE
;
933 else if (callee
->local
.redefined_extern_inline
)
934 e
->inline_failed
= CIF_REDEFINED_EXTERN_INLINE
;
935 else if (!callee
->local
.inlinable
)
936 e
->inline_failed
= CIF_FUNCTION_NOT_INLINABLE
;
937 else if (e
->call_stmt
&& gimple_call_cannot_inline_p (e
->call_stmt
))
938 e
->inline_failed
= CIF_MISMATCHED_ARGUMENTS
;
940 e
->inline_failed
= CIF_FUNCTION_NOT_CONSIDERED
;
943 /* Allocate a cgraph_edge structure and fill it with data according to the
944 parameters of which only CALLEE can be NULL (when creating an indirect call
947 static struct cgraph_edge
*
948 cgraph_create_edge_1 (struct cgraph_node
*caller
, struct cgraph_node
*callee
,
949 gimple call_stmt
, gcov_type count
, int freq
, int nest
)
951 struct cgraph_edge
*edge
;
953 /* LTO does not actually have access to the call_stmt since these
954 have not been loaded yet. */
957 #ifdef ENABLE_CHECKING
958 /* This is rather pricely check possibly trigerring construction of
959 call stmt hashtable. */
960 gcc_assert (!cgraph_edge (caller
, call_stmt
));
963 gcc_assert (is_gimple_call (call_stmt
));
969 free_edges
= NEXT_FREE_EDGE (edge
);
973 edge
= ggc_alloc_cgraph_edge ();
974 edge
->uid
= cgraph_edge_max_uid
++;
978 edge
->caller
= caller
;
979 edge
->callee
= callee
;
980 edge
->prev_caller
= NULL
;
981 edge
->next_caller
= NULL
;
982 edge
->prev_callee
= NULL
;
983 edge
->next_callee
= NULL
;
986 gcc_assert (count
>= 0);
987 edge
->frequency
= freq
;
988 gcc_assert (freq
>= 0);
989 gcc_assert (freq
<= CGRAPH_FREQ_MAX
);
990 edge
->loop_nest
= nest
;
992 edge
->call_stmt
= call_stmt
;
993 push_cfun (DECL_STRUCT_FUNCTION (caller
->decl
));
994 edge
->can_throw_external
995 = call_stmt
? stmt_can_throw_external (call_stmt
) : false;
997 edge
->call_stmt_cannot_inline_p
=
998 (call_stmt
? gimple_call_cannot_inline_p (call_stmt
) : false);
999 if (call_stmt
&& caller
->call_site_hash
)
1000 cgraph_add_edge_to_call_site_hash (edge
);
1002 edge
->indirect_info
= NULL
;
1003 edge
->indirect_inlining_edge
= 0;
1008 /* Create edge from CALLER to CALLEE in the cgraph. */
1010 struct cgraph_edge
*
1011 cgraph_create_edge (struct cgraph_node
*caller
, struct cgraph_node
*callee
,
1012 gimple call_stmt
, gcov_type count
, int freq
, int nest
)
1014 struct cgraph_edge
*edge
= cgraph_create_edge_1 (caller
, callee
, call_stmt
,
1017 edge
->indirect_unknown_callee
= 0;
1018 initialize_inline_failed (edge
);
1020 edge
->next_caller
= callee
->callers
;
1021 if (callee
->callers
)
1022 callee
->callers
->prev_caller
= edge
;
1023 edge
->next_callee
= caller
->callees
;
1024 if (caller
->callees
)
1025 caller
->callees
->prev_callee
= edge
;
1026 caller
->callees
= edge
;
1027 callee
->callers
= edge
;
1033 /* Create an indirect edge with a yet-undetermined callee where the call
1034 statement destination is a formal parameter of the caller with index
1037 struct cgraph_edge
*
1038 cgraph_create_indirect_edge (struct cgraph_node
*caller
, gimple call_stmt
,
1040 gcov_type count
, int freq
, int nest
)
1042 struct cgraph_edge
*edge
= cgraph_create_edge_1 (caller
, NULL
, call_stmt
,
1045 edge
->indirect_unknown_callee
= 1;
1046 initialize_inline_failed (edge
);
1048 edge
->indirect_info
= ggc_alloc_cleared_cgraph_indirect_call_info ();
1049 edge
->indirect_info
->param_index
= -1;
1050 edge
->indirect_info
->ecf_flags
= ecf_flags
;
1052 edge
->next_callee
= caller
->indirect_calls
;
1053 if (caller
->indirect_calls
)
1054 caller
->indirect_calls
->prev_callee
= edge
;
1055 caller
->indirect_calls
= edge
;
1060 /* Remove the edge E from the list of the callers of the callee. */
1063 cgraph_edge_remove_callee (struct cgraph_edge
*e
)
1065 gcc_assert (!e
->indirect_unknown_callee
);
1067 e
->prev_caller
->next_caller
= e
->next_caller
;
1069 e
->next_caller
->prev_caller
= e
->prev_caller
;
1070 if (!e
->prev_caller
)
1071 e
->callee
->callers
= e
->next_caller
;
1074 /* Remove the edge E from the list of the callees of the caller. */
1077 cgraph_edge_remove_caller (struct cgraph_edge
*e
)
1080 e
->prev_callee
->next_callee
= e
->next_callee
;
1082 e
->next_callee
->prev_callee
= e
->prev_callee
;
1083 if (!e
->prev_callee
)
1085 if (e
->indirect_unknown_callee
)
1086 e
->caller
->indirect_calls
= e
->next_callee
;
1088 e
->caller
->callees
= e
->next_callee
;
1090 if (e
->caller
->call_site_hash
)
1091 htab_remove_elt_with_hash (e
->caller
->call_site_hash
,
1093 htab_hash_pointer (e
->call_stmt
));
1096 /* Put the edge onto the free list. */
1099 cgraph_free_edge (struct cgraph_edge
*e
)
1103 /* Clear out the edge so we do not dangle pointers. */
1104 memset (e
, 0, sizeof (*e
));
1106 NEXT_FREE_EDGE (e
) = free_edges
;
1110 /* Remove the edge E in the cgraph. */
1113 cgraph_remove_edge (struct cgraph_edge
*e
)
1115 /* Call all edge removal hooks. */
1116 cgraph_call_edge_removal_hooks (e
);
1118 if (!e
->indirect_unknown_callee
)
1119 /* Remove from callers list of the callee. */
1120 cgraph_edge_remove_callee (e
);
1122 /* Remove from callees list of the callers. */
1123 cgraph_edge_remove_caller (e
);
1125 /* Put the edge onto the free list. */
1126 cgraph_free_edge (e
);
1129 /* Set callee of call graph edge E and add it to the corresponding set of
1133 cgraph_set_edge_callee (struct cgraph_edge
*e
, struct cgraph_node
*n
)
1135 e
->prev_caller
= NULL
;
1137 n
->callers
->prev_caller
= e
;
1138 e
->next_caller
= n
->callers
;
1143 /* Redirect callee of E to N. The function does not update underlying
1147 cgraph_redirect_edge_callee (struct cgraph_edge
*e
, struct cgraph_node
*n
)
1149 /* Remove from callers list of the current callee. */
1150 cgraph_edge_remove_callee (e
);
1152 /* Insert to callers list of the new callee. */
1153 cgraph_set_edge_callee (e
, n
);
1156 /* Make an indirect EDGE with an unknown callee an ordinary edge leading to
1160 cgraph_make_edge_direct (struct cgraph_edge
*edge
, struct cgraph_node
*callee
)
1162 edge
->indirect_unknown_callee
= 0;
1164 /* Get the edge out of the indirect edge list. */
1165 if (edge
->prev_callee
)
1166 edge
->prev_callee
->next_callee
= edge
->next_callee
;
1167 if (edge
->next_callee
)
1168 edge
->next_callee
->prev_callee
= edge
->prev_callee
;
1169 if (!edge
->prev_callee
)
1170 edge
->caller
->indirect_calls
= edge
->next_callee
;
1172 /* Put it into the normal callee list */
1173 edge
->prev_callee
= NULL
;
1174 edge
->next_callee
= edge
->caller
->callees
;
1175 if (edge
->caller
->callees
)
1176 edge
->caller
->callees
->prev_callee
= edge
;
1177 edge
->caller
->callees
= edge
;
1179 /* Insert to callers list of the new callee. */
1180 cgraph_set_edge_callee (edge
, callee
);
1182 /* We need to re-determine the inlining status of the edge. */
1183 initialize_inline_failed (edge
);
1187 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
1188 OLD_STMT changed into NEW_STMT. OLD_CALL is gimple_call_fndecl
1189 of OLD_STMT if it was previously call statement. */
1192 cgraph_update_edges_for_call_stmt_node (struct cgraph_node
*node
,
1193 gimple old_stmt
, tree old_call
, gimple new_stmt
)
1195 tree new_call
= (is_gimple_call (new_stmt
)) ? gimple_call_fndecl (new_stmt
) : 0;
1197 /* We are seeing indirect calls, then there is nothing to update. */
1198 if (!new_call
&& !old_call
)
1200 /* See if we turned indirect call into direct call or folded call to one builtin
1201 into different bultin. */
1202 if (old_call
!= new_call
)
1204 struct cgraph_edge
*e
= cgraph_edge (node
, old_stmt
);
1205 struct cgraph_edge
*ne
= NULL
;
1212 /* See if the edge is already there and has the correct callee. It
1213 might be so because of indirect inlining has already updated
1215 if (new_call
&& e
->callee
&& e
->callee
->decl
== new_call
)
1218 /* Otherwise remove edge and create new one; we can't simply redirect
1219 since function has changed, so inline plan and other information
1220 attached to edge is invalid. */
1222 frequency
= e
->frequency
;
1223 loop_nest
= e
->loop_nest
;
1224 cgraph_remove_edge (e
);
1228 /* We are seeing new direct call; compute profile info based on BB. */
1229 basic_block bb
= gimple_bb (new_stmt
);
1231 frequency
= compute_call_stmt_bb_frequency (current_function_decl
,
1233 loop_nest
= bb
->loop_depth
;
1238 ne
= cgraph_create_edge (node
, cgraph_node (new_call
),
1239 new_stmt
, count
, frequency
,
1241 gcc_assert (ne
->inline_failed
);
1244 /* We only updated the call stmt; update pointer in cgraph edge.. */
1245 else if (old_stmt
!= new_stmt
)
1246 cgraph_set_call_stmt (cgraph_edge (node
, old_stmt
), new_stmt
);
1249 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
1250 OLD_STMT changed into NEW_STMT. OLD_DECL is gimple_call_fndecl
1251 of OLD_STMT before it was updated (updating can happen inplace). */
1254 cgraph_update_edges_for_call_stmt (gimple old_stmt
, tree old_decl
, gimple new_stmt
)
1256 struct cgraph_node
*orig
= cgraph_node (cfun
->decl
);
1257 struct cgraph_node
*node
;
1259 cgraph_update_edges_for_call_stmt_node (orig
, old_stmt
, old_decl
, new_stmt
);
1261 for (node
= orig
->clones
; node
!= orig
;)
1263 cgraph_update_edges_for_call_stmt_node (node
, old_stmt
, old_decl
, new_stmt
);
1265 node
= node
->clones
;
1266 else if (node
->next_sibling_clone
)
1267 node
= node
->next_sibling_clone
;
1270 while (node
!= orig
&& !node
->next_sibling_clone
)
1271 node
= node
->clone_of
;
1273 node
= node
->next_sibling_clone
;
1279 /* Remove all callees from the node. */
1282 cgraph_node_remove_callees (struct cgraph_node
*node
)
1284 struct cgraph_edge
*e
, *f
;
1286 /* It is sufficient to remove the edges from the lists of callers of
1287 the callees. The callee list of the node can be zapped with one
1289 for (e
= node
->callees
; e
; e
= f
)
1292 cgraph_call_edge_removal_hooks (e
);
1293 if (!e
->indirect_unknown_callee
)
1294 cgraph_edge_remove_callee (e
);
1295 cgraph_free_edge (e
);
1297 for (e
= node
->indirect_calls
; e
; e
= f
)
1300 cgraph_call_edge_removal_hooks (e
);
1301 if (!e
->indirect_unknown_callee
)
1302 cgraph_edge_remove_callee (e
);
1303 cgraph_free_edge (e
);
1305 node
->indirect_calls
= NULL
;
1306 node
->callees
= NULL
;
1307 if (node
->call_site_hash
)
1309 htab_delete (node
->call_site_hash
);
1310 node
->call_site_hash
= NULL
;
1314 /* Remove all callers from the node. */
1317 cgraph_node_remove_callers (struct cgraph_node
*node
)
1319 struct cgraph_edge
*e
, *f
;
1321 /* It is sufficient to remove the edges from the lists of callees of
1322 the callers. The caller list of the node can be zapped with one
1324 for (e
= node
->callers
; e
; e
= f
)
1327 cgraph_call_edge_removal_hooks (e
);
1328 cgraph_edge_remove_caller (e
);
1329 cgraph_free_edge (e
);
1331 node
->callers
= NULL
;
1334 /* Release memory used to represent body of function NODE. */
1337 cgraph_release_function_body (struct cgraph_node
*node
)
1339 if (DECL_STRUCT_FUNCTION (node
->decl
))
1341 tree old_decl
= current_function_decl
;
1342 push_cfun (DECL_STRUCT_FUNCTION (node
->decl
));
1343 if (cfun
->gimple_df
)
1345 current_function_decl
= node
->decl
;
1347 delete_tree_cfg_annotations ();
1349 current_function_decl
= old_decl
;
1353 gcc_assert (dom_computed
[0] == DOM_NONE
);
1354 gcc_assert (dom_computed
[1] == DOM_NONE
);
1357 if (cfun
->value_histograms
)
1359 gcc_assert (!current_loops
);
1361 gimple_set_body (node
->decl
, NULL
);
1362 VEC_free (ipa_opt_pass
, heap
,
1363 node
->ipa_transforms_to_apply
);
1364 /* Struct function hangs a lot of data that would leak if we didn't
1365 removed all pointers to it. */
1366 ggc_free (DECL_STRUCT_FUNCTION (node
->decl
));
1367 DECL_STRUCT_FUNCTION (node
->decl
) = NULL
;
1369 DECL_SAVED_TREE (node
->decl
) = NULL
;
1370 /* If the node is abstract and needed, then do not clear DECL_INITIAL
1371 of its associated function function declaration because it's
1372 needed to emit debug info later. */
1373 if (!node
->abstract_and_needed
)
1374 DECL_INITIAL (node
->decl
) = error_mark_node
;
1377 /* Remove same body alias node. */
1380 cgraph_remove_same_body_alias (struct cgraph_node
*node
)
1383 int uid
= node
->uid
;
1385 gcc_assert (node
->same_body_alias
);
1387 node
->previous
->next
= node
->next
;
1389 node
->same_body
->same_body
= node
->next
;
1391 node
->next
->previous
= node
->previous
;
1393 node
->previous
= NULL
;
1394 slot
= htab_find_slot (cgraph_hash
, node
, NO_INSERT
);
1396 htab_clear_slot (cgraph_hash
, slot
);
1397 if (assembler_name_hash
)
1399 tree name
= DECL_ASSEMBLER_NAME (node
->decl
);
1400 slot
= htab_find_slot_with_hash (assembler_name_hash
, name
,
1401 decl_assembler_name_hash (name
),
1403 if (slot
&& *slot
== node
)
1404 htab_clear_slot (assembler_name_hash
, slot
);
1407 /* Clear out the node to NULL all pointers and add the node to the free
1409 memset (node
, 0, sizeof(*node
));
1411 NEXT_FREE_NODE (node
) = free_nodes
;
1415 /* Remove the node from cgraph. */
1418 cgraph_remove_node (struct cgraph_node
*node
)
1421 bool kill_body
= false;
1422 struct cgraph_node
*n
;
1423 int uid
= node
->uid
;
1425 cgraph_call_node_removal_hooks (node
);
1426 cgraph_node_remove_callers (node
);
1427 cgraph_node_remove_callees (node
);
1428 ipa_remove_all_references (&node
->ref_list
);
1429 ipa_remove_all_refering (&node
->ref_list
);
1430 VEC_free (ipa_opt_pass
, heap
,
1431 node
->ipa_transforms_to_apply
);
1433 /* Incremental inlining access removed nodes stored in the postorder list.
1435 node
->needed
= node
->reachable
= false;
1436 for (n
= node
->nested
; n
; n
= n
->next_nested
)
1438 node
->nested
= NULL
;
1441 struct cgraph_node
**node2
= &node
->origin
->nested
;
1443 while (*node2
!= node
)
1444 node2
= &(*node2
)->next_nested
;
1445 *node2
= node
->next_nested
;
1448 node
->previous
->next
= node
->next
;
1450 cgraph_nodes
= node
->next
;
1452 node
->next
->previous
= node
->previous
;
1454 node
->previous
= NULL
;
1455 slot
= htab_find_slot (cgraph_hash
, node
, NO_INSERT
);
1458 struct cgraph_node
*next_inline_clone
;
1460 for (next_inline_clone
= node
->clones
;
1461 next_inline_clone
&& next_inline_clone
->decl
!= node
->decl
;
1462 next_inline_clone
= next_inline_clone
->next_sibling_clone
)
1465 /* If there is inline clone of the node being removed, we need
1466 to put it into the position of removed node and reorganize all
1467 other clones to be based on it. */
1468 if (next_inline_clone
)
1470 struct cgraph_node
*n
;
1471 struct cgraph_node
*new_clones
;
1473 *slot
= next_inline_clone
;
1475 /* Unlink inline clone from the list of clones of removed node. */
1476 if (next_inline_clone
->next_sibling_clone
)
1477 next_inline_clone
->next_sibling_clone
->prev_sibling_clone
1478 = next_inline_clone
->prev_sibling_clone
;
1479 if (next_inline_clone
->prev_sibling_clone
)
1481 gcc_assert (node
->clones
!= next_inline_clone
);
1482 next_inline_clone
->prev_sibling_clone
->next_sibling_clone
1483 = next_inline_clone
->next_sibling_clone
;
1487 gcc_assert (node
->clones
== next_inline_clone
);
1488 node
->clones
= next_inline_clone
->next_sibling_clone
;
1491 new_clones
= node
->clones
;
1492 node
->clones
= NULL
;
1494 /* Copy clone info. */
1495 next_inline_clone
->clone
= node
->clone
;
1497 /* Now place it into clone tree at same level at NODE. */
1498 next_inline_clone
->clone_of
= node
->clone_of
;
1499 next_inline_clone
->prev_sibling_clone
= NULL
;
1500 next_inline_clone
->next_sibling_clone
= NULL
;
1503 if (node
->clone_of
->clones
)
1504 node
->clone_of
->clones
->prev_sibling_clone
= next_inline_clone
;
1505 next_inline_clone
->next_sibling_clone
= node
->clone_of
->clones
;
1506 node
->clone_of
->clones
= next_inline_clone
;
1509 /* Merge the clone list. */
1512 if (!next_inline_clone
->clones
)
1513 next_inline_clone
->clones
= new_clones
;
1516 n
= next_inline_clone
->clones
;
1517 while (n
->next_sibling_clone
)
1518 n
= n
->next_sibling_clone
;
1519 n
->next_sibling_clone
= new_clones
;
1520 new_clones
->prev_sibling_clone
= n
;
1524 /* Update clone_of pointers. */
1528 n
->clone_of
= next_inline_clone
;
1529 n
= n
->next_sibling_clone
;
1534 htab_clear_slot (cgraph_hash
, slot
);
1539 if (node
->prev_sibling_clone
)
1540 node
->prev_sibling_clone
->next_sibling_clone
= node
->next_sibling_clone
;
1541 else if (node
->clone_of
)
1542 node
->clone_of
->clones
= node
->next_sibling_clone
;
1543 if (node
->next_sibling_clone
)
1544 node
->next_sibling_clone
->prev_sibling_clone
= node
->prev_sibling_clone
;
1547 struct cgraph_node
*n
, *next
;
1551 for (n
= node
->clones
; n
->next_sibling_clone
; n
= n
->next_sibling_clone
)
1552 n
->clone_of
= node
->clone_of
;
1553 n
->clone_of
= node
->clone_of
;
1554 n
->next_sibling_clone
= node
->clone_of
->clones
;
1555 if (node
->clone_of
->clones
)
1556 node
->clone_of
->clones
->prev_sibling_clone
= n
;
1557 node
->clone_of
->clones
= node
->clones
;
1561 /* We are removing node with clones. this makes clones inconsistent,
1562 but assume they will be removed subsequently and just keep clone
1563 tree intact. This can happen in unreachable function removal since
1564 we remove unreachable functions in random order, not by bottom-up
1565 walk of clone trees. */
1566 for (n
= node
->clones
; n
; n
= next
)
1568 next
= n
->next_sibling_clone
;
1569 n
->next_sibling_clone
= NULL
;
1570 n
->prev_sibling_clone
= NULL
;
1576 while (node
->same_body
)
1577 cgraph_remove_same_body_alias (node
->same_body
);
1579 if (node
->same_comdat_group
)
1581 struct cgraph_node
*prev
;
1582 for (prev
= node
->same_comdat_group
;
1583 prev
->same_comdat_group
!= node
;
1584 prev
= prev
->same_comdat_group
)
1586 if (node
->same_comdat_group
== prev
)
1587 prev
->same_comdat_group
= NULL
;
1589 prev
->same_comdat_group
= node
->same_comdat_group
;
1590 node
->same_comdat_group
= NULL
;
1593 /* While all the clones are removed after being proceeded, the function
1594 itself is kept in the cgraph even after it is compiled. Check whether
1595 we are done with this body and reclaim it proactively if this is the case.
1597 if (!kill_body
&& *slot
)
1599 struct cgraph_node
*n
= (struct cgraph_node
*) *slot
;
1600 if (!n
->clones
&& !n
->clone_of
&& !n
->global
.inlined_to
1601 && (cgraph_global_info_ready
1602 && (TREE_ASM_WRITTEN (n
->decl
) || DECL_EXTERNAL (n
->decl
)
1603 || n
->in_other_partition
)))
1606 if (assembler_name_hash
)
1608 tree name
= DECL_ASSEMBLER_NAME (node
->decl
);
1609 slot
= htab_find_slot_with_hash (assembler_name_hash
, name
,
1610 decl_assembler_name_hash (name
),
1612 /* Inline clones are not hashed. */
1613 if (slot
&& *slot
== node
)
1614 htab_clear_slot (assembler_name_hash
, slot
);
1618 cgraph_release_function_body (node
);
1620 if (node
->call_site_hash
)
1622 htab_delete (node
->call_site_hash
);
1623 node
->call_site_hash
= NULL
;
1627 /* Clear out the node to NULL all pointers and add the node to the free
1629 memset (node
, 0, sizeof(*node
));
1631 NEXT_FREE_NODE (node
) = free_nodes
;
1635 /* Remove the node from cgraph. */
1638 cgraph_remove_node_and_inline_clones (struct cgraph_node
*node
)
1640 struct cgraph_edge
*e
, *next
;
1641 for (e
= node
->callees
; e
; e
= next
)
1643 next
= e
->next_callee
;
1644 if (!e
->inline_failed
)
1645 cgraph_remove_node_and_inline_clones (e
->callee
);
1647 cgraph_remove_node (node
);
1650 /* Notify finalize_compilation_unit that given node is reachable. */
1653 cgraph_mark_reachable_node (struct cgraph_node
*node
)
1655 if (!node
->reachable
&& node
->local
.finalized
)
1657 if (cgraph_global_info_ready
)
1659 /* Verify that function does not appear to be needed out of blue
1660 during the optimization process. This can happen for extern
1661 inlines when bodies was removed after inlining. */
1662 gcc_assert ((node
->analyzed
|| node
->in_other_partition
1663 || DECL_EXTERNAL (node
->decl
)));
1666 notice_global_symbol (node
->decl
);
1667 node
->reachable
= 1;
1669 node
->next_needed
= cgraph_nodes_queue
;
1670 cgraph_nodes_queue
= node
;
1674 /* Likewise indicate that a node is needed, i.e. reachable via some
1678 cgraph_mark_needed_node (struct cgraph_node
*node
)
1681 gcc_assert (!node
->global
.inlined_to
);
1682 cgraph_mark_reachable_node (node
);
1685 /* Likewise indicate that a node is having address taken. */
1688 cgraph_mark_address_taken_node (struct cgraph_node
*node
)
1690 cgraph_mark_reachable_node (node
);
1691 node
->address_taken
= 1;
1694 /* Return local info for the compiled function. */
1696 struct cgraph_local_info
*
1697 cgraph_local_info (tree decl
)
1699 struct cgraph_node
*node
;
1701 gcc_assert (TREE_CODE (decl
) == FUNCTION_DECL
);
1702 node
= cgraph_node (decl
);
1703 return &node
->local
;
1706 /* Return local info for the compiled function. */
1708 struct cgraph_global_info
*
1709 cgraph_global_info (tree decl
)
1711 struct cgraph_node
*node
;
1713 gcc_assert (TREE_CODE (decl
) == FUNCTION_DECL
&& cgraph_global_info_ready
);
1714 node
= cgraph_node (decl
);
1715 return &node
->global
;
1718 /* Return local info for the compiled function. */
1720 struct cgraph_rtl_info
*
1721 cgraph_rtl_info (tree decl
)
1723 struct cgraph_node
*node
;
1725 gcc_assert (TREE_CODE (decl
) == FUNCTION_DECL
);
1726 node
= cgraph_node (decl
);
1727 if (decl
!= current_function_decl
1728 && !TREE_ASM_WRITTEN (node
->decl
))
1733 /* Return a string describing the failure REASON. */
1736 cgraph_inline_failed_string (cgraph_inline_failed_t reason
)
1739 #define DEFCIFCODE(code, string) string,
1741 static const char *cif_string_table
[CIF_N_REASONS
] = {
1742 #include "cif-code.def"
1745 /* Signedness of an enum type is implementation defined, so cast it
1746 to unsigned before testing. */
1747 gcc_assert ((unsigned) reason
< CIF_N_REASONS
);
1748 return cif_string_table
[reason
];
1751 /* Return name of the node used in debug output. */
1753 cgraph_node_name (struct cgraph_node
*node
)
1755 return lang_hooks
.decl_printable_name (node
->decl
, 2);
1758 /* Names used to print out the availability enum. */
1759 const char * const cgraph_availability_names
[] =
1760 {"unset", "not_available", "overwritable", "available", "local"};
1763 /* Dump call graph node NODE to file F. */
1766 dump_cgraph_node (FILE *f
, struct cgraph_node
*node
)
1768 struct cgraph_edge
*edge
;
1769 int indirect_calls_count
= 0;
1771 fprintf (f
, "%s/%i(%i)", cgraph_node_name (node
), node
->uid
,
1773 dump_addr (f
, " @", (void *)node
);
1774 if (DECL_ASSEMBLER_NAME_SET_P (node
->decl
))
1775 fprintf (f
, " (asm: %s)", IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node
->decl
)));
1776 if (node
->global
.inlined_to
)
1777 fprintf (f
, " (inline copy in %s/%i)",
1778 cgraph_node_name (node
->global
.inlined_to
),
1779 node
->global
.inlined_to
->uid
);
1781 fprintf (f
, " (clone of %s/%i)",
1782 cgraph_node_name (node
->clone_of
),
1783 node
->clone_of
->uid
);
1784 if (cgraph_function_flags_ready
)
1785 fprintf (f
, " availability:%s",
1786 cgraph_availability_names
[cgraph_function_body_availability (node
)]);
1788 fprintf (f
, " analyzed");
1789 if (node
->in_other_partition
)
1790 fprintf (f
, " in_other_partition");
1792 fprintf (f
, " executed "HOST_WIDEST_INT_PRINT_DEC
"x",
1793 (HOST_WIDEST_INT
)node
->count
);
1794 if (node
->local
.inline_summary
.self_time
)
1795 fprintf (f
, " %i time, %i benefit", node
->local
.inline_summary
.self_time
,
1796 node
->local
.inline_summary
.time_inlining_benefit
);
1797 if (node
->global
.time
&& node
->global
.time
1798 != node
->local
.inline_summary
.self_time
)
1799 fprintf (f
, " (%i after inlining)", node
->global
.time
);
1800 if (node
->local
.inline_summary
.self_size
)
1801 fprintf (f
, " %i size, %i benefit", node
->local
.inline_summary
.self_size
,
1802 node
->local
.inline_summary
.size_inlining_benefit
);
1803 if (node
->global
.size
&& node
->global
.size
1804 != node
->local
.inline_summary
.self_size
)
1805 fprintf (f
, " (%i after inlining)", node
->global
.size
);
1806 if (node
->local
.inline_summary
.estimated_self_stack_size
)
1807 fprintf (f
, " %i bytes stack usage", (int)node
->local
.inline_summary
.estimated_self_stack_size
);
1808 if (node
->global
.estimated_stack_size
!= node
->local
.inline_summary
.estimated_self_stack_size
)
1809 fprintf (f
, " %i bytes after inlining", (int)node
->global
.estimated_stack_size
);
1811 fprintf (f
, " nested in: %s", cgraph_node_name (node
->origin
));
1813 fprintf (f
, " needed");
1814 if (node
->address_taken
)
1815 fprintf (f
, " address_taken");
1816 else if (node
->reachable
)
1817 fprintf (f
, " reachable");
1818 else if (node
->reachable_from_other_partition
)
1819 fprintf (f
, " reachable_from_other_partition");
1820 if (gimple_has_body_p (node
->decl
))
1821 fprintf (f
, " body");
1823 fprintf (f
, " process");
1824 if (node
->local
.local
)
1825 fprintf (f
, " local");
1826 if (node
->local
.externally_visible
)
1827 fprintf (f
, " externally_visible");
1828 if (node
->local
.used_from_object_file
)
1829 fprintf (f
, " used_from_object_file");
1830 if (node
->local
.finalized
)
1831 fprintf (f
, " finalized");
1832 if (node
->local
.disregard_inline_limits
)
1833 fprintf (f
, " always_inline");
1834 else if (node
->local
.inlinable
)
1835 fprintf (f
, " inlinable");
1836 else if (node
->local
.versionable
)
1837 fprintf (f
, " versionable");
1838 if (node
->local
.redefined_extern_inline
)
1839 fprintf (f
, " redefined_extern_inline");
1840 if (TREE_ASM_WRITTEN (node
->decl
))
1841 fprintf (f
, " asm_written");
1843 fprintf (f
, "\n called by: ");
1844 for (edge
= node
->callers
; edge
; edge
= edge
->next_caller
)
1846 fprintf (f
, "%s/%i ", cgraph_node_name (edge
->caller
),
1849 fprintf (f
, "("HOST_WIDEST_INT_PRINT_DEC
"x) ",
1850 (HOST_WIDEST_INT
)edge
->count
);
1851 if (edge
->frequency
)
1852 fprintf (f
, "(%.2f per call) ",
1853 edge
->frequency
/ (double)CGRAPH_FREQ_BASE
);
1854 if (!edge
->inline_failed
)
1855 fprintf(f
, "(inlined) ");
1856 if (edge
->indirect_inlining_edge
)
1857 fprintf(f
, "(indirect_inlining) ");
1858 if (edge
->can_throw_external
)
1859 fprintf(f
, "(can throw external) ");
1862 fprintf (f
, "\n calls: ");
1863 for (edge
= node
->callees
; edge
; edge
= edge
->next_callee
)
1865 fprintf (f
, "%s/%i ", cgraph_node_name (edge
->callee
),
1867 if (!edge
->inline_failed
)
1868 fprintf(f
, "(inlined) ");
1869 if (edge
->indirect_inlining_edge
)
1870 fprintf(f
, "(indirect_inlining) ");
1872 fprintf (f
, "("HOST_WIDEST_INT_PRINT_DEC
"x) ",
1873 (HOST_WIDEST_INT
)edge
->count
);
1874 if (edge
->frequency
)
1875 fprintf (f
, "(%.2f per call) ",
1876 edge
->frequency
/ (double)CGRAPH_FREQ_BASE
);
1877 if (edge
->loop_nest
)
1878 fprintf (f
, "(nested in %i loops) ", edge
->loop_nest
);
1879 if (edge
->can_throw_external
)
1880 fprintf(f
, "(can throw external) ");
1883 fprintf (f
, " References: ");
1884 ipa_dump_references (f
, &node
->ref_list
);
1885 fprintf (f
, " Refering this function: ");
1886 ipa_dump_refering (f
, &node
->ref_list
);
1888 for (edge
= node
->indirect_calls
; edge
; edge
= edge
->next_callee
)
1889 indirect_calls_count
++;
1890 if (indirect_calls_count
)
1891 fprintf (f
, " has %i outgoing edges for indirect calls.\n",
1892 indirect_calls_count
);
1894 if (node
->same_body
)
1896 struct cgraph_node
*n
;
1897 fprintf (f
, " aliases & thunks:");
1898 for (n
= node
->same_body
; n
; n
= n
->next
)
1900 fprintf (f
, " %s/%i", cgraph_node_name (n
), n
->uid
);
1901 if (n
->thunk
.thunk_p
)
1903 fprintf (f
, " (thunk of %s fixed ofset %i virtual value %i has "
1904 "virtual offset %i",
1905 lang_hooks
.decl_printable_name (n
->thunk
.alias
, 2),
1906 (int)n
->thunk
.fixed_offset
,
1907 (int)n
->thunk
.virtual_value
,
1908 (int)n
->thunk
.virtual_offset_p
);
1911 if (DECL_ASSEMBLER_NAME_SET_P (n
->decl
))
1912 fprintf (f
, " (asm: %s)", IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (n
->decl
)));
1919 /* Dump call graph node NODE to stderr. */
1922 debug_cgraph_node (struct cgraph_node
*node
)
1924 dump_cgraph_node (stderr
, node
);
1928 /* Dump the callgraph to file F. */
1931 dump_cgraph (FILE *f
)
1933 struct cgraph_node
*node
;
1935 fprintf (f
, "callgraph:\n\n");
1936 for (node
= cgraph_nodes
; node
; node
= node
->next
)
1937 dump_cgraph_node (f
, node
);
1941 /* Dump the call graph to stderr. */
1946 dump_cgraph (stderr
);
1950 /* Set the DECL_ASSEMBLER_NAME and update cgraph hashtables. */
1953 change_decl_assembler_name (tree decl
, tree name
)
1955 gcc_assert (!assembler_name_hash
);
1956 if (!DECL_ASSEMBLER_NAME_SET_P (decl
))
1958 SET_DECL_ASSEMBLER_NAME (decl
, name
);
1961 if (name
== DECL_ASSEMBLER_NAME (decl
))
1964 if (TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl
))
1965 && DECL_RTL_SET_P (decl
))
1966 warning (0, "%D renamed after being referenced in assembly", decl
);
1968 SET_DECL_ASSEMBLER_NAME (decl
, name
);
1971 /* Add a top-level asm statement to the list. */
1973 struct cgraph_asm_node
*
1974 cgraph_add_asm_node (tree asm_str
)
1976 struct cgraph_asm_node
*node
;
1978 node
= ggc_alloc_cleared_cgraph_asm_node ();
1979 node
->asm_str
= asm_str
;
1980 node
->order
= cgraph_order
++;
1982 if (cgraph_asm_nodes
== NULL
)
1983 cgraph_asm_nodes
= node
;
1985 cgraph_asm_last_node
->next
= node
;
1986 cgraph_asm_last_node
= node
;
1990 /* Return true when the DECL can possibly be inlined. */
1992 cgraph_function_possibly_inlined_p (tree decl
)
1994 if (!cgraph_global_info_ready
)
1995 return !DECL_UNINLINABLE (decl
);
1996 return DECL_POSSIBLY_INLINED (decl
);
1999 /* Create clone of E in the node N represented by CALL_EXPR the callgraph. */
2000 struct cgraph_edge
*
2001 cgraph_clone_edge (struct cgraph_edge
*e
, struct cgraph_node
*n
,
2002 gimple call_stmt
, unsigned stmt_uid
, gcov_type count_scale
,
2003 int freq_scale
, int loop_nest
, bool update_original
)
2005 struct cgraph_edge
*new_edge
;
2006 gcov_type count
= e
->count
* count_scale
/ REG_BR_PROB_BASE
;
2009 /* We do not want to ignore loop nest after frequency drops to 0. */
2012 freq
= e
->frequency
* (gcov_type
) freq_scale
/ CGRAPH_FREQ_BASE
;
2013 if (freq
> CGRAPH_FREQ_MAX
)
2014 freq
= CGRAPH_FREQ_MAX
;
2016 if (e
->indirect_unknown_callee
)
2020 if (call_stmt
&& (decl
= gimple_call_fndecl (call_stmt
)))
2022 struct cgraph_node
*callee
= cgraph_node (decl
);
2023 new_edge
= cgraph_create_edge (n
, callee
, call_stmt
, count
, freq
,
2024 e
->loop_nest
+ loop_nest
);
2028 new_edge
= cgraph_create_indirect_edge (n
, call_stmt
,
2029 e
->indirect_info
->ecf_flags
,
2031 e
->loop_nest
+ loop_nest
);
2032 *new_edge
->indirect_info
= *e
->indirect_info
;
2036 new_edge
= cgraph_create_edge (n
, e
->callee
, call_stmt
, count
, freq
,
2037 e
->loop_nest
+ loop_nest
);
2039 new_edge
->inline_failed
= e
->inline_failed
;
2040 new_edge
->indirect_inlining_edge
= e
->indirect_inlining_edge
;
2041 new_edge
->lto_stmt_uid
= stmt_uid
;
2042 if (update_original
)
2044 e
->count
-= new_edge
->count
;
2048 cgraph_call_edge_duplication_hooks (e
, new_edge
);
2052 /* Create node representing clone of N executed COUNT times. Decrease
2053 the execution counts from original node too.
2054 The new clone will have decl set to DECL that may or may not be the same
2057 When UPDATE_ORIGINAL is true, the counts are subtracted from the original
2058 function's profile to reflect the fact that part of execution is handled
2060 struct cgraph_node
*
2061 cgraph_clone_node (struct cgraph_node
*n
, tree decl
, gcov_type count
, int freq
,
2062 int loop_nest
, bool update_original
,
2063 VEC(cgraph_edge_p
,heap
) *redirect_callers
)
2065 struct cgraph_node
*new_node
= cgraph_create_node ();
2066 struct cgraph_edge
*e
;
2067 gcov_type count_scale
;
2070 new_node
->decl
= decl
;
2071 new_node
->origin
= n
->origin
;
2072 if (new_node
->origin
)
2074 new_node
->next_nested
= new_node
->origin
->nested
;
2075 new_node
->origin
->nested
= new_node
;
2077 new_node
->analyzed
= n
->analyzed
;
2078 new_node
->local
= n
->local
;
2079 new_node
->local
.externally_visible
= false;
2080 new_node
->local
.used_from_object_file
= false;
2081 new_node
->local
.local
= true;
2082 new_node
->local
.vtable_method
= false;
2083 new_node
->global
= n
->global
;
2084 new_node
->rtl
= n
->rtl
;
2085 new_node
->count
= count
;
2086 new_node
->frequency
= n
->frequency
;
2087 new_node
->clone
= n
->clone
;
2088 new_node
->clone
.tree_map
= 0;
2091 if (new_node
->count
> n
->count
)
2092 count_scale
= REG_BR_PROB_BASE
;
2094 count_scale
= new_node
->count
* REG_BR_PROB_BASE
/ n
->count
;
2098 if (update_original
)
2105 for (i
= 0; VEC_iterate (cgraph_edge_p
, redirect_callers
, i
, e
); i
++)
2107 /* Redirect calls to the old version node to point to its new
2109 cgraph_redirect_edge_callee (e
, new_node
);
2113 for (e
= n
->callees
;e
; e
=e
->next_callee
)
2114 cgraph_clone_edge (e
, new_node
, e
->call_stmt
, e
->lto_stmt_uid
,
2115 count_scale
, freq
, loop_nest
, update_original
);
2117 for (e
= n
->indirect_calls
; e
; e
= e
->next_callee
)
2118 cgraph_clone_edge (e
, new_node
, e
->call_stmt
, e
->lto_stmt_uid
,
2119 count_scale
, freq
, loop_nest
, update_original
);
2120 ipa_clone_references (new_node
, NULL
, &n
->ref_list
);
2122 new_node
->next_sibling_clone
= n
->clones
;
2124 n
->clones
->prev_sibling_clone
= new_node
;
2125 n
->clones
= new_node
;
2126 new_node
->clone_of
= n
;
2128 cgraph_call_node_duplication_hooks (n
, new_node
);
2129 if (n
->decl
!= decl
)
2131 struct cgraph_node
**slot
;
2132 slot
= (struct cgraph_node
**) htab_find_slot (cgraph_hash
, new_node
, INSERT
);
2133 gcc_assert (!*slot
);
2135 if (assembler_name_hash
)
2138 tree name
= DECL_ASSEMBLER_NAME (decl
);
2140 aslot
= htab_find_slot_with_hash (assembler_name_hash
, name
,
2141 decl_assembler_name_hash (name
),
2143 gcc_assert (!*aslot
);
2150 /* Create a new name for clone of DECL, add SUFFIX. Returns an identifier. */
2152 static GTY(()) unsigned int clone_fn_id_num
;
2155 clone_function_name (tree decl
, const char *suffix
)
2157 tree name
= DECL_ASSEMBLER_NAME (decl
);
2158 size_t len
= IDENTIFIER_LENGTH (name
);
2159 char *tmp_name
, *prefix
;
2161 prefix
= XALLOCAVEC (char, len
+ strlen (suffix
) + 2);
2162 memcpy (prefix
, IDENTIFIER_POINTER (name
), len
);
2163 strcpy (prefix
+ len
+ 1, suffix
);
2164 #ifndef NO_DOT_IN_LABEL
2166 #elif !defined NO_DOLLAR_IN_LABEL
2171 ASM_FORMAT_PRIVATE_NAME (tmp_name
, prefix
, clone_fn_id_num
++);
2172 return get_identifier (tmp_name
);
2175 /* Create callgraph node clone with new declaration. The actual body will
2176 be copied later at compilation stage.
2178 TODO: after merging in ipa-sra use function call notes instead of args_to_skip
2181 struct cgraph_node
*
2182 cgraph_create_virtual_clone (struct cgraph_node
*old_node
,
2183 VEC(cgraph_edge_p
,heap
) *redirect_callers
,
2184 VEC(ipa_replace_map_p
,gc
) *tree_map
,
2185 bitmap args_to_skip
,
2186 const char * suffix
)
2188 tree old_decl
= old_node
->decl
;
2189 struct cgraph_node
*new_node
= NULL
;
2192 struct ipa_replace_map
*map
;
2194 #ifdef ENABLE_CHECKING
2196 gcc_assert (tree_versionable_function_p (old_decl
));
2199 /* Make a new FUNCTION_DECL tree node */
2201 new_decl
= copy_node (old_decl
);
2203 new_decl
= build_function_decl_skip_args (old_decl
, args_to_skip
);
2204 DECL_STRUCT_FUNCTION (new_decl
) = NULL
;
2206 /* Generate a new name for the new version. */
2207 DECL_NAME (new_decl
) = clone_function_name (old_decl
, suffix
);
2208 SET_DECL_ASSEMBLER_NAME (new_decl
, DECL_NAME (new_decl
));
2209 SET_DECL_RTL (new_decl
, NULL
);
2211 new_node
= cgraph_clone_node (old_node
, new_decl
, old_node
->count
,
2212 CGRAPH_FREQ_BASE
, 0, false,
2214 /* Update the properties.
2215 Make clone visible only within this translation unit. Make sure
2216 that is not weak also.
2217 ??? We cannot use COMDAT linkage because there is no
2218 ABI support for this. */
2219 DECL_EXTERNAL (new_node
->decl
) = 0;
2220 if (DECL_ONE_ONLY (old_decl
))
2221 DECL_SECTION_NAME (new_node
->decl
) = NULL
;
2222 DECL_COMDAT_GROUP (new_node
->decl
) = 0;
2223 TREE_PUBLIC (new_node
->decl
) = 0;
2224 DECL_COMDAT (new_node
->decl
) = 0;
2225 DECL_WEAK (new_node
->decl
) = 0;
2226 new_node
->clone
.tree_map
= tree_map
;
2227 new_node
->clone
.args_to_skip
= args_to_skip
;
2228 for (i
= 0; VEC_iterate (ipa_replace_map_p
, tree_map
, i
, map
); i
++)
2230 tree var
= map
->new_tree
;
2233 if (TREE_CODE (var
) != ADDR_EXPR
)
2235 var
= get_base_var (var
);
2239 /* Record references of the future statement initializing the constant
2241 if (TREE_CODE (var
) == FUNCTION_DECL
)
2242 ipa_record_reference (new_node
, NULL
, cgraph_node (var
),
2243 NULL
, IPA_REF_ADDR
, NULL
);
2244 else if (TREE_CODE (var
) == VAR_DECL
)
2245 ipa_record_reference (new_node
, NULL
, NULL
, varpool_node (var
),
2246 IPA_REF_ADDR
, NULL
);
2249 new_node
->clone
.combined_args_to_skip
= old_node
->clone
.combined_args_to_skip
;
2250 else if (old_node
->clone
.combined_args_to_skip
)
2252 int newi
= 0, oldi
= 0;
2254 bitmap new_args_to_skip
= BITMAP_GGC_ALLOC ();
2255 struct cgraph_node
*orig_node
;
2256 for (orig_node
= old_node
; orig_node
->clone_of
; orig_node
= orig_node
->clone_of
)
2258 for (arg
= DECL_ARGUMENTS (orig_node
->decl
); arg
; arg
= TREE_CHAIN (arg
), oldi
++)
2260 if (bitmap_bit_p (old_node
->clone
.combined_args_to_skip
, oldi
))
2262 bitmap_set_bit (new_args_to_skip
, oldi
);
2265 if (bitmap_bit_p (args_to_skip
, newi
))
2266 bitmap_set_bit (new_args_to_skip
, oldi
);
2269 new_node
->clone
.combined_args_to_skip
= new_args_to_skip
;
2272 new_node
->clone
.combined_args_to_skip
= args_to_skip
;
2273 new_node
->local
.externally_visible
= 0;
2274 new_node
->local
.used_from_object_file
= 0;
2275 new_node
->local
.local
= 1;
2276 new_node
->lowered
= true;
2277 new_node
->reachable
= true;
2283 /* NODE is no longer nested function; update cgraph accordingly. */
2285 cgraph_unnest_node (struct cgraph_node
*node
)
2287 struct cgraph_node
**node2
= &node
->origin
->nested
;
2288 gcc_assert (node
->origin
);
2290 while (*node2
!= node
)
2291 node2
= &(*node2
)->next_nested
;
2292 *node2
= node
->next_nested
;
2293 node
->origin
= NULL
;
2296 /* Return function availability. See cgraph.h for description of individual
2299 cgraph_function_body_availability (struct cgraph_node
*node
)
2301 enum availability avail
;
2302 gcc_assert (cgraph_function_flags_ready
);
2303 if (!node
->analyzed
)
2304 avail
= AVAIL_NOT_AVAILABLE
;
2305 else if (node
->local
.local
)
2306 avail
= AVAIL_LOCAL
;
2307 else if (!node
->local
.externally_visible
)
2308 avail
= AVAIL_AVAILABLE
;
2309 /* Inline functions are safe to be analyzed even if their sybol can
2310 be overwritten at runtime. It is not meaningful to enfore any sane
2311 behaviour on replacing inline function by different body. */
2312 else if (DECL_DECLARED_INLINE_P (node
->decl
))
2313 avail
= AVAIL_AVAILABLE
;
2315 /* If the function can be overwritten, return OVERWRITABLE. Take
2316 care at least of two notable extensions - the COMDAT functions
2317 used to share template instantiations in C++ (this is symmetric
2318 to code cp_cannot_inline_tree_fn and probably shall be shared and
2319 the inlinability hooks completely eliminated).
2321 ??? Does the C++ one definition rule allow us to always return
2322 AVAIL_AVAILABLE here? That would be good reason to preserve this
2325 else if (DECL_REPLACEABLE_P (node
->decl
) && !DECL_EXTERNAL (node
->decl
))
2326 avail
= AVAIL_OVERWRITABLE
;
2327 else avail
= AVAIL_AVAILABLE
;
2332 /* Add the function FNDECL to the call graph.
2333 Unlike cgraph_finalize_function, this function is intended to be used
2334 by middle end and allows insertion of new function at arbitrary point
2335 of compilation. The function can be either in high, low or SSA form
2338 The function is assumed to be reachable and have address taken (so no
2339 API breaking optimizations are performed on it).
2341 Main work done by this function is to enqueue the function for later
2342 processing to avoid need the passes to be re-entrant. */
2345 cgraph_add_new_function (tree fndecl
, bool lowered
)
2347 struct cgraph_node
*node
;
2348 switch (cgraph_state
)
2350 case CGRAPH_STATE_CONSTRUCTION
:
2351 /* Just enqueue function to be processed at nearest occurrence. */
2352 node
= cgraph_node (fndecl
);
2353 node
->next_needed
= cgraph_new_nodes
;
2355 node
->lowered
= true;
2356 cgraph_new_nodes
= node
;
2359 case CGRAPH_STATE_IPA
:
2360 case CGRAPH_STATE_IPA_SSA
:
2361 case CGRAPH_STATE_EXPANSION
:
2362 /* Bring the function into finalized state and enqueue for later
2363 analyzing and compilation. */
2364 node
= cgraph_node (fndecl
);
2365 node
->local
.local
= false;
2366 node
->local
.finalized
= true;
2367 node
->reachable
= node
->needed
= true;
2368 if (!lowered
&& cgraph_state
== CGRAPH_STATE_EXPANSION
)
2370 push_cfun (DECL_STRUCT_FUNCTION (fndecl
));
2371 current_function_decl
= fndecl
;
2372 gimple_register_cfg_hooks ();
2373 tree_lowering_passes (fndecl
);
2374 bitmap_obstack_initialize (NULL
);
2375 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl
)))
2376 execute_pass_list (pass_early_local_passes
.pass
.sub
);
2377 bitmap_obstack_release (NULL
);
2379 current_function_decl
= NULL
;
2384 node
->lowered
= true;
2385 node
->next_needed
= cgraph_new_nodes
;
2386 cgraph_new_nodes
= node
;
2389 case CGRAPH_STATE_FINISHED
:
2390 /* At the very end of compilation we have to do all the work up
2392 push_cfun (DECL_STRUCT_FUNCTION (fndecl
));
2393 current_function_decl
= fndecl
;
2394 gimple_register_cfg_hooks ();
2396 tree_lowering_passes (fndecl
);
2397 bitmap_obstack_initialize (NULL
);
2398 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl
)))
2399 execute_pass_list (pass_early_local_passes
.pass
.sub
);
2400 bitmap_obstack_release (NULL
);
2401 tree_rest_of_compilation (fndecl
);
2403 current_function_decl
= NULL
;
2407 /* Set a personality if required and we already passed EH lowering. */
2409 && (function_needs_eh_personality (DECL_STRUCT_FUNCTION (fndecl
))
2410 == eh_personality_lang
))
2411 DECL_FUNCTION_PERSONALITY (fndecl
) = lang_hooks
.eh_personality ();
2414 /* Return true if NODE can be made local for API change.
2415 Extern inline functions and C++ COMDAT functions can be made local
2416 at the expense of possible code size growth if function is used in multiple
2417 compilation units. */
2419 cgraph_node_can_be_local_p (struct cgraph_node
*node
)
2421 return (!node
->needed
&& !node
->address_taken
2422 && ((DECL_COMDAT (node
->decl
) && !node
->same_comdat_group
)
2423 || !node
->local
.externally_visible
));
2426 /* Make DECL local. FIXME: We shouldn't need to mess with rtl this early,
2427 but other code such as notice_global_symbol generates rtl. */
2429 cgraph_make_decl_local (tree decl
)
2433 if (TREE_CODE (decl
) == VAR_DECL
)
2434 DECL_COMMON (decl
) = 0;
2435 else if (TREE_CODE (decl
) == FUNCTION_DECL
)
2437 DECL_COMDAT (decl
) = 0;
2438 DECL_COMDAT_GROUP (decl
) = 0;
2439 DECL_WEAK (decl
) = 0;
2440 DECL_EXTERNAL (decl
) = 0;
2444 TREE_PUBLIC (decl
) = 0;
2445 if (!DECL_RTL_SET_P (decl
))
2448 /* Update rtl flags. */
2449 make_decl_rtl (decl
);
2451 rtl
= DECL_RTL (decl
);
2455 symbol
= XEXP (rtl
, 0);
2456 if (GET_CODE (symbol
) != SYMBOL_REF
)
2459 SYMBOL_REF_WEAK (symbol
) = DECL_WEAK (decl
);
2462 /* Bring NODE local. */
2464 cgraph_make_node_local (struct cgraph_node
*node
)
2466 gcc_assert (cgraph_node_can_be_local_p (node
));
2467 if (DECL_COMDAT (node
->decl
) || DECL_EXTERNAL (node
->decl
))
2469 struct cgraph_node
*alias
;
2470 cgraph_make_decl_local (node
->decl
);
2472 for (alias
= node
->same_body
; alias
; alias
= alias
->next
)
2473 cgraph_make_decl_local (alias
->decl
);
2475 node
->local
.externally_visible
= false;
2476 node
->local
.local
= true;
2477 gcc_assert (cgraph_function_body_availability (node
) == AVAIL_LOCAL
);
2481 /* Set TREE_NOTHROW on NODE's decl and on same_body aliases of NODE
2482 if any to NOTHROW. */
2485 cgraph_set_nothrow_flag (struct cgraph_node
*node
, bool nothrow
)
2487 struct cgraph_node
*alias
;
2488 TREE_NOTHROW (node
->decl
) = nothrow
;
2489 for (alias
= node
->same_body
; alias
; alias
= alias
->next
)
2490 TREE_NOTHROW (alias
->decl
) = nothrow
;
2493 /* Set TREE_READONLY on NODE's decl and on same_body aliases of NODE
2494 if any to READONLY. */
2497 cgraph_set_readonly_flag (struct cgraph_node
*node
, bool readonly
)
2499 struct cgraph_node
*alias
;
2500 TREE_READONLY (node
->decl
) = readonly
;
2501 for (alias
= node
->same_body
; alias
; alias
= alias
->next
)
2502 TREE_READONLY (alias
->decl
) = readonly
;
2505 /* Set DECL_PURE_P on NODE's decl and on same_body aliases of NODE
2509 cgraph_set_pure_flag (struct cgraph_node
*node
, bool pure
)
2511 struct cgraph_node
*alias
;
2512 DECL_PURE_P (node
->decl
) = pure
;
2513 for (alias
= node
->same_body
; alias
; alias
= alias
->next
)
2514 DECL_PURE_P (alias
->decl
) = pure
;
2517 /* Set DECL_LOOPING_CONST_OR_PURE_P on NODE's decl and on
2518 same_body aliases of NODE if any to LOOPING_CONST_OR_PURE. */
2521 cgraph_set_looping_const_or_pure_flag (struct cgraph_node
*node
,
2522 bool looping_const_or_pure
)
2524 struct cgraph_node
*alias
;
2525 DECL_LOOPING_CONST_OR_PURE_P (node
->decl
) = looping_const_or_pure
;
2526 for (alias
= node
->same_body
; alias
; alias
= alias
->next
)
2527 DECL_LOOPING_CONST_OR_PURE_P (alias
->decl
) = looping_const_or_pure
;
2530 /* See if the frequency of NODE can be updated based on frequencies of its
2533 cgraph_propagate_frequency (struct cgraph_node
*node
)
2535 bool maybe_unlikely_executed
= true, maybe_executed_once
= true;
2536 struct cgraph_edge
*edge
;
2537 if (!node
->local
.local
)
2539 gcc_assert (node
->analyzed
);
2540 if (node
->frequency
== NODE_FREQUENCY_HOT
)
2542 if (node
->frequency
== NODE_FREQUENCY_UNLIKELY_EXECUTED
)
2544 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2545 fprintf (dump_file
, "Processing frequency %s\n", cgraph_node_name (node
));
2546 for (edge
= node
->callers
;
2547 edge
&& (maybe_unlikely_executed
|| maybe_executed_once
);
2548 edge
= edge
->next_caller
)
2550 if (!edge
->frequency
)
2552 switch (edge
->caller
->frequency
)
2554 case NODE_FREQUENCY_UNLIKELY_EXECUTED
:
2556 case NODE_FREQUENCY_EXECUTED_ONCE
:
2557 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2558 fprintf (dump_file
, " Called by %s that is executed once\n", cgraph_node_name (node
));
2559 maybe_unlikely_executed
= false;
2560 if (edge
->loop_nest
)
2562 maybe_executed_once
= false;
2563 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2564 fprintf (dump_file
, " Called in loop\n");
2567 case NODE_FREQUENCY_HOT
:
2568 case NODE_FREQUENCY_NORMAL
:
2569 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2570 fprintf (dump_file
, " Called by %s that is normal or hot\n", cgraph_node_name (node
));
2571 maybe_unlikely_executed
= false;
2572 maybe_executed_once
= false;
2576 if (maybe_unlikely_executed
)
2578 node
->frequency
= NODE_FREQUENCY_UNLIKELY_EXECUTED
;
2580 fprintf (dump_file
, "Node %s promoted to unlikely executed.\n", cgraph_node_name (node
));
2583 if (maybe_executed_once
&& node
->frequency
!= NODE_FREQUENCY_EXECUTED_ONCE
)
2585 node
->frequency
= NODE_FREQUENCY_EXECUTED_ONCE
;
2587 fprintf (dump_file
, "Node %s promoted to executed once.\n", cgraph_node_name (node
));
2593 /* Return true when NODE can not return or throw and thus
2594 it is safe to ignore its side effects for IPA analysis. */
2597 cgraph_node_cannot_return (struct cgraph_node
*node
)
2599 int flags
= flags_from_decl_or_type (node
->decl
);
2600 if (!flag_exceptions
)
2601 return (flags
& ECF_NORETURN
) != 0;
2603 return ((flags
& (ECF_NORETURN
| ECF_NOTHROW
))
2604 == (ECF_NORETURN
| ECF_NOTHROW
));
2607 /* Return true when call of E can not lead to return from caller
2608 and thus it is safe to ignore its side effects for IPA analysis
2609 when computing side effects of the caller.
2610 FIXME: We could actually mark all edges that have no reaching
2611 patch to EXIT_BLOCK_PTR or throw to get better results. */
2613 cgraph_edge_cannot_lead_to_return (struct cgraph_edge
*e
)
2615 if (cgraph_node_cannot_return (e
->caller
))
2617 if (e
->indirect_unknown_callee
)
2619 int flags
= e
->indirect_info
->ecf_flags
;
2620 if (!flag_exceptions
)
2621 return (flags
& ECF_NORETURN
) != 0;
2623 return ((flags
& (ECF_NORETURN
| ECF_NOTHROW
))
2624 == (ECF_NORETURN
| ECF_NOTHROW
));
2627 return cgraph_node_cannot_return (e
->callee
);
2630 #include "gt-cgraph.h"