2010-07-27 Paolo Carlini <paolo.carlini@oracle.com>
[official-gcc/alias-decl.git] / gcc / cgraph.c
blob63fed497b471be8eaa2fc1bed034f5fdbe32bf53
1 /* Callgraph handling code.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This file contains basic routines manipulating call graph
24 The callgraph:
26 The call-graph is data structure designed for intra-procedural optimization
27 but it is also used in non-unit-at-a-time compilation to allow easier code
28 sharing.
30 The call-graph consist of nodes and edges represented via linked lists.
31 Each function (external or not) corresponds to the unique node.
33 The mapping from declarations to call-graph nodes is done using hash table
34 based on DECL_UID. The call-graph nodes are created lazily using
35 cgraph_node function when called for unknown declaration.
37 The callgraph at the moment does not represent all indirect calls or calls
38 from other compilation units. Flag NEEDED is set for each node that may be
39 accessed in such an invisible way and it shall be considered an entry point
40 to the callgraph.
42 On the other hand, the callgraph currently does contain some edges for
43 indirect calls with unknown callees which can be accessed through
44 indirect_calls field of a node. It should be noted however that at the
45 moment only calls which are potential candidates for indirect inlining are
46 added there.
48 Interprocedural information:
50 Callgraph is place to store data needed for interprocedural optimization.
51 All data structures are divided into three components: local_info that
52 is produced while analyzing the function, global_info that is result
53 of global walking of the callgraph on the end of compilation and
54 rtl_info used by RTL backend to propagate data from already compiled
55 functions to their callers.
57 Moreover, each node has a uid which can be used to keep information in
58 on-the-side arrays. UIDs are reused and therefore reasonably dense.
60 Inlining plans:
62 The function inlining information is decided in advance and maintained
63 in the callgraph as so called inline plan.
64 For each inlined call, the callee's node is cloned to represent the
65 new function copy produced by inliner.
66 Each inlined call gets a unique corresponding clone node of the callee
67 and the data structure is updated while inlining is performed, so
68 the clones are eliminated and their callee edges redirected to the
69 caller.
71 Each edge has "inline_failed" field. When the field is set to NULL,
72 the call will be inlined. When it is non-NULL it contains a reason
73 why inlining wasn't performed. */
75 #include "config.h"
76 #include "system.h"
77 #include "coretypes.h"
78 #include "tm.h"
79 #include "tree.h"
80 #include "tree-inline.h"
81 #include "langhooks.h"
82 #include "hashtab.h"
83 #include "toplev.h"
84 #include "flags.h"
85 #include "ggc.h"
86 #include "debug.h"
87 #include "target.h"
88 #include "basic-block.h"
89 #include "cgraph.h"
90 #include "output.h"
91 #include "intl.h"
92 #include "gimple.h"
93 #include "tree-dump.h"
94 #include "tree-flow.h"
95 #include "value-prof.h"
96 #include "except.h"
97 #include "diagnostic-core.h"
98 #include "rtl.h"
99 #include "ipa-utils.h"
100 #include "lto-streamer.h"
102 static void cgraph_node_remove_callers (struct cgraph_node *node);
103 static inline void cgraph_edge_remove_caller (struct cgraph_edge *e);
104 static inline void cgraph_edge_remove_callee (struct cgraph_edge *e);
106 /* Hash table used to convert declarations into nodes. */
107 static GTY((param_is (struct cgraph_node))) htab_t cgraph_hash;
108 /* Hash table used to convert assembler names into nodes. */
109 static GTY((param_is (struct cgraph_node))) htab_t assembler_name_hash;
111 /* The linked list of cgraph nodes. */
112 struct cgraph_node *cgraph_nodes;
114 /* Queue of cgraph nodes scheduled to be lowered. */
115 struct cgraph_node *cgraph_nodes_queue;
117 /* Queue of cgraph nodes scheduled to be added into cgraph. This is a
118 secondary queue used during optimization to accommodate passes that
119 may generate new functions that need to be optimized and expanded. */
120 struct cgraph_node *cgraph_new_nodes;
122 /* Number of nodes in existence. */
123 int cgraph_n_nodes;
125 /* Maximal uid used in cgraph nodes. */
126 int cgraph_max_uid;
128 /* Maximal uid used in cgraph edges. */
129 int cgraph_edge_max_uid;
131 /* Maximal pid used for profiling */
132 int cgraph_max_pid;
134 /* Set when whole unit has been analyzed so we can access global info. */
135 bool cgraph_global_info_ready = false;
137 /* What state callgraph is in right now. */
138 enum cgraph_state cgraph_state = CGRAPH_STATE_CONSTRUCTION;
140 /* Set when the cgraph is fully build and the basic flags are computed. */
141 bool cgraph_function_flags_ready = false;
143 /* Linked list of cgraph asm nodes. */
144 struct cgraph_asm_node *cgraph_asm_nodes;
146 /* Last node in cgraph_asm_nodes. */
147 static GTY(()) struct cgraph_asm_node *cgraph_asm_last_node;
149 /* The order index of the next cgraph node to be created. This is
150 used so that we can sort the cgraph nodes in order by when we saw
151 them, to support -fno-toplevel-reorder. */
152 int cgraph_order;
154 /* List of hooks trigerred on cgraph_edge events. */
155 struct cgraph_edge_hook_list {
156 cgraph_edge_hook hook;
157 void *data;
158 struct cgraph_edge_hook_list *next;
161 /* List of hooks trigerred on cgraph_node events. */
162 struct cgraph_node_hook_list {
163 cgraph_node_hook hook;
164 void *data;
165 struct cgraph_node_hook_list *next;
168 /* List of hooks trigerred on events involving two cgraph_edges. */
169 struct cgraph_2edge_hook_list {
170 cgraph_2edge_hook hook;
171 void *data;
172 struct cgraph_2edge_hook_list *next;
175 /* List of hooks trigerred on events involving two cgraph_nodes. */
176 struct cgraph_2node_hook_list {
177 cgraph_2node_hook hook;
178 void *data;
179 struct cgraph_2node_hook_list *next;
182 /* List of hooks triggered when an edge is removed. */
183 struct cgraph_edge_hook_list *first_cgraph_edge_removal_hook;
184 /* List of hooks triggered when a node is removed. */
185 struct cgraph_node_hook_list *first_cgraph_node_removal_hook;
186 /* List of hooks triggered when an edge is duplicated. */
187 struct cgraph_2edge_hook_list *first_cgraph_edge_duplicated_hook;
188 /* List of hooks triggered when a node is duplicated. */
189 struct cgraph_2node_hook_list *first_cgraph_node_duplicated_hook;
190 /* List of hooks triggered when an function is inserted. */
191 struct cgraph_node_hook_list *first_cgraph_function_insertion_hook;
193 /* Head of a linked list of unused (freed) call graph nodes.
194 Do not GTY((delete)) this list so UIDs gets reliably recycled. */
195 static GTY(()) struct cgraph_node *free_nodes;
196 /* Head of a linked list of unused (freed) call graph edges.
197 Do not GTY((delete)) this list so UIDs gets reliably recycled. */
198 static GTY(()) struct cgraph_edge *free_edges;
200 /* Macros to access the next item in the list of free cgraph nodes and
201 edges. */
202 #define NEXT_FREE_NODE(NODE) (NODE)->next
203 #define NEXT_FREE_EDGE(EDGE) (EDGE)->prev_caller
205 /* Register HOOK to be called with DATA on each removed edge. */
206 struct cgraph_edge_hook_list *
207 cgraph_add_edge_removal_hook (cgraph_edge_hook hook, void *data)
209 struct cgraph_edge_hook_list *entry;
210 struct cgraph_edge_hook_list **ptr = &first_cgraph_edge_removal_hook;
212 entry = (struct cgraph_edge_hook_list *) xmalloc (sizeof (*entry));
213 entry->hook = hook;
214 entry->data = data;
215 entry->next = NULL;
216 while (*ptr)
217 ptr = &(*ptr)->next;
218 *ptr = entry;
219 return entry;
222 /* Remove ENTRY from the list of hooks called on removing edges. */
223 void
224 cgraph_remove_edge_removal_hook (struct cgraph_edge_hook_list *entry)
226 struct cgraph_edge_hook_list **ptr = &first_cgraph_edge_removal_hook;
228 while (*ptr != entry)
229 ptr = &(*ptr)->next;
230 *ptr = entry->next;
231 free (entry);
234 /* Call all edge removal hooks. */
235 static void
236 cgraph_call_edge_removal_hooks (struct cgraph_edge *e)
238 struct cgraph_edge_hook_list *entry = first_cgraph_edge_removal_hook;
239 while (entry)
241 entry->hook (e, entry->data);
242 entry = entry->next;
246 /* Register HOOK to be called with DATA on each removed node. */
247 struct cgraph_node_hook_list *
248 cgraph_add_node_removal_hook (cgraph_node_hook hook, void *data)
250 struct cgraph_node_hook_list *entry;
251 struct cgraph_node_hook_list **ptr = &first_cgraph_node_removal_hook;
253 entry = (struct cgraph_node_hook_list *) xmalloc (sizeof (*entry));
254 entry->hook = hook;
255 entry->data = data;
256 entry->next = NULL;
257 while (*ptr)
258 ptr = &(*ptr)->next;
259 *ptr = entry;
260 return entry;
263 /* Remove ENTRY from the list of hooks called on removing nodes. */
264 void
265 cgraph_remove_node_removal_hook (struct cgraph_node_hook_list *entry)
267 struct cgraph_node_hook_list **ptr = &first_cgraph_node_removal_hook;
269 while (*ptr != entry)
270 ptr = &(*ptr)->next;
271 *ptr = entry->next;
272 free (entry);
275 /* Call all node removal hooks. */
276 static void
277 cgraph_call_node_removal_hooks (struct cgraph_node *node)
279 struct cgraph_node_hook_list *entry = first_cgraph_node_removal_hook;
280 while (entry)
282 entry->hook (node, entry->data);
283 entry = entry->next;
287 /* Register HOOK to be called with DATA on each inserted node. */
288 struct cgraph_node_hook_list *
289 cgraph_add_function_insertion_hook (cgraph_node_hook hook, void *data)
291 struct cgraph_node_hook_list *entry;
292 struct cgraph_node_hook_list **ptr = &first_cgraph_function_insertion_hook;
294 entry = (struct cgraph_node_hook_list *) xmalloc (sizeof (*entry));
295 entry->hook = hook;
296 entry->data = data;
297 entry->next = NULL;
298 while (*ptr)
299 ptr = &(*ptr)->next;
300 *ptr = entry;
301 return entry;
304 /* Remove ENTRY from the list of hooks called on inserted nodes. */
305 void
306 cgraph_remove_function_insertion_hook (struct cgraph_node_hook_list *entry)
308 struct cgraph_node_hook_list **ptr = &first_cgraph_function_insertion_hook;
310 while (*ptr != entry)
311 ptr = &(*ptr)->next;
312 *ptr = entry->next;
313 free (entry);
316 /* Call all node insertion hooks. */
317 void
318 cgraph_call_function_insertion_hooks (struct cgraph_node *node)
320 struct cgraph_node_hook_list *entry = first_cgraph_function_insertion_hook;
321 while (entry)
323 entry->hook (node, entry->data);
324 entry = entry->next;
328 /* Register HOOK to be called with DATA on each duplicated edge. */
329 struct cgraph_2edge_hook_list *
330 cgraph_add_edge_duplication_hook (cgraph_2edge_hook hook, void *data)
332 struct cgraph_2edge_hook_list *entry;
333 struct cgraph_2edge_hook_list **ptr = &first_cgraph_edge_duplicated_hook;
335 entry = (struct cgraph_2edge_hook_list *) xmalloc (sizeof (*entry));
336 entry->hook = hook;
337 entry->data = data;
338 entry->next = NULL;
339 while (*ptr)
340 ptr = &(*ptr)->next;
341 *ptr = entry;
342 return entry;
345 /* Remove ENTRY from the list of hooks called on duplicating edges. */
346 void
347 cgraph_remove_edge_duplication_hook (struct cgraph_2edge_hook_list *entry)
349 struct cgraph_2edge_hook_list **ptr = &first_cgraph_edge_duplicated_hook;
351 while (*ptr != entry)
352 ptr = &(*ptr)->next;
353 *ptr = entry->next;
354 free (entry);
357 /* Call all edge duplication hooks. */
358 static void
359 cgraph_call_edge_duplication_hooks (struct cgraph_edge *cs1,
360 struct cgraph_edge *cs2)
362 struct cgraph_2edge_hook_list *entry = first_cgraph_edge_duplicated_hook;
363 while (entry)
365 entry->hook (cs1, cs2, entry->data);
366 entry = entry->next;
370 /* Register HOOK to be called with DATA on each duplicated node. */
371 struct cgraph_2node_hook_list *
372 cgraph_add_node_duplication_hook (cgraph_2node_hook hook, void *data)
374 struct cgraph_2node_hook_list *entry;
375 struct cgraph_2node_hook_list **ptr = &first_cgraph_node_duplicated_hook;
377 entry = (struct cgraph_2node_hook_list *) xmalloc (sizeof (*entry));
378 entry->hook = hook;
379 entry->data = data;
380 entry->next = NULL;
381 while (*ptr)
382 ptr = &(*ptr)->next;
383 *ptr = entry;
384 return entry;
387 /* Remove ENTRY from the list of hooks called on duplicating nodes. */
388 void
389 cgraph_remove_node_duplication_hook (struct cgraph_2node_hook_list *entry)
391 struct cgraph_2node_hook_list **ptr = &first_cgraph_node_duplicated_hook;
393 while (*ptr != entry)
394 ptr = &(*ptr)->next;
395 *ptr = entry->next;
396 free (entry);
399 /* Call all node duplication hooks. */
400 static void
401 cgraph_call_node_duplication_hooks (struct cgraph_node *node1,
402 struct cgraph_node *node2)
404 struct cgraph_2node_hook_list *entry = first_cgraph_node_duplicated_hook;
405 while (entry)
407 entry->hook (node1, node2, entry->data);
408 entry = entry->next;
412 /* Returns a hash code for P. */
414 static hashval_t
415 hash_node (const void *p)
417 const struct cgraph_node *n = (const struct cgraph_node *) p;
418 return (hashval_t) DECL_UID (n->decl);
422 /* Returns nonzero if P1 and P2 are equal. */
424 static int
425 eq_node (const void *p1, const void *p2)
427 const struct cgraph_node *n1 = (const struct cgraph_node *) p1;
428 const struct cgraph_node *n2 = (const struct cgraph_node *) p2;
429 return DECL_UID (n1->decl) == DECL_UID (n2->decl);
432 /* Allocate new callgraph node. */
434 static inline struct cgraph_node *
435 cgraph_allocate_node (void)
437 struct cgraph_node *node;
439 if (free_nodes)
441 node = free_nodes;
442 free_nodes = NEXT_FREE_NODE (node);
444 else
446 node = ggc_alloc_cleared_cgraph_node ();
447 node->uid = cgraph_max_uid++;
450 return node;
453 /* Allocate new callgraph node and insert it into basic data structures. */
455 static struct cgraph_node *
456 cgraph_create_node (void)
458 struct cgraph_node *node = cgraph_allocate_node ();
460 node->next = cgraph_nodes;
461 node->pid = -1;
462 node->order = cgraph_order++;
463 if (cgraph_nodes)
464 cgraph_nodes->previous = node;
465 node->previous = NULL;
466 node->global.estimated_growth = INT_MIN;
467 node->frequency = NODE_FREQUENCY_NORMAL;
468 ipa_empty_ref_list (&node->ref_list);
469 cgraph_nodes = node;
470 cgraph_n_nodes++;
471 return node;
474 /* Return cgraph node assigned to DECL. Create new one when needed. */
476 struct cgraph_node *
477 cgraph_node (tree decl)
479 struct cgraph_node key, *node, **slot;
481 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
483 if (!cgraph_hash)
484 cgraph_hash = htab_create_ggc (10, hash_node, eq_node, NULL);
486 key.decl = decl;
488 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key, INSERT);
490 if (*slot)
492 node = *slot;
493 if (node->same_body_alias)
494 node = node->same_body;
495 return node;
498 node = cgraph_create_node ();
499 node->decl = decl;
500 *slot = node;
501 if (DECL_CONTEXT (decl) && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)
503 node->origin = cgraph_node (DECL_CONTEXT (decl));
504 node->next_nested = node->origin->nested;
505 node->origin->nested = node;
507 if (assembler_name_hash)
509 void **aslot;
510 tree name = DECL_ASSEMBLER_NAME (decl);
512 aslot = htab_find_slot_with_hash (assembler_name_hash, name,
513 decl_assembler_name_hash (name),
514 INSERT);
515 /* We can have multiple declarations with same assembler name. For C++
516 it is __builtin_strlen and strlen, for instance. Do we need to
517 record them all? Original implementation marked just first one
518 so lets hope for the best. */
519 if (*aslot == NULL)
520 *aslot = node;
522 return node;
525 /* Mark ALIAS as an alias to DECL. */
527 static struct cgraph_node *
528 cgraph_same_body_alias_1 (tree alias, tree decl)
530 struct cgraph_node key, *alias_node, *decl_node, **slot;
532 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
533 gcc_assert (TREE_CODE (alias) == FUNCTION_DECL);
534 decl_node = cgraph_node (decl);
536 key.decl = alias;
538 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key, INSERT);
540 /* If the cgraph_node has been already created, fail. */
541 if (*slot)
542 return NULL;
544 alias_node = cgraph_allocate_node ();
545 alias_node->decl = alias;
546 alias_node->same_body_alias = 1;
547 alias_node->same_body = decl_node;
548 alias_node->previous = NULL;
549 if (decl_node->same_body)
550 decl_node->same_body->previous = alias_node;
551 alias_node->next = decl_node->same_body;
552 alias_node->thunk.alias = decl;
553 decl_node->same_body = alias_node;
554 *slot = alias_node;
555 return alias_node;
558 /* Attempt to mark ALIAS as an alias to DECL. Return TRUE if successful.
559 Same body aliases are output whenever the body of DECL is output,
560 and cgraph_node (ALIAS) transparently returns cgraph_node (DECL). */
562 bool
563 cgraph_same_body_alias (tree alias, tree decl)
565 #ifndef ASM_OUTPUT_DEF
566 /* If aliases aren't supported by the assembler, fail. */
567 return false;
568 #endif
570 /*gcc_assert (!assembler_name_hash);*/
572 return cgraph_same_body_alias_1 (alias, decl) != NULL;
575 void
576 cgraph_add_thunk (tree alias, tree decl, bool this_adjusting,
577 HOST_WIDE_INT fixed_offset, HOST_WIDE_INT virtual_value,
578 tree virtual_offset,
579 tree real_alias)
581 struct cgraph_node *node = cgraph_get_node (alias);
583 if (node)
585 gcc_assert (node->local.finalized);
586 gcc_assert (!node->same_body);
587 cgraph_remove_node (node);
590 node = cgraph_same_body_alias_1 (alias, decl);
591 gcc_assert (node);
592 #ifdef ENABLE_CHECKING
593 gcc_assert (!virtual_offset
594 || tree_int_cst_equal (virtual_offset, size_int (virtual_value)));
595 #endif
596 node->thunk.fixed_offset = fixed_offset;
597 node->thunk.this_adjusting = this_adjusting;
598 node->thunk.virtual_value = virtual_value;
599 node->thunk.virtual_offset_p = virtual_offset != NULL;
600 node->thunk.alias = real_alias;
601 node->thunk.thunk_p = true;
604 /* Returns the cgraph node assigned to DECL or NULL if no cgraph node
605 is assigned. */
607 struct cgraph_node *
608 cgraph_get_node_or_alias (tree decl)
610 struct cgraph_node key, *node = NULL, **slot;
612 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
614 if (!cgraph_hash)
615 return NULL;
617 key.decl = decl;
619 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key,
620 NO_INSERT);
622 if (slot && *slot)
623 node = *slot;
624 return node;
627 /* Returns the cgraph node assigned to DECL or NULL if no cgraph node
628 is assigned. */
630 struct cgraph_node *
631 cgraph_get_node (tree decl)
633 struct cgraph_node key, *node = NULL, **slot;
635 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
637 if (!cgraph_hash)
638 return NULL;
640 key.decl = decl;
642 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key,
643 NO_INSERT);
645 if (slot && *slot)
647 node = *slot;
648 if (node->same_body_alias)
649 node = node->same_body;
651 return node;
654 /* Insert already constructed node into hashtable. */
656 void
657 cgraph_insert_node_to_hashtable (struct cgraph_node *node)
659 struct cgraph_node **slot;
661 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, node, INSERT);
663 gcc_assert (!*slot);
664 *slot = node;
667 /* Returns a hash code for P. */
669 static hashval_t
670 hash_node_by_assembler_name (const void *p)
672 const struct cgraph_node *n = (const struct cgraph_node *) p;
673 return (hashval_t) decl_assembler_name_hash (DECL_ASSEMBLER_NAME (n->decl));
676 /* Returns nonzero if P1 and P2 are equal. */
678 static int
679 eq_assembler_name (const void *p1, const void *p2)
681 const struct cgraph_node *n1 = (const struct cgraph_node *) p1;
682 const_tree name = (const_tree)p2;
683 return (decl_assembler_name_equal (n1->decl, name));
686 /* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
687 Return NULL if there's no such node. */
689 struct cgraph_node *
690 cgraph_node_for_asm (tree asmname)
692 struct cgraph_node *node;
693 void **slot;
695 if (!assembler_name_hash)
697 assembler_name_hash =
698 htab_create_ggc (10, hash_node_by_assembler_name, eq_assembler_name,
699 NULL);
700 for (node = cgraph_nodes; node; node = node->next)
701 if (!node->global.inlined_to)
703 tree name = DECL_ASSEMBLER_NAME (node->decl);
704 slot = htab_find_slot_with_hash (assembler_name_hash, name,
705 decl_assembler_name_hash (name),
706 INSERT);
707 /* We can have multiple declarations with same assembler name. For C++
708 it is __builtin_strlen and strlen, for instance. Do we need to
709 record them all? Original implementation marked just first one
710 so lets hope for the best. */
711 if (!*slot)
712 *slot = node;
713 if (node->same_body)
715 struct cgraph_node *alias;
717 for (alias = node->same_body; alias; alias = alias->next)
719 hashval_t hash;
720 name = DECL_ASSEMBLER_NAME (alias->decl);
721 hash = decl_assembler_name_hash (name);
722 slot = htab_find_slot_with_hash (assembler_name_hash, name,
723 hash, INSERT);
724 if (!*slot)
725 *slot = alias;
731 slot = htab_find_slot_with_hash (assembler_name_hash, asmname,
732 decl_assembler_name_hash (asmname),
733 NO_INSERT);
735 if (slot)
737 node = (struct cgraph_node *) *slot;
738 if (node->same_body_alias)
739 node = node->same_body;
740 return node;
742 return NULL;
745 /* Returns a hash value for X (which really is a die_struct). */
747 static hashval_t
748 edge_hash (const void *x)
750 return htab_hash_pointer (((const struct cgraph_edge *) x)->call_stmt);
753 /* Return nonzero if decl_id of die_struct X is the same as UID of decl *Y. */
755 static int
756 edge_eq (const void *x, const void *y)
758 return ((const struct cgraph_edge *) x)->call_stmt == y;
761 /* Add call graph edge E to call site hash of its caller. */
763 static inline void
764 cgraph_add_edge_to_call_site_hash (struct cgraph_edge *e)
766 void **slot;
767 slot = htab_find_slot_with_hash (e->caller->call_site_hash,
768 e->call_stmt,
769 htab_hash_pointer (e->call_stmt),
770 INSERT);
771 gcc_assert (!*slot);
772 *slot = e;
775 /* Return the callgraph edge representing the GIMPLE_CALL statement
776 CALL_STMT. */
778 struct cgraph_edge *
779 cgraph_edge (struct cgraph_node *node, gimple call_stmt)
781 struct cgraph_edge *e, *e2;
782 int n = 0;
784 if (node->call_site_hash)
785 return (struct cgraph_edge *)
786 htab_find_with_hash (node->call_site_hash, call_stmt,
787 htab_hash_pointer (call_stmt));
789 /* This loop may turn out to be performance problem. In such case adding
790 hashtables into call nodes with very many edges is probably best
791 solution. It is not good idea to add pointer into CALL_EXPR itself
792 because we want to make possible having multiple cgraph nodes representing
793 different clones of the same body before the body is actually cloned. */
794 for (e = node->callees; e; e = e->next_callee)
796 if (e->call_stmt == call_stmt)
797 break;
798 n++;
801 if (!e)
802 for (e = node->indirect_calls; e; e = e->next_callee)
804 if (e->call_stmt == call_stmt)
805 break;
806 n++;
809 if (n > 100)
811 node->call_site_hash = htab_create_ggc (120, edge_hash, edge_eq, NULL);
812 for (e2 = node->callees; e2; e2 = e2->next_callee)
813 cgraph_add_edge_to_call_site_hash (e2);
814 for (e2 = node->indirect_calls; e2; e2 = e2->next_callee)
815 cgraph_add_edge_to_call_site_hash (e2);
818 return e;
822 /* Change field call_stmt of edge E to NEW_STMT. */
824 void
825 cgraph_set_call_stmt (struct cgraph_edge *e, gimple new_stmt)
827 tree decl;
829 if (e->caller->call_site_hash)
831 htab_remove_elt_with_hash (e->caller->call_site_hash,
832 e->call_stmt,
833 htab_hash_pointer (e->call_stmt));
836 e->call_stmt = new_stmt;
837 if (e->indirect_unknown_callee
838 && (decl = gimple_call_fndecl (new_stmt)))
840 /* Constant propagation (and possibly also inlining?) can turn an
841 indirect call into a direct one. */
842 struct cgraph_node *new_callee = cgraph_node (decl);
844 cgraph_make_edge_direct (e, new_callee);
847 push_cfun (DECL_STRUCT_FUNCTION (e->caller->decl));
848 e->can_throw_external = stmt_can_throw_external (new_stmt);
849 pop_cfun ();
850 if (e->caller->call_site_hash)
851 cgraph_add_edge_to_call_site_hash (e);
854 /* Like cgraph_set_call_stmt but walk the clone tree and update all
855 clones sharing the same function body. */
857 void
858 cgraph_set_call_stmt_including_clones (struct cgraph_node *orig,
859 gimple old_stmt, gimple new_stmt)
861 struct cgraph_node *node;
862 struct cgraph_edge *edge = cgraph_edge (orig, old_stmt);
864 if (edge)
865 cgraph_set_call_stmt (edge, new_stmt);
867 node = orig->clones;
868 if (node)
869 while (node != orig)
871 struct cgraph_edge *edge = cgraph_edge (node, old_stmt);
872 if (edge)
873 cgraph_set_call_stmt (edge, new_stmt);
874 if (node->clones)
875 node = node->clones;
876 else if (node->next_sibling_clone)
877 node = node->next_sibling_clone;
878 else
880 while (node != orig && !node->next_sibling_clone)
881 node = node->clone_of;
882 if (node != orig)
883 node = node->next_sibling_clone;
888 /* Like cgraph_create_edge walk the clone tree and update all clones sharing
889 same function body. If clones already have edge for OLD_STMT; only
890 update the edge same way as cgraph_set_call_stmt_including_clones does.
892 TODO: COUNT and LOOP_DEPTH should be properly distributed based on relative
893 frequencies of the clones. */
895 void
896 cgraph_create_edge_including_clones (struct cgraph_node *orig,
897 struct cgraph_node *callee,
898 gimple old_stmt,
899 gimple stmt, gcov_type count,
900 int freq, int loop_depth,
901 cgraph_inline_failed_t reason)
903 struct cgraph_node *node;
904 struct cgraph_edge *edge;
906 if (!cgraph_edge (orig, stmt))
908 edge = cgraph_create_edge (orig, callee, stmt, count, freq, loop_depth);
909 edge->inline_failed = reason;
912 node = orig->clones;
913 if (node)
914 while (node != orig)
916 struct cgraph_edge *edge = cgraph_edge (node, old_stmt);
918 /* It is possible that clones already contain the edge while
919 master didn't. Either we promoted indirect call into direct
920 call in the clone or we are processing clones of unreachable
921 master where edges has been rmeoved. */
922 if (edge)
923 cgraph_set_call_stmt (edge, stmt);
924 else if (!cgraph_edge (node, stmt))
926 edge = cgraph_create_edge (node, callee, stmt, count,
927 freq, loop_depth);
928 edge->inline_failed = reason;
931 if (node->clones)
932 node = node->clones;
933 else if (node->next_sibling_clone)
934 node = node->next_sibling_clone;
935 else
937 while (node != orig && !node->next_sibling_clone)
938 node = node->clone_of;
939 if (node != orig)
940 node = node->next_sibling_clone;
945 /* Give initial reasons why inlining would fail on EDGE. This gets either
946 nullified or usually overwritten by more precise reasons later. */
948 static void
949 initialize_inline_failed (struct cgraph_edge *e)
951 struct cgraph_node *callee = e->callee;
953 if (e->indirect_unknown_callee)
954 e->inline_failed = CIF_INDIRECT_UNKNOWN_CALL;
955 else if (!callee->analyzed)
956 e->inline_failed = CIF_BODY_NOT_AVAILABLE;
957 else if (callee->local.redefined_extern_inline)
958 e->inline_failed = CIF_REDEFINED_EXTERN_INLINE;
959 else if (!callee->local.inlinable)
960 e->inline_failed = CIF_FUNCTION_NOT_INLINABLE;
961 else if (e->call_stmt && gimple_call_cannot_inline_p (e->call_stmt))
962 e->inline_failed = CIF_MISMATCHED_ARGUMENTS;
963 else
964 e->inline_failed = CIF_FUNCTION_NOT_CONSIDERED;
967 /* Allocate a cgraph_edge structure and fill it with data according to the
968 parameters of which only CALLEE can be NULL (when creating an indirect call
969 edge). */
971 static struct cgraph_edge *
972 cgraph_create_edge_1 (struct cgraph_node *caller, struct cgraph_node *callee,
973 gimple call_stmt, gcov_type count, int freq, int nest)
975 struct cgraph_edge *edge;
977 /* LTO does not actually have access to the call_stmt since these
978 have not been loaded yet. */
979 if (call_stmt)
981 #ifdef ENABLE_CHECKING
982 /* This is rather pricely check possibly trigerring construction of
983 call stmt hashtable. */
984 gcc_assert (!cgraph_edge (caller, call_stmt));
985 #endif
987 gcc_assert (is_gimple_call (call_stmt));
990 if (free_edges)
992 edge = free_edges;
993 free_edges = NEXT_FREE_EDGE (edge);
995 else
997 edge = ggc_alloc_cgraph_edge ();
998 edge->uid = cgraph_edge_max_uid++;
1001 edge->aux = NULL;
1002 edge->caller = caller;
1003 edge->callee = callee;
1004 edge->prev_caller = NULL;
1005 edge->next_caller = NULL;
1006 edge->prev_callee = NULL;
1007 edge->next_callee = NULL;
1009 edge->count = count;
1010 gcc_assert (count >= 0);
1011 edge->frequency = freq;
1012 gcc_assert (freq >= 0);
1013 gcc_assert (freq <= CGRAPH_FREQ_MAX);
1014 edge->loop_nest = nest;
1016 edge->call_stmt = call_stmt;
1017 push_cfun (DECL_STRUCT_FUNCTION (caller->decl));
1018 edge->can_throw_external
1019 = call_stmt ? stmt_can_throw_external (call_stmt) : false;
1020 pop_cfun ();
1021 edge->call_stmt_cannot_inline_p =
1022 (call_stmt ? gimple_call_cannot_inline_p (call_stmt) : false);
1023 if (call_stmt && caller->call_site_hash)
1024 cgraph_add_edge_to_call_site_hash (edge);
1026 edge->indirect_info = NULL;
1027 edge->indirect_inlining_edge = 0;
1029 return edge;
1032 /* Create edge from CALLER to CALLEE in the cgraph. */
1034 struct cgraph_edge *
1035 cgraph_create_edge (struct cgraph_node *caller, struct cgraph_node *callee,
1036 gimple call_stmt, gcov_type count, int freq, int nest)
1038 struct cgraph_edge *edge = cgraph_create_edge_1 (caller, callee, call_stmt,
1039 count, freq, nest);
1041 edge->indirect_unknown_callee = 0;
1042 initialize_inline_failed (edge);
1044 edge->next_caller = callee->callers;
1045 if (callee->callers)
1046 callee->callers->prev_caller = edge;
1047 edge->next_callee = caller->callees;
1048 if (caller->callees)
1049 caller->callees->prev_callee = edge;
1050 caller->callees = edge;
1051 callee->callers = edge;
1053 return edge;
1057 /* Create an indirect edge with a yet-undetermined callee where the call
1058 statement destination is a formal parameter of the caller with index
1059 PARAM_INDEX. */
1061 struct cgraph_edge *
1062 cgraph_create_indirect_edge (struct cgraph_node *caller, gimple call_stmt,
1063 int ecf_flags,
1064 gcov_type count, int freq, int nest)
1066 struct cgraph_edge *edge = cgraph_create_edge_1 (caller, NULL, call_stmt,
1067 count, freq, nest);
1069 edge->indirect_unknown_callee = 1;
1070 initialize_inline_failed (edge);
1072 edge->indirect_info = ggc_alloc_cleared_cgraph_indirect_call_info ();
1073 edge->indirect_info->param_index = -1;
1074 edge->indirect_info->ecf_flags = ecf_flags;
1076 edge->next_callee = caller->indirect_calls;
1077 if (caller->indirect_calls)
1078 caller->indirect_calls->prev_callee = edge;
1079 caller->indirect_calls = edge;
1081 return edge;
1084 /* Remove the edge E from the list of the callers of the callee. */
1086 static inline void
1087 cgraph_edge_remove_callee (struct cgraph_edge *e)
1089 gcc_assert (!e->indirect_unknown_callee);
1090 if (e->prev_caller)
1091 e->prev_caller->next_caller = e->next_caller;
1092 if (e->next_caller)
1093 e->next_caller->prev_caller = e->prev_caller;
1094 if (!e->prev_caller)
1095 e->callee->callers = e->next_caller;
1098 /* Remove the edge E from the list of the callees of the caller. */
1100 static inline void
1101 cgraph_edge_remove_caller (struct cgraph_edge *e)
1103 if (e->prev_callee)
1104 e->prev_callee->next_callee = e->next_callee;
1105 if (e->next_callee)
1106 e->next_callee->prev_callee = e->prev_callee;
1107 if (!e->prev_callee)
1109 if (e->indirect_unknown_callee)
1110 e->caller->indirect_calls = e->next_callee;
1111 else
1112 e->caller->callees = e->next_callee;
1114 if (e->caller->call_site_hash)
1115 htab_remove_elt_with_hash (e->caller->call_site_hash,
1116 e->call_stmt,
1117 htab_hash_pointer (e->call_stmt));
1120 /* Put the edge onto the free list. */
1122 static void
1123 cgraph_free_edge (struct cgraph_edge *e)
1125 int uid = e->uid;
1127 /* Clear out the edge so we do not dangle pointers. */
1128 memset (e, 0, sizeof (*e));
1129 e->uid = uid;
1130 NEXT_FREE_EDGE (e) = free_edges;
1131 free_edges = e;
1134 /* Remove the edge E in the cgraph. */
1136 void
1137 cgraph_remove_edge (struct cgraph_edge *e)
1139 /* Call all edge removal hooks. */
1140 cgraph_call_edge_removal_hooks (e);
1142 if (!e->indirect_unknown_callee)
1143 /* Remove from callers list of the callee. */
1144 cgraph_edge_remove_callee (e);
1146 /* Remove from callees list of the callers. */
1147 cgraph_edge_remove_caller (e);
1149 /* Put the edge onto the free list. */
1150 cgraph_free_edge (e);
1153 /* Set callee of call graph edge E and add it to the corresponding set of
1154 callers. */
1156 static void
1157 cgraph_set_edge_callee (struct cgraph_edge *e, struct cgraph_node *n)
1159 e->prev_caller = NULL;
1160 if (n->callers)
1161 n->callers->prev_caller = e;
1162 e->next_caller = n->callers;
1163 n->callers = e;
1164 e->callee = n;
1167 /* Redirect callee of E to N. The function does not update underlying
1168 call expression. */
1170 void
1171 cgraph_redirect_edge_callee (struct cgraph_edge *e, struct cgraph_node *n)
1173 /* Remove from callers list of the current callee. */
1174 cgraph_edge_remove_callee (e);
1176 /* Insert to callers list of the new callee. */
1177 cgraph_set_edge_callee (e, n);
1180 /* Make an indirect EDGE with an unknown callee an ordinary edge leading to
1181 CALLEE. */
1183 void
1184 cgraph_make_edge_direct (struct cgraph_edge *edge, struct cgraph_node *callee)
1186 edge->indirect_unknown_callee = 0;
1188 /* Get the edge out of the indirect edge list. */
1189 if (edge->prev_callee)
1190 edge->prev_callee->next_callee = edge->next_callee;
1191 if (edge->next_callee)
1192 edge->next_callee->prev_callee = edge->prev_callee;
1193 if (!edge->prev_callee)
1194 edge->caller->indirect_calls = edge->next_callee;
1196 /* Put it into the normal callee list */
1197 edge->prev_callee = NULL;
1198 edge->next_callee = edge->caller->callees;
1199 if (edge->caller->callees)
1200 edge->caller->callees->prev_callee = edge;
1201 edge->caller->callees = edge;
1203 /* Insert to callers list of the new callee. */
1204 cgraph_set_edge_callee (edge, callee);
1206 /* We need to re-determine the inlining status of the edge. */
1207 initialize_inline_failed (edge);
1211 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
1212 OLD_STMT changed into NEW_STMT. OLD_CALL is gimple_call_fndecl
1213 of OLD_STMT if it was previously call statement. */
1215 static void
1216 cgraph_update_edges_for_call_stmt_node (struct cgraph_node *node,
1217 gimple old_stmt, tree old_call, gimple new_stmt)
1219 tree new_call = (is_gimple_call (new_stmt)) ? gimple_call_fndecl (new_stmt) : 0;
1221 /* We are seeing indirect calls, then there is nothing to update. */
1222 if (!new_call && !old_call)
1223 return;
1224 /* See if we turned indirect call into direct call or folded call to one builtin
1225 into different bultin. */
1226 if (old_call != new_call)
1228 struct cgraph_edge *e = cgraph_edge (node, old_stmt);
1229 struct cgraph_edge *ne = NULL;
1230 gcov_type count;
1231 int frequency;
1232 int loop_nest;
1234 if (e)
1236 /* See if the edge is already there and has the correct callee. It
1237 might be so because of indirect inlining has already updated
1238 it. */
1239 if (new_call && e->callee && e->callee->decl == new_call)
1240 return;
1242 /* Otherwise remove edge and create new one; we can't simply redirect
1243 since function has changed, so inline plan and other information
1244 attached to edge is invalid. */
1245 count = e->count;
1246 frequency = e->frequency;
1247 loop_nest = e->loop_nest;
1248 cgraph_remove_edge (e);
1250 else
1252 /* We are seeing new direct call; compute profile info based on BB. */
1253 basic_block bb = gimple_bb (new_stmt);
1254 count = bb->count;
1255 frequency = compute_call_stmt_bb_frequency (current_function_decl,
1256 bb);
1257 loop_nest = bb->loop_depth;
1260 if (new_call)
1262 ne = cgraph_create_edge (node, cgraph_node (new_call),
1263 new_stmt, count, frequency,
1264 loop_nest);
1265 gcc_assert (ne->inline_failed);
1268 /* We only updated the call stmt; update pointer in cgraph edge.. */
1269 else if (old_stmt != new_stmt)
1270 cgraph_set_call_stmt (cgraph_edge (node, old_stmt), new_stmt);
1273 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
1274 OLD_STMT changed into NEW_STMT. OLD_DECL is gimple_call_fndecl
1275 of OLD_STMT before it was updated (updating can happen inplace). */
1277 void
1278 cgraph_update_edges_for_call_stmt (gimple old_stmt, tree old_decl, gimple new_stmt)
1280 struct cgraph_node *orig = cgraph_node (cfun->decl);
1281 struct cgraph_node *node;
1283 cgraph_update_edges_for_call_stmt_node (orig, old_stmt, old_decl, new_stmt);
1284 if (orig->clones)
1285 for (node = orig->clones; node != orig;)
1287 cgraph_update_edges_for_call_stmt_node (node, old_stmt, old_decl, new_stmt);
1288 if (node->clones)
1289 node = node->clones;
1290 else if (node->next_sibling_clone)
1291 node = node->next_sibling_clone;
1292 else
1294 while (node != orig && !node->next_sibling_clone)
1295 node = node->clone_of;
1296 if (node != orig)
1297 node = node->next_sibling_clone;
1303 /* Remove all callees from the node. */
1305 void
1306 cgraph_node_remove_callees (struct cgraph_node *node)
1308 struct cgraph_edge *e, *f;
1310 /* It is sufficient to remove the edges from the lists of callers of
1311 the callees. The callee list of the node can be zapped with one
1312 assignment. */
1313 for (e = node->callees; e; e = f)
1315 f = e->next_callee;
1316 cgraph_call_edge_removal_hooks (e);
1317 if (!e->indirect_unknown_callee)
1318 cgraph_edge_remove_callee (e);
1319 cgraph_free_edge (e);
1321 for (e = node->indirect_calls; e; e = f)
1323 f = e->next_callee;
1324 cgraph_call_edge_removal_hooks (e);
1325 if (!e->indirect_unknown_callee)
1326 cgraph_edge_remove_callee (e);
1327 cgraph_free_edge (e);
1329 node->indirect_calls = NULL;
1330 node->callees = NULL;
1331 if (node->call_site_hash)
1333 htab_delete (node->call_site_hash);
1334 node->call_site_hash = NULL;
1338 /* Remove all callers from the node. */
1340 static void
1341 cgraph_node_remove_callers (struct cgraph_node *node)
1343 struct cgraph_edge *e, *f;
1345 /* It is sufficient to remove the edges from the lists of callees of
1346 the callers. The caller list of the node can be zapped with one
1347 assignment. */
1348 for (e = node->callers; e; e = f)
1350 f = e->next_caller;
1351 cgraph_call_edge_removal_hooks (e);
1352 cgraph_edge_remove_caller (e);
1353 cgraph_free_edge (e);
1355 node->callers = NULL;
1358 /* Release memory used to represent body of function NODE. */
1360 void
1361 cgraph_release_function_body (struct cgraph_node *node)
1363 if (DECL_STRUCT_FUNCTION (node->decl))
1365 tree old_decl = current_function_decl;
1366 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
1367 if (cfun->gimple_df)
1369 current_function_decl = node->decl;
1370 delete_tree_ssa ();
1371 delete_tree_cfg_annotations ();
1372 cfun->eh = NULL;
1373 current_function_decl = old_decl;
1375 if (cfun->cfg)
1377 gcc_assert (dom_computed[0] == DOM_NONE);
1378 gcc_assert (dom_computed[1] == DOM_NONE);
1379 clear_edges ();
1381 if (cfun->value_histograms)
1382 free_histograms ();
1383 gcc_assert (!current_loops);
1384 pop_cfun();
1385 gimple_set_body (node->decl, NULL);
1386 VEC_free (ipa_opt_pass, heap,
1387 node->ipa_transforms_to_apply);
1388 /* Struct function hangs a lot of data that would leak if we didn't
1389 removed all pointers to it. */
1390 ggc_free (DECL_STRUCT_FUNCTION (node->decl));
1391 DECL_STRUCT_FUNCTION (node->decl) = NULL;
1393 DECL_SAVED_TREE (node->decl) = NULL;
1394 /* If the node is abstract and needed, then do not clear DECL_INITIAL
1395 of its associated function function declaration because it's
1396 needed to emit debug info later. */
1397 if (!node->abstract_and_needed)
1398 DECL_INITIAL (node->decl) = error_mark_node;
1401 /* Remove same body alias node. */
1403 void
1404 cgraph_remove_same_body_alias (struct cgraph_node *node)
1406 void **slot;
1407 int uid = node->uid;
1409 gcc_assert (node->same_body_alias);
1410 if (node->previous)
1411 node->previous->next = node->next;
1412 else
1413 node->same_body->same_body = node->next;
1414 if (node->next)
1415 node->next->previous = node->previous;
1416 node->next = NULL;
1417 node->previous = NULL;
1418 slot = htab_find_slot (cgraph_hash, node, NO_INSERT);
1419 if (*slot == node)
1420 htab_clear_slot (cgraph_hash, slot);
1421 if (assembler_name_hash)
1423 tree name = DECL_ASSEMBLER_NAME (node->decl);
1424 slot = htab_find_slot_with_hash (assembler_name_hash, name,
1425 decl_assembler_name_hash (name),
1426 NO_INSERT);
1427 if (slot && *slot == node)
1428 htab_clear_slot (assembler_name_hash, slot);
1431 /* Clear out the node to NULL all pointers and add the node to the free
1432 list. */
1433 memset (node, 0, sizeof(*node));
1434 node->uid = uid;
1435 NEXT_FREE_NODE (node) = free_nodes;
1436 free_nodes = node;
1439 /* Remove the node from cgraph. */
1441 void
1442 cgraph_remove_node (struct cgraph_node *node)
1444 void **slot;
1445 bool kill_body = false;
1446 struct cgraph_node *n;
1447 int uid = node->uid;
1449 cgraph_call_node_removal_hooks (node);
1450 cgraph_node_remove_callers (node);
1451 cgraph_node_remove_callees (node);
1452 ipa_remove_all_references (&node->ref_list);
1453 ipa_remove_all_refering (&node->ref_list);
1454 VEC_free (ipa_opt_pass, heap,
1455 node->ipa_transforms_to_apply);
1457 /* Incremental inlining access removed nodes stored in the postorder list.
1459 node->needed = node->reachable = false;
1460 for (n = node->nested; n; n = n->next_nested)
1461 n->origin = NULL;
1462 node->nested = NULL;
1463 if (node->origin)
1465 struct cgraph_node **node2 = &node->origin->nested;
1467 while (*node2 != node)
1468 node2 = &(*node2)->next_nested;
1469 *node2 = node->next_nested;
1471 if (node->previous)
1472 node->previous->next = node->next;
1473 else
1474 cgraph_nodes = node->next;
1475 if (node->next)
1476 node->next->previous = node->previous;
1477 node->next = NULL;
1478 node->previous = NULL;
1479 slot = htab_find_slot (cgraph_hash, node, NO_INSERT);
1480 if (*slot == node)
1482 struct cgraph_node *next_inline_clone;
1484 for (next_inline_clone = node->clones;
1485 next_inline_clone && next_inline_clone->decl != node->decl;
1486 next_inline_clone = next_inline_clone->next_sibling_clone)
1489 /* If there is inline clone of the node being removed, we need
1490 to put it into the position of removed node and reorganize all
1491 other clones to be based on it. */
1492 if (next_inline_clone)
1494 struct cgraph_node *n;
1495 struct cgraph_node *new_clones;
1497 *slot = next_inline_clone;
1499 /* Unlink inline clone from the list of clones of removed node. */
1500 if (next_inline_clone->next_sibling_clone)
1501 next_inline_clone->next_sibling_clone->prev_sibling_clone
1502 = next_inline_clone->prev_sibling_clone;
1503 if (next_inline_clone->prev_sibling_clone)
1505 gcc_assert (node->clones != next_inline_clone);
1506 next_inline_clone->prev_sibling_clone->next_sibling_clone
1507 = next_inline_clone->next_sibling_clone;
1509 else
1511 gcc_assert (node->clones == next_inline_clone);
1512 node->clones = next_inline_clone->next_sibling_clone;
1515 new_clones = node->clones;
1516 node->clones = NULL;
1518 /* Copy clone info. */
1519 next_inline_clone->clone = node->clone;
1521 /* Now place it into clone tree at same level at NODE. */
1522 next_inline_clone->clone_of = node->clone_of;
1523 next_inline_clone->prev_sibling_clone = NULL;
1524 next_inline_clone->next_sibling_clone = NULL;
1525 if (node->clone_of)
1527 if (node->clone_of->clones)
1528 node->clone_of->clones->prev_sibling_clone = next_inline_clone;
1529 next_inline_clone->next_sibling_clone = node->clone_of->clones;
1530 node->clone_of->clones = next_inline_clone;
1533 /* Merge the clone list. */
1534 if (new_clones)
1536 if (!next_inline_clone->clones)
1537 next_inline_clone->clones = new_clones;
1538 else
1540 n = next_inline_clone->clones;
1541 while (n->next_sibling_clone)
1542 n = n->next_sibling_clone;
1543 n->next_sibling_clone = new_clones;
1544 new_clones->prev_sibling_clone = n;
1548 /* Update clone_of pointers. */
1549 n = new_clones;
1550 while (n)
1552 n->clone_of = next_inline_clone;
1553 n = n->next_sibling_clone;
1556 else
1558 htab_clear_slot (cgraph_hash, slot);
1559 kill_body = true;
1563 if (node->prev_sibling_clone)
1564 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
1565 else if (node->clone_of)
1566 node->clone_of->clones = node->next_sibling_clone;
1567 if (node->next_sibling_clone)
1568 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
1569 if (node->clones)
1571 struct cgraph_node *n, *next;
1573 if (node->clone_of)
1575 for (n = node->clones; n->next_sibling_clone; n = n->next_sibling_clone)
1576 n->clone_of = node->clone_of;
1577 n->clone_of = node->clone_of;
1578 n->next_sibling_clone = node->clone_of->clones;
1579 if (node->clone_of->clones)
1580 node->clone_of->clones->prev_sibling_clone = n;
1581 node->clone_of->clones = node->clones;
1583 else
1585 /* We are removing node with clones. this makes clones inconsistent,
1586 but assume they will be removed subsequently and just keep clone
1587 tree intact. This can happen in unreachable function removal since
1588 we remove unreachable functions in random order, not by bottom-up
1589 walk of clone trees. */
1590 for (n = node->clones; n; n = next)
1592 next = n->next_sibling_clone;
1593 n->next_sibling_clone = NULL;
1594 n->prev_sibling_clone = NULL;
1595 n->clone_of = NULL;
1600 while (node->same_body)
1601 cgraph_remove_same_body_alias (node->same_body);
1603 if (node->same_comdat_group)
1605 struct cgraph_node *prev;
1606 for (prev = node->same_comdat_group;
1607 prev->same_comdat_group != node;
1608 prev = prev->same_comdat_group)
1610 if (node->same_comdat_group == prev)
1611 prev->same_comdat_group = NULL;
1612 else
1613 prev->same_comdat_group = node->same_comdat_group;
1614 node->same_comdat_group = NULL;
1617 /* While all the clones are removed after being proceeded, the function
1618 itself is kept in the cgraph even after it is compiled. Check whether
1619 we are done with this body and reclaim it proactively if this is the case.
1621 if (!kill_body && *slot)
1623 struct cgraph_node *n = (struct cgraph_node *) *slot;
1624 if (!n->clones && !n->clone_of && !n->global.inlined_to
1625 && (cgraph_global_info_ready
1626 && (TREE_ASM_WRITTEN (n->decl) || DECL_EXTERNAL (n->decl)
1627 || n->in_other_partition)))
1628 kill_body = true;
1630 if (assembler_name_hash)
1632 tree name = DECL_ASSEMBLER_NAME (node->decl);
1633 slot = htab_find_slot_with_hash (assembler_name_hash, name,
1634 decl_assembler_name_hash (name),
1635 NO_INSERT);
1636 /* Inline clones are not hashed. */
1637 if (slot && *slot == node)
1638 htab_clear_slot (assembler_name_hash, slot);
1641 if (kill_body)
1642 cgraph_release_function_body (node);
1643 node->decl = NULL;
1644 if (node->call_site_hash)
1646 htab_delete (node->call_site_hash);
1647 node->call_site_hash = NULL;
1649 cgraph_n_nodes--;
1651 /* Clear out the node to NULL all pointers and add the node to the free
1652 list. */
1653 memset (node, 0, sizeof(*node));
1654 node->uid = uid;
1655 NEXT_FREE_NODE (node) = free_nodes;
1656 free_nodes = node;
1659 /* Remove the node from cgraph. */
1661 void
1662 cgraph_remove_node_and_inline_clones (struct cgraph_node *node)
1664 struct cgraph_edge *e, *next;
1665 for (e = node->callees; e; e = next)
1667 next = e->next_callee;
1668 if (!e->inline_failed)
1669 cgraph_remove_node_and_inline_clones (e->callee);
1671 cgraph_remove_node (node);
1674 /* Notify finalize_compilation_unit that given node is reachable. */
1676 void
1677 cgraph_mark_reachable_node (struct cgraph_node *node)
1679 if (!node->reachable && node->local.finalized)
1681 if (cgraph_global_info_ready)
1683 /* Verify that function does not appear to be needed out of blue
1684 during the optimization process. This can happen for extern
1685 inlines when bodies was removed after inlining. */
1686 gcc_assert ((node->analyzed || node->in_other_partition
1687 || DECL_EXTERNAL (node->decl)));
1689 else
1690 notice_global_symbol (node->decl);
1691 node->reachable = 1;
1693 node->next_needed = cgraph_nodes_queue;
1694 cgraph_nodes_queue = node;
1698 /* Likewise indicate that a node is needed, i.e. reachable via some
1699 external means. */
1701 void
1702 cgraph_mark_needed_node (struct cgraph_node *node)
1704 node->needed = 1;
1705 gcc_assert (!node->global.inlined_to);
1706 cgraph_mark_reachable_node (node);
1709 /* Likewise indicate that a node is having address taken. */
1711 void
1712 cgraph_mark_address_taken_node (struct cgraph_node *node)
1714 cgraph_mark_reachable_node (node);
1715 node->address_taken = 1;
1718 /* Return local info for the compiled function. */
1720 struct cgraph_local_info *
1721 cgraph_local_info (tree decl)
1723 struct cgraph_node *node;
1725 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
1726 node = cgraph_node (decl);
1727 return &node->local;
1730 /* Return local info for the compiled function. */
1732 struct cgraph_global_info *
1733 cgraph_global_info (tree decl)
1735 struct cgraph_node *node;
1737 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL && cgraph_global_info_ready);
1738 node = cgraph_node (decl);
1739 return &node->global;
1742 /* Return local info for the compiled function. */
1744 struct cgraph_rtl_info *
1745 cgraph_rtl_info (tree decl)
1747 struct cgraph_node *node;
1749 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
1750 node = cgraph_node (decl);
1751 if (decl != current_function_decl
1752 && !TREE_ASM_WRITTEN (node->decl))
1753 return NULL;
1754 return &node->rtl;
1757 /* Return a string describing the failure REASON. */
1759 const char*
1760 cgraph_inline_failed_string (cgraph_inline_failed_t reason)
1762 #undef DEFCIFCODE
1763 #define DEFCIFCODE(code, string) string,
1765 static const char *cif_string_table[CIF_N_REASONS] = {
1766 #include "cif-code.def"
1769 /* Signedness of an enum type is implementation defined, so cast it
1770 to unsigned before testing. */
1771 gcc_assert ((unsigned) reason < CIF_N_REASONS);
1772 return cif_string_table[reason];
1775 /* Return name of the node used in debug output. */
1776 const char *
1777 cgraph_node_name (struct cgraph_node *node)
1779 return lang_hooks.decl_printable_name (node->decl, 2);
1782 /* Names used to print out the availability enum. */
1783 const char * const cgraph_availability_names[] =
1784 {"unset", "not_available", "overwritable", "available", "local"};
1787 /* Dump call graph node NODE to file F. */
1789 void
1790 dump_cgraph_node (FILE *f, struct cgraph_node *node)
1792 struct cgraph_edge *edge;
1793 int indirect_calls_count = 0;
1795 fprintf (f, "%s/%i(%i)", cgraph_node_name (node), node->uid,
1796 node->pid);
1797 dump_addr (f, " @", (void *)node);
1798 if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
1799 fprintf (f, " (asm: %s)", IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node->decl)));
1800 if (node->global.inlined_to)
1801 fprintf (f, " (inline copy in %s/%i)",
1802 cgraph_node_name (node->global.inlined_to),
1803 node->global.inlined_to->uid);
1804 if (node->clone_of)
1805 fprintf (f, " (clone of %s/%i)",
1806 cgraph_node_name (node->clone_of),
1807 node->clone_of->uid);
1808 if (cgraph_function_flags_ready)
1809 fprintf (f, " availability:%s",
1810 cgraph_availability_names [cgraph_function_body_availability (node)]);
1811 if (node->analyzed)
1812 fprintf (f, " analyzed");
1813 if (node->in_other_partition)
1814 fprintf (f, " in_other_partition");
1815 if (node->count)
1816 fprintf (f, " executed "HOST_WIDEST_INT_PRINT_DEC"x",
1817 (HOST_WIDEST_INT)node->count);
1818 if (node->local.inline_summary.self_time)
1819 fprintf (f, " %i time, %i benefit", node->local.inline_summary.self_time,
1820 node->local.inline_summary.time_inlining_benefit);
1821 if (node->global.time && node->global.time
1822 != node->local.inline_summary.self_time)
1823 fprintf (f, " (%i after inlining)", node->global.time);
1824 if (node->local.inline_summary.self_size)
1825 fprintf (f, " %i size, %i benefit", node->local.inline_summary.self_size,
1826 node->local.inline_summary.size_inlining_benefit);
1827 if (node->global.size && node->global.size
1828 != node->local.inline_summary.self_size)
1829 fprintf (f, " (%i after inlining)", node->global.size);
1830 if (node->local.inline_summary.estimated_self_stack_size)
1831 fprintf (f, " %i bytes stack usage", (int)node->local.inline_summary.estimated_self_stack_size);
1832 if (node->global.estimated_stack_size != node->local.inline_summary.estimated_self_stack_size)
1833 fprintf (f, " %i bytes after inlining", (int)node->global.estimated_stack_size);
1834 if (node->origin)
1835 fprintf (f, " nested in: %s", cgraph_node_name (node->origin));
1836 if (node->needed)
1837 fprintf (f, " needed");
1838 if (node->address_taken)
1839 fprintf (f, " address_taken");
1840 else if (node->reachable)
1841 fprintf (f, " reachable");
1842 else if (node->reachable_from_other_partition)
1843 fprintf (f, " reachable_from_other_partition");
1844 if (gimple_has_body_p (node->decl))
1845 fprintf (f, " body");
1846 if (node->process)
1847 fprintf (f, " process");
1848 if (node->local.local)
1849 fprintf (f, " local");
1850 if (node->local.externally_visible)
1851 fprintf (f, " externally_visible");
1852 if (node->local.used_from_object_file)
1853 fprintf (f, " used_from_object_file");
1854 if (node->local.finalized)
1855 fprintf (f, " finalized");
1856 if (node->local.disregard_inline_limits)
1857 fprintf (f, " always_inline");
1858 else if (node->local.inlinable)
1859 fprintf (f, " inlinable");
1860 else if (node->local.versionable)
1861 fprintf (f, " versionable");
1862 if (node->local.redefined_extern_inline)
1863 fprintf (f, " redefined_extern_inline");
1864 if (TREE_ASM_WRITTEN (node->decl))
1865 fprintf (f, " asm_written");
1867 fprintf (f, "\n called by: ");
1868 for (edge = node->callers; edge; edge = edge->next_caller)
1870 fprintf (f, "%s/%i ", cgraph_node_name (edge->caller),
1871 edge->caller->uid);
1872 if (edge->count)
1873 fprintf (f, "("HOST_WIDEST_INT_PRINT_DEC"x) ",
1874 (HOST_WIDEST_INT)edge->count);
1875 if (edge->frequency)
1876 fprintf (f, "(%.2f per call) ",
1877 edge->frequency / (double)CGRAPH_FREQ_BASE);
1878 if (!edge->inline_failed)
1879 fprintf(f, "(inlined) ");
1880 if (edge->indirect_inlining_edge)
1881 fprintf(f, "(indirect_inlining) ");
1882 if (edge->can_throw_external)
1883 fprintf(f, "(can throw external) ");
1886 fprintf (f, "\n calls: ");
1887 for (edge = node->callees; edge; edge = edge->next_callee)
1889 fprintf (f, "%s/%i ", cgraph_node_name (edge->callee),
1890 edge->callee->uid);
1891 if (!edge->inline_failed)
1892 fprintf(f, "(inlined) ");
1893 if (edge->indirect_inlining_edge)
1894 fprintf(f, "(indirect_inlining) ");
1895 if (edge->count)
1896 fprintf (f, "("HOST_WIDEST_INT_PRINT_DEC"x) ",
1897 (HOST_WIDEST_INT)edge->count);
1898 if (edge->frequency)
1899 fprintf (f, "(%.2f per call) ",
1900 edge->frequency / (double)CGRAPH_FREQ_BASE);
1901 if (edge->loop_nest)
1902 fprintf (f, "(nested in %i loops) ", edge->loop_nest);
1903 if (edge->can_throw_external)
1904 fprintf(f, "(can throw external) ");
1906 fprintf (f, "\n");
1907 fprintf (f, " References: ");
1908 ipa_dump_references (f, &node->ref_list);
1909 fprintf (f, " Refering this function: ");
1910 ipa_dump_refering (f, &node->ref_list);
1912 for (edge = node->indirect_calls; edge; edge = edge->next_callee)
1913 indirect_calls_count++;
1914 if (indirect_calls_count)
1915 fprintf (f, " has %i outgoing edges for indirect calls.\n",
1916 indirect_calls_count);
1918 if (node->same_body)
1920 struct cgraph_node *n;
1921 fprintf (f, " aliases & thunks:");
1922 for (n = node->same_body; n; n = n->next)
1924 fprintf (f, " %s/%i", cgraph_node_name (n), n->uid);
1925 if (n->thunk.thunk_p)
1927 fprintf (f, " (thunk of %s fixed ofset %i virtual value %i has "
1928 "virtual offset %i",
1929 lang_hooks.decl_printable_name (n->thunk.alias, 2),
1930 (int)n->thunk.fixed_offset,
1931 (int)n->thunk.virtual_value,
1932 (int)n->thunk.virtual_offset_p);
1933 fprintf (f, ")");
1935 if (DECL_ASSEMBLER_NAME_SET_P (n->decl))
1936 fprintf (f, " (asm: %s)", IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (n->decl)));
1938 fprintf (f, "\n");
1943 /* Dump call graph node NODE to stderr. */
1945 DEBUG_FUNCTION void
1946 debug_cgraph_node (struct cgraph_node *node)
1948 dump_cgraph_node (stderr, node);
1952 /* Dump the callgraph to file F. */
1954 void
1955 dump_cgraph (FILE *f)
1957 struct cgraph_node *node;
1959 fprintf (f, "callgraph:\n\n");
1960 for (node = cgraph_nodes; node; node = node->next)
1961 dump_cgraph_node (f, node);
1965 /* Dump the call graph to stderr. */
1967 DEBUG_FUNCTION void
1968 debug_cgraph (void)
1970 dump_cgraph (stderr);
1974 /* Set the DECL_ASSEMBLER_NAME and update cgraph hashtables. */
1976 void
1977 change_decl_assembler_name (tree decl, tree name)
1979 struct cgraph_node *node;
1980 void **slot;
1981 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
1982 SET_DECL_ASSEMBLER_NAME (decl, name);
1983 else
1985 if (name == DECL_ASSEMBLER_NAME (decl))
1986 return;
1988 if (assembler_name_hash
1989 && TREE_CODE (decl) == FUNCTION_DECL
1990 && (node = cgraph_get_node_or_alias (decl)) != NULL)
1992 tree old_name = DECL_ASSEMBLER_NAME (decl);
1993 slot = htab_find_slot_with_hash (assembler_name_hash, old_name,
1994 decl_assembler_name_hash (old_name),
1995 NO_INSERT);
1996 /* Inline clones are not hashed. */
1997 if (slot && *slot == node)
1998 htab_clear_slot (assembler_name_hash, slot);
2000 if (TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl))
2001 && DECL_RTL_SET_P (decl))
2002 warning (0, "%D renamed after being referenced in assembly", decl);
2004 SET_DECL_ASSEMBLER_NAME (decl, name);
2006 if (assembler_name_hash
2007 && TREE_CODE (decl) == FUNCTION_DECL
2008 && (node = cgraph_get_node_or_alias (decl)) != NULL)
2010 slot = htab_find_slot_with_hash (assembler_name_hash, name,
2011 decl_assembler_name_hash (name),
2012 INSERT);
2013 gcc_assert (!*slot);
2014 *slot = node;
2018 /* Add a top-level asm statement to the list. */
2020 struct cgraph_asm_node *
2021 cgraph_add_asm_node (tree asm_str)
2023 struct cgraph_asm_node *node;
2025 node = ggc_alloc_cleared_cgraph_asm_node ();
2026 node->asm_str = asm_str;
2027 node->order = cgraph_order++;
2028 node->next = NULL;
2029 if (cgraph_asm_nodes == NULL)
2030 cgraph_asm_nodes = node;
2031 else
2032 cgraph_asm_last_node->next = node;
2033 cgraph_asm_last_node = node;
2034 return node;
2037 /* Return true when the DECL can possibly be inlined. */
2038 bool
2039 cgraph_function_possibly_inlined_p (tree decl)
2041 if (!cgraph_global_info_ready)
2042 return !DECL_UNINLINABLE (decl);
2043 return DECL_POSSIBLY_INLINED (decl);
2046 /* Create clone of E in the node N represented by CALL_EXPR the callgraph. */
2047 struct cgraph_edge *
2048 cgraph_clone_edge (struct cgraph_edge *e, struct cgraph_node *n,
2049 gimple call_stmt, unsigned stmt_uid, gcov_type count_scale,
2050 int freq_scale, int loop_nest, bool update_original)
2052 struct cgraph_edge *new_edge;
2053 gcov_type count = e->count * count_scale / REG_BR_PROB_BASE;
2054 gcov_type freq;
2056 /* We do not want to ignore loop nest after frequency drops to 0. */
2057 if (!freq_scale)
2058 freq_scale = 1;
2059 freq = e->frequency * (gcov_type) freq_scale / CGRAPH_FREQ_BASE;
2060 if (freq > CGRAPH_FREQ_MAX)
2061 freq = CGRAPH_FREQ_MAX;
2063 if (e->indirect_unknown_callee)
2065 tree decl;
2067 if (call_stmt && (decl = gimple_call_fndecl (call_stmt)))
2069 struct cgraph_node *callee = cgraph_node (decl);
2070 new_edge = cgraph_create_edge (n, callee, call_stmt, count, freq,
2071 e->loop_nest + loop_nest);
2073 else
2075 new_edge = cgraph_create_indirect_edge (n, call_stmt,
2076 e->indirect_info->ecf_flags,
2077 count, freq,
2078 e->loop_nest + loop_nest);
2079 *new_edge->indirect_info = *e->indirect_info;
2082 else
2083 new_edge = cgraph_create_edge (n, e->callee, call_stmt, count, freq,
2084 e->loop_nest + loop_nest);
2086 new_edge->inline_failed = e->inline_failed;
2087 new_edge->indirect_inlining_edge = e->indirect_inlining_edge;
2088 new_edge->lto_stmt_uid = stmt_uid;
2089 if (update_original)
2091 e->count -= new_edge->count;
2092 if (e->count < 0)
2093 e->count = 0;
2095 cgraph_call_edge_duplication_hooks (e, new_edge);
2096 return new_edge;
2099 /* Create node representing clone of N executed COUNT times. Decrease
2100 the execution counts from original node too.
2101 The new clone will have decl set to DECL that may or may not be the same
2102 as decl of N.
2104 When UPDATE_ORIGINAL is true, the counts are subtracted from the original
2105 function's profile to reflect the fact that part of execution is handled
2106 by node. */
2107 struct cgraph_node *
2108 cgraph_clone_node (struct cgraph_node *n, tree decl, gcov_type count, int freq,
2109 int loop_nest, bool update_original,
2110 VEC(cgraph_edge_p,heap) *redirect_callers)
2112 struct cgraph_node *new_node = cgraph_create_node ();
2113 struct cgraph_edge *e;
2114 gcov_type count_scale;
2115 unsigned i;
2117 new_node->decl = decl;
2118 new_node->origin = n->origin;
2119 if (new_node->origin)
2121 new_node->next_nested = new_node->origin->nested;
2122 new_node->origin->nested = new_node;
2124 new_node->analyzed = n->analyzed;
2125 new_node->local = n->local;
2126 new_node->local.externally_visible = false;
2127 new_node->local.used_from_object_file = false;
2128 new_node->local.local = true;
2129 new_node->local.vtable_method = false;
2130 new_node->global = n->global;
2131 new_node->rtl = n->rtl;
2132 new_node->count = count;
2133 new_node->frequency = n->frequency;
2134 new_node->clone = n->clone;
2135 new_node->clone.tree_map = 0;
2136 if (n->count)
2138 if (new_node->count > n->count)
2139 count_scale = REG_BR_PROB_BASE;
2140 else
2141 count_scale = new_node->count * REG_BR_PROB_BASE / n->count;
2143 else
2144 count_scale = 0;
2145 if (update_original)
2147 n->count -= count;
2148 if (n->count < 0)
2149 n->count = 0;
2152 for (i = 0; VEC_iterate (cgraph_edge_p, redirect_callers, i, e); i++)
2154 /* Redirect calls to the old version node to point to its new
2155 version. */
2156 cgraph_redirect_edge_callee (e, new_node);
2160 for (e = n->callees;e; e=e->next_callee)
2161 cgraph_clone_edge (e, new_node, e->call_stmt, e->lto_stmt_uid,
2162 count_scale, freq, loop_nest, update_original);
2164 for (e = n->indirect_calls; e; e = e->next_callee)
2165 cgraph_clone_edge (e, new_node, e->call_stmt, e->lto_stmt_uid,
2166 count_scale, freq, loop_nest, update_original);
2167 ipa_clone_references (new_node, NULL, &n->ref_list);
2169 new_node->next_sibling_clone = n->clones;
2170 if (n->clones)
2171 n->clones->prev_sibling_clone = new_node;
2172 n->clones = new_node;
2173 new_node->clone_of = n;
2175 cgraph_call_node_duplication_hooks (n, new_node);
2176 if (n->decl != decl)
2178 struct cgraph_node **slot;
2179 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, new_node, INSERT);
2180 gcc_assert (!*slot);
2181 *slot = new_node;
2182 if (assembler_name_hash)
2184 void **aslot;
2185 tree name = DECL_ASSEMBLER_NAME (decl);
2187 aslot = htab_find_slot_with_hash (assembler_name_hash, name,
2188 decl_assembler_name_hash (name),
2189 INSERT);
2190 gcc_assert (!*aslot);
2191 *aslot = new_node;
2194 return new_node;
2197 /* Create a new name for clone of DECL, add SUFFIX. Returns an identifier. */
2199 static GTY(()) unsigned int clone_fn_id_num;
2201 tree
2202 clone_function_name (tree decl, const char *suffix)
2204 tree name = DECL_ASSEMBLER_NAME (decl);
2205 size_t len = IDENTIFIER_LENGTH (name);
2206 char *tmp_name, *prefix;
2208 prefix = XALLOCAVEC (char, len + strlen (suffix) + 2);
2209 memcpy (prefix, IDENTIFIER_POINTER (name), len);
2210 strcpy (prefix + len + 1, suffix);
2211 #ifndef NO_DOT_IN_LABEL
2212 prefix[len] = '.';
2213 #elif !defined NO_DOLLAR_IN_LABEL
2214 prefix[len] = '$';
2215 #else
2216 prefix[len] = '_';
2217 #endif
2218 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix, clone_fn_id_num++);
2219 return get_identifier (tmp_name);
2222 /* Create callgraph node clone with new declaration. The actual body will
2223 be copied later at compilation stage.
2225 TODO: after merging in ipa-sra use function call notes instead of args_to_skip
2226 bitmap interface.
2228 struct cgraph_node *
2229 cgraph_create_virtual_clone (struct cgraph_node *old_node,
2230 VEC(cgraph_edge_p,heap) *redirect_callers,
2231 VEC(ipa_replace_map_p,gc) *tree_map,
2232 bitmap args_to_skip,
2233 const char * suffix)
2235 tree old_decl = old_node->decl;
2236 struct cgraph_node *new_node = NULL;
2237 tree new_decl;
2238 size_t i;
2239 struct ipa_replace_map *map;
2241 #ifdef ENABLE_CHECKING
2242 if (!flag_wpa)
2243 gcc_assert (tree_versionable_function_p (old_decl));
2244 #endif
2246 /* Make a new FUNCTION_DECL tree node */
2247 if (!args_to_skip)
2248 new_decl = copy_node (old_decl);
2249 else
2250 new_decl = build_function_decl_skip_args (old_decl, args_to_skip);
2251 DECL_STRUCT_FUNCTION (new_decl) = NULL;
2253 /* Generate a new name for the new version. */
2254 DECL_NAME (new_decl) = clone_function_name (old_decl, suffix);
2255 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
2256 SET_DECL_RTL (new_decl, NULL);
2258 new_node = cgraph_clone_node (old_node, new_decl, old_node->count,
2259 CGRAPH_FREQ_BASE, 0, false,
2260 redirect_callers);
2261 /* Update the properties.
2262 Make clone visible only within this translation unit. Make sure
2263 that is not weak also.
2264 ??? We cannot use COMDAT linkage because there is no
2265 ABI support for this. */
2266 DECL_EXTERNAL (new_node->decl) = 0;
2267 if (DECL_ONE_ONLY (old_decl))
2268 DECL_SECTION_NAME (new_node->decl) = NULL;
2269 DECL_COMDAT_GROUP (new_node->decl) = 0;
2270 TREE_PUBLIC (new_node->decl) = 0;
2271 DECL_COMDAT (new_node->decl) = 0;
2272 DECL_WEAK (new_node->decl) = 0;
2273 new_node->clone.tree_map = tree_map;
2274 new_node->clone.args_to_skip = args_to_skip;
2275 for (i = 0; VEC_iterate (ipa_replace_map_p, tree_map, i, map); i++)
2277 tree var = map->new_tree;
2279 STRIP_NOPS (var);
2280 if (TREE_CODE (var) != ADDR_EXPR)
2281 continue;
2282 var = get_base_var (var);
2283 if (!var)
2284 continue;
2286 /* Record references of the future statement initializing the constant
2287 argument. */
2288 if (TREE_CODE (var) == FUNCTION_DECL)
2289 ipa_record_reference (new_node, NULL, cgraph_node (var),
2290 NULL, IPA_REF_ADDR, NULL);
2291 else if (TREE_CODE (var) == VAR_DECL)
2292 ipa_record_reference (new_node, NULL, NULL, varpool_node (var),
2293 IPA_REF_ADDR, NULL);
2295 if (!args_to_skip)
2296 new_node->clone.combined_args_to_skip = old_node->clone.combined_args_to_skip;
2297 else if (old_node->clone.combined_args_to_skip)
2299 int newi = 0, oldi = 0;
2300 tree arg;
2301 bitmap new_args_to_skip = BITMAP_GGC_ALLOC ();
2302 struct cgraph_node *orig_node;
2303 for (orig_node = old_node; orig_node->clone_of; orig_node = orig_node->clone_of)
2305 for (arg = DECL_ARGUMENTS (orig_node->decl); arg; arg = DECL_CHAIN (arg), oldi++)
2307 if (bitmap_bit_p (old_node->clone.combined_args_to_skip, oldi))
2309 bitmap_set_bit (new_args_to_skip, oldi);
2310 continue;
2312 if (bitmap_bit_p (args_to_skip, newi))
2313 bitmap_set_bit (new_args_to_skip, oldi);
2314 newi++;
2316 new_node->clone.combined_args_to_skip = new_args_to_skip;
2318 else
2319 new_node->clone.combined_args_to_skip = args_to_skip;
2320 new_node->local.externally_visible = 0;
2321 new_node->local.used_from_object_file = 0;
2322 new_node->local.local = 1;
2323 new_node->lowered = true;
2324 new_node->reachable = true;
2327 return new_node;
2330 /* NODE is no longer nested function; update cgraph accordingly. */
2331 void
2332 cgraph_unnest_node (struct cgraph_node *node)
2334 struct cgraph_node **node2 = &node->origin->nested;
2335 gcc_assert (node->origin);
2337 while (*node2 != node)
2338 node2 = &(*node2)->next_nested;
2339 *node2 = node->next_nested;
2340 node->origin = NULL;
2343 /* Return function availability. See cgraph.h for description of individual
2344 return values. */
2345 enum availability
2346 cgraph_function_body_availability (struct cgraph_node *node)
2348 enum availability avail;
2349 gcc_assert (cgraph_function_flags_ready);
2350 if (!node->analyzed)
2351 avail = AVAIL_NOT_AVAILABLE;
2352 else if (node->local.local)
2353 avail = AVAIL_LOCAL;
2354 else if (!node->local.externally_visible)
2355 avail = AVAIL_AVAILABLE;
2356 /* Inline functions are safe to be analyzed even if their sybol can
2357 be overwritten at runtime. It is not meaningful to enfore any sane
2358 behaviour on replacing inline function by different body. */
2359 else if (DECL_DECLARED_INLINE_P (node->decl))
2360 avail = AVAIL_AVAILABLE;
2362 /* If the function can be overwritten, return OVERWRITABLE. Take
2363 care at least of two notable extensions - the COMDAT functions
2364 used to share template instantiations in C++ (this is symmetric
2365 to code cp_cannot_inline_tree_fn and probably shall be shared and
2366 the inlinability hooks completely eliminated).
2368 ??? Does the C++ one definition rule allow us to always return
2369 AVAIL_AVAILABLE here? That would be good reason to preserve this
2370 bit. */
2372 else if (DECL_REPLACEABLE_P (node->decl) && !DECL_EXTERNAL (node->decl))
2373 avail = AVAIL_OVERWRITABLE;
2374 else avail = AVAIL_AVAILABLE;
2376 return avail;
2379 /* Add the function FNDECL to the call graph.
2380 Unlike cgraph_finalize_function, this function is intended to be used
2381 by middle end and allows insertion of new function at arbitrary point
2382 of compilation. The function can be either in high, low or SSA form
2383 GIMPLE.
2385 The function is assumed to be reachable and have address taken (so no
2386 API breaking optimizations are performed on it).
2388 Main work done by this function is to enqueue the function for later
2389 processing to avoid need the passes to be re-entrant. */
2391 void
2392 cgraph_add_new_function (tree fndecl, bool lowered)
2394 struct cgraph_node *node;
2395 switch (cgraph_state)
2397 case CGRAPH_STATE_CONSTRUCTION:
2398 /* Just enqueue function to be processed at nearest occurrence. */
2399 node = cgraph_node (fndecl);
2400 node->next_needed = cgraph_new_nodes;
2401 if (lowered)
2402 node->lowered = true;
2403 cgraph_new_nodes = node;
2404 break;
2406 case CGRAPH_STATE_IPA:
2407 case CGRAPH_STATE_IPA_SSA:
2408 case CGRAPH_STATE_EXPANSION:
2409 /* Bring the function into finalized state and enqueue for later
2410 analyzing and compilation. */
2411 node = cgraph_node (fndecl);
2412 node->local.local = false;
2413 node->local.finalized = true;
2414 node->reachable = node->needed = true;
2415 if (!lowered && cgraph_state == CGRAPH_STATE_EXPANSION)
2417 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
2418 current_function_decl = fndecl;
2419 gimple_register_cfg_hooks ();
2420 tree_lowering_passes (fndecl);
2421 bitmap_obstack_initialize (NULL);
2422 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
2423 execute_pass_list (pass_early_local_passes.pass.sub);
2424 bitmap_obstack_release (NULL);
2425 pop_cfun ();
2426 current_function_decl = NULL;
2428 lowered = true;
2430 if (lowered)
2431 node->lowered = true;
2432 node->next_needed = cgraph_new_nodes;
2433 cgraph_new_nodes = node;
2434 break;
2436 case CGRAPH_STATE_FINISHED:
2437 /* At the very end of compilation we have to do all the work up
2438 to expansion. */
2439 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
2440 current_function_decl = fndecl;
2441 gimple_register_cfg_hooks ();
2442 if (!lowered)
2443 tree_lowering_passes (fndecl);
2444 bitmap_obstack_initialize (NULL);
2445 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
2446 execute_pass_list (pass_early_local_passes.pass.sub);
2447 bitmap_obstack_release (NULL);
2448 tree_rest_of_compilation (fndecl);
2449 pop_cfun ();
2450 current_function_decl = NULL;
2451 break;
2454 /* Set a personality if required and we already passed EH lowering. */
2455 if (lowered
2456 && (function_needs_eh_personality (DECL_STRUCT_FUNCTION (fndecl))
2457 == eh_personality_lang))
2458 DECL_FUNCTION_PERSONALITY (fndecl) = lang_hooks.eh_personality ();
2461 /* Return true if NODE can be made local for API change.
2462 Extern inline functions and C++ COMDAT functions can be made local
2463 at the expense of possible code size growth if function is used in multiple
2464 compilation units. */
2465 bool
2466 cgraph_node_can_be_local_p (struct cgraph_node *node)
2468 return (!node->needed && !node->address_taken
2469 && ((DECL_COMDAT (node->decl) && !node->same_comdat_group)
2470 || !node->local.externally_visible));
2473 /* Make DECL local. FIXME: We shouldn't need to mess with rtl this early,
2474 but other code such as notice_global_symbol generates rtl. */
2475 void
2476 cgraph_make_decl_local (tree decl)
2478 rtx rtl, symbol;
2480 if (TREE_CODE (decl) == VAR_DECL)
2481 DECL_COMMON (decl) = 0;
2482 else gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
2484 if (DECL_COMDAT (decl))
2486 /* It is possible that we are linking against library defining same COMDAT
2487 function. To avoid conflict we need to rename our local name of the
2488 function just in the case WHOPR partitioning decide to make it hidden
2489 to avoid cross partition references. */
2490 if (flag_wpa)
2492 const char *old_name;
2494 old_name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
2495 if (TREE_CODE (decl) == FUNCTION_DECL)
2497 struct cgraph_node *node = cgraph_get_node_or_alias (decl);
2498 change_decl_assembler_name (decl,
2499 clone_function_name (decl, "local"));
2500 if (node->local.lto_file_data)
2501 lto_record_renamed_decl (node->local.lto_file_data,
2502 old_name,
2503 IDENTIFIER_POINTER
2504 (DECL_ASSEMBLER_NAME (decl)));
2506 else if (TREE_CODE (decl) == VAR_DECL)
2508 struct varpool_node *vnode = varpool_get_node (decl);
2509 /* change_decl_assembler_name will warn here on vtables because
2510 C++ frontend still sets TREE_SYMBOL_REFERENCED on them. */
2511 SET_DECL_ASSEMBLER_NAME (decl,
2512 clone_function_name (decl, "local"));
2513 if (vnode->lto_file_data)
2514 lto_record_renamed_decl (vnode->lto_file_data,
2515 old_name,
2516 IDENTIFIER_POINTER
2517 (DECL_ASSEMBLER_NAME (decl)));
2520 DECL_SECTION_NAME (decl) = 0;
2521 DECL_COMDAT (decl) = 0;
2523 DECL_COMDAT_GROUP (decl) = 0;
2524 DECL_WEAK (decl) = 0;
2525 DECL_EXTERNAL (decl) = 0;
2526 TREE_PUBLIC (decl) = 0;
2527 if (!DECL_RTL_SET_P (decl))
2528 return;
2530 /* Update rtl flags. */
2531 make_decl_rtl (decl);
2533 rtl = DECL_RTL (decl);
2534 if (!MEM_P (rtl))
2535 return;
2537 symbol = XEXP (rtl, 0);
2538 if (GET_CODE (symbol) != SYMBOL_REF)
2539 return;
2541 SYMBOL_REF_WEAK (symbol) = DECL_WEAK (decl);
2544 /* Bring NODE local. */
2545 void
2546 cgraph_make_node_local (struct cgraph_node *node)
2548 gcc_assert (cgraph_node_can_be_local_p (node));
2549 if (DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))
2551 struct cgraph_node *alias;
2552 cgraph_make_decl_local (node->decl);
2554 for (alias = node->same_body; alias; alias = alias->next)
2555 cgraph_make_decl_local (alias->decl);
2557 node->local.externally_visible = false;
2558 node->local.local = true;
2559 gcc_assert (cgraph_function_body_availability (node) == AVAIL_LOCAL);
2563 /* Set TREE_NOTHROW on NODE's decl and on same_body aliases of NODE
2564 if any to NOTHROW. */
2566 void
2567 cgraph_set_nothrow_flag (struct cgraph_node *node, bool nothrow)
2569 struct cgraph_node *alias;
2570 TREE_NOTHROW (node->decl) = nothrow;
2571 for (alias = node->same_body; alias; alias = alias->next)
2572 TREE_NOTHROW (alias->decl) = nothrow;
2575 /* Set TREE_READONLY on NODE's decl and on same_body aliases of NODE
2576 if any to READONLY. */
2578 void
2579 cgraph_set_readonly_flag (struct cgraph_node *node, bool readonly)
2581 struct cgraph_node *alias;
2582 TREE_READONLY (node->decl) = readonly;
2583 for (alias = node->same_body; alias; alias = alias->next)
2584 TREE_READONLY (alias->decl) = readonly;
2587 /* Set DECL_PURE_P on NODE's decl and on same_body aliases of NODE
2588 if any to PURE. */
2590 void
2591 cgraph_set_pure_flag (struct cgraph_node *node, bool pure)
2593 struct cgraph_node *alias;
2594 DECL_PURE_P (node->decl) = pure;
2595 for (alias = node->same_body; alias; alias = alias->next)
2596 DECL_PURE_P (alias->decl) = pure;
2599 /* Set DECL_LOOPING_CONST_OR_PURE_P on NODE's decl and on
2600 same_body aliases of NODE if any to LOOPING_CONST_OR_PURE. */
2602 void
2603 cgraph_set_looping_const_or_pure_flag (struct cgraph_node *node,
2604 bool looping_const_or_pure)
2606 struct cgraph_node *alias;
2607 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = looping_const_or_pure;
2608 for (alias = node->same_body; alias; alias = alias->next)
2609 DECL_LOOPING_CONST_OR_PURE_P (alias->decl) = looping_const_or_pure;
2612 /* See if the frequency of NODE can be updated based on frequencies of its
2613 callers. */
2614 bool
2615 cgraph_propagate_frequency (struct cgraph_node *node)
2617 bool maybe_unlikely_executed = true, maybe_executed_once = true;
2618 struct cgraph_edge *edge;
2619 if (!node->local.local)
2620 return false;
2621 gcc_assert (node->analyzed);
2622 if (node->frequency == NODE_FREQUENCY_HOT)
2623 return false;
2624 if (node->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED)
2625 return false;
2626 if (dump_file && (dump_flags & TDF_DETAILS))
2627 fprintf (dump_file, "Processing frequency %s\n", cgraph_node_name (node));
2628 for (edge = node->callers;
2629 edge && (maybe_unlikely_executed || maybe_executed_once);
2630 edge = edge->next_caller)
2632 if (!edge->frequency)
2633 continue;
2634 switch (edge->caller->frequency)
2636 case NODE_FREQUENCY_UNLIKELY_EXECUTED:
2637 break;
2638 case NODE_FREQUENCY_EXECUTED_ONCE:
2639 if (dump_file && (dump_flags & TDF_DETAILS))
2640 fprintf (dump_file, " Called by %s that is executed once\n", cgraph_node_name (node));
2641 maybe_unlikely_executed = false;
2642 if (edge->loop_nest)
2644 maybe_executed_once = false;
2645 if (dump_file && (dump_flags & TDF_DETAILS))
2646 fprintf (dump_file, " Called in loop\n");
2648 break;
2649 case NODE_FREQUENCY_HOT:
2650 case NODE_FREQUENCY_NORMAL:
2651 if (dump_file && (dump_flags & TDF_DETAILS))
2652 fprintf (dump_file, " Called by %s that is normal or hot\n", cgraph_node_name (node));
2653 maybe_unlikely_executed = false;
2654 maybe_executed_once = false;
2655 break;
2658 if (maybe_unlikely_executed)
2660 node->frequency = NODE_FREQUENCY_UNLIKELY_EXECUTED;
2661 if (dump_file)
2662 fprintf (dump_file, "Node %s promoted to unlikely executed.\n", cgraph_node_name (node));
2663 return true;
2665 if (maybe_executed_once && node->frequency != NODE_FREQUENCY_EXECUTED_ONCE)
2667 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
2668 if (dump_file)
2669 fprintf (dump_file, "Node %s promoted to executed once.\n", cgraph_node_name (node));
2670 return true;
2672 return false;
2675 /* Return true when NODE can not return or throw and thus
2676 it is safe to ignore its side effects for IPA analysis. */
2678 bool
2679 cgraph_node_cannot_return (struct cgraph_node *node)
2681 int flags = flags_from_decl_or_type (node->decl);
2682 if (!flag_exceptions)
2683 return (flags & ECF_NORETURN) != 0;
2684 else
2685 return ((flags & (ECF_NORETURN | ECF_NOTHROW))
2686 == (ECF_NORETURN | ECF_NOTHROW));
2689 /* Return true when call of E can not lead to return from caller
2690 and thus it is safe to ignore its side effects for IPA analysis
2691 when computing side effects of the caller.
2692 FIXME: We could actually mark all edges that have no reaching
2693 patch to EXIT_BLOCK_PTR or throw to get better results. */
2694 bool
2695 cgraph_edge_cannot_lead_to_return (struct cgraph_edge *e)
2697 if (cgraph_node_cannot_return (e->caller))
2698 return true;
2699 if (e->indirect_unknown_callee)
2701 int flags = e->indirect_info->ecf_flags;
2702 if (!flag_exceptions)
2703 return (flags & ECF_NORETURN) != 0;
2704 else
2705 return ((flags & (ECF_NORETURN | ECF_NOTHROW))
2706 == (ECF_NORETURN | ECF_NOTHROW));
2708 else
2709 return cgraph_node_cannot_return (e->callee);
2712 /* Return true when function NODE can be excpected to be removed
2713 from program when direct calls in this compilation unit are removed.
2715 As a special case COMDAT functions are
2716 cgraph_can_remove_if_no_direct_calls_p while the are not
2717 cgraph_only_called_directly_p (it is possible they are called from other
2718 unit)
2720 This function behaves as cgraph_only_called_directly_p because eliminating
2721 all uses of COMDAT function does not make it neccesarily disappear from
2722 the program unless we are compiling whole program or we do LTO. In this
2723 case we know we win since dynamic linking will not really discard the
2724 linkonce section. */
2726 bool
2727 cgraph_will_be_removed_from_program_if_no_direct_calls (struct cgraph_node *node)
2729 if (node->local.used_from_object_file)
2730 return false;
2731 if (!in_lto_p && !flag_whole_program)
2732 return cgraph_only_called_directly_p (node);
2733 else
2734 return cgraph_can_remove_if_no_direct_calls_p (node);
2737 #include "gt-cgraph.h"