gcc/
[official-gcc.git] / gcc / cgraph.c
blob7293b6dee03ae6f882859ca9c19a6bf1c7dc120a
1 /* Callgraph handling code.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This file contains basic routines manipulating call graph
24 The callgraph:
26 The call-graph is data structure designed for intra-procedural optimization
27 but it is also used in non-unit-at-a-time compilation to allow easier code
28 sharing.
30 The call-graph consist of nodes and edges represented via linked lists.
31 Each function (external or not) corresponds to the unique node.
33 The mapping from declarations to call-graph nodes is done using hash table
34 based on DECL_UID. The call-graph nodes are created lazily using
35 cgraph_node function when called for unknown declaration.
37 The callgraph at the moment does not represent all indirect calls or calls
38 from other compilation units. Flag NEEDED is set for each node that may be
39 accessed in such an invisible way and it shall be considered an entry point
40 to the callgraph.
42 On the other hand, the callgraph currently does contain some edges for
43 indirect calls with unknown callees which can be accessed through
44 indirect_calls field of a node. It should be noted however that at the
45 moment only calls which are potential candidates for indirect inlining are
46 added there.
48 Interprocedural information:
50 Callgraph is place to store data needed for interprocedural optimization.
51 All data structures are divided into three components: local_info that
52 is produced while analyzing the function, global_info that is result
53 of global walking of the callgraph on the end of compilation and
54 rtl_info used by RTL backend to propagate data from already compiled
55 functions to their callers.
57 Moreover, each node has a uid which can be used to keep information in
58 on-the-side arrays. UIDs are reused and therefore reasonably dense.
60 Inlining plans:
62 The function inlining information is decided in advance and maintained
63 in the callgraph as so called inline plan.
64 For each inlined call, the callee's node is cloned to represent the
65 new function copy produced by inliner.
66 Each inlined call gets a unique corresponding clone node of the callee
67 and the data structure is updated while inlining is performed, so
68 the clones are eliminated and their callee edges redirected to the
69 caller.
71 Each edge has "inline_failed" field. When the field is set to NULL,
72 the call will be inlined. When it is non-NULL it contains a reason
73 why inlining wasn't performed. */
75 #include "config.h"
76 #include "system.h"
77 #include "coretypes.h"
78 #include "tm.h"
79 #include "tree.h"
80 #include "tree-inline.h"
81 #include "langhooks.h"
82 #include "hashtab.h"
83 #include "toplev.h"
84 #include "flags.h"
85 #include "ggc.h"
86 #include "debug.h"
87 #include "target.h"
88 #include "basic-block.h"
89 #include "cgraph.h"
90 #include "output.h"
91 #include "intl.h"
92 #include "gimple.h"
93 #include "tree-dump.h"
94 #include "tree-flow.h"
95 #include "value-prof.h"
96 #include "except.h"
97 #include "diagnostic-core.h"
98 #include "rtl.h"
99 #include "ipa-utils.h"
100 #include "lto-streamer.h"
102 static void cgraph_node_remove_callers (struct cgraph_node *node);
103 static inline void cgraph_edge_remove_caller (struct cgraph_edge *e);
104 static inline void cgraph_edge_remove_callee (struct cgraph_edge *e);
106 /* Hash table used to convert declarations into nodes. */
107 static GTY((param_is (struct cgraph_node))) htab_t cgraph_hash;
108 /* Hash table used to convert assembler names into nodes. */
109 static GTY((param_is (struct cgraph_node))) htab_t assembler_name_hash;
111 /* The linked list of cgraph nodes. */
112 struct cgraph_node *cgraph_nodes;
114 /* Queue of cgraph nodes scheduled to be lowered. */
115 struct cgraph_node *cgraph_nodes_queue;
117 /* Queue of cgraph nodes scheduled to be added into cgraph. This is a
118 secondary queue used during optimization to accommodate passes that
119 may generate new functions that need to be optimized and expanded. */
120 struct cgraph_node *cgraph_new_nodes;
122 /* Number of nodes in existence. */
123 int cgraph_n_nodes;
125 /* Maximal uid used in cgraph nodes. */
126 int cgraph_max_uid;
128 /* Maximal uid used in cgraph edges. */
129 int cgraph_edge_max_uid;
131 /* Maximal pid used for profiling */
132 int cgraph_max_pid;
134 /* Set when whole unit has been analyzed so we can access global info. */
135 bool cgraph_global_info_ready = false;
137 /* What state callgraph is in right now. */
138 enum cgraph_state cgraph_state = CGRAPH_STATE_CONSTRUCTION;
140 /* Set when the cgraph is fully build and the basic flags are computed. */
141 bool cgraph_function_flags_ready = false;
143 /* Linked list of cgraph asm nodes. */
144 struct cgraph_asm_node *cgraph_asm_nodes;
146 /* Last node in cgraph_asm_nodes. */
147 static GTY(()) struct cgraph_asm_node *cgraph_asm_last_node;
149 /* The order index of the next cgraph node to be created. This is
150 used so that we can sort the cgraph nodes in order by when we saw
151 them, to support -fno-toplevel-reorder. */
152 int cgraph_order;
154 /* List of hooks trigerred on cgraph_edge events. */
155 struct cgraph_edge_hook_list {
156 cgraph_edge_hook hook;
157 void *data;
158 struct cgraph_edge_hook_list *next;
161 /* List of hooks trigerred on cgraph_node events. */
162 struct cgraph_node_hook_list {
163 cgraph_node_hook hook;
164 void *data;
165 struct cgraph_node_hook_list *next;
168 /* List of hooks trigerred on events involving two cgraph_edges. */
169 struct cgraph_2edge_hook_list {
170 cgraph_2edge_hook hook;
171 void *data;
172 struct cgraph_2edge_hook_list *next;
175 /* List of hooks trigerred on events involving two cgraph_nodes. */
176 struct cgraph_2node_hook_list {
177 cgraph_2node_hook hook;
178 void *data;
179 struct cgraph_2node_hook_list *next;
182 /* List of hooks triggered when an edge is removed. */
183 struct cgraph_edge_hook_list *first_cgraph_edge_removal_hook;
184 /* List of hooks triggered when a node is removed. */
185 struct cgraph_node_hook_list *first_cgraph_node_removal_hook;
186 /* List of hooks triggered when an edge is duplicated. */
187 struct cgraph_2edge_hook_list *first_cgraph_edge_duplicated_hook;
188 /* List of hooks triggered when a node is duplicated. */
189 struct cgraph_2node_hook_list *first_cgraph_node_duplicated_hook;
190 /* List of hooks triggered when an function is inserted. */
191 struct cgraph_node_hook_list *first_cgraph_function_insertion_hook;
193 /* Head of a linked list of unused (freed) call graph nodes.
194 Do not GTY((delete)) this list so UIDs gets reliably recycled. */
195 static GTY(()) struct cgraph_node *free_nodes;
196 /* Head of a linked list of unused (freed) call graph edges.
197 Do not GTY((delete)) this list so UIDs gets reliably recycled. */
198 static GTY(()) struct cgraph_edge *free_edges;
200 /* Macros to access the next item in the list of free cgraph nodes and
201 edges. */
202 #define NEXT_FREE_NODE(NODE) (NODE)->next
203 #define NEXT_FREE_EDGE(EDGE) (EDGE)->prev_caller
205 /* Register HOOK to be called with DATA on each removed edge. */
206 struct cgraph_edge_hook_list *
207 cgraph_add_edge_removal_hook (cgraph_edge_hook hook, void *data)
209 struct cgraph_edge_hook_list *entry;
210 struct cgraph_edge_hook_list **ptr = &first_cgraph_edge_removal_hook;
212 entry = (struct cgraph_edge_hook_list *) xmalloc (sizeof (*entry));
213 entry->hook = hook;
214 entry->data = data;
215 entry->next = NULL;
216 while (*ptr)
217 ptr = &(*ptr)->next;
218 *ptr = entry;
219 return entry;
222 /* Remove ENTRY from the list of hooks called on removing edges. */
223 void
224 cgraph_remove_edge_removal_hook (struct cgraph_edge_hook_list *entry)
226 struct cgraph_edge_hook_list **ptr = &first_cgraph_edge_removal_hook;
228 while (*ptr != entry)
229 ptr = &(*ptr)->next;
230 *ptr = entry->next;
231 free (entry);
234 /* Call all edge removal hooks. */
235 static void
236 cgraph_call_edge_removal_hooks (struct cgraph_edge *e)
238 struct cgraph_edge_hook_list *entry = first_cgraph_edge_removal_hook;
239 while (entry)
241 entry->hook (e, entry->data);
242 entry = entry->next;
246 /* Register HOOK to be called with DATA on each removed node. */
247 struct cgraph_node_hook_list *
248 cgraph_add_node_removal_hook (cgraph_node_hook hook, void *data)
250 struct cgraph_node_hook_list *entry;
251 struct cgraph_node_hook_list **ptr = &first_cgraph_node_removal_hook;
253 entry = (struct cgraph_node_hook_list *) xmalloc (sizeof (*entry));
254 entry->hook = hook;
255 entry->data = data;
256 entry->next = NULL;
257 while (*ptr)
258 ptr = &(*ptr)->next;
259 *ptr = entry;
260 return entry;
263 /* Remove ENTRY from the list of hooks called on removing nodes. */
264 void
265 cgraph_remove_node_removal_hook (struct cgraph_node_hook_list *entry)
267 struct cgraph_node_hook_list **ptr = &first_cgraph_node_removal_hook;
269 while (*ptr != entry)
270 ptr = &(*ptr)->next;
271 *ptr = entry->next;
272 free (entry);
275 /* Call all node removal hooks. */
276 static void
277 cgraph_call_node_removal_hooks (struct cgraph_node *node)
279 struct cgraph_node_hook_list *entry = first_cgraph_node_removal_hook;
280 while (entry)
282 entry->hook (node, entry->data);
283 entry = entry->next;
287 /* Register HOOK to be called with DATA on each inserted node. */
288 struct cgraph_node_hook_list *
289 cgraph_add_function_insertion_hook (cgraph_node_hook hook, void *data)
291 struct cgraph_node_hook_list *entry;
292 struct cgraph_node_hook_list **ptr = &first_cgraph_function_insertion_hook;
294 entry = (struct cgraph_node_hook_list *) xmalloc (sizeof (*entry));
295 entry->hook = hook;
296 entry->data = data;
297 entry->next = NULL;
298 while (*ptr)
299 ptr = &(*ptr)->next;
300 *ptr = entry;
301 return entry;
304 /* Remove ENTRY from the list of hooks called on inserted nodes. */
305 void
306 cgraph_remove_function_insertion_hook (struct cgraph_node_hook_list *entry)
308 struct cgraph_node_hook_list **ptr = &first_cgraph_function_insertion_hook;
310 while (*ptr != entry)
311 ptr = &(*ptr)->next;
312 *ptr = entry->next;
313 free (entry);
316 /* Call all node insertion hooks. */
317 void
318 cgraph_call_function_insertion_hooks (struct cgraph_node *node)
320 struct cgraph_node_hook_list *entry = first_cgraph_function_insertion_hook;
321 while (entry)
323 entry->hook (node, entry->data);
324 entry = entry->next;
328 /* Register HOOK to be called with DATA on each duplicated edge. */
329 struct cgraph_2edge_hook_list *
330 cgraph_add_edge_duplication_hook (cgraph_2edge_hook hook, void *data)
332 struct cgraph_2edge_hook_list *entry;
333 struct cgraph_2edge_hook_list **ptr = &first_cgraph_edge_duplicated_hook;
335 entry = (struct cgraph_2edge_hook_list *) xmalloc (sizeof (*entry));
336 entry->hook = hook;
337 entry->data = data;
338 entry->next = NULL;
339 while (*ptr)
340 ptr = &(*ptr)->next;
341 *ptr = entry;
342 return entry;
345 /* Remove ENTRY from the list of hooks called on duplicating edges. */
346 void
347 cgraph_remove_edge_duplication_hook (struct cgraph_2edge_hook_list *entry)
349 struct cgraph_2edge_hook_list **ptr = &first_cgraph_edge_duplicated_hook;
351 while (*ptr != entry)
352 ptr = &(*ptr)->next;
353 *ptr = entry->next;
354 free (entry);
357 /* Call all edge duplication hooks. */
358 static void
359 cgraph_call_edge_duplication_hooks (struct cgraph_edge *cs1,
360 struct cgraph_edge *cs2)
362 struct cgraph_2edge_hook_list *entry = first_cgraph_edge_duplicated_hook;
363 while (entry)
365 entry->hook (cs1, cs2, entry->data);
366 entry = entry->next;
370 /* Register HOOK to be called with DATA on each duplicated node. */
371 struct cgraph_2node_hook_list *
372 cgraph_add_node_duplication_hook (cgraph_2node_hook hook, void *data)
374 struct cgraph_2node_hook_list *entry;
375 struct cgraph_2node_hook_list **ptr = &first_cgraph_node_duplicated_hook;
377 entry = (struct cgraph_2node_hook_list *) xmalloc (sizeof (*entry));
378 entry->hook = hook;
379 entry->data = data;
380 entry->next = NULL;
381 while (*ptr)
382 ptr = &(*ptr)->next;
383 *ptr = entry;
384 return entry;
387 /* Remove ENTRY from the list of hooks called on duplicating nodes. */
388 void
389 cgraph_remove_node_duplication_hook (struct cgraph_2node_hook_list *entry)
391 struct cgraph_2node_hook_list **ptr = &first_cgraph_node_duplicated_hook;
393 while (*ptr != entry)
394 ptr = &(*ptr)->next;
395 *ptr = entry->next;
396 free (entry);
399 /* Call all node duplication hooks. */
400 static void
401 cgraph_call_node_duplication_hooks (struct cgraph_node *node1,
402 struct cgraph_node *node2)
404 struct cgraph_2node_hook_list *entry = first_cgraph_node_duplicated_hook;
405 while (entry)
407 entry->hook (node1, node2, entry->data);
408 entry = entry->next;
412 /* Returns a hash code for P. */
414 static hashval_t
415 hash_node (const void *p)
417 const struct cgraph_node *n = (const struct cgraph_node *) p;
418 return (hashval_t) DECL_UID (n->decl);
422 /* Returns nonzero if P1 and P2 are equal. */
424 static int
425 eq_node (const void *p1, const void *p2)
427 const struct cgraph_node *n1 = (const struct cgraph_node *) p1;
428 const struct cgraph_node *n2 = (const struct cgraph_node *) p2;
429 return DECL_UID (n1->decl) == DECL_UID (n2->decl);
432 /* Allocate new callgraph node. */
434 static inline struct cgraph_node *
435 cgraph_allocate_node (void)
437 struct cgraph_node *node;
439 if (free_nodes)
441 node = free_nodes;
442 free_nodes = NEXT_FREE_NODE (node);
444 else
446 node = ggc_alloc_cleared_cgraph_node ();
447 node->uid = cgraph_max_uid++;
450 return node;
453 /* Allocate new callgraph node and insert it into basic data structures. */
455 static struct cgraph_node *
456 cgraph_create_node (void)
458 struct cgraph_node *node = cgraph_allocate_node ();
460 node->next = cgraph_nodes;
461 node->pid = -1;
462 node->order = cgraph_order++;
463 if (cgraph_nodes)
464 cgraph_nodes->previous = node;
465 node->previous = NULL;
466 node->global.estimated_growth = INT_MIN;
467 node->frequency = NODE_FREQUENCY_NORMAL;
468 ipa_empty_ref_list (&node->ref_list);
469 cgraph_nodes = node;
470 cgraph_n_nodes++;
471 return node;
474 /* Return cgraph node assigned to DECL. Create new one when needed. */
476 struct cgraph_node *
477 cgraph_node (tree decl)
479 struct cgraph_node key, *node, **slot;
481 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
483 if (!cgraph_hash)
484 cgraph_hash = htab_create_ggc (10, hash_node, eq_node, NULL);
486 key.decl = decl;
488 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key, INSERT);
490 if (*slot)
492 node = *slot;
493 if (node->same_body_alias)
494 node = node->same_body;
495 return node;
498 node = cgraph_create_node ();
499 node->decl = decl;
500 *slot = node;
501 if (DECL_CONTEXT (decl) && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)
503 node->origin = cgraph_node (DECL_CONTEXT (decl));
504 node->next_nested = node->origin->nested;
505 node->origin->nested = node;
507 if (assembler_name_hash)
509 void **aslot;
510 tree name = DECL_ASSEMBLER_NAME (decl);
512 aslot = htab_find_slot_with_hash (assembler_name_hash, name,
513 decl_assembler_name_hash (name),
514 INSERT);
515 /* We can have multiple declarations with same assembler name. For C++
516 it is __builtin_strlen and strlen, for instance. Do we need to
517 record them all? Original implementation marked just first one
518 so lets hope for the best. */
519 if (*aslot == NULL)
520 *aslot = node;
522 return node;
525 /* Mark ALIAS as an alias to DECL. */
527 static struct cgraph_node *
528 cgraph_same_body_alias_1 (tree alias, tree decl)
530 struct cgraph_node key, *alias_node, *decl_node, **slot;
532 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
533 gcc_assert (TREE_CODE (alias) == FUNCTION_DECL);
534 decl_node = cgraph_node (decl);
536 key.decl = alias;
538 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key, INSERT);
540 /* If the cgraph_node has been already created, fail. */
541 if (*slot)
542 return NULL;
544 alias_node = cgraph_allocate_node ();
545 alias_node->decl = alias;
546 alias_node->same_body_alias = 1;
547 alias_node->same_body = decl_node;
548 alias_node->previous = NULL;
549 if (decl_node->same_body)
550 decl_node->same_body->previous = alias_node;
551 alias_node->next = decl_node->same_body;
552 alias_node->thunk.alias = decl;
553 decl_node->same_body = alias_node;
554 *slot = alias_node;
555 return alias_node;
558 /* Attempt to mark ALIAS as an alias to DECL. Return alias node if successful
559 and NULL otherwise.
560 Same body aliases are output whenever the body of DECL is output,
561 and cgraph_node (ALIAS) transparently returns cgraph_node (DECL). */
563 struct cgraph_node *
564 cgraph_same_body_alias (tree alias, tree decl)
566 #ifndef ASM_OUTPUT_DEF
567 /* If aliases aren't supported by the assembler, fail. */
568 return NULL;
569 #endif
571 /*gcc_assert (!assembler_name_hash);*/
573 return cgraph_same_body_alias_1 (alias, decl);
576 /* Add thunk alias into callgraph. The alias declaration is ALIAS and it
577 alises DECL with an adjustments made into the first parameter.
578 See comments in thunk_adjust for detail on the parameters. */
580 struct cgraph_node *
581 cgraph_add_thunk (tree alias, tree decl, bool this_adjusting,
582 HOST_WIDE_INT fixed_offset, HOST_WIDE_INT virtual_value,
583 tree virtual_offset,
584 tree real_alias)
586 struct cgraph_node *node = cgraph_get_node (alias);
588 if (node)
590 gcc_assert (node->local.finalized);
591 gcc_assert (!node->same_body);
592 cgraph_remove_node (node);
595 node = cgraph_same_body_alias_1 (alias, decl);
596 gcc_assert (node);
597 #ifdef ENABLE_CHECKING
598 gcc_assert (!virtual_offset
599 || tree_int_cst_equal (virtual_offset, size_int (virtual_value)));
600 #endif
601 node->thunk.fixed_offset = fixed_offset;
602 node->thunk.this_adjusting = this_adjusting;
603 node->thunk.virtual_value = virtual_value;
604 node->thunk.virtual_offset_p = virtual_offset != NULL;
605 node->thunk.alias = real_alias;
606 node->thunk.thunk_p = true;
607 return node;
610 /* Returns the cgraph node assigned to DECL or NULL if no cgraph node
611 is assigned. */
613 struct cgraph_node *
614 cgraph_get_node_or_alias (const_tree decl)
616 struct cgraph_node key, *node = NULL, **slot;
618 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
620 if (!cgraph_hash)
621 return NULL;
623 key.decl = CONST_CAST2 (tree, const_tree, decl);
625 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key,
626 NO_INSERT);
628 if (slot && *slot)
629 node = *slot;
630 return node;
633 /* Returns the cgraph node assigned to DECL or NULL if no cgraph node
634 is assigned. */
636 struct cgraph_node *
637 cgraph_get_node (const_tree decl)
639 struct cgraph_node key, *node = NULL, **slot;
641 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
643 if (!cgraph_hash)
644 return NULL;
646 key.decl = CONST_CAST2 (tree, const_tree, decl);
648 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key,
649 NO_INSERT);
651 if (slot && *slot)
653 node = *slot;
654 if (node->same_body_alias)
655 node = node->same_body;
657 return node;
660 /* Insert already constructed node into hashtable. */
662 void
663 cgraph_insert_node_to_hashtable (struct cgraph_node *node)
665 struct cgraph_node **slot;
667 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, node, INSERT);
669 gcc_assert (!*slot);
670 *slot = node;
673 /* Returns a hash code for P. */
675 static hashval_t
676 hash_node_by_assembler_name (const void *p)
678 const struct cgraph_node *n = (const struct cgraph_node *) p;
679 return (hashval_t) decl_assembler_name_hash (DECL_ASSEMBLER_NAME (n->decl));
682 /* Returns nonzero if P1 and P2 are equal. */
684 static int
685 eq_assembler_name (const void *p1, const void *p2)
687 const struct cgraph_node *n1 = (const struct cgraph_node *) p1;
688 const_tree name = (const_tree)p2;
689 return (decl_assembler_name_equal (n1->decl, name));
692 /* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
693 Return NULL if there's no such node. */
695 struct cgraph_node *
696 cgraph_node_for_asm (tree asmname)
698 struct cgraph_node *node;
699 void **slot;
701 if (!assembler_name_hash)
703 assembler_name_hash =
704 htab_create_ggc (10, hash_node_by_assembler_name, eq_assembler_name,
705 NULL);
706 for (node = cgraph_nodes; node; node = node->next)
707 if (!node->global.inlined_to)
709 tree name = DECL_ASSEMBLER_NAME (node->decl);
710 slot = htab_find_slot_with_hash (assembler_name_hash, name,
711 decl_assembler_name_hash (name),
712 INSERT);
713 /* We can have multiple declarations with same assembler name. For C++
714 it is __builtin_strlen and strlen, for instance. Do we need to
715 record them all? Original implementation marked just first one
716 so lets hope for the best. */
717 if (!*slot)
718 *slot = node;
719 if (node->same_body)
721 struct cgraph_node *alias;
723 for (alias = node->same_body; alias; alias = alias->next)
725 hashval_t hash;
726 name = DECL_ASSEMBLER_NAME (alias->decl);
727 hash = decl_assembler_name_hash (name);
728 slot = htab_find_slot_with_hash (assembler_name_hash, name,
729 hash, INSERT);
730 if (!*slot)
731 *slot = alias;
737 slot = htab_find_slot_with_hash (assembler_name_hash, asmname,
738 decl_assembler_name_hash (asmname),
739 NO_INSERT);
741 if (slot)
743 node = (struct cgraph_node *) *slot;
744 if (node->same_body_alias)
745 node = node->same_body;
746 return node;
748 return NULL;
751 /* Returns a hash value for X (which really is a die_struct). */
753 static hashval_t
754 edge_hash (const void *x)
756 return htab_hash_pointer (((const struct cgraph_edge *) x)->call_stmt);
759 /* Return nonzero if decl_id of die_struct X is the same as UID of decl *Y. */
761 static int
762 edge_eq (const void *x, const void *y)
764 return ((const struct cgraph_edge *) x)->call_stmt == y;
767 /* Add call graph edge E to call site hash of its caller. */
769 static inline void
770 cgraph_add_edge_to_call_site_hash (struct cgraph_edge *e)
772 void **slot;
773 slot = htab_find_slot_with_hash (e->caller->call_site_hash,
774 e->call_stmt,
775 htab_hash_pointer (e->call_stmt),
776 INSERT);
777 gcc_assert (!*slot);
778 *slot = e;
781 /* Return the callgraph edge representing the GIMPLE_CALL statement
782 CALL_STMT. */
784 struct cgraph_edge *
785 cgraph_edge (struct cgraph_node *node, gimple call_stmt)
787 struct cgraph_edge *e, *e2;
788 int n = 0;
790 if (node->call_site_hash)
791 return (struct cgraph_edge *)
792 htab_find_with_hash (node->call_site_hash, call_stmt,
793 htab_hash_pointer (call_stmt));
795 /* This loop may turn out to be performance problem. In such case adding
796 hashtables into call nodes with very many edges is probably best
797 solution. It is not good idea to add pointer into CALL_EXPR itself
798 because we want to make possible having multiple cgraph nodes representing
799 different clones of the same body before the body is actually cloned. */
800 for (e = node->callees; e; e = e->next_callee)
802 if (e->call_stmt == call_stmt)
803 break;
804 n++;
807 if (!e)
808 for (e = node->indirect_calls; e; e = e->next_callee)
810 if (e->call_stmt == call_stmt)
811 break;
812 n++;
815 if (n > 100)
817 node->call_site_hash = htab_create_ggc (120, edge_hash, edge_eq, NULL);
818 for (e2 = node->callees; e2; e2 = e2->next_callee)
819 cgraph_add_edge_to_call_site_hash (e2);
820 for (e2 = node->indirect_calls; e2; e2 = e2->next_callee)
821 cgraph_add_edge_to_call_site_hash (e2);
824 return e;
828 /* Change field call_stmt of edge E to NEW_STMT. */
830 void
831 cgraph_set_call_stmt (struct cgraph_edge *e, gimple new_stmt)
833 tree decl;
835 if (e->caller->call_site_hash)
837 htab_remove_elt_with_hash (e->caller->call_site_hash,
838 e->call_stmt,
839 htab_hash_pointer (e->call_stmt));
842 e->call_stmt = new_stmt;
843 if (e->indirect_unknown_callee
844 && (decl = gimple_call_fndecl (new_stmt)))
846 /* Constant propagation (and possibly also inlining?) can turn an
847 indirect call into a direct one. */
848 struct cgraph_node *new_callee = cgraph_node (decl);
850 cgraph_make_edge_direct (e, new_callee);
853 push_cfun (DECL_STRUCT_FUNCTION (e->caller->decl));
854 e->can_throw_external = stmt_can_throw_external (new_stmt);
855 pop_cfun ();
856 if (e->caller->call_site_hash)
857 cgraph_add_edge_to_call_site_hash (e);
860 /* Like cgraph_set_call_stmt but walk the clone tree and update all
861 clones sharing the same function body. */
863 void
864 cgraph_set_call_stmt_including_clones (struct cgraph_node *orig,
865 gimple old_stmt, gimple new_stmt)
867 struct cgraph_node *node;
868 struct cgraph_edge *edge = cgraph_edge (orig, old_stmt);
870 if (edge)
871 cgraph_set_call_stmt (edge, new_stmt);
873 node = orig->clones;
874 if (node)
875 while (node != orig)
877 struct cgraph_edge *edge = cgraph_edge (node, old_stmt);
878 if (edge)
879 cgraph_set_call_stmt (edge, new_stmt);
880 if (node->clones)
881 node = node->clones;
882 else if (node->next_sibling_clone)
883 node = node->next_sibling_clone;
884 else
886 while (node != orig && !node->next_sibling_clone)
887 node = node->clone_of;
888 if (node != orig)
889 node = node->next_sibling_clone;
894 /* Like cgraph_create_edge walk the clone tree and update all clones sharing
895 same function body. If clones already have edge for OLD_STMT; only
896 update the edge same way as cgraph_set_call_stmt_including_clones does.
898 TODO: COUNT and LOOP_DEPTH should be properly distributed based on relative
899 frequencies of the clones. */
901 void
902 cgraph_create_edge_including_clones (struct cgraph_node *orig,
903 struct cgraph_node *callee,
904 gimple old_stmt,
905 gimple stmt, gcov_type count,
906 int freq, int loop_depth,
907 cgraph_inline_failed_t reason)
909 struct cgraph_node *node;
910 struct cgraph_edge *edge;
912 if (!cgraph_edge (orig, stmt))
914 edge = cgraph_create_edge (orig, callee, stmt, count, freq, loop_depth);
915 edge->inline_failed = reason;
918 node = orig->clones;
919 if (node)
920 while (node != orig)
922 struct cgraph_edge *edge = cgraph_edge (node, old_stmt);
924 /* It is possible that clones already contain the edge while
925 master didn't. Either we promoted indirect call into direct
926 call in the clone or we are processing clones of unreachable
927 master where edges has been rmeoved. */
928 if (edge)
929 cgraph_set_call_stmt (edge, stmt);
930 else if (!cgraph_edge (node, stmt))
932 edge = cgraph_create_edge (node, callee, stmt, count,
933 freq, loop_depth);
934 edge->inline_failed = reason;
937 if (node->clones)
938 node = node->clones;
939 else if (node->next_sibling_clone)
940 node = node->next_sibling_clone;
941 else
943 while (node != orig && !node->next_sibling_clone)
944 node = node->clone_of;
945 if (node != orig)
946 node = node->next_sibling_clone;
951 /* Give initial reasons why inlining would fail on EDGE. This gets either
952 nullified or usually overwritten by more precise reasons later. */
954 static void
955 initialize_inline_failed (struct cgraph_edge *e)
957 struct cgraph_node *callee = e->callee;
959 if (e->indirect_unknown_callee)
960 e->inline_failed = CIF_INDIRECT_UNKNOWN_CALL;
961 else if (!callee->analyzed)
962 e->inline_failed = CIF_BODY_NOT_AVAILABLE;
963 else if (callee->local.redefined_extern_inline)
964 e->inline_failed = CIF_REDEFINED_EXTERN_INLINE;
965 else if (!callee->local.inlinable)
966 e->inline_failed = CIF_FUNCTION_NOT_INLINABLE;
967 else if (e->call_stmt && gimple_call_cannot_inline_p (e->call_stmt))
968 e->inline_failed = CIF_MISMATCHED_ARGUMENTS;
969 else
970 e->inline_failed = CIF_FUNCTION_NOT_CONSIDERED;
973 /* Allocate a cgraph_edge structure and fill it with data according to the
974 parameters of which only CALLEE can be NULL (when creating an indirect call
975 edge). */
977 static struct cgraph_edge *
978 cgraph_create_edge_1 (struct cgraph_node *caller, struct cgraph_node *callee,
979 gimple call_stmt, gcov_type count, int freq, int nest)
981 struct cgraph_edge *edge;
983 /* LTO does not actually have access to the call_stmt since these
984 have not been loaded yet. */
985 if (call_stmt)
987 #ifdef ENABLE_CHECKING
988 /* This is rather pricely check possibly trigerring construction of
989 call stmt hashtable. */
990 gcc_assert (!cgraph_edge (caller, call_stmt));
991 #endif
993 gcc_assert (is_gimple_call (call_stmt));
996 if (free_edges)
998 edge = free_edges;
999 free_edges = NEXT_FREE_EDGE (edge);
1001 else
1003 edge = ggc_alloc_cgraph_edge ();
1004 edge->uid = cgraph_edge_max_uid++;
1007 edge->aux = NULL;
1008 edge->caller = caller;
1009 edge->callee = callee;
1010 edge->prev_caller = NULL;
1011 edge->next_caller = NULL;
1012 edge->prev_callee = NULL;
1013 edge->next_callee = NULL;
1015 edge->count = count;
1016 gcc_assert (count >= 0);
1017 edge->frequency = freq;
1018 gcc_assert (freq >= 0);
1019 gcc_assert (freq <= CGRAPH_FREQ_MAX);
1020 edge->loop_nest = nest;
1022 edge->call_stmt = call_stmt;
1023 push_cfun (DECL_STRUCT_FUNCTION (caller->decl));
1024 edge->can_throw_external
1025 = call_stmt ? stmt_can_throw_external (call_stmt) : false;
1026 pop_cfun ();
1027 edge->call_stmt_cannot_inline_p =
1028 (call_stmt ? gimple_call_cannot_inline_p (call_stmt) : false);
1029 if (call_stmt && caller->call_site_hash)
1030 cgraph_add_edge_to_call_site_hash (edge);
1032 edge->indirect_info = NULL;
1033 edge->indirect_inlining_edge = 0;
1035 return edge;
1038 /* Create edge from CALLER to CALLEE in the cgraph. */
1040 struct cgraph_edge *
1041 cgraph_create_edge (struct cgraph_node *caller, struct cgraph_node *callee,
1042 gimple call_stmt, gcov_type count, int freq, int nest)
1044 struct cgraph_edge *edge = cgraph_create_edge_1 (caller, callee, call_stmt,
1045 count, freq, nest);
1047 edge->indirect_unknown_callee = 0;
1048 initialize_inline_failed (edge);
1050 edge->next_caller = callee->callers;
1051 if (callee->callers)
1052 callee->callers->prev_caller = edge;
1053 edge->next_callee = caller->callees;
1054 if (caller->callees)
1055 caller->callees->prev_callee = edge;
1056 caller->callees = edge;
1057 callee->callers = edge;
1059 return edge;
1063 /* Create an indirect edge with a yet-undetermined callee where the call
1064 statement destination is a formal parameter of the caller with index
1065 PARAM_INDEX. */
1067 struct cgraph_edge *
1068 cgraph_create_indirect_edge (struct cgraph_node *caller, gimple call_stmt,
1069 int ecf_flags,
1070 gcov_type count, int freq, int nest)
1072 struct cgraph_edge *edge = cgraph_create_edge_1 (caller, NULL, call_stmt,
1073 count, freq, nest);
1075 edge->indirect_unknown_callee = 1;
1076 initialize_inline_failed (edge);
1078 edge->indirect_info = ggc_alloc_cleared_cgraph_indirect_call_info ();
1079 edge->indirect_info->param_index = -1;
1080 edge->indirect_info->ecf_flags = ecf_flags;
1082 edge->next_callee = caller->indirect_calls;
1083 if (caller->indirect_calls)
1084 caller->indirect_calls->prev_callee = edge;
1085 caller->indirect_calls = edge;
1087 return edge;
1090 /* Remove the edge E from the list of the callers of the callee. */
1092 static inline void
1093 cgraph_edge_remove_callee (struct cgraph_edge *e)
1095 gcc_assert (!e->indirect_unknown_callee);
1096 if (e->prev_caller)
1097 e->prev_caller->next_caller = e->next_caller;
1098 if (e->next_caller)
1099 e->next_caller->prev_caller = e->prev_caller;
1100 if (!e->prev_caller)
1101 e->callee->callers = e->next_caller;
1104 /* Remove the edge E from the list of the callees of the caller. */
1106 static inline void
1107 cgraph_edge_remove_caller (struct cgraph_edge *e)
1109 if (e->prev_callee)
1110 e->prev_callee->next_callee = e->next_callee;
1111 if (e->next_callee)
1112 e->next_callee->prev_callee = e->prev_callee;
1113 if (!e->prev_callee)
1115 if (e->indirect_unknown_callee)
1116 e->caller->indirect_calls = e->next_callee;
1117 else
1118 e->caller->callees = e->next_callee;
1120 if (e->caller->call_site_hash)
1121 htab_remove_elt_with_hash (e->caller->call_site_hash,
1122 e->call_stmt,
1123 htab_hash_pointer (e->call_stmt));
1126 /* Put the edge onto the free list. */
1128 static void
1129 cgraph_free_edge (struct cgraph_edge *e)
1131 int uid = e->uid;
1133 /* Clear out the edge so we do not dangle pointers. */
1134 memset (e, 0, sizeof (*e));
1135 e->uid = uid;
1136 NEXT_FREE_EDGE (e) = free_edges;
1137 free_edges = e;
1140 /* Remove the edge E in the cgraph. */
1142 void
1143 cgraph_remove_edge (struct cgraph_edge *e)
1145 /* Call all edge removal hooks. */
1146 cgraph_call_edge_removal_hooks (e);
1148 if (!e->indirect_unknown_callee)
1149 /* Remove from callers list of the callee. */
1150 cgraph_edge_remove_callee (e);
1152 /* Remove from callees list of the callers. */
1153 cgraph_edge_remove_caller (e);
1155 /* Put the edge onto the free list. */
1156 cgraph_free_edge (e);
1159 /* Set callee of call graph edge E and add it to the corresponding set of
1160 callers. */
1162 static void
1163 cgraph_set_edge_callee (struct cgraph_edge *e, struct cgraph_node *n)
1165 e->prev_caller = NULL;
1166 if (n->callers)
1167 n->callers->prev_caller = e;
1168 e->next_caller = n->callers;
1169 n->callers = e;
1170 e->callee = n;
1173 /* Redirect callee of E to N. The function does not update underlying
1174 call expression. */
1176 void
1177 cgraph_redirect_edge_callee (struct cgraph_edge *e, struct cgraph_node *n)
1179 /* Remove from callers list of the current callee. */
1180 cgraph_edge_remove_callee (e);
1182 /* Insert to callers list of the new callee. */
1183 cgraph_set_edge_callee (e, n);
1186 /* Make an indirect EDGE with an unknown callee an ordinary edge leading to
1187 CALLEE. */
1189 void
1190 cgraph_make_edge_direct (struct cgraph_edge *edge, struct cgraph_node *callee)
1192 edge->indirect_unknown_callee = 0;
1194 /* Get the edge out of the indirect edge list. */
1195 if (edge->prev_callee)
1196 edge->prev_callee->next_callee = edge->next_callee;
1197 if (edge->next_callee)
1198 edge->next_callee->prev_callee = edge->prev_callee;
1199 if (!edge->prev_callee)
1200 edge->caller->indirect_calls = edge->next_callee;
1202 /* Put it into the normal callee list */
1203 edge->prev_callee = NULL;
1204 edge->next_callee = edge->caller->callees;
1205 if (edge->caller->callees)
1206 edge->caller->callees->prev_callee = edge;
1207 edge->caller->callees = edge;
1209 /* Insert to callers list of the new callee. */
1210 cgraph_set_edge_callee (edge, callee);
1212 /* We need to re-determine the inlining status of the edge. */
1213 initialize_inline_failed (edge);
1217 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
1218 OLD_STMT changed into NEW_STMT. OLD_CALL is gimple_call_fndecl
1219 of OLD_STMT if it was previously call statement. */
1221 static void
1222 cgraph_update_edges_for_call_stmt_node (struct cgraph_node *node,
1223 gimple old_stmt, tree old_call, gimple new_stmt)
1225 tree new_call = (is_gimple_call (new_stmt)) ? gimple_call_fndecl (new_stmt) : 0;
1227 /* We are seeing indirect calls, then there is nothing to update. */
1228 if (!new_call && !old_call)
1229 return;
1230 /* See if we turned indirect call into direct call or folded call to one builtin
1231 into different bultin. */
1232 if (old_call != new_call)
1234 struct cgraph_edge *e = cgraph_edge (node, old_stmt);
1235 struct cgraph_edge *ne = NULL;
1236 gcov_type count;
1237 int frequency;
1238 int loop_nest;
1240 if (e)
1242 /* See if the edge is already there and has the correct callee. It
1243 might be so because of indirect inlining has already updated
1244 it. */
1245 if (new_call && e->callee && e->callee->decl == new_call)
1246 return;
1248 /* Otherwise remove edge and create new one; we can't simply redirect
1249 since function has changed, so inline plan and other information
1250 attached to edge is invalid. */
1251 count = e->count;
1252 frequency = e->frequency;
1253 loop_nest = e->loop_nest;
1254 cgraph_remove_edge (e);
1256 else
1258 /* We are seeing new direct call; compute profile info based on BB. */
1259 basic_block bb = gimple_bb (new_stmt);
1260 count = bb->count;
1261 frequency = compute_call_stmt_bb_frequency (current_function_decl,
1262 bb);
1263 loop_nest = bb->loop_depth;
1266 if (new_call)
1268 ne = cgraph_create_edge (node, cgraph_node (new_call),
1269 new_stmt, count, frequency,
1270 loop_nest);
1271 gcc_assert (ne->inline_failed);
1274 /* We only updated the call stmt; update pointer in cgraph edge.. */
1275 else if (old_stmt != new_stmt)
1276 cgraph_set_call_stmt (cgraph_edge (node, old_stmt), new_stmt);
1279 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
1280 OLD_STMT changed into NEW_STMT. OLD_DECL is gimple_call_fndecl
1281 of OLD_STMT before it was updated (updating can happen inplace). */
1283 void
1284 cgraph_update_edges_for_call_stmt (gimple old_stmt, tree old_decl, gimple new_stmt)
1286 struct cgraph_node *orig = cgraph_node (cfun->decl);
1287 struct cgraph_node *node;
1289 cgraph_update_edges_for_call_stmt_node (orig, old_stmt, old_decl, new_stmt);
1290 if (orig->clones)
1291 for (node = orig->clones; node != orig;)
1293 cgraph_update_edges_for_call_stmt_node (node, old_stmt, old_decl, new_stmt);
1294 if (node->clones)
1295 node = node->clones;
1296 else if (node->next_sibling_clone)
1297 node = node->next_sibling_clone;
1298 else
1300 while (node != orig && !node->next_sibling_clone)
1301 node = node->clone_of;
1302 if (node != orig)
1303 node = node->next_sibling_clone;
1309 /* Remove all callees from the node. */
1311 void
1312 cgraph_node_remove_callees (struct cgraph_node *node)
1314 struct cgraph_edge *e, *f;
1316 /* It is sufficient to remove the edges from the lists of callers of
1317 the callees. The callee list of the node can be zapped with one
1318 assignment. */
1319 for (e = node->callees; e; e = f)
1321 f = e->next_callee;
1322 cgraph_call_edge_removal_hooks (e);
1323 if (!e->indirect_unknown_callee)
1324 cgraph_edge_remove_callee (e);
1325 cgraph_free_edge (e);
1327 for (e = node->indirect_calls; e; e = f)
1329 f = e->next_callee;
1330 cgraph_call_edge_removal_hooks (e);
1331 if (!e->indirect_unknown_callee)
1332 cgraph_edge_remove_callee (e);
1333 cgraph_free_edge (e);
1335 node->indirect_calls = NULL;
1336 node->callees = NULL;
1337 if (node->call_site_hash)
1339 htab_delete (node->call_site_hash);
1340 node->call_site_hash = NULL;
1344 /* Remove all callers from the node. */
1346 static void
1347 cgraph_node_remove_callers (struct cgraph_node *node)
1349 struct cgraph_edge *e, *f;
1351 /* It is sufficient to remove the edges from the lists of callees of
1352 the callers. The caller list of the node can be zapped with one
1353 assignment. */
1354 for (e = node->callers; e; e = f)
1356 f = e->next_caller;
1357 cgraph_call_edge_removal_hooks (e);
1358 cgraph_edge_remove_caller (e);
1359 cgraph_free_edge (e);
1361 node->callers = NULL;
1364 /* Release memory used to represent body of function NODE. */
1366 void
1367 cgraph_release_function_body (struct cgraph_node *node)
1369 if (DECL_STRUCT_FUNCTION (node->decl))
1371 tree old_decl = current_function_decl;
1372 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
1373 if (cfun->gimple_df)
1375 current_function_decl = node->decl;
1376 delete_tree_ssa ();
1377 delete_tree_cfg_annotations ();
1378 cfun->eh = NULL;
1379 current_function_decl = old_decl;
1381 if (cfun->cfg)
1383 gcc_assert (dom_computed[0] == DOM_NONE);
1384 gcc_assert (dom_computed[1] == DOM_NONE);
1385 clear_edges ();
1387 if (cfun->value_histograms)
1388 free_histograms ();
1389 gcc_assert (!current_loops);
1390 pop_cfun();
1391 gimple_set_body (node->decl, NULL);
1392 VEC_free (ipa_opt_pass, heap,
1393 node->ipa_transforms_to_apply);
1394 /* Struct function hangs a lot of data that would leak if we didn't
1395 removed all pointers to it. */
1396 ggc_free (DECL_STRUCT_FUNCTION (node->decl));
1397 DECL_STRUCT_FUNCTION (node->decl) = NULL;
1399 DECL_SAVED_TREE (node->decl) = NULL;
1400 /* If the node is abstract and needed, then do not clear DECL_INITIAL
1401 of its associated function function declaration because it's
1402 needed to emit debug info later. */
1403 if (!node->abstract_and_needed)
1404 DECL_INITIAL (node->decl) = error_mark_node;
1407 /* Remove same body alias node. */
1409 void
1410 cgraph_remove_same_body_alias (struct cgraph_node *node)
1412 void **slot;
1413 int uid = node->uid;
1415 gcc_assert (node->same_body_alias);
1416 if (node->previous)
1417 node->previous->next = node->next;
1418 else
1419 node->same_body->same_body = node->next;
1420 if (node->next)
1421 node->next->previous = node->previous;
1422 node->next = NULL;
1423 node->previous = NULL;
1424 slot = htab_find_slot (cgraph_hash, node, NO_INSERT);
1425 if (*slot == node)
1426 htab_clear_slot (cgraph_hash, slot);
1427 if (assembler_name_hash)
1429 tree name = DECL_ASSEMBLER_NAME (node->decl);
1430 slot = htab_find_slot_with_hash (assembler_name_hash, name,
1431 decl_assembler_name_hash (name),
1432 NO_INSERT);
1433 if (slot && *slot == node)
1434 htab_clear_slot (assembler_name_hash, slot);
1437 /* Clear out the node to NULL all pointers and add the node to the free
1438 list. */
1439 memset (node, 0, sizeof(*node));
1440 node->uid = uid;
1441 NEXT_FREE_NODE (node) = free_nodes;
1442 free_nodes = node;
1445 /* Remove the node from cgraph. */
1447 void
1448 cgraph_remove_node (struct cgraph_node *node)
1450 void **slot;
1451 bool kill_body = false;
1452 struct cgraph_node *n;
1453 int uid = node->uid;
1455 cgraph_call_node_removal_hooks (node);
1456 cgraph_node_remove_callers (node);
1457 cgraph_node_remove_callees (node);
1458 ipa_remove_all_references (&node->ref_list);
1459 ipa_remove_all_refering (&node->ref_list);
1460 VEC_free (ipa_opt_pass, heap,
1461 node->ipa_transforms_to_apply);
1463 /* Incremental inlining access removed nodes stored in the postorder list.
1465 node->needed = node->reachable = false;
1466 for (n = node->nested; n; n = n->next_nested)
1467 n->origin = NULL;
1468 node->nested = NULL;
1469 if (node->origin)
1471 struct cgraph_node **node2 = &node->origin->nested;
1473 while (*node2 != node)
1474 node2 = &(*node2)->next_nested;
1475 *node2 = node->next_nested;
1477 if (node->previous)
1478 node->previous->next = node->next;
1479 else
1480 cgraph_nodes = node->next;
1481 if (node->next)
1482 node->next->previous = node->previous;
1483 node->next = NULL;
1484 node->previous = NULL;
1485 slot = htab_find_slot (cgraph_hash, node, NO_INSERT);
1486 if (*slot == node)
1488 struct cgraph_node *next_inline_clone;
1490 for (next_inline_clone = node->clones;
1491 next_inline_clone && next_inline_clone->decl != node->decl;
1492 next_inline_clone = next_inline_clone->next_sibling_clone)
1495 /* If there is inline clone of the node being removed, we need
1496 to put it into the position of removed node and reorganize all
1497 other clones to be based on it. */
1498 if (next_inline_clone)
1500 struct cgraph_node *n;
1501 struct cgraph_node *new_clones;
1503 *slot = next_inline_clone;
1505 /* Unlink inline clone from the list of clones of removed node. */
1506 if (next_inline_clone->next_sibling_clone)
1507 next_inline_clone->next_sibling_clone->prev_sibling_clone
1508 = next_inline_clone->prev_sibling_clone;
1509 if (next_inline_clone->prev_sibling_clone)
1511 gcc_assert (node->clones != next_inline_clone);
1512 next_inline_clone->prev_sibling_clone->next_sibling_clone
1513 = next_inline_clone->next_sibling_clone;
1515 else
1517 gcc_assert (node->clones == next_inline_clone);
1518 node->clones = next_inline_clone->next_sibling_clone;
1521 new_clones = node->clones;
1522 node->clones = NULL;
1524 /* Copy clone info. */
1525 next_inline_clone->clone = node->clone;
1527 /* Now place it into clone tree at same level at NODE. */
1528 next_inline_clone->clone_of = node->clone_of;
1529 next_inline_clone->prev_sibling_clone = NULL;
1530 next_inline_clone->next_sibling_clone = NULL;
1531 if (node->clone_of)
1533 if (node->clone_of->clones)
1534 node->clone_of->clones->prev_sibling_clone = next_inline_clone;
1535 next_inline_clone->next_sibling_clone = node->clone_of->clones;
1536 node->clone_of->clones = next_inline_clone;
1539 /* Merge the clone list. */
1540 if (new_clones)
1542 if (!next_inline_clone->clones)
1543 next_inline_clone->clones = new_clones;
1544 else
1546 n = next_inline_clone->clones;
1547 while (n->next_sibling_clone)
1548 n = n->next_sibling_clone;
1549 n->next_sibling_clone = new_clones;
1550 new_clones->prev_sibling_clone = n;
1554 /* Update clone_of pointers. */
1555 n = new_clones;
1556 while (n)
1558 n->clone_of = next_inline_clone;
1559 n = n->next_sibling_clone;
1562 else
1564 htab_clear_slot (cgraph_hash, slot);
1565 kill_body = true;
1569 if (node->prev_sibling_clone)
1570 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
1571 else if (node->clone_of)
1572 node->clone_of->clones = node->next_sibling_clone;
1573 if (node->next_sibling_clone)
1574 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
1575 if (node->clones)
1577 struct cgraph_node *n, *next;
1579 if (node->clone_of)
1581 for (n = node->clones; n->next_sibling_clone; n = n->next_sibling_clone)
1582 n->clone_of = node->clone_of;
1583 n->clone_of = node->clone_of;
1584 n->next_sibling_clone = node->clone_of->clones;
1585 if (node->clone_of->clones)
1586 node->clone_of->clones->prev_sibling_clone = n;
1587 node->clone_of->clones = node->clones;
1589 else
1591 /* We are removing node with clones. this makes clones inconsistent,
1592 but assume they will be removed subsequently and just keep clone
1593 tree intact. This can happen in unreachable function removal since
1594 we remove unreachable functions in random order, not by bottom-up
1595 walk of clone trees. */
1596 for (n = node->clones; n; n = next)
1598 next = n->next_sibling_clone;
1599 n->next_sibling_clone = NULL;
1600 n->prev_sibling_clone = NULL;
1601 n->clone_of = NULL;
1606 while (node->same_body)
1607 cgraph_remove_same_body_alias (node->same_body);
1609 if (node->same_comdat_group)
1611 struct cgraph_node *prev;
1612 for (prev = node->same_comdat_group;
1613 prev->same_comdat_group != node;
1614 prev = prev->same_comdat_group)
1616 if (node->same_comdat_group == prev)
1617 prev->same_comdat_group = NULL;
1618 else
1619 prev->same_comdat_group = node->same_comdat_group;
1620 node->same_comdat_group = NULL;
1623 /* While all the clones are removed after being proceeded, the function
1624 itself is kept in the cgraph even after it is compiled. Check whether
1625 we are done with this body and reclaim it proactively if this is the case.
1627 if (!kill_body && *slot)
1629 struct cgraph_node *n = (struct cgraph_node *) *slot;
1630 if (!n->clones && !n->clone_of && !n->global.inlined_to
1631 && (cgraph_global_info_ready
1632 && (TREE_ASM_WRITTEN (n->decl) || DECL_EXTERNAL (n->decl)
1633 || n->in_other_partition)))
1634 kill_body = true;
1636 if (assembler_name_hash)
1638 tree name = DECL_ASSEMBLER_NAME (node->decl);
1639 slot = htab_find_slot_with_hash (assembler_name_hash, name,
1640 decl_assembler_name_hash (name),
1641 NO_INSERT);
1642 /* Inline clones are not hashed. */
1643 if (slot && *slot == node)
1644 htab_clear_slot (assembler_name_hash, slot);
1647 if (kill_body)
1648 cgraph_release_function_body (node);
1649 node->decl = NULL;
1650 if (node->call_site_hash)
1652 htab_delete (node->call_site_hash);
1653 node->call_site_hash = NULL;
1655 cgraph_n_nodes--;
1657 /* Clear out the node to NULL all pointers and add the node to the free
1658 list. */
1659 memset (node, 0, sizeof(*node));
1660 node->uid = uid;
1661 NEXT_FREE_NODE (node) = free_nodes;
1662 free_nodes = node;
1665 /* Remove the node from cgraph. */
1667 void
1668 cgraph_remove_node_and_inline_clones (struct cgraph_node *node)
1670 struct cgraph_edge *e, *next;
1671 for (e = node->callees; e; e = next)
1673 next = e->next_callee;
1674 if (!e->inline_failed)
1675 cgraph_remove_node_and_inline_clones (e->callee);
1677 cgraph_remove_node (node);
1680 /* Notify finalize_compilation_unit that given node is reachable. */
1682 void
1683 cgraph_mark_reachable_node (struct cgraph_node *node)
1685 if (!node->reachable && node->local.finalized)
1687 if (cgraph_global_info_ready)
1689 /* Verify that function does not appear to be needed out of blue
1690 during the optimization process. This can happen for extern
1691 inlines when bodies was removed after inlining. */
1692 gcc_assert ((node->analyzed || node->in_other_partition
1693 || DECL_EXTERNAL (node->decl)));
1695 else
1696 notice_global_symbol (node->decl);
1697 node->reachable = 1;
1699 node->next_needed = cgraph_nodes_queue;
1700 cgraph_nodes_queue = node;
1704 /* Likewise indicate that a node is needed, i.e. reachable via some
1705 external means. */
1707 void
1708 cgraph_mark_needed_node (struct cgraph_node *node)
1710 node->needed = 1;
1711 gcc_assert (!node->global.inlined_to);
1712 cgraph_mark_reachable_node (node);
1715 /* Likewise indicate that a node is having address taken. */
1717 void
1718 cgraph_mark_address_taken_node (struct cgraph_node *node)
1720 cgraph_mark_reachable_node (node);
1721 node->address_taken = 1;
1724 /* Return local info for the compiled function. */
1726 struct cgraph_local_info *
1727 cgraph_local_info (tree decl)
1729 struct cgraph_node *node;
1731 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
1732 node = cgraph_node (decl);
1733 return &node->local;
1736 /* Return local info for the compiled function. */
1738 struct cgraph_global_info *
1739 cgraph_global_info (tree decl)
1741 struct cgraph_node *node;
1743 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL && cgraph_global_info_ready);
1744 node = cgraph_node (decl);
1745 return &node->global;
1748 /* Return local info for the compiled function. */
1750 struct cgraph_rtl_info *
1751 cgraph_rtl_info (tree decl)
1753 struct cgraph_node *node;
1755 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
1756 node = cgraph_node (decl);
1757 if (decl != current_function_decl
1758 && !TREE_ASM_WRITTEN (node->decl))
1759 return NULL;
1760 return &node->rtl;
1763 /* Return a string describing the failure REASON. */
1765 const char*
1766 cgraph_inline_failed_string (cgraph_inline_failed_t reason)
1768 #undef DEFCIFCODE
1769 #define DEFCIFCODE(code, string) string,
1771 static const char *cif_string_table[CIF_N_REASONS] = {
1772 #include "cif-code.def"
1775 /* Signedness of an enum type is implementation defined, so cast it
1776 to unsigned before testing. */
1777 gcc_assert ((unsigned) reason < CIF_N_REASONS);
1778 return cif_string_table[reason];
1781 /* Return name of the node used in debug output. */
1782 const char *
1783 cgraph_node_name (struct cgraph_node *node)
1785 return lang_hooks.decl_printable_name (node->decl, 2);
1788 /* Names used to print out the availability enum. */
1789 const char * const cgraph_availability_names[] =
1790 {"unset", "not_available", "overwritable", "available", "local"};
1793 /* Dump call graph node NODE to file F. */
1795 void
1796 dump_cgraph_node (FILE *f, struct cgraph_node *node)
1798 struct cgraph_edge *edge;
1799 int indirect_calls_count = 0;
1801 fprintf (f, "%s/%i(%i)", cgraph_node_name (node), node->uid,
1802 node->pid);
1803 dump_addr (f, " @", (void *)node);
1804 if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
1805 fprintf (f, " (asm: %s)", IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node->decl)));
1806 if (node->global.inlined_to)
1807 fprintf (f, " (inline copy in %s/%i)",
1808 cgraph_node_name (node->global.inlined_to),
1809 node->global.inlined_to->uid);
1810 if (node->clone_of)
1811 fprintf (f, " (clone of %s/%i)",
1812 cgraph_node_name (node->clone_of),
1813 node->clone_of->uid);
1814 if (cgraph_function_flags_ready)
1815 fprintf (f, " availability:%s",
1816 cgraph_availability_names [cgraph_function_body_availability (node)]);
1817 if (node->analyzed)
1818 fprintf (f, " analyzed");
1819 if (node->in_other_partition)
1820 fprintf (f, " in_other_partition");
1821 if (node->count)
1822 fprintf (f, " executed "HOST_WIDEST_INT_PRINT_DEC"x",
1823 (HOST_WIDEST_INT)node->count);
1824 if (node->local.inline_summary.self_time)
1825 fprintf (f, " %i time, %i benefit", node->local.inline_summary.self_time,
1826 node->local.inline_summary.time_inlining_benefit);
1827 if (node->global.time && node->global.time
1828 != node->local.inline_summary.self_time)
1829 fprintf (f, " (%i after inlining)", node->global.time);
1830 if (node->local.inline_summary.self_size)
1831 fprintf (f, " %i size, %i benefit", node->local.inline_summary.self_size,
1832 node->local.inline_summary.size_inlining_benefit);
1833 if (node->global.size && node->global.size
1834 != node->local.inline_summary.self_size)
1835 fprintf (f, " (%i after inlining)", node->global.size);
1836 if (node->local.inline_summary.estimated_self_stack_size)
1837 fprintf (f, " %i bytes stack usage", (int)node->local.inline_summary.estimated_self_stack_size);
1838 if (node->global.estimated_stack_size != node->local.inline_summary.estimated_self_stack_size)
1839 fprintf (f, " %i bytes after inlining", (int)node->global.estimated_stack_size);
1840 if (node->origin)
1841 fprintf (f, " nested in: %s", cgraph_node_name (node->origin));
1842 if (node->needed)
1843 fprintf (f, " needed");
1844 if (node->address_taken)
1845 fprintf (f, " address_taken");
1846 else if (node->reachable)
1847 fprintf (f, " reachable");
1848 else if (node->reachable_from_other_partition)
1849 fprintf (f, " reachable_from_other_partition");
1850 if (gimple_has_body_p (node->decl))
1851 fprintf (f, " body");
1852 if (node->process)
1853 fprintf (f, " process");
1854 if (node->local.local)
1855 fprintf (f, " local");
1856 if (node->local.externally_visible)
1857 fprintf (f, " externally_visible");
1858 if (node->local.finalized)
1859 fprintf (f, " finalized");
1860 if (node->local.disregard_inline_limits)
1861 fprintf (f, " always_inline");
1862 else if (node->local.inlinable)
1863 fprintf (f, " inlinable");
1864 else if (node->local.versionable)
1865 fprintf (f, " versionable");
1866 if (node->local.redefined_extern_inline)
1867 fprintf (f, " redefined_extern_inline");
1868 if (TREE_ASM_WRITTEN (node->decl))
1869 fprintf (f, " asm_written");
1871 fprintf (f, "\n called by: ");
1872 for (edge = node->callers; edge; edge = edge->next_caller)
1874 fprintf (f, "%s/%i ", cgraph_node_name (edge->caller),
1875 edge->caller->uid);
1876 if (edge->count)
1877 fprintf (f, "("HOST_WIDEST_INT_PRINT_DEC"x) ",
1878 (HOST_WIDEST_INT)edge->count);
1879 if (edge->frequency)
1880 fprintf (f, "(%.2f per call) ",
1881 edge->frequency / (double)CGRAPH_FREQ_BASE);
1882 if (!edge->inline_failed)
1883 fprintf(f, "(inlined) ");
1884 if (edge->indirect_inlining_edge)
1885 fprintf(f, "(indirect_inlining) ");
1886 if (edge->can_throw_external)
1887 fprintf(f, "(can throw external) ");
1890 fprintf (f, "\n calls: ");
1891 for (edge = node->callees; edge; edge = edge->next_callee)
1893 fprintf (f, "%s/%i ", cgraph_node_name (edge->callee),
1894 edge->callee->uid);
1895 if (!edge->inline_failed)
1896 fprintf(f, "(inlined) ");
1897 if (edge->indirect_inlining_edge)
1898 fprintf(f, "(indirect_inlining) ");
1899 if (edge->count)
1900 fprintf (f, "("HOST_WIDEST_INT_PRINT_DEC"x) ",
1901 (HOST_WIDEST_INT)edge->count);
1902 if (edge->frequency)
1903 fprintf (f, "(%.2f per call) ",
1904 edge->frequency / (double)CGRAPH_FREQ_BASE);
1905 if (edge->loop_nest)
1906 fprintf (f, "(nested in %i loops) ", edge->loop_nest);
1907 if (edge->can_throw_external)
1908 fprintf(f, "(can throw external) ");
1910 fprintf (f, "\n");
1911 fprintf (f, " References: ");
1912 ipa_dump_references (f, &node->ref_list);
1913 fprintf (f, " Refering this function: ");
1914 ipa_dump_refering (f, &node->ref_list);
1916 for (edge = node->indirect_calls; edge; edge = edge->next_callee)
1917 indirect_calls_count++;
1918 if (indirect_calls_count)
1919 fprintf (f, " has %i outgoing edges for indirect calls.\n",
1920 indirect_calls_count);
1922 if (node->same_body)
1924 struct cgraph_node *n;
1925 fprintf (f, " aliases & thunks:");
1926 for (n = node->same_body; n; n = n->next)
1928 fprintf (f, " %s/%i", cgraph_node_name (n), n->uid);
1929 if (n->thunk.thunk_p)
1931 fprintf (f, " (thunk of %s fixed ofset %i virtual value %i has "
1932 "virtual offset %i",
1933 lang_hooks.decl_printable_name (n->thunk.alias, 2),
1934 (int)n->thunk.fixed_offset,
1935 (int)n->thunk.virtual_value,
1936 (int)n->thunk.virtual_offset_p);
1937 fprintf (f, ")");
1939 if (DECL_ASSEMBLER_NAME_SET_P (n->decl))
1940 fprintf (f, " (asm: %s)", IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (n->decl)));
1942 fprintf (f, "\n");
1947 /* Dump call graph node NODE to stderr. */
1949 DEBUG_FUNCTION void
1950 debug_cgraph_node (struct cgraph_node *node)
1952 dump_cgraph_node (stderr, node);
1956 /* Dump the callgraph to file F. */
1958 void
1959 dump_cgraph (FILE *f)
1961 struct cgraph_node *node;
1963 fprintf (f, "callgraph:\n\n");
1964 for (node = cgraph_nodes; node; node = node->next)
1965 dump_cgraph_node (f, node);
1969 /* Dump the call graph to stderr. */
1971 DEBUG_FUNCTION void
1972 debug_cgraph (void)
1974 dump_cgraph (stderr);
1978 /* Set the DECL_ASSEMBLER_NAME and update cgraph hashtables. */
1980 void
1981 change_decl_assembler_name (tree decl, tree name)
1983 struct cgraph_node *node;
1984 void **slot;
1985 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
1986 SET_DECL_ASSEMBLER_NAME (decl, name);
1987 else
1989 if (name == DECL_ASSEMBLER_NAME (decl))
1990 return;
1992 if (assembler_name_hash
1993 && TREE_CODE (decl) == FUNCTION_DECL
1994 && (node = cgraph_get_node_or_alias (decl)) != NULL)
1996 tree old_name = DECL_ASSEMBLER_NAME (decl);
1997 slot = htab_find_slot_with_hash (assembler_name_hash, old_name,
1998 decl_assembler_name_hash (old_name),
1999 NO_INSERT);
2000 /* Inline clones are not hashed. */
2001 if (slot && *slot == node)
2002 htab_clear_slot (assembler_name_hash, slot);
2004 if (TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl))
2005 && DECL_RTL_SET_P (decl))
2006 warning (0, "%D renamed after being referenced in assembly", decl);
2008 SET_DECL_ASSEMBLER_NAME (decl, name);
2010 if (assembler_name_hash
2011 && TREE_CODE (decl) == FUNCTION_DECL
2012 && (node = cgraph_get_node_or_alias (decl)) != NULL)
2014 slot = htab_find_slot_with_hash (assembler_name_hash, name,
2015 decl_assembler_name_hash (name),
2016 INSERT);
2017 gcc_assert (!*slot);
2018 *slot = node;
2022 /* Add a top-level asm statement to the list. */
2024 struct cgraph_asm_node *
2025 cgraph_add_asm_node (tree asm_str)
2027 struct cgraph_asm_node *node;
2029 node = ggc_alloc_cleared_cgraph_asm_node ();
2030 node->asm_str = asm_str;
2031 node->order = cgraph_order++;
2032 node->next = NULL;
2033 if (cgraph_asm_nodes == NULL)
2034 cgraph_asm_nodes = node;
2035 else
2036 cgraph_asm_last_node->next = node;
2037 cgraph_asm_last_node = node;
2038 return node;
2041 /* Return true when the DECL can possibly be inlined. */
2042 bool
2043 cgraph_function_possibly_inlined_p (tree decl)
2045 if (!cgraph_global_info_ready)
2046 return !DECL_UNINLINABLE (decl);
2047 return DECL_POSSIBLY_INLINED (decl);
2050 /* Create clone of E in the node N represented by CALL_EXPR the callgraph. */
2051 struct cgraph_edge *
2052 cgraph_clone_edge (struct cgraph_edge *e, struct cgraph_node *n,
2053 gimple call_stmt, unsigned stmt_uid, gcov_type count_scale,
2054 int freq_scale, int loop_nest, bool update_original)
2056 struct cgraph_edge *new_edge;
2057 gcov_type count = e->count * count_scale / REG_BR_PROB_BASE;
2058 gcov_type freq;
2060 /* We do not want to ignore loop nest after frequency drops to 0. */
2061 if (!freq_scale)
2062 freq_scale = 1;
2063 freq = e->frequency * (gcov_type) freq_scale / CGRAPH_FREQ_BASE;
2064 if (freq > CGRAPH_FREQ_MAX)
2065 freq = CGRAPH_FREQ_MAX;
2067 if (e->indirect_unknown_callee)
2069 tree decl;
2071 if (call_stmt && (decl = gimple_call_fndecl (call_stmt)))
2073 struct cgraph_node *callee = cgraph_node (decl);
2074 new_edge = cgraph_create_edge (n, callee, call_stmt, count, freq,
2075 e->loop_nest + loop_nest);
2077 else
2079 new_edge = cgraph_create_indirect_edge (n, call_stmt,
2080 e->indirect_info->ecf_flags,
2081 count, freq,
2082 e->loop_nest + loop_nest);
2083 *new_edge->indirect_info = *e->indirect_info;
2086 else
2087 new_edge = cgraph_create_edge (n, e->callee, call_stmt, count, freq,
2088 e->loop_nest + loop_nest);
2090 new_edge->inline_failed = e->inline_failed;
2091 new_edge->indirect_inlining_edge = e->indirect_inlining_edge;
2092 new_edge->lto_stmt_uid = stmt_uid;
2093 if (update_original)
2095 e->count -= new_edge->count;
2096 if (e->count < 0)
2097 e->count = 0;
2099 cgraph_call_edge_duplication_hooks (e, new_edge);
2100 return new_edge;
2103 /* Create node representing clone of N executed COUNT times. Decrease
2104 the execution counts from original node too.
2105 The new clone will have decl set to DECL that may or may not be the same
2106 as decl of N.
2108 When UPDATE_ORIGINAL is true, the counts are subtracted from the original
2109 function's profile to reflect the fact that part of execution is handled
2110 by node. */
2111 struct cgraph_node *
2112 cgraph_clone_node (struct cgraph_node *n, tree decl, gcov_type count, int freq,
2113 int loop_nest, bool update_original,
2114 VEC(cgraph_edge_p,heap) *redirect_callers)
2116 struct cgraph_node *new_node = cgraph_create_node ();
2117 struct cgraph_edge *e;
2118 gcov_type count_scale;
2119 unsigned i;
2121 new_node->decl = decl;
2122 new_node->origin = n->origin;
2123 if (new_node->origin)
2125 new_node->next_nested = new_node->origin->nested;
2126 new_node->origin->nested = new_node;
2128 new_node->analyzed = n->analyzed;
2129 new_node->local = n->local;
2130 new_node->local.externally_visible = false;
2131 new_node->local.local = true;
2132 new_node->local.vtable_method = false;
2133 new_node->global = n->global;
2134 new_node->rtl = n->rtl;
2135 new_node->count = count;
2136 new_node->frequency = n->frequency;
2137 new_node->clone = n->clone;
2138 new_node->clone.tree_map = 0;
2139 if (n->count)
2141 if (new_node->count > n->count)
2142 count_scale = REG_BR_PROB_BASE;
2143 else
2144 count_scale = new_node->count * REG_BR_PROB_BASE / n->count;
2146 else
2147 count_scale = 0;
2148 if (update_original)
2150 n->count -= count;
2151 if (n->count < 0)
2152 n->count = 0;
2155 FOR_EACH_VEC_ELT (cgraph_edge_p, redirect_callers, i, e)
2157 /* Redirect calls to the old version node to point to its new
2158 version. */
2159 cgraph_redirect_edge_callee (e, new_node);
2163 for (e = n->callees;e; e=e->next_callee)
2164 cgraph_clone_edge (e, new_node, e->call_stmt, e->lto_stmt_uid,
2165 count_scale, freq, loop_nest, update_original);
2167 for (e = n->indirect_calls; e; e = e->next_callee)
2168 cgraph_clone_edge (e, new_node, e->call_stmt, e->lto_stmt_uid,
2169 count_scale, freq, loop_nest, update_original);
2170 ipa_clone_references (new_node, NULL, &n->ref_list);
2172 new_node->next_sibling_clone = n->clones;
2173 if (n->clones)
2174 n->clones->prev_sibling_clone = new_node;
2175 n->clones = new_node;
2176 new_node->clone_of = n;
2178 cgraph_call_node_duplication_hooks (n, new_node);
2179 if (n->decl != decl)
2181 struct cgraph_node **slot;
2182 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, new_node, INSERT);
2183 gcc_assert (!*slot);
2184 *slot = new_node;
2185 if (assembler_name_hash)
2187 void **aslot;
2188 tree name = DECL_ASSEMBLER_NAME (decl);
2190 aslot = htab_find_slot_with_hash (assembler_name_hash, name,
2191 decl_assembler_name_hash (name),
2192 INSERT);
2193 gcc_assert (!*aslot);
2194 *aslot = new_node;
2197 return new_node;
2200 /* Create a new name for clone of DECL, add SUFFIX. Returns an identifier. */
2202 static GTY(()) unsigned int clone_fn_id_num;
2204 tree
2205 clone_function_name (tree decl, const char *suffix)
2207 tree name = DECL_ASSEMBLER_NAME (decl);
2208 size_t len = IDENTIFIER_LENGTH (name);
2209 char *tmp_name, *prefix;
2211 prefix = XALLOCAVEC (char, len + strlen (suffix) + 2);
2212 memcpy (prefix, IDENTIFIER_POINTER (name), len);
2213 strcpy (prefix + len + 1, suffix);
2214 #ifndef NO_DOT_IN_LABEL
2215 prefix[len] = '.';
2216 #elif !defined NO_DOLLAR_IN_LABEL
2217 prefix[len] = '$';
2218 #else
2219 prefix[len] = '_';
2220 #endif
2221 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix, clone_fn_id_num++);
2222 return get_identifier (tmp_name);
2225 /* Create callgraph node clone with new declaration. The actual body will
2226 be copied later at compilation stage.
2228 TODO: after merging in ipa-sra use function call notes instead of args_to_skip
2229 bitmap interface.
2231 struct cgraph_node *
2232 cgraph_create_virtual_clone (struct cgraph_node *old_node,
2233 VEC(cgraph_edge_p,heap) *redirect_callers,
2234 VEC(ipa_replace_map_p,gc) *tree_map,
2235 bitmap args_to_skip,
2236 const char * suffix)
2238 tree old_decl = old_node->decl;
2239 struct cgraph_node *new_node = NULL;
2240 tree new_decl;
2241 size_t i;
2242 struct ipa_replace_map *map;
2244 #ifdef ENABLE_CHECKING
2245 if (!flag_wpa)
2246 gcc_assert (tree_versionable_function_p (old_decl));
2247 #endif
2249 /* Make a new FUNCTION_DECL tree node */
2250 if (!args_to_skip)
2251 new_decl = copy_node (old_decl);
2252 else
2253 new_decl = build_function_decl_skip_args (old_decl, args_to_skip);
2254 DECL_STRUCT_FUNCTION (new_decl) = NULL;
2256 /* Generate a new name for the new version. */
2257 DECL_NAME (new_decl) = clone_function_name (old_decl, suffix);
2258 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
2259 SET_DECL_RTL (new_decl, NULL);
2261 new_node = cgraph_clone_node (old_node, new_decl, old_node->count,
2262 CGRAPH_FREQ_BASE, 0, false,
2263 redirect_callers);
2264 /* Update the properties.
2265 Make clone visible only within this translation unit. Make sure
2266 that is not weak also.
2267 ??? We cannot use COMDAT linkage because there is no
2268 ABI support for this. */
2269 DECL_EXTERNAL (new_node->decl) = 0;
2270 if (DECL_ONE_ONLY (old_decl))
2271 DECL_SECTION_NAME (new_node->decl) = NULL;
2272 DECL_COMDAT_GROUP (new_node->decl) = 0;
2273 TREE_PUBLIC (new_node->decl) = 0;
2274 DECL_COMDAT (new_node->decl) = 0;
2275 DECL_WEAK (new_node->decl) = 0;
2276 new_node->clone.tree_map = tree_map;
2277 new_node->clone.args_to_skip = args_to_skip;
2278 FOR_EACH_VEC_ELT (ipa_replace_map_p, tree_map, i, map)
2280 tree var = map->new_tree;
2282 STRIP_NOPS (var);
2283 if (TREE_CODE (var) != ADDR_EXPR)
2284 continue;
2285 var = get_base_var (var);
2286 if (!var)
2287 continue;
2289 /* Record references of the future statement initializing the constant
2290 argument. */
2291 if (TREE_CODE (var) == FUNCTION_DECL)
2292 ipa_record_reference (new_node, NULL, cgraph_node (var),
2293 NULL, IPA_REF_ADDR, NULL);
2294 else if (TREE_CODE (var) == VAR_DECL)
2295 ipa_record_reference (new_node, NULL, NULL, varpool_node (var),
2296 IPA_REF_ADDR, NULL);
2298 if (!args_to_skip)
2299 new_node->clone.combined_args_to_skip = old_node->clone.combined_args_to_skip;
2300 else if (old_node->clone.combined_args_to_skip)
2302 int newi = 0, oldi = 0;
2303 tree arg;
2304 bitmap new_args_to_skip = BITMAP_GGC_ALLOC ();
2305 struct cgraph_node *orig_node;
2306 for (orig_node = old_node; orig_node->clone_of; orig_node = orig_node->clone_of)
2308 for (arg = DECL_ARGUMENTS (orig_node->decl); arg; arg = DECL_CHAIN (arg), oldi++)
2310 if (bitmap_bit_p (old_node->clone.combined_args_to_skip, oldi))
2312 bitmap_set_bit (new_args_to_skip, oldi);
2313 continue;
2315 if (bitmap_bit_p (args_to_skip, newi))
2316 bitmap_set_bit (new_args_to_skip, oldi);
2317 newi++;
2319 new_node->clone.combined_args_to_skip = new_args_to_skip;
2321 else
2322 new_node->clone.combined_args_to_skip = args_to_skip;
2323 new_node->local.externally_visible = 0;
2324 new_node->local.local = 1;
2325 new_node->lowered = true;
2326 new_node->reachable = true;
2329 return new_node;
2332 /* NODE is no longer nested function; update cgraph accordingly. */
2333 void
2334 cgraph_unnest_node (struct cgraph_node *node)
2336 struct cgraph_node **node2 = &node->origin->nested;
2337 gcc_assert (node->origin);
2339 while (*node2 != node)
2340 node2 = &(*node2)->next_nested;
2341 *node2 = node->next_nested;
2342 node->origin = NULL;
2345 /* Return function availability. See cgraph.h for description of individual
2346 return values. */
2347 enum availability
2348 cgraph_function_body_availability (struct cgraph_node *node)
2350 enum availability avail;
2351 gcc_assert (cgraph_function_flags_ready);
2352 if (!node->analyzed)
2353 avail = AVAIL_NOT_AVAILABLE;
2354 else if (node->local.local)
2355 avail = AVAIL_LOCAL;
2356 else if (!node->local.externally_visible)
2357 avail = AVAIL_AVAILABLE;
2358 /* Inline functions are safe to be analyzed even if their sybol can
2359 be overwritten at runtime. It is not meaningful to enfore any sane
2360 behaviour on replacing inline function by different body. */
2361 else if (DECL_DECLARED_INLINE_P (node->decl))
2362 avail = AVAIL_AVAILABLE;
2364 /* If the function can be overwritten, return OVERWRITABLE. Take
2365 care at least of two notable extensions - the COMDAT functions
2366 used to share template instantiations in C++ (this is symmetric
2367 to code cp_cannot_inline_tree_fn and probably shall be shared and
2368 the inlinability hooks completely eliminated).
2370 ??? Does the C++ one definition rule allow us to always return
2371 AVAIL_AVAILABLE here? That would be good reason to preserve this
2372 bit. */
2374 else if (decl_replaceable_p (node->decl) && !DECL_EXTERNAL (node->decl))
2375 avail = AVAIL_OVERWRITABLE;
2376 else avail = AVAIL_AVAILABLE;
2378 return avail;
2381 /* Add the function FNDECL to the call graph.
2382 Unlike cgraph_finalize_function, this function is intended to be used
2383 by middle end and allows insertion of new function at arbitrary point
2384 of compilation. The function can be either in high, low or SSA form
2385 GIMPLE.
2387 The function is assumed to be reachable and have address taken (so no
2388 API breaking optimizations are performed on it).
2390 Main work done by this function is to enqueue the function for later
2391 processing to avoid need the passes to be re-entrant. */
2393 void
2394 cgraph_add_new_function (tree fndecl, bool lowered)
2396 struct cgraph_node *node;
2397 switch (cgraph_state)
2399 case CGRAPH_STATE_CONSTRUCTION:
2400 /* Just enqueue function to be processed at nearest occurrence. */
2401 node = cgraph_node (fndecl);
2402 node->next_needed = cgraph_new_nodes;
2403 if (lowered)
2404 node->lowered = true;
2405 cgraph_new_nodes = node;
2406 break;
2408 case CGRAPH_STATE_IPA:
2409 case CGRAPH_STATE_IPA_SSA:
2410 case CGRAPH_STATE_EXPANSION:
2411 /* Bring the function into finalized state and enqueue for later
2412 analyzing and compilation. */
2413 node = cgraph_node (fndecl);
2414 node->local.local = false;
2415 node->local.finalized = true;
2416 node->reachable = node->needed = true;
2417 if (!lowered && cgraph_state == CGRAPH_STATE_EXPANSION)
2419 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
2420 current_function_decl = fndecl;
2421 gimple_register_cfg_hooks ();
2422 tree_lowering_passes (fndecl);
2423 bitmap_obstack_initialize (NULL);
2424 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
2425 execute_pass_list (pass_early_local_passes.pass.sub);
2426 bitmap_obstack_release (NULL);
2427 pop_cfun ();
2428 current_function_decl = NULL;
2430 lowered = true;
2432 if (lowered)
2433 node->lowered = true;
2434 node->next_needed = cgraph_new_nodes;
2435 cgraph_new_nodes = node;
2436 break;
2438 case CGRAPH_STATE_FINISHED:
2439 /* At the very end of compilation we have to do all the work up
2440 to expansion. */
2441 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
2442 current_function_decl = fndecl;
2443 gimple_register_cfg_hooks ();
2444 if (!lowered)
2445 tree_lowering_passes (fndecl);
2446 bitmap_obstack_initialize (NULL);
2447 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
2448 execute_pass_list (pass_early_local_passes.pass.sub);
2449 bitmap_obstack_release (NULL);
2450 tree_rest_of_compilation (fndecl);
2451 pop_cfun ();
2452 current_function_decl = NULL;
2453 break;
2456 /* Set a personality if required and we already passed EH lowering. */
2457 if (lowered
2458 && (function_needs_eh_personality (DECL_STRUCT_FUNCTION (fndecl))
2459 == eh_personality_lang))
2460 DECL_FUNCTION_PERSONALITY (fndecl) = lang_hooks.eh_personality ();
2463 /* Return true if NODE can be made local for API change.
2464 Extern inline functions and C++ COMDAT functions can be made local
2465 at the expense of possible code size growth if function is used in multiple
2466 compilation units. */
2467 bool
2468 cgraph_node_can_be_local_p (struct cgraph_node *node)
2470 return (!node->needed && !node->address_taken
2471 && ((DECL_COMDAT (node->decl) && !node->same_comdat_group)
2472 || !node->local.externally_visible));
2475 /* Make DECL local. FIXME: We shouldn't need to mess with rtl this early,
2476 but other code such as notice_global_symbol generates rtl. */
2477 void
2478 cgraph_make_decl_local (tree decl)
2480 rtx rtl, symbol;
2482 if (TREE_CODE (decl) == VAR_DECL)
2483 DECL_COMMON (decl) = 0;
2484 else gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
2486 if (DECL_COMDAT (decl))
2488 /* It is possible that we are linking against library defining same COMDAT
2489 function. To avoid conflict we need to rename our local name of the
2490 function just in the case WHOPR partitioning decide to make it hidden
2491 to avoid cross partition references. */
2492 if (flag_wpa)
2494 const char *old_name;
2496 old_name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
2497 if (TREE_CODE (decl) == FUNCTION_DECL)
2499 struct cgraph_node *node = cgraph_get_node_or_alias (decl);
2500 change_decl_assembler_name (decl,
2501 clone_function_name (decl, "local"));
2502 if (node->local.lto_file_data)
2503 lto_record_renamed_decl (node->local.lto_file_data,
2504 old_name,
2505 IDENTIFIER_POINTER
2506 (DECL_ASSEMBLER_NAME (decl)));
2508 else if (TREE_CODE (decl) == VAR_DECL)
2510 struct varpool_node *vnode = varpool_get_node (decl);
2511 /* change_decl_assembler_name will warn here on vtables because
2512 C++ frontend still sets TREE_SYMBOL_REFERENCED on them. */
2513 SET_DECL_ASSEMBLER_NAME (decl,
2514 clone_function_name (decl, "local"));
2515 if (vnode->lto_file_data)
2516 lto_record_renamed_decl (vnode->lto_file_data,
2517 old_name,
2518 IDENTIFIER_POINTER
2519 (DECL_ASSEMBLER_NAME (decl)));
2522 DECL_SECTION_NAME (decl) = 0;
2523 DECL_COMDAT (decl) = 0;
2525 DECL_COMDAT_GROUP (decl) = 0;
2526 DECL_WEAK (decl) = 0;
2527 DECL_EXTERNAL (decl) = 0;
2528 TREE_PUBLIC (decl) = 0;
2529 if (!DECL_RTL_SET_P (decl))
2530 return;
2532 /* Update rtl flags. */
2533 make_decl_rtl (decl);
2535 rtl = DECL_RTL (decl);
2536 if (!MEM_P (rtl))
2537 return;
2539 symbol = XEXP (rtl, 0);
2540 if (GET_CODE (symbol) != SYMBOL_REF)
2541 return;
2543 SYMBOL_REF_WEAK (symbol) = DECL_WEAK (decl);
2546 /* Bring NODE local. */
2547 void
2548 cgraph_make_node_local (struct cgraph_node *node)
2550 gcc_assert (cgraph_node_can_be_local_p (node));
2551 if (DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))
2553 struct cgraph_node *alias;
2554 cgraph_make_decl_local (node->decl);
2556 for (alias = node->same_body; alias; alias = alias->next)
2557 cgraph_make_decl_local (alias->decl);
2559 node->local.externally_visible = false;
2560 node->local.local = true;
2561 node->resolution = LDPR_PREVAILING_DEF_IRONLY;
2562 gcc_assert (cgraph_function_body_availability (node) == AVAIL_LOCAL);
2566 /* Set TREE_NOTHROW on NODE's decl and on same_body aliases of NODE
2567 if any to NOTHROW. */
2569 void
2570 cgraph_set_nothrow_flag (struct cgraph_node *node, bool nothrow)
2572 struct cgraph_node *alias;
2573 TREE_NOTHROW (node->decl) = nothrow;
2574 for (alias = node->same_body; alias; alias = alias->next)
2575 TREE_NOTHROW (alias->decl) = nothrow;
2578 /* Set TREE_READONLY on NODE's decl and on same_body aliases of NODE
2579 if any to READONLY. */
2581 void
2582 cgraph_set_readonly_flag (struct cgraph_node *node, bool readonly)
2584 struct cgraph_node *alias;
2585 TREE_READONLY (node->decl) = readonly;
2586 for (alias = node->same_body; alias; alias = alias->next)
2587 TREE_READONLY (alias->decl) = readonly;
2590 /* Set DECL_PURE_P on NODE's decl and on same_body aliases of NODE
2591 if any to PURE. */
2593 void
2594 cgraph_set_pure_flag (struct cgraph_node *node, bool pure)
2596 struct cgraph_node *alias;
2597 DECL_PURE_P (node->decl) = pure;
2598 for (alias = node->same_body; alias; alias = alias->next)
2599 DECL_PURE_P (alias->decl) = pure;
2602 /* Set DECL_LOOPING_CONST_OR_PURE_P on NODE's decl and on
2603 same_body aliases of NODE if any to LOOPING_CONST_OR_PURE. */
2605 void
2606 cgraph_set_looping_const_or_pure_flag (struct cgraph_node *node,
2607 bool looping_const_or_pure)
2609 struct cgraph_node *alias;
2610 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = looping_const_or_pure;
2611 for (alias = node->same_body; alias; alias = alias->next)
2612 DECL_LOOPING_CONST_OR_PURE_P (alias->decl) = looping_const_or_pure;
2615 /* See if the frequency of NODE can be updated based on frequencies of its
2616 callers. */
2617 bool
2618 cgraph_propagate_frequency (struct cgraph_node *node)
2620 bool maybe_unlikely_executed = true, maybe_executed_once = true;
2621 struct cgraph_edge *edge;
2622 if (!node->local.local)
2623 return false;
2624 gcc_assert (node->analyzed);
2625 if (node->frequency == NODE_FREQUENCY_HOT)
2626 return false;
2627 if (node->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED)
2628 return false;
2629 if (dump_file && (dump_flags & TDF_DETAILS))
2630 fprintf (dump_file, "Processing frequency %s\n", cgraph_node_name (node));
2631 for (edge = node->callers;
2632 edge && (maybe_unlikely_executed || maybe_executed_once);
2633 edge = edge->next_caller)
2635 if (!edge->frequency)
2636 continue;
2637 switch (edge->caller->frequency)
2639 case NODE_FREQUENCY_UNLIKELY_EXECUTED:
2640 break;
2641 case NODE_FREQUENCY_EXECUTED_ONCE:
2642 if (dump_file && (dump_flags & TDF_DETAILS))
2643 fprintf (dump_file, " Called by %s that is executed once\n", cgraph_node_name (node));
2644 maybe_unlikely_executed = false;
2645 if (edge->loop_nest)
2647 maybe_executed_once = false;
2648 if (dump_file && (dump_flags & TDF_DETAILS))
2649 fprintf (dump_file, " Called in loop\n");
2651 break;
2652 case NODE_FREQUENCY_HOT:
2653 case NODE_FREQUENCY_NORMAL:
2654 if (dump_file && (dump_flags & TDF_DETAILS))
2655 fprintf (dump_file, " Called by %s that is normal or hot\n", cgraph_node_name (node));
2656 maybe_unlikely_executed = false;
2657 maybe_executed_once = false;
2658 break;
2661 if (maybe_unlikely_executed)
2663 node->frequency = NODE_FREQUENCY_UNLIKELY_EXECUTED;
2664 if (dump_file)
2665 fprintf (dump_file, "Node %s promoted to unlikely executed.\n", cgraph_node_name (node));
2666 return true;
2668 if (maybe_executed_once && node->frequency != NODE_FREQUENCY_EXECUTED_ONCE)
2670 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
2671 if (dump_file)
2672 fprintf (dump_file, "Node %s promoted to executed once.\n", cgraph_node_name (node));
2673 return true;
2675 return false;
2678 /* Return true when NODE can not return or throw and thus
2679 it is safe to ignore its side effects for IPA analysis. */
2681 bool
2682 cgraph_node_cannot_return (struct cgraph_node *node)
2684 int flags = flags_from_decl_or_type (node->decl);
2685 if (!flag_exceptions)
2686 return (flags & ECF_NORETURN) != 0;
2687 else
2688 return ((flags & (ECF_NORETURN | ECF_NOTHROW))
2689 == (ECF_NORETURN | ECF_NOTHROW));
2692 /* Return true when call of E can not lead to return from caller
2693 and thus it is safe to ignore its side effects for IPA analysis
2694 when computing side effects of the caller.
2695 FIXME: We could actually mark all edges that have no reaching
2696 patch to EXIT_BLOCK_PTR or throw to get better results. */
2697 bool
2698 cgraph_edge_cannot_lead_to_return (struct cgraph_edge *e)
2700 if (cgraph_node_cannot_return (e->caller))
2701 return true;
2702 if (e->indirect_unknown_callee)
2704 int flags = e->indirect_info->ecf_flags;
2705 if (!flag_exceptions)
2706 return (flags & ECF_NORETURN) != 0;
2707 else
2708 return ((flags & (ECF_NORETURN | ECF_NOTHROW))
2709 == (ECF_NORETURN | ECF_NOTHROW));
2711 else
2712 return cgraph_node_cannot_return (e->callee);
2715 /* Return true when function NODE can be removed from callgraph
2716 if all direct calls are eliminated. */
2718 bool
2719 cgraph_can_remove_if_no_direct_calls_and_refs_p (struct cgraph_node *node)
2721 /* When function is needed, we can not remove it. */
2722 if (node->needed || node->reachable_from_other_partition)
2723 return false;
2724 /* Only COMDAT functions can be removed if externally visible. */
2725 if (node->local.externally_visible
2726 && (!DECL_COMDAT (node->decl)
2727 || cgraph_used_from_object_file_p (node)))
2728 return false;
2729 /* Constructors and destructors are executed by the runtime, however
2730 we can get rid of all pure constructors and destructors. */
2731 if (DECL_STATIC_CONSTRUCTOR (node->decl)
2732 || DECL_STATIC_DESTRUCTOR (node->decl))
2734 int flags = flags_from_decl_or_type (node->decl);
2735 if (!optimize
2736 || !(flags & (ECF_CONST | ECF_PURE))
2737 || (flags & ECF_LOOPING_CONST_OR_PURE))
2738 return false;
2740 return true;
2743 /* Return true when function NODE can be excpected to be removed
2744 from program when direct calls in this compilation unit are removed.
2746 As a special case COMDAT functions are
2747 cgraph_can_remove_if_no_direct_calls_p while the are not
2748 cgraph_only_called_directly_p (it is possible they are called from other
2749 unit)
2751 This function behaves as cgraph_only_called_directly_p because eliminating
2752 all uses of COMDAT function does not make it neccesarily disappear from
2753 the program unless we are compiling whole program or we do LTO. In this
2754 case we know we win since dynamic linking will not really discard the
2755 linkonce section. */
2757 bool
2758 cgraph_will_be_removed_from_program_if_no_direct_calls (struct cgraph_node *node)
2760 if (cgraph_used_from_object_file_p (node))
2761 return false;
2762 if (!in_lto_p && !flag_whole_program)
2763 return cgraph_only_called_directly_p (node);
2764 else
2765 return cgraph_can_remove_if_no_direct_calls_p (node);
2768 /* Return true when RESOLUTION indicate that linker will use
2769 the symbol from non-LTo object files. */
2771 bool
2772 resolution_used_from_other_file_p (enum ld_plugin_symbol_resolution resolution)
2774 return (resolution == LDPR_PREVAILING_DEF
2775 || resolution == LDPR_PREEMPTED_REG
2776 || resolution == LDPR_RESOLVED_EXEC
2777 || resolution == LDPR_RESOLVED_DYN);
2780 /* Return true when NODE is known to be used from other (non-LTO) object file.
2781 Known only when doing LTO via linker plugin. */
2783 bool
2784 cgraph_used_from_object_file_p (struct cgraph_node *node)
2786 struct cgraph_node *alias;
2788 if (!TREE_PUBLIC (node->decl))
2789 return false;
2790 if (resolution_used_from_other_file_p (node->resolution))
2791 return true;
2792 for (alias = node->same_body; alias; alias = alias->next)
2793 if (TREE_PUBLIC (alias->decl)
2794 && resolution_used_from_other_file_p (alias->resolution))
2795 return true;
2796 return false;
2799 #include "gt-cgraph.h"