Remove outermost loop parameter.
[official-gcc/graphite-test-results.git] / gcc / cgraph.c
blob0c2441fcf6f2697309f16368a7df964b67a770e6
1 /* Callgraph handling code.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This file contains basic routines manipulating call graph
24 The callgraph:
26 The call-graph is data structure designed for intra-procedural optimization
27 but it is also used in non-unit-at-a-time compilation to allow easier code
28 sharing.
30 The call-graph consist of nodes and edges represented via linked lists.
31 Each function (external or not) corresponds to the unique node.
33 The mapping from declarations to call-graph nodes is done using hash table
34 based on DECL_UID. The call-graph nodes are created lazily using
35 cgraph_node function when called for unknown declaration.
37 The callgraph at the moment does not represent all indirect calls or calls
38 from other compilation units. Flag NEEDED is set for each node that may be
39 accessed in such an invisible way and it shall be considered an entry point
40 to the callgraph.
42 On the other hand, the callgraph currently does contain some edges for
43 indirect calls with unknown callees which can be accessed through
44 indirect_calls field of a node. It should be noted however that at the
45 moment only calls which are potential candidates for indirect inlining are
46 added there.
48 Interprocedural information:
50 Callgraph is place to store data needed for interprocedural optimization.
51 All data structures are divided into three components: local_info that
52 is produced while analyzing the function, global_info that is result
53 of global walking of the callgraph on the end of compilation and
54 rtl_info used by RTL backend to propagate data from already compiled
55 functions to their callers.
57 Moreover, each node has a uid which can be used to keep information in
58 on-the-side arrays. UIDs are reused and therefore reasonably dense.
60 Inlining plans:
62 The function inlining information is decided in advance and maintained
63 in the callgraph as so called inline plan.
64 For each inlined call, the callee's node is cloned to represent the
65 new function copy produced by inliner.
66 Each inlined call gets a unique corresponding clone node of the callee
67 and the data structure is updated while inlining is performed, so
68 the clones are eliminated and their callee edges redirected to the
69 caller.
71 Each edge has "inline_failed" field. When the field is set to NULL,
72 the call will be inlined. When it is non-NULL it contains a reason
73 why inlining wasn't performed. */
75 #include "config.h"
76 #include "system.h"
77 #include "coretypes.h"
78 #include "tm.h"
79 #include "tree.h"
80 #include "tree-inline.h"
81 #include "langhooks.h"
82 #include "hashtab.h"
83 #include "toplev.h"
84 #include "flags.h"
85 #include "ggc.h"
86 #include "debug.h"
87 #include "target.h"
88 #include "basic-block.h"
89 #include "cgraph.h"
90 #include "output.h"
91 #include "intl.h"
92 #include "gimple.h"
93 #include "tree-dump.h"
94 #include "tree-flow.h"
95 #include "value-prof.h"
96 #include "except.h"
97 #include "diagnostic-core.h"
98 #include "rtl.h"
99 #include "ipa-utils.h"
101 static void cgraph_node_remove_callers (struct cgraph_node *node);
102 static inline void cgraph_edge_remove_caller (struct cgraph_edge *e);
103 static inline void cgraph_edge_remove_callee (struct cgraph_edge *e);
105 /* Hash table used to convert declarations into nodes. */
106 static GTY((param_is (struct cgraph_node))) htab_t cgraph_hash;
107 /* Hash table used to convert assembler names into nodes. */
108 static GTY((param_is (struct cgraph_node))) htab_t assembler_name_hash;
110 /* The linked list of cgraph nodes. */
111 struct cgraph_node *cgraph_nodes;
113 /* Queue of cgraph nodes scheduled to be lowered. */
114 struct cgraph_node *cgraph_nodes_queue;
116 /* Queue of cgraph nodes scheduled to be added into cgraph. This is a
117 secondary queue used during optimization to accommodate passes that
118 may generate new functions that need to be optimized and expanded. */
119 struct cgraph_node *cgraph_new_nodes;
121 /* Number of nodes in existence. */
122 int cgraph_n_nodes;
124 /* Maximal uid used in cgraph nodes. */
125 int cgraph_max_uid;
127 /* Maximal uid used in cgraph edges. */
128 int cgraph_edge_max_uid;
130 /* Maximal pid used for profiling */
131 int cgraph_max_pid;
133 /* Set when whole unit has been analyzed so we can access global info. */
134 bool cgraph_global_info_ready = false;
136 /* What state callgraph is in right now. */
137 enum cgraph_state cgraph_state = CGRAPH_STATE_CONSTRUCTION;
139 /* Set when the cgraph is fully build and the basic flags are computed. */
140 bool cgraph_function_flags_ready = false;
142 /* Linked list of cgraph asm nodes. */
143 struct cgraph_asm_node *cgraph_asm_nodes;
145 /* Last node in cgraph_asm_nodes. */
146 static GTY(()) struct cgraph_asm_node *cgraph_asm_last_node;
148 /* The order index of the next cgraph node to be created. This is
149 used so that we can sort the cgraph nodes in order by when we saw
150 them, to support -fno-toplevel-reorder. */
151 int cgraph_order;
153 /* List of hooks trigerred on cgraph_edge events. */
154 struct cgraph_edge_hook_list {
155 cgraph_edge_hook hook;
156 void *data;
157 struct cgraph_edge_hook_list *next;
160 /* List of hooks trigerred on cgraph_node events. */
161 struct cgraph_node_hook_list {
162 cgraph_node_hook hook;
163 void *data;
164 struct cgraph_node_hook_list *next;
167 /* List of hooks trigerred on events involving two cgraph_edges. */
168 struct cgraph_2edge_hook_list {
169 cgraph_2edge_hook hook;
170 void *data;
171 struct cgraph_2edge_hook_list *next;
174 /* List of hooks trigerred on events involving two cgraph_nodes. */
175 struct cgraph_2node_hook_list {
176 cgraph_2node_hook hook;
177 void *data;
178 struct cgraph_2node_hook_list *next;
181 /* List of hooks triggered when an edge is removed. */
182 struct cgraph_edge_hook_list *first_cgraph_edge_removal_hook;
183 /* List of hooks triggered when a node is removed. */
184 struct cgraph_node_hook_list *first_cgraph_node_removal_hook;
185 /* List of hooks triggered when an edge is duplicated. */
186 struct cgraph_2edge_hook_list *first_cgraph_edge_duplicated_hook;
187 /* List of hooks triggered when a node is duplicated. */
188 struct cgraph_2node_hook_list *first_cgraph_node_duplicated_hook;
189 /* List of hooks triggered when an function is inserted. */
190 struct cgraph_node_hook_list *first_cgraph_function_insertion_hook;
192 /* Head of a linked list of unused (freed) call graph nodes.
193 Do not GTY((delete)) this list so UIDs gets reliably recycled. */
194 static GTY(()) struct cgraph_node *free_nodes;
195 /* Head of a linked list of unused (freed) call graph edges.
196 Do not GTY((delete)) this list so UIDs gets reliably recycled. */
197 static GTY(()) struct cgraph_edge *free_edges;
199 /* Macros to access the next item in the list of free cgraph nodes and
200 edges. */
201 #define NEXT_FREE_NODE(NODE) (NODE)->next
202 #define NEXT_FREE_EDGE(EDGE) (EDGE)->prev_caller
204 /* Register HOOK to be called with DATA on each removed edge. */
205 struct cgraph_edge_hook_list *
206 cgraph_add_edge_removal_hook (cgraph_edge_hook hook, void *data)
208 struct cgraph_edge_hook_list *entry;
209 struct cgraph_edge_hook_list **ptr = &first_cgraph_edge_removal_hook;
211 entry = (struct cgraph_edge_hook_list *) xmalloc (sizeof (*entry));
212 entry->hook = hook;
213 entry->data = data;
214 entry->next = NULL;
215 while (*ptr)
216 ptr = &(*ptr)->next;
217 *ptr = entry;
218 return entry;
221 /* Remove ENTRY from the list of hooks called on removing edges. */
222 void
223 cgraph_remove_edge_removal_hook (struct cgraph_edge_hook_list *entry)
225 struct cgraph_edge_hook_list **ptr = &first_cgraph_edge_removal_hook;
227 while (*ptr != entry)
228 ptr = &(*ptr)->next;
229 *ptr = entry->next;
230 free (entry);
233 /* Call all edge removal hooks. */
234 static void
235 cgraph_call_edge_removal_hooks (struct cgraph_edge *e)
237 struct cgraph_edge_hook_list *entry = first_cgraph_edge_removal_hook;
238 while (entry)
240 entry->hook (e, entry->data);
241 entry = entry->next;
245 /* Register HOOK to be called with DATA on each removed node. */
246 struct cgraph_node_hook_list *
247 cgraph_add_node_removal_hook (cgraph_node_hook hook, void *data)
249 struct cgraph_node_hook_list *entry;
250 struct cgraph_node_hook_list **ptr = &first_cgraph_node_removal_hook;
252 entry = (struct cgraph_node_hook_list *) xmalloc (sizeof (*entry));
253 entry->hook = hook;
254 entry->data = data;
255 entry->next = NULL;
256 while (*ptr)
257 ptr = &(*ptr)->next;
258 *ptr = entry;
259 return entry;
262 /* Remove ENTRY from the list of hooks called on removing nodes. */
263 void
264 cgraph_remove_node_removal_hook (struct cgraph_node_hook_list *entry)
266 struct cgraph_node_hook_list **ptr = &first_cgraph_node_removal_hook;
268 while (*ptr != entry)
269 ptr = &(*ptr)->next;
270 *ptr = entry->next;
271 free (entry);
274 /* Call all node removal hooks. */
275 static void
276 cgraph_call_node_removal_hooks (struct cgraph_node *node)
278 struct cgraph_node_hook_list *entry = first_cgraph_node_removal_hook;
279 while (entry)
281 entry->hook (node, entry->data);
282 entry = entry->next;
286 /* Register HOOK to be called with DATA on each inserted node. */
287 struct cgraph_node_hook_list *
288 cgraph_add_function_insertion_hook (cgraph_node_hook hook, void *data)
290 struct cgraph_node_hook_list *entry;
291 struct cgraph_node_hook_list **ptr = &first_cgraph_function_insertion_hook;
293 entry = (struct cgraph_node_hook_list *) xmalloc (sizeof (*entry));
294 entry->hook = hook;
295 entry->data = data;
296 entry->next = NULL;
297 while (*ptr)
298 ptr = &(*ptr)->next;
299 *ptr = entry;
300 return entry;
303 /* Remove ENTRY from the list of hooks called on inserted nodes. */
304 void
305 cgraph_remove_function_insertion_hook (struct cgraph_node_hook_list *entry)
307 struct cgraph_node_hook_list **ptr = &first_cgraph_function_insertion_hook;
309 while (*ptr != entry)
310 ptr = &(*ptr)->next;
311 *ptr = entry->next;
312 free (entry);
315 /* Call all node insertion hooks. */
316 void
317 cgraph_call_function_insertion_hooks (struct cgraph_node *node)
319 struct cgraph_node_hook_list *entry = first_cgraph_function_insertion_hook;
320 while (entry)
322 entry->hook (node, entry->data);
323 entry = entry->next;
327 /* Register HOOK to be called with DATA on each duplicated edge. */
328 struct cgraph_2edge_hook_list *
329 cgraph_add_edge_duplication_hook (cgraph_2edge_hook hook, void *data)
331 struct cgraph_2edge_hook_list *entry;
332 struct cgraph_2edge_hook_list **ptr = &first_cgraph_edge_duplicated_hook;
334 entry = (struct cgraph_2edge_hook_list *) xmalloc (sizeof (*entry));
335 entry->hook = hook;
336 entry->data = data;
337 entry->next = NULL;
338 while (*ptr)
339 ptr = &(*ptr)->next;
340 *ptr = entry;
341 return entry;
344 /* Remove ENTRY from the list of hooks called on duplicating edges. */
345 void
346 cgraph_remove_edge_duplication_hook (struct cgraph_2edge_hook_list *entry)
348 struct cgraph_2edge_hook_list **ptr = &first_cgraph_edge_duplicated_hook;
350 while (*ptr != entry)
351 ptr = &(*ptr)->next;
352 *ptr = entry->next;
353 free (entry);
356 /* Call all edge duplication hooks. */
357 static void
358 cgraph_call_edge_duplication_hooks (struct cgraph_edge *cs1,
359 struct cgraph_edge *cs2)
361 struct cgraph_2edge_hook_list *entry = first_cgraph_edge_duplicated_hook;
362 while (entry)
364 entry->hook (cs1, cs2, entry->data);
365 entry = entry->next;
369 /* Register HOOK to be called with DATA on each duplicated node. */
370 struct cgraph_2node_hook_list *
371 cgraph_add_node_duplication_hook (cgraph_2node_hook hook, void *data)
373 struct cgraph_2node_hook_list *entry;
374 struct cgraph_2node_hook_list **ptr = &first_cgraph_node_duplicated_hook;
376 entry = (struct cgraph_2node_hook_list *) xmalloc (sizeof (*entry));
377 entry->hook = hook;
378 entry->data = data;
379 entry->next = NULL;
380 while (*ptr)
381 ptr = &(*ptr)->next;
382 *ptr = entry;
383 return entry;
386 /* Remove ENTRY from the list of hooks called on duplicating nodes. */
387 void
388 cgraph_remove_node_duplication_hook (struct cgraph_2node_hook_list *entry)
390 struct cgraph_2node_hook_list **ptr = &first_cgraph_node_duplicated_hook;
392 while (*ptr != entry)
393 ptr = &(*ptr)->next;
394 *ptr = entry->next;
395 free (entry);
398 /* Call all node duplication hooks. */
399 static void
400 cgraph_call_node_duplication_hooks (struct cgraph_node *node1,
401 struct cgraph_node *node2)
403 struct cgraph_2node_hook_list *entry = first_cgraph_node_duplicated_hook;
404 while (entry)
406 entry->hook (node1, node2, entry->data);
407 entry = entry->next;
411 /* Returns a hash code for P. */
413 static hashval_t
414 hash_node (const void *p)
416 const struct cgraph_node *n = (const struct cgraph_node *) p;
417 return (hashval_t) DECL_UID (n->decl);
421 /* Returns nonzero if P1 and P2 are equal. */
423 static int
424 eq_node (const void *p1, const void *p2)
426 const struct cgraph_node *n1 = (const struct cgraph_node *) p1;
427 const struct cgraph_node *n2 = (const struct cgraph_node *) p2;
428 return DECL_UID (n1->decl) == DECL_UID (n2->decl);
431 /* Allocate new callgraph node. */
433 static inline struct cgraph_node *
434 cgraph_allocate_node (void)
436 struct cgraph_node *node;
438 if (free_nodes)
440 node = free_nodes;
441 free_nodes = NEXT_FREE_NODE (node);
443 else
445 node = GGC_CNEW (struct cgraph_node);
446 node->uid = cgraph_max_uid++;
449 return node;
452 /* Allocate new callgraph node and insert it into basic data structures. */
454 static struct cgraph_node *
455 cgraph_create_node (void)
457 struct cgraph_node *node = cgraph_allocate_node ();
459 node->next = cgraph_nodes;
460 node->pid = -1;
461 node->order = cgraph_order++;
462 if (cgraph_nodes)
463 cgraph_nodes->previous = node;
464 node->previous = NULL;
465 node->global.estimated_growth = INT_MIN;
466 node->frequency = NODE_FREQUENCY_NORMAL;
467 ipa_empty_ref_list (&node->ref_list);
468 cgraph_nodes = node;
469 cgraph_n_nodes++;
470 return node;
473 /* Return cgraph node assigned to DECL. Create new one when needed. */
475 struct cgraph_node *
476 cgraph_node (tree decl)
478 struct cgraph_node key, *node, **slot;
480 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
482 if (!cgraph_hash)
483 cgraph_hash = htab_create_ggc (10, hash_node, eq_node, NULL);
485 key.decl = decl;
487 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key, INSERT);
489 if (*slot)
491 node = *slot;
492 if (node->same_body_alias)
493 node = node->same_body;
494 return node;
497 node = cgraph_create_node ();
498 node->decl = decl;
499 *slot = node;
500 if (DECL_CONTEXT (decl) && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)
502 node->origin = cgraph_node (DECL_CONTEXT (decl));
503 node->next_nested = node->origin->nested;
504 node->origin->nested = node;
506 if (assembler_name_hash)
508 void **aslot;
509 tree name = DECL_ASSEMBLER_NAME (decl);
511 aslot = htab_find_slot_with_hash (assembler_name_hash, name,
512 decl_assembler_name_hash (name),
513 INSERT);
514 /* We can have multiple declarations with same assembler name. For C++
515 it is __builtin_strlen and strlen, for instance. Do we need to
516 record them all? Original implementation marked just first one
517 so lets hope for the best. */
518 if (*aslot == NULL)
519 *aslot = node;
521 return node;
524 /* Mark ALIAS as an alias to DECL. */
526 static struct cgraph_node *
527 cgraph_same_body_alias_1 (tree alias, tree decl)
529 struct cgraph_node key, *alias_node, *decl_node, **slot;
531 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
532 gcc_assert (TREE_CODE (alias) == FUNCTION_DECL);
533 decl_node = cgraph_node (decl);
535 key.decl = alias;
537 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key, INSERT);
539 /* If the cgraph_node has been already created, fail. */
540 if (*slot)
541 return NULL;
543 alias_node = cgraph_allocate_node ();
544 alias_node->decl = alias;
545 alias_node->same_body_alias = 1;
546 alias_node->same_body = decl_node;
547 alias_node->previous = NULL;
548 if (decl_node->same_body)
549 decl_node->same_body->previous = alias_node;
550 alias_node->next = decl_node->same_body;
551 alias_node->thunk.alias = decl;
552 decl_node->same_body = alias_node;
553 *slot = alias_node;
554 return alias_node;
557 /* Attempt to mark ALIAS as an alias to DECL. Return TRUE if successful.
558 Same body aliases are output whenever the body of DECL is output,
559 and cgraph_node (ALIAS) transparently returns cgraph_node (DECL). */
561 bool
562 cgraph_same_body_alias (tree alias, tree decl)
564 #ifndef ASM_OUTPUT_DEF
565 /* If aliases aren't supported by the assembler, fail. */
566 return false;
567 #endif
569 /*gcc_assert (!assembler_name_hash);*/
571 return cgraph_same_body_alias_1 (alias, decl) != NULL;
574 void
575 cgraph_add_thunk (tree alias, tree decl, bool this_adjusting,
576 HOST_WIDE_INT fixed_offset, HOST_WIDE_INT virtual_value,
577 tree virtual_offset,
578 tree real_alias)
580 struct cgraph_node *node = cgraph_get_node (alias);
582 if (node)
584 gcc_assert (node->local.finalized);
585 gcc_assert (!node->same_body);
586 cgraph_remove_node (node);
589 node = cgraph_same_body_alias_1 (alias, decl);
590 gcc_assert (node);
591 #ifdef ENABLE_CHECKING
592 gcc_assert (!virtual_offset
593 || tree_int_cst_equal (virtual_offset, size_int (virtual_value)));
594 #endif
595 node->thunk.fixed_offset = fixed_offset;
596 node->thunk.this_adjusting = this_adjusting;
597 node->thunk.virtual_value = virtual_value;
598 node->thunk.virtual_offset_p = virtual_offset != NULL;
599 node->thunk.alias = real_alias;
600 node->thunk.thunk_p = true;
603 /* Returns the cgraph node assigned to DECL or NULL if no cgraph node
604 is assigned. */
606 struct cgraph_node *
607 cgraph_get_node (tree decl)
609 struct cgraph_node key, *node = NULL, **slot;
611 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
613 if (!cgraph_hash)
614 return NULL;
616 key.decl = decl;
618 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key,
619 NO_INSERT);
621 if (slot && *slot)
623 node = *slot;
624 if (node->same_body_alias)
625 node = node->same_body;
627 return node;
630 /* Insert already constructed node into hashtable. */
632 void
633 cgraph_insert_node_to_hashtable (struct cgraph_node *node)
635 struct cgraph_node **slot;
637 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, node, INSERT);
639 gcc_assert (!*slot);
640 *slot = node;
643 /* Returns a hash code for P. */
645 static hashval_t
646 hash_node_by_assembler_name (const void *p)
648 const struct cgraph_node *n = (const struct cgraph_node *) p;
649 return (hashval_t) decl_assembler_name_hash (DECL_ASSEMBLER_NAME (n->decl));
652 /* Returns nonzero if P1 and P2 are equal. */
654 static int
655 eq_assembler_name (const void *p1, const void *p2)
657 const struct cgraph_node *n1 = (const struct cgraph_node *) p1;
658 const_tree name = (const_tree)p2;
659 return (decl_assembler_name_equal (n1->decl, name));
662 /* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
663 Return NULL if there's no such node. */
665 struct cgraph_node *
666 cgraph_node_for_asm (tree asmname)
668 struct cgraph_node *node;
669 void **slot;
671 if (!assembler_name_hash)
673 assembler_name_hash =
674 htab_create_ggc (10, hash_node_by_assembler_name, eq_assembler_name,
675 NULL);
676 for (node = cgraph_nodes; node; node = node->next)
677 if (!node->global.inlined_to)
679 tree name = DECL_ASSEMBLER_NAME (node->decl);
680 slot = htab_find_slot_with_hash (assembler_name_hash, name,
681 decl_assembler_name_hash (name),
682 INSERT);
683 /* We can have multiple declarations with same assembler name. For C++
684 it is __builtin_strlen and strlen, for instance. Do we need to
685 record them all? Original implementation marked just first one
686 so lets hope for the best. */
687 if (!*slot)
688 *slot = node;
689 if (node->same_body)
691 struct cgraph_node *alias;
693 for (alias = node->same_body; alias; alias = alias->next)
695 hashval_t hash;
696 name = DECL_ASSEMBLER_NAME (alias->decl);
697 hash = decl_assembler_name_hash (name);
698 slot = htab_find_slot_with_hash (assembler_name_hash, name,
699 hash, INSERT);
700 if (!*slot)
701 *slot = alias;
707 slot = htab_find_slot_with_hash (assembler_name_hash, asmname,
708 decl_assembler_name_hash (asmname),
709 NO_INSERT);
711 if (slot)
713 node = (struct cgraph_node *) *slot;
714 if (node->same_body_alias)
715 node = node->same_body;
716 return node;
718 return NULL;
721 /* Returns a hash value for X (which really is a die_struct). */
723 static hashval_t
724 edge_hash (const void *x)
726 return htab_hash_pointer (((const struct cgraph_edge *) x)->call_stmt);
729 /* Return nonzero if decl_id of die_struct X is the same as UID of decl *Y. */
731 static int
732 edge_eq (const void *x, const void *y)
734 return ((const struct cgraph_edge *) x)->call_stmt == y;
737 /* Add call graph edge E to call site hash of its caller. */
739 static inline void
740 cgraph_add_edge_to_call_site_hash (struct cgraph_edge *e)
742 void **slot;
743 slot = htab_find_slot_with_hash (e->caller->call_site_hash,
744 e->call_stmt,
745 htab_hash_pointer (e->call_stmt),
746 INSERT);
747 gcc_assert (!*slot);
748 *slot = e;
751 /* Return the callgraph edge representing the GIMPLE_CALL statement
752 CALL_STMT. */
754 struct cgraph_edge *
755 cgraph_edge (struct cgraph_node *node, gimple call_stmt)
757 struct cgraph_edge *e, *e2;
758 int n = 0;
760 if (node->call_site_hash)
761 return (struct cgraph_edge *)
762 htab_find_with_hash (node->call_site_hash, call_stmt,
763 htab_hash_pointer (call_stmt));
765 /* This loop may turn out to be performance problem. In such case adding
766 hashtables into call nodes with very many edges is probably best
767 solution. It is not good idea to add pointer into CALL_EXPR itself
768 because we want to make possible having multiple cgraph nodes representing
769 different clones of the same body before the body is actually cloned. */
770 for (e = node->callees; e; e = e->next_callee)
772 if (e->call_stmt == call_stmt)
773 break;
774 n++;
777 if (!e)
778 for (e = node->indirect_calls; e; e = e->next_callee)
780 if (e->call_stmt == call_stmt)
781 break;
782 n++;
785 if (n > 100)
787 node->call_site_hash = htab_create_ggc (120, edge_hash, edge_eq, NULL);
788 for (e2 = node->callees; e2; e2 = e2->next_callee)
789 cgraph_add_edge_to_call_site_hash (e2);
790 for (e2 = node->indirect_calls; e2; e2 = e2->next_callee)
791 cgraph_add_edge_to_call_site_hash (e2);
794 return e;
798 /* Change field call_stmt of edge E to NEW_STMT. */
800 void
801 cgraph_set_call_stmt (struct cgraph_edge *e, gimple new_stmt)
803 tree decl;
805 if (e->caller->call_site_hash)
807 htab_remove_elt_with_hash (e->caller->call_site_hash,
808 e->call_stmt,
809 htab_hash_pointer (e->call_stmt));
812 e->call_stmt = new_stmt;
813 if (e->indirect_unknown_callee
814 && (decl = gimple_call_fndecl (new_stmt)))
816 /* Constant propagation (and possibly also inlining?) can turn an
817 indirect call into a direct one. */
818 struct cgraph_node *new_callee = cgraph_node (decl);
820 cgraph_make_edge_direct (e, new_callee);
823 push_cfun (DECL_STRUCT_FUNCTION (e->caller->decl));
824 e->can_throw_external = stmt_can_throw_external (new_stmt);
825 pop_cfun ();
826 if (e->caller->call_site_hash)
827 cgraph_add_edge_to_call_site_hash (e);
830 /* Like cgraph_set_call_stmt but walk the clone tree and update all
831 clones sharing the same function body. */
833 void
834 cgraph_set_call_stmt_including_clones (struct cgraph_node *orig,
835 gimple old_stmt, gimple new_stmt)
837 struct cgraph_node *node;
838 struct cgraph_edge *edge = cgraph_edge (orig, old_stmt);
840 if (edge)
841 cgraph_set_call_stmt (edge, new_stmt);
843 node = orig->clones;
844 if (node)
845 while (node != orig)
847 struct cgraph_edge *edge = cgraph_edge (node, old_stmt);
848 if (edge)
849 cgraph_set_call_stmt (edge, new_stmt);
850 if (node->clones)
851 node = node->clones;
852 else if (node->next_sibling_clone)
853 node = node->next_sibling_clone;
854 else
856 while (node != orig && !node->next_sibling_clone)
857 node = node->clone_of;
858 if (node != orig)
859 node = node->next_sibling_clone;
864 /* Like cgraph_create_edge walk the clone tree and update all clones sharing
865 same function body. If clones already have edge for OLD_STMT; only
866 update the edge same way as cgraph_set_call_stmt_including_clones does.
868 TODO: COUNT and LOOP_DEPTH should be properly distributed based on relative
869 frequencies of the clones. */
871 void
872 cgraph_create_edge_including_clones (struct cgraph_node *orig,
873 struct cgraph_node *callee,
874 gimple old_stmt,
875 gimple stmt, gcov_type count,
876 int freq, int loop_depth,
877 cgraph_inline_failed_t reason)
879 struct cgraph_node *node;
880 struct cgraph_edge *edge;
882 if (!cgraph_edge (orig, stmt))
884 edge = cgraph_create_edge (orig, callee, stmt, count, freq, loop_depth);
885 edge->inline_failed = reason;
888 node = orig->clones;
889 if (node)
890 while (node != orig)
892 struct cgraph_edge *edge = cgraph_edge (node, old_stmt);
894 /* It is possible that clones already contain the edge while
895 master didn't. Either we promoted indirect call into direct
896 call in the clone or we are processing clones of unreachable
897 master where edges has been rmeoved. */
898 if (edge)
899 cgraph_set_call_stmt (edge, stmt);
900 else if (!cgraph_edge (node, stmt))
902 edge = cgraph_create_edge (node, callee, stmt, count,
903 freq, loop_depth);
904 edge->inline_failed = reason;
907 if (node->clones)
908 node = node->clones;
909 else if (node->next_sibling_clone)
910 node = node->next_sibling_clone;
911 else
913 while (node != orig && !node->next_sibling_clone)
914 node = node->clone_of;
915 if (node != orig)
916 node = node->next_sibling_clone;
921 /* Give initial reasons why inlining would fail on EDGE. This gets either
922 nullified or usually overwritten by more precise reasons later. */
924 static void
925 initialize_inline_failed (struct cgraph_edge *e)
927 struct cgraph_node *callee = e->callee;
929 if (e->indirect_unknown_callee)
930 e->inline_failed = CIF_INDIRECT_UNKNOWN_CALL;
931 else if (!callee->analyzed)
932 e->inline_failed = CIF_BODY_NOT_AVAILABLE;
933 else if (callee->local.redefined_extern_inline)
934 e->inline_failed = CIF_REDEFINED_EXTERN_INLINE;
935 else if (!callee->local.inlinable)
936 e->inline_failed = CIF_FUNCTION_NOT_INLINABLE;
937 else if (e->call_stmt && gimple_call_cannot_inline_p (e->call_stmt))
938 e->inline_failed = CIF_MISMATCHED_ARGUMENTS;
939 else
940 e->inline_failed = CIF_FUNCTION_NOT_CONSIDERED;
943 /* Allocate a cgraph_edge structure and fill it with data according to the
944 parameters of which only CALLEE can be NULL (when creating an indirect call
945 edge). */
947 static struct cgraph_edge *
948 cgraph_create_edge_1 (struct cgraph_node *caller, struct cgraph_node *callee,
949 gimple call_stmt, gcov_type count, int freq, int nest)
951 struct cgraph_edge *edge;
953 /* LTO does not actually have access to the call_stmt since these
954 have not been loaded yet. */
955 if (call_stmt)
957 #ifdef ENABLE_CHECKING
958 /* This is rather pricely check possibly trigerring construction of
959 call stmt hashtable. */
960 gcc_assert (!cgraph_edge (caller, call_stmt));
961 #endif
963 gcc_assert (is_gimple_call (call_stmt));
966 if (free_edges)
968 edge = free_edges;
969 free_edges = NEXT_FREE_EDGE (edge);
971 else
973 edge = GGC_NEW (struct cgraph_edge);
974 edge->uid = cgraph_edge_max_uid++;
977 edge->aux = NULL;
978 edge->caller = caller;
979 edge->callee = callee;
980 edge->prev_caller = NULL;
981 edge->next_caller = NULL;
982 edge->prev_callee = NULL;
983 edge->next_callee = NULL;
985 edge->count = count;
986 gcc_assert (count >= 0);
987 edge->frequency = freq;
988 gcc_assert (freq >= 0);
989 gcc_assert (freq <= CGRAPH_FREQ_MAX);
990 edge->loop_nest = nest;
992 edge->call_stmt = call_stmt;
993 push_cfun (DECL_STRUCT_FUNCTION (caller->decl));
994 edge->can_throw_external
995 = call_stmt ? stmt_can_throw_external (call_stmt) : false;
996 pop_cfun ();
997 edge->call_stmt_cannot_inline_p =
998 (call_stmt ? gimple_call_cannot_inline_p (call_stmt) : false);
999 if (call_stmt && caller->call_site_hash)
1000 cgraph_add_edge_to_call_site_hash (edge);
1002 edge->indirect_info = NULL;
1003 edge->indirect_inlining_edge = 0;
1005 return edge;
1008 /* Create edge from CALLER to CALLEE in the cgraph. */
1010 struct cgraph_edge *
1011 cgraph_create_edge (struct cgraph_node *caller, struct cgraph_node *callee,
1012 gimple call_stmt, gcov_type count, int freq, int nest)
1014 struct cgraph_edge *edge = cgraph_create_edge_1 (caller, callee, call_stmt,
1015 count, freq, nest);
1017 edge->indirect_unknown_callee = 0;
1018 initialize_inline_failed (edge);
1020 edge->next_caller = callee->callers;
1021 if (callee->callers)
1022 callee->callers->prev_caller = edge;
1023 edge->next_callee = caller->callees;
1024 if (caller->callees)
1025 caller->callees->prev_callee = edge;
1026 caller->callees = edge;
1027 callee->callers = edge;
1029 return edge;
1033 /* Create an indirect edge with a yet-undetermined callee where the call
1034 statement destination is a formal parameter of the caller with index
1035 PARAM_INDEX. */
1037 struct cgraph_edge *
1038 cgraph_create_indirect_edge (struct cgraph_node *caller, gimple call_stmt,
1039 int ecf_flags,
1040 gcov_type count, int freq, int nest)
1042 struct cgraph_edge *edge = cgraph_create_edge_1 (caller, NULL, call_stmt,
1043 count, freq, nest);
1045 edge->indirect_unknown_callee = 1;
1046 initialize_inline_failed (edge);
1048 edge->indirect_info = GGC_CNEW (struct cgraph_indirect_call_info);
1049 edge->indirect_info->param_index = -1;
1050 edge->indirect_info->ecf_flags = ecf_flags;
1052 edge->next_callee = caller->indirect_calls;
1053 if (caller->indirect_calls)
1054 caller->indirect_calls->prev_callee = edge;
1055 caller->indirect_calls = edge;
1057 return edge;
1060 /* Remove the edge E from the list of the callers of the callee. */
1062 static inline void
1063 cgraph_edge_remove_callee (struct cgraph_edge *e)
1065 gcc_assert (!e->indirect_unknown_callee);
1066 if (e->prev_caller)
1067 e->prev_caller->next_caller = e->next_caller;
1068 if (e->next_caller)
1069 e->next_caller->prev_caller = e->prev_caller;
1070 if (!e->prev_caller)
1071 e->callee->callers = e->next_caller;
1074 /* Remove the edge E from the list of the callees of the caller. */
1076 static inline void
1077 cgraph_edge_remove_caller (struct cgraph_edge *e)
1079 if (e->prev_callee)
1080 e->prev_callee->next_callee = e->next_callee;
1081 if (e->next_callee)
1082 e->next_callee->prev_callee = e->prev_callee;
1083 if (!e->prev_callee)
1085 if (e->indirect_unknown_callee)
1086 e->caller->indirect_calls = e->next_callee;
1087 else
1088 e->caller->callees = e->next_callee;
1090 if (e->caller->call_site_hash)
1091 htab_remove_elt_with_hash (e->caller->call_site_hash,
1092 e->call_stmt,
1093 htab_hash_pointer (e->call_stmt));
1096 /* Put the edge onto the free list. */
1098 static void
1099 cgraph_free_edge (struct cgraph_edge *e)
1101 int uid = e->uid;
1103 /* Clear out the edge so we do not dangle pointers. */
1104 memset (e, 0, sizeof (*e));
1105 e->uid = uid;
1106 NEXT_FREE_EDGE (e) = free_edges;
1107 free_edges = e;
1110 /* Remove the edge E in the cgraph. */
1112 void
1113 cgraph_remove_edge (struct cgraph_edge *e)
1115 /* Call all edge removal hooks. */
1116 cgraph_call_edge_removal_hooks (e);
1118 if (!e->indirect_unknown_callee)
1119 /* Remove from callers list of the callee. */
1120 cgraph_edge_remove_callee (e);
1122 /* Remove from callees list of the callers. */
1123 cgraph_edge_remove_caller (e);
1125 /* Put the edge onto the free list. */
1126 cgraph_free_edge (e);
1129 /* Set callee of call graph edge E and add it to the corresponding set of
1130 callers. */
1132 static void
1133 cgraph_set_edge_callee (struct cgraph_edge *e, struct cgraph_node *n)
1135 e->prev_caller = NULL;
1136 if (n->callers)
1137 n->callers->prev_caller = e;
1138 e->next_caller = n->callers;
1139 n->callers = e;
1140 e->callee = n;
1143 /* Redirect callee of E to N. The function does not update underlying
1144 call expression. */
1146 void
1147 cgraph_redirect_edge_callee (struct cgraph_edge *e, struct cgraph_node *n)
1149 /* Remove from callers list of the current callee. */
1150 cgraph_edge_remove_callee (e);
1152 /* Insert to callers list of the new callee. */
1153 cgraph_set_edge_callee (e, n);
1156 /* Make an indirect EDGE with an unknown callee an ordinary edge leading to
1157 CALLEE. */
1159 void
1160 cgraph_make_edge_direct (struct cgraph_edge *edge, struct cgraph_node *callee)
1162 edge->indirect_unknown_callee = 0;
1164 /* Get the edge out of the indirect edge list. */
1165 if (edge->prev_callee)
1166 edge->prev_callee->next_callee = edge->next_callee;
1167 if (edge->next_callee)
1168 edge->next_callee->prev_callee = edge->prev_callee;
1169 if (!edge->prev_callee)
1170 edge->caller->indirect_calls = edge->next_callee;
1172 /* Put it into the normal callee list */
1173 edge->prev_callee = NULL;
1174 edge->next_callee = edge->caller->callees;
1175 if (edge->caller->callees)
1176 edge->caller->callees->prev_callee = edge;
1177 edge->caller->callees = edge;
1179 /* Insert to callers list of the new callee. */
1180 cgraph_set_edge_callee (edge, callee);
1182 /* We need to re-determine the inlining status of the edge. */
1183 initialize_inline_failed (edge);
1187 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
1188 OLD_STMT changed into NEW_STMT. OLD_CALL is gimple_call_fndecl
1189 of OLD_STMT if it was previously call statement. */
1191 static void
1192 cgraph_update_edges_for_call_stmt_node (struct cgraph_node *node,
1193 gimple old_stmt, tree old_call, gimple new_stmt)
1195 tree new_call = (is_gimple_call (new_stmt)) ? gimple_call_fndecl (new_stmt) : 0;
1197 /* We are seeing indirect calls, then there is nothing to update. */
1198 if (!new_call && !old_call)
1199 return;
1200 /* See if we turned indirect call into direct call or folded call to one builtin
1201 into different bultin. */
1202 if (old_call != new_call)
1204 struct cgraph_edge *e = cgraph_edge (node, old_stmt);
1205 struct cgraph_edge *ne = NULL;
1206 gcov_type count;
1207 int frequency;
1208 int loop_nest;
1210 if (e)
1212 /* See if the edge is already there and has the correct callee. It
1213 might be so because of indirect inlining has already updated
1214 it. */
1215 if (new_call && e->callee && e->callee->decl == new_call)
1216 return;
1218 /* Otherwise remove edge and create new one; we can't simply redirect
1219 since function has changed, so inline plan and other information
1220 attached to edge is invalid. */
1221 count = e->count;
1222 frequency = e->frequency;
1223 loop_nest = e->loop_nest;
1224 cgraph_remove_edge (e);
1226 else
1228 /* We are seeing new direct call; compute profile info based on BB. */
1229 basic_block bb = gimple_bb (new_stmt);
1230 count = bb->count;
1231 frequency = compute_call_stmt_bb_frequency (current_function_decl,
1232 bb);
1233 loop_nest = bb->loop_depth;
1236 if (new_call)
1238 ne = cgraph_create_edge (node, cgraph_node (new_call),
1239 new_stmt, count, frequency,
1240 loop_nest);
1241 gcc_assert (ne->inline_failed);
1244 /* We only updated the call stmt; update pointer in cgraph edge.. */
1245 else if (old_stmt != new_stmt)
1246 cgraph_set_call_stmt (cgraph_edge (node, old_stmt), new_stmt);
1249 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
1250 OLD_STMT changed into NEW_STMT. OLD_DECL is gimple_call_fndecl
1251 of OLD_STMT before it was updated (updating can happen inplace). */
1253 void
1254 cgraph_update_edges_for_call_stmt (gimple old_stmt, tree old_decl, gimple new_stmt)
1256 struct cgraph_node *orig = cgraph_node (cfun->decl);
1257 struct cgraph_node *node;
1259 cgraph_update_edges_for_call_stmt_node (orig, old_stmt, old_decl, new_stmt);
1260 if (orig->clones)
1261 for (node = orig->clones; node != orig;)
1263 cgraph_update_edges_for_call_stmt_node (node, old_stmt, old_decl, new_stmt);
1264 if (node->clones)
1265 node = node->clones;
1266 else if (node->next_sibling_clone)
1267 node = node->next_sibling_clone;
1268 else
1270 while (node != orig && !node->next_sibling_clone)
1271 node = node->clone_of;
1272 if (node != orig)
1273 node = node->next_sibling_clone;
1279 /* Remove all callees from the node. */
1281 void
1282 cgraph_node_remove_callees (struct cgraph_node *node)
1284 struct cgraph_edge *e, *f;
1286 /* It is sufficient to remove the edges from the lists of callers of
1287 the callees. The callee list of the node can be zapped with one
1288 assignment. */
1289 for (e = node->callees; e; e = f)
1291 f = e->next_callee;
1292 cgraph_call_edge_removal_hooks (e);
1293 if (!e->indirect_unknown_callee)
1294 cgraph_edge_remove_callee (e);
1295 cgraph_free_edge (e);
1297 for (e = node->indirect_calls; e; e = f)
1299 f = e->next_callee;
1300 cgraph_call_edge_removal_hooks (e);
1301 if (!e->indirect_unknown_callee)
1302 cgraph_edge_remove_callee (e);
1303 cgraph_free_edge (e);
1305 node->indirect_calls = NULL;
1306 node->callees = NULL;
1307 if (node->call_site_hash)
1309 htab_delete (node->call_site_hash);
1310 node->call_site_hash = NULL;
1314 /* Remove all callers from the node. */
1316 static void
1317 cgraph_node_remove_callers (struct cgraph_node *node)
1319 struct cgraph_edge *e, *f;
1321 /* It is sufficient to remove the edges from the lists of callees of
1322 the callers. The caller list of the node can be zapped with one
1323 assignment. */
1324 for (e = node->callers; e; e = f)
1326 f = e->next_caller;
1327 cgraph_call_edge_removal_hooks (e);
1328 cgraph_edge_remove_caller (e);
1329 cgraph_free_edge (e);
1331 node->callers = NULL;
1334 /* Release memory used to represent body of function NODE. */
1336 void
1337 cgraph_release_function_body (struct cgraph_node *node)
1339 if (DECL_STRUCT_FUNCTION (node->decl))
1341 tree old_decl = current_function_decl;
1342 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
1343 if (cfun->gimple_df)
1345 current_function_decl = node->decl;
1346 delete_tree_ssa ();
1347 delete_tree_cfg_annotations ();
1348 cfun->eh = NULL;
1349 current_function_decl = old_decl;
1351 if (cfun->cfg)
1353 gcc_assert (dom_computed[0] == DOM_NONE);
1354 gcc_assert (dom_computed[1] == DOM_NONE);
1355 clear_edges ();
1357 if (cfun->value_histograms)
1358 free_histograms ();
1359 gcc_assert (!current_loops);
1360 pop_cfun();
1361 gimple_set_body (node->decl, NULL);
1362 VEC_free (ipa_opt_pass, heap,
1363 node->ipa_transforms_to_apply);
1364 /* Struct function hangs a lot of data that would leak if we didn't
1365 removed all pointers to it. */
1366 ggc_free (DECL_STRUCT_FUNCTION (node->decl));
1367 DECL_STRUCT_FUNCTION (node->decl) = NULL;
1369 DECL_SAVED_TREE (node->decl) = NULL;
1370 /* If the node is abstract and needed, then do not clear DECL_INITIAL
1371 of its associated function function declaration because it's
1372 needed to emit debug info later. */
1373 if (!node->abstract_and_needed)
1374 DECL_INITIAL (node->decl) = error_mark_node;
1377 /* Remove same body alias node. */
1379 void
1380 cgraph_remove_same_body_alias (struct cgraph_node *node)
1382 void **slot;
1383 int uid = node->uid;
1385 gcc_assert (node->same_body_alias);
1386 if (node->previous)
1387 node->previous->next = node->next;
1388 else
1389 node->same_body->same_body = node->next;
1390 if (node->next)
1391 node->next->previous = node->previous;
1392 node->next = NULL;
1393 node->previous = NULL;
1394 slot = htab_find_slot (cgraph_hash, node, NO_INSERT);
1395 if (*slot == node)
1396 htab_clear_slot (cgraph_hash, slot);
1397 if (assembler_name_hash)
1399 tree name = DECL_ASSEMBLER_NAME (node->decl);
1400 slot = htab_find_slot_with_hash (assembler_name_hash, name,
1401 decl_assembler_name_hash (name),
1402 NO_INSERT);
1403 if (slot && *slot == node)
1404 htab_clear_slot (assembler_name_hash, slot);
1407 /* Clear out the node to NULL all pointers and add the node to the free
1408 list. */
1409 memset (node, 0, sizeof(*node));
1410 node->uid = uid;
1411 NEXT_FREE_NODE (node) = free_nodes;
1412 free_nodes = node;
1415 /* Remove the node from cgraph. */
1417 void
1418 cgraph_remove_node (struct cgraph_node *node)
1420 void **slot;
1421 bool kill_body = false;
1422 struct cgraph_node *n;
1423 int uid = node->uid;
1425 cgraph_call_node_removal_hooks (node);
1426 cgraph_node_remove_callers (node);
1427 cgraph_node_remove_callees (node);
1428 ipa_remove_all_references (&node->ref_list);
1429 ipa_remove_all_refering (&node->ref_list);
1430 VEC_free (ipa_opt_pass, heap,
1431 node->ipa_transforms_to_apply);
1433 /* Incremental inlining access removed nodes stored in the postorder list.
1435 node->needed = node->reachable = false;
1436 for (n = node->nested; n; n = n->next_nested)
1437 n->origin = NULL;
1438 node->nested = NULL;
1439 if (node->origin)
1441 struct cgraph_node **node2 = &node->origin->nested;
1443 while (*node2 != node)
1444 node2 = &(*node2)->next_nested;
1445 *node2 = node->next_nested;
1447 if (node->previous)
1448 node->previous->next = node->next;
1449 else
1450 cgraph_nodes = node->next;
1451 if (node->next)
1452 node->next->previous = node->previous;
1453 node->next = NULL;
1454 node->previous = NULL;
1455 slot = htab_find_slot (cgraph_hash, node, NO_INSERT);
1456 if (*slot == node)
1458 struct cgraph_node *next_inline_clone;
1460 for (next_inline_clone = node->clones;
1461 next_inline_clone && next_inline_clone->decl != node->decl;
1462 next_inline_clone = next_inline_clone->next_sibling_clone)
1465 /* If there is inline clone of the node being removed, we need
1466 to put it into the position of removed node and reorganize all
1467 other clones to be based on it. */
1468 if (next_inline_clone)
1470 struct cgraph_node *n;
1471 struct cgraph_node *new_clones;
1473 *slot = next_inline_clone;
1475 /* Unlink inline clone from the list of clones of removed node. */
1476 if (next_inline_clone->next_sibling_clone)
1477 next_inline_clone->next_sibling_clone->prev_sibling_clone
1478 = next_inline_clone->prev_sibling_clone;
1479 if (next_inline_clone->prev_sibling_clone)
1481 gcc_assert (node->clones != next_inline_clone);
1482 next_inline_clone->prev_sibling_clone->next_sibling_clone
1483 = next_inline_clone->next_sibling_clone;
1485 else
1487 gcc_assert (node->clones == next_inline_clone);
1488 node->clones = next_inline_clone->next_sibling_clone;
1491 new_clones = node->clones;
1492 node->clones = NULL;
1494 /* Copy clone info. */
1495 next_inline_clone->clone = node->clone;
1497 /* Now place it into clone tree at same level at NODE. */
1498 next_inline_clone->clone_of = node->clone_of;
1499 next_inline_clone->prev_sibling_clone = NULL;
1500 next_inline_clone->next_sibling_clone = NULL;
1501 if (node->clone_of)
1503 if (node->clone_of->clones)
1504 node->clone_of->clones->prev_sibling_clone = next_inline_clone;
1505 next_inline_clone->next_sibling_clone = node->clone_of->clones;
1506 node->clone_of->clones = next_inline_clone;
1509 /* Merge the clone list. */
1510 if (new_clones)
1512 if (!next_inline_clone->clones)
1513 next_inline_clone->clones = new_clones;
1514 else
1516 n = next_inline_clone->clones;
1517 while (n->next_sibling_clone)
1518 n = n->next_sibling_clone;
1519 n->next_sibling_clone = new_clones;
1520 new_clones->prev_sibling_clone = n;
1524 /* Update clone_of pointers. */
1525 n = new_clones;
1526 while (n)
1528 n->clone_of = next_inline_clone;
1529 n = n->next_sibling_clone;
1532 else
1534 htab_clear_slot (cgraph_hash, slot);
1535 kill_body = true;
1539 if (node->prev_sibling_clone)
1540 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
1541 else if (node->clone_of)
1542 node->clone_of->clones = node->next_sibling_clone;
1543 if (node->next_sibling_clone)
1544 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
1545 if (node->clones)
1547 struct cgraph_node *n, *next;
1549 if (node->clone_of)
1551 for (n = node->clones; n->next_sibling_clone; n = n->next_sibling_clone)
1552 n->clone_of = node->clone_of;
1553 n->clone_of = node->clone_of;
1554 n->next_sibling_clone = node->clone_of->clones;
1555 if (node->clone_of->clones)
1556 node->clone_of->clones->prev_sibling_clone = n;
1557 node->clone_of->clones = node->clones;
1559 else
1561 /* We are removing node with clones. this makes clones inconsistent,
1562 but assume they will be removed subsequently and just keep clone
1563 tree intact. This can happen in unreachable function removal since
1564 we remove unreachable functions in random order, not by bottom-up
1565 walk of clone trees. */
1566 for (n = node->clones; n; n = next)
1568 next = n->next_sibling_clone;
1569 n->next_sibling_clone = NULL;
1570 n->prev_sibling_clone = NULL;
1571 n->clone_of = NULL;
1576 while (node->same_body)
1577 cgraph_remove_same_body_alias (node->same_body);
1579 if (node->same_comdat_group)
1581 struct cgraph_node *prev;
1582 for (prev = node->same_comdat_group;
1583 prev->same_comdat_group != node;
1584 prev = prev->same_comdat_group)
1586 if (node->same_comdat_group == prev)
1587 prev->same_comdat_group = NULL;
1588 else
1589 prev->same_comdat_group = node->same_comdat_group;
1590 node->same_comdat_group = NULL;
1593 /* While all the clones are removed after being proceeded, the function
1594 itself is kept in the cgraph even after it is compiled. Check whether
1595 we are done with this body and reclaim it proactively if this is the case.
1597 if (!kill_body && *slot)
1599 struct cgraph_node *n = (struct cgraph_node *) *slot;
1600 if (!n->clones && !n->clone_of && !n->global.inlined_to
1601 && (cgraph_global_info_ready
1602 && (TREE_ASM_WRITTEN (n->decl) || DECL_EXTERNAL (n->decl)
1603 || n->in_other_partition)))
1604 kill_body = true;
1606 if (assembler_name_hash)
1608 tree name = DECL_ASSEMBLER_NAME (node->decl);
1609 slot = htab_find_slot_with_hash (assembler_name_hash, name,
1610 decl_assembler_name_hash (name),
1611 NO_INSERT);
1612 /* Inline clones are not hashed. */
1613 if (slot && *slot == node)
1614 htab_clear_slot (assembler_name_hash, slot);
1617 if (kill_body)
1618 cgraph_release_function_body (node);
1619 node->decl = NULL;
1620 if (node->call_site_hash)
1622 htab_delete (node->call_site_hash);
1623 node->call_site_hash = NULL;
1625 cgraph_n_nodes--;
1627 /* Clear out the node to NULL all pointers and add the node to the free
1628 list. */
1629 memset (node, 0, sizeof(*node));
1630 node->uid = uid;
1631 NEXT_FREE_NODE (node) = free_nodes;
1632 free_nodes = node;
1635 /* Remove the node from cgraph. */
1637 void
1638 cgraph_remove_node_and_inline_clones (struct cgraph_node *node)
1640 struct cgraph_edge *e, *next;
1641 for (e = node->callees; e; e = next)
1643 next = e->next_callee;
1644 if (!e->inline_failed)
1645 cgraph_remove_node_and_inline_clones (e->callee);
1647 cgraph_remove_node (node);
1650 /* Notify finalize_compilation_unit that given node is reachable. */
1652 void
1653 cgraph_mark_reachable_node (struct cgraph_node *node)
1655 if (!node->reachable && node->local.finalized)
1657 if (cgraph_global_info_ready)
1659 /* Verify that function does not appear to be needed out of blue
1660 during the optimization process. This can happen for extern
1661 inlines when bodies was removed after inlining. */
1662 gcc_assert ((node->analyzed || node->in_other_partition
1663 || DECL_EXTERNAL (node->decl)));
1665 else
1666 notice_global_symbol (node->decl);
1667 node->reachable = 1;
1669 node->next_needed = cgraph_nodes_queue;
1670 cgraph_nodes_queue = node;
1674 /* Likewise indicate that a node is needed, i.e. reachable via some
1675 external means. */
1677 void
1678 cgraph_mark_needed_node (struct cgraph_node *node)
1680 node->needed = 1;
1681 gcc_assert (!node->global.inlined_to);
1682 cgraph_mark_reachable_node (node);
1685 /* Likewise indicate that a node is having address taken. */
1687 void
1688 cgraph_mark_address_taken_node (struct cgraph_node *node)
1690 cgraph_mark_reachable_node (node);
1691 node->address_taken = 1;
1694 /* Return local info for the compiled function. */
1696 struct cgraph_local_info *
1697 cgraph_local_info (tree decl)
1699 struct cgraph_node *node;
1701 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
1702 node = cgraph_node (decl);
1703 return &node->local;
1706 /* Return local info for the compiled function. */
1708 struct cgraph_global_info *
1709 cgraph_global_info (tree decl)
1711 struct cgraph_node *node;
1713 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL && cgraph_global_info_ready);
1714 node = cgraph_node (decl);
1715 return &node->global;
1718 /* Return local info for the compiled function. */
1720 struct cgraph_rtl_info *
1721 cgraph_rtl_info (tree decl)
1723 struct cgraph_node *node;
1725 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
1726 node = cgraph_node (decl);
1727 if (decl != current_function_decl
1728 && !TREE_ASM_WRITTEN (node->decl))
1729 return NULL;
1730 return &node->rtl;
1733 /* Return a string describing the failure REASON. */
1735 const char*
1736 cgraph_inline_failed_string (cgraph_inline_failed_t reason)
1738 #undef DEFCIFCODE
1739 #define DEFCIFCODE(code, string) string,
1741 static const char *cif_string_table[CIF_N_REASONS] = {
1742 #include "cif-code.def"
1745 /* Signedness of an enum type is implementation defined, so cast it
1746 to unsigned before testing. */
1747 gcc_assert ((unsigned) reason < CIF_N_REASONS);
1748 return cif_string_table[reason];
1751 /* Return name of the node used in debug output. */
1752 const char *
1753 cgraph_node_name (struct cgraph_node *node)
1755 return lang_hooks.decl_printable_name (node->decl, 2);
1758 /* Names used to print out the availability enum. */
1759 const char * const cgraph_availability_names[] =
1760 {"unset", "not_available", "overwritable", "available", "local"};
1763 /* Dump call graph node NODE to file F. */
1765 void
1766 dump_cgraph_node (FILE *f, struct cgraph_node *node)
1768 struct cgraph_edge *edge;
1769 int indirect_calls_count = 0;
1771 fprintf (f, "%s/%i(%i)", cgraph_node_name (node), node->uid,
1772 node->pid);
1773 dump_addr (f, " @", (void *)node);
1774 if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
1775 fprintf (f, " (asm: %s)", IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node->decl)));
1776 if (node->global.inlined_to)
1777 fprintf (f, " (inline copy in %s/%i)",
1778 cgraph_node_name (node->global.inlined_to),
1779 node->global.inlined_to->uid);
1780 if (node->clone_of)
1781 fprintf (f, " (clone of %s/%i)",
1782 cgraph_node_name (node->clone_of),
1783 node->clone_of->uid);
1784 if (cgraph_function_flags_ready)
1785 fprintf (f, " availability:%s",
1786 cgraph_availability_names [cgraph_function_body_availability (node)]);
1787 if (node->analyzed)
1788 fprintf (f, " analyzed");
1789 if (node->in_other_partition)
1790 fprintf (f, " in_other_partition");
1791 if (node->count)
1792 fprintf (f, " executed "HOST_WIDEST_INT_PRINT_DEC"x",
1793 (HOST_WIDEST_INT)node->count);
1794 if (node->local.inline_summary.self_time)
1795 fprintf (f, " %i time, %i benefit", node->local.inline_summary.self_time,
1796 node->local.inline_summary.time_inlining_benefit);
1797 if (node->global.time && node->global.time
1798 != node->local.inline_summary.self_time)
1799 fprintf (f, " (%i after inlining)", node->global.time);
1800 if (node->local.inline_summary.self_size)
1801 fprintf (f, " %i size, %i benefit", node->local.inline_summary.self_size,
1802 node->local.inline_summary.size_inlining_benefit);
1803 if (node->global.size && node->global.size
1804 != node->local.inline_summary.self_size)
1805 fprintf (f, " (%i after inlining)", node->global.size);
1806 if (node->local.inline_summary.estimated_self_stack_size)
1807 fprintf (f, " %i bytes stack usage", (int)node->local.inline_summary.estimated_self_stack_size);
1808 if (node->global.estimated_stack_size != node->local.inline_summary.estimated_self_stack_size)
1809 fprintf (f, " %i bytes after inlining", (int)node->global.estimated_stack_size);
1810 if (node->origin)
1811 fprintf (f, " nested in: %s", cgraph_node_name (node->origin));
1812 if (node->needed)
1813 fprintf (f, " needed");
1814 if (node->address_taken)
1815 fprintf (f, " address_taken");
1816 else if (node->reachable)
1817 fprintf (f, " reachable");
1818 else if (node->reachable_from_other_partition)
1819 fprintf (f, " reachable_from_other_partition");
1820 if (gimple_has_body_p (node->decl))
1821 fprintf (f, " body");
1822 if (node->process)
1823 fprintf (f, " process");
1824 if (node->local.local)
1825 fprintf (f, " local");
1826 if (node->local.externally_visible)
1827 fprintf (f, " externally_visible");
1828 if (node->local.finalized)
1829 fprintf (f, " finalized");
1830 if (node->local.disregard_inline_limits)
1831 fprintf (f, " always_inline");
1832 else if (node->local.inlinable)
1833 fprintf (f, " inlinable");
1834 else if (node->local.versionable)
1835 fprintf (f, " versionable");
1836 if (node->local.redefined_extern_inline)
1837 fprintf (f, " redefined_extern_inline");
1838 if (TREE_ASM_WRITTEN (node->decl))
1839 fprintf (f, " asm_written");
1841 fprintf (f, "\n called by: ");
1842 for (edge = node->callers; edge; edge = edge->next_caller)
1844 fprintf (f, "%s/%i ", cgraph_node_name (edge->caller),
1845 edge->caller->uid);
1846 if (edge->count)
1847 fprintf (f, "("HOST_WIDEST_INT_PRINT_DEC"x) ",
1848 (HOST_WIDEST_INT)edge->count);
1849 if (edge->frequency)
1850 fprintf (f, "(%.2f per call) ",
1851 edge->frequency / (double)CGRAPH_FREQ_BASE);
1852 if (!edge->inline_failed)
1853 fprintf(f, "(inlined) ");
1854 if (edge->indirect_inlining_edge)
1855 fprintf(f, "(indirect_inlining) ");
1856 if (edge->can_throw_external)
1857 fprintf(f, "(can throw external) ");
1860 fprintf (f, "\n calls: ");
1861 for (edge = node->callees; edge; edge = edge->next_callee)
1863 fprintf (f, "%s/%i ", cgraph_node_name (edge->callee),
1864 edge->callee->uid);
1865 if (!edge->inline_failed)
1866 fprintf(f, "(inlined) ");
1867 if (edge->indirect_inlining_edge)
1868 fprintf(f, "(indirect_inlining) ");
1869 if (edge->count)
1870 fprintf (f, "("HOST_WIDEST_INT_PRINT_DEC"x) ",
1871 (HOST_WIDEST_INT)edge->count);
1872 if (edge->frequency)
1873 fprintf (f, "(%.2f per call) ",
1874 edge->frequency / (double)CGRAPH_FREQ_BASE);
1875 if (edge->loop_nest)
1876 fprintf (f, "(nested in %i loops) ", edge->loop_nest);
1877 if (edge->can_throw_external)
1878 fprintf(f, "(can throw external) ");
1880 fprintf (f, "\n");
1881 fprintf (f, " References: ");
1882 ipa_dump_references (f, &node->ref_list);
1883 fprintf (f, " Refering this function: ");
1884 ipa_dump_refering (f, &node->ref_list);
1886 for (edge = node->indirect_calls; edge; edge = edge->next_callee)
1887 indirect_calls_count++;
1888 if (indirect_calls_count)
1889 fprintf (f, " has %i outgoing edges for indirect calls.\n",
1890 indirect_calls_count);
1892 if (node->same_body)
1894 struct cgraph_node *n;
1895 fprintf (f, " aliases & thunks:");
1896 for (n = node->same_body; n; n = n->next)
1898 fprintf (f, " %s/%i", cgraph_node_name (n), n->uid);
1899 if (n->thunk.thunk_p)
1901 fprintf (f, " (thunk of %s fixed ofset %i virtual value %i has "
1902 "virtual offset %i",
1903 lang_hooks.decl_printable_name (n->thunk.alias, 2),
1904 (int)n->thunk.fixed_offset,
1905 (int)n->thunk.virtual_value,
1906 (int)n->thunk.virtual_offset_p);
1907 fprintf (f, ")");
1909 if (DECL_ASSEMBLER_NAME_SET_P (n->decl))
1910 fprintf (f, " (asm: %s)", IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (n->decl)));
1912 fprintf (f, "\n");
1917 /* Dump call graph node NODE to stderr. */
1919 DEBUG_FUNCTION void
1920 debug_cgraph_node (struct cgraph_node *node)
1922 dump_cgraph_node (stderr, node);
1926 /* Dump the callgraph to file F. */
1928 void
1929 dump_cgraph (FILE *f)
1931 struct cgraph_node *node;
1933 fprintf (f, "callgraph:\n\n");
1934 for (node = cgraph_nodes; node; node = node->next)
1935 dump_cgraph_node (f, node);
1939 /* Dump the call graph to stderr. */
1941 DEBUG_FUNCTION void
1942 debug_cgraph (void)
1944 dump_cgraph (stderr);
1948 /* Set the DECL_ASSEMBLER_NAME and update cgraph hashtables. */
1950 void
1951 change_decl_assembler_name (tree decl, tree name)
1953 gcc_assert (!assembler_name_hash);
1954 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
1956 SET_DECL_ASSEMBLER_NAME (decl, name);
1957 return;
1959 if (name == DECL_ASSEMBLER_NAME (decl))
1960 return;
1962 if (TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl))
1963 && DECL_RTL_SET_P (decl))
1964 warning (0, "%D renamed after being referenced in assembly", decl);
1966 SET_DECL_ASSEMBLER_NAME (decl, name);
1969 /* Add a top-level asm statement to the list. */
1971 struct cgraph_asm_node *
1972 cgraph_add_asm_node (tree asm_str)
1974 struct cgraph_asm_node *node;
1976 node = GGC_CNEW (struct cgraph_asm_node);
1977 node->asm_str = asm_str;
1978 node->order = cgraph_order++;
1979 node->next = NULL;
1980 if (cgraph_asm_nodes == NULL)
1981 cgraph_asm_nodes = node;
1982 else
1983 cgraph_asm_last_node->next = node;
1984 cgraph_asm_last_node = node;
1985 return node;
1988 /* Return true when the DECL can possibly be inlined. */
1989 bool
1990 cgraph_function_possibly_inlined_p (tree decl)
1992 if (!cgraph_global_info_ready)
1993 return !DECL_UNINLINABLE (decl);
1994 return DECL_POSSIBLY_INLINED (decl);
1997 /* Create clone of E in the node N represented by CALL_EXPR the callgraph. */
1998 struct cgraph_edge *
1999 cgraph_clone_edge (struct cgraph_edge *e, struct cgraph_node *n,
2000 gimple call_stmt, unsigned stmt_uid, gcov_type count_scale,
2001 int freq_scale, int loop_nest, bool update_original)
2003 struct cgraph_edge *new_edge;
2004 gcov_type count = e->count * count_scale / REG_BR_PROB_BASE;
2005 gcov_type freq;
2007 /* We do not want to ignore loop nest after frequency drops to 0. */
2008 if (!freq_scale)
2009 freq_scale = 1;
2010 freq = e->frequency * (gcov_type) freq_scale / CGRAPH_FREQ_BASE;
2011 if (freq > CGRAPH_FREQ_MAX)
2012 freq = CGRAPH_FREQ_MAX;
2014 if (e->indirect_unknown_callee)
2016 tree decl;
2018 if (call_stmt && (decl = gimple_call_fndecl (call_stmt)))
2020 struct cgraph_node *callee = cgraph_node (decl);
2021 new_edge = cgraph_create_edge (n, callee, call_stmt, count, freq,
2022 e->loop_nest + loop_nest);
2024 else
2026 new_edge = cgraph_create_indirect_edge (n, call_stmt,
2027 e->indirect_info->ecf_flags,
2028 count, freq,
2029 e->loop_nest + loop_nest);
2030 *new_edge->indirect_info = *e->indirect_info;
2033 else
2034 new_edge = cgraph_create_edge (n, e->callee, call_stmt, count, freq,
2035 e->loop_nest + loop_nest);
2037 new_edge->inline_failed = e->inline_failed;
2038 new_edge->indirect_inlining_edge = e->indirect_inlining_edge;
2039 new_edge->lto_stmt_uid = stmt_uid;
2040 if (update_original)
2042 e->count -= new_edge->count;
2043 if (e->count < 0)
2044 e->count = 0;
2046 cgraph_call_edge_duplication_hooks (e, new_edge);
2047 return new_edge;
2050 /* Create node representing clone of N executed COUNT times. Decrease
2051 the execution counts from original node too.
2052 The new clone will have decl set to DECL that may or may not be the same
2053 as decl of N.
2055 When UPDATE_ORIGINAL is true, the counts are subtracted from the original
2056 function's profile to reflect the fact that part of execution is handled
2057 by node. */
2058 struct cgraph_node *
2059 cgraph_clone_node (struct cgraph_node *n, tree decl, gcov_type count, int freq,
2060 int loop_nest, bool update_original,
2061 VEC(cgraph_edge_p,heap) *redirect_callers)
2063 struct cgraph_node *new_node = cgraph_create_node ();
2064 struct cgraph_edge *e;
2065 gcov_type count_scale;
2066 unsigned i;
2068 new_node->decl = decl;
2069 new_node->origin = n->origin;
2070 if (new_node->origin)
2072 new_node->next_nested = new_node->origin->nested;
2073 new_node->origin->nested = new_node;
2075 new_node->analyzed = n->analyzed;
2076 new_node->local = n->local;
2077 new_node->local.externally_visible = false;
2078 new_node->local.local = true;
2079 new_node->local.vtable_method = false;
2080 new_node->global = n->global;
2081 new_node->rtl = n->rtl;
2082 new_node->count = count;
2083 new_node->frequency = n->frequency;
2084 new_node->clone = n->clone;
2085 new_node->clone.tree_map = 0;
2086 if (n->count)
2088 if (new_node->count > n->count)
2089 count_scale = REG_BR_PROB_BASE;
2090 else
2091 count_scale = new_node->count * REG_BR_PROB_BASE / n->count;
2093 else
2094 count_scale = 0;
2095 if (update_original)
2097 n->count -= count;
2098 if (n->count < 0)
2099 n->count = 0;
2102 for (i = 0; VEC_iterate (cgraph_edge_p, redirect_callers, i, e); i++)
2104 /* Redirect calls to the old version node to point to its new
2105 version. */
2106 cgraph_redirect_edge_callee (e, new_node);
2110 for (e = n->callees;e; e=e->next_callee)
2111 cgraph_clone_edge (e, new_node, e->call_stmt, e->lto_stmt_uid,
2112 count_scale, freq, loop_nest, update_original);
2114 for (e = n->indirect_calls; e; e = e->next_callee)
2115 cgraph_clone_edge (e, new_node, e->call_stmt, e->lto_stmt_uid,
2116 count_scale, freq, loop_nest, update_original);
2117 ipa_clone_references (new_node, NULL, &n->ref_list);
2119 new_node->next_sibling_clone = n->clones;
2120 if (n->clones)
2121 n->clones->prev_sibling_clone = new_node;
2122 n->clones = new_node;
2123 new_node->clone_of = n;
2125 cgraph_call_node_duplication_hooks (n, new_node);
2126 if (n->decl != decl)
2128 struct cgraph_node **slot;
2129 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, new_node, INSERT);
2130 gcc_assert (!*slot);
2131 *slot = new_node;
2132 if (assembler_name_hash)
2134 void **aslot;
2135 tree name = DECL_ASSEMBLER_NAME (decl);
2137 aslot = htab_find_slot_with_hash (assembler_name_hash, name,
2138 decl_assembler_name_hash (name),
2139 INSERT);
2140 gcc_assert (!*aslot);
2141 *aslot = new_node;
2144 return new_node;
2147 /* Create a new name for clone of DECL, add SUFFIX. Returns an identifier. */
2149 static GTY(()) unsigned int clone_fn_id_num;
2151 tree
2152 clone_function_name (tree decl, const char *suffix)
2154 tree name = DECL_ASSEMBLER_NAME (decl);
2155 size_t len = IDENTIFIER_LENGTH (name);
2156 char *tmp_name, *prefix;
2158 prefix = XALLOCAVEC (char, len + strlen (suffix) + 2);
2159 memcpy (prefix, IDENTIFIER_POINTER (name), len);
2160 strcpy (prefix + len + 1, suffix);
2161 #ifndef NO_DOT_IN_LABEL
2162 prefix[len] = '.';
2163 #elif !defined NO_DOLLAR_IN_LABEL
2164 prefix[len] = '$';
2165 #else
2166 prefix[len] = '_';
2167 #endif
2168 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix, clone_fn_id_num++);
2169 return get_identifier (tmp_name);
2172 /* Create callgraph node clone with new declaration. The actual body will
2173 be copied later at compilation stage.
2175 TODO: after merging in ipa-sra use function call notes instead of args_to_skip
2176 bitmap interface.
2178 struct cgraph_node *
2179 cgraph_create_virtual_clone (struct cgraph_node *old_node,
2180 VEC(cgraph_edge_p,heap) *redirect_callers,
2181 VEC(ipa_replace_map_p,gc) *tree_map,
2182 bitmap args_to_skip,
2183 const char * suffix)
2185 tree old_decl = old_node->decl;
2186 struct cgraph_node *new_node = NULL;
2187 tree new_decl;
2188 size_t i;
2189 struct ipa_replace_map *map;
2191 #ifdef ENABLE_CHECKING
2192 if (!flag_wpa)
2193 gcc_assert (tree_versionable_function_p (old_decl));
2194 #endif
2196 /* Make a new FUNCTION_DECL tree node */
2197 if (!args_to_skip)
2198 new_decl = copy_node (old_decl);
2199 else
2200 new_decl = build_function_decl_skip_args (old_decl, args_to_skip);
2201 DECL_STRUCT_FUNCTION (new_decl) = NULL;
2203 /* Generate a new name for the new version. */
2204 DECL_NAME (new_decl) = clone_function_name (old_decl, suffix);
2205 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
2206 SET_DECL_RTL (new_decl, NULL);
2208 new_node = cgraph_clone_node (old_node, new_decl, old_node->count,
2209 CGRAPH_FREQ_BASE, 0, false,
2210 redirect_callers);
2211 /* Update the properties.
2212 Make clone visible only within this translation unit. Make sure
2213 that is not weak also.
2214 ??? We cannot use COMDAT linkage because there is no
2215 ABI support for this. */
2216 DECL_EXTERNAL (new_node->decl) = 0;
2217 DECL_COMDAT_GROUP (new_node->decl) = 0;
2218 TREE_PUBLIC (new_node->decl) = 0;
2219 DECL_COMDAT (new_node->decl) = 0;
2220 DECL_WEAK (new_node->decl) = 0;
2221 new_node->clone.tree_map = tree_map;
2222 new_node->clone.args_to_skip = args_to_skip;
2223 for (i = 0; VEC_iterate (ipa_replace_map_p, tree_map, i, map); i++)
2225 tree var = map->new_tree;
2227 STRIP_NOPS (var);
2228 if (TREE_CODE (var) != ADDR_EXPR)
2229 continue;
2230 var = get_base_var (var);
2231 if (!var)
2232 continue;
2234 /* Record references of the future statement initializing the constant
2235 argument. */
2236 if (TREE_CODE (var) == FUNCTION_DECL)
2237 ipa_record_reference (new_node, NULL, cgraph_node (var),
2238 NULL, IPA_REF_ADDR, NULL);
2239 else if (TREE_CODE (var) == VAR_DECL)
2240 ipa_record_reference (new_node, NULL, NULL, varpool_node (var),
2241 IPA_REF_ADDR, NULL);
2243 if (!args_to_skip)
2244 new_node->clone.combined_args_to_skip = old_node->clone.combined_args_to_skip;
2245 else if (old_node->clone.combined_args_to_skip)
2247 int newi = 0, oldi = 0;
2248 tree arg;
2249 bitmap new_args_to_skip = BITMAP_GGC_ALLOC ();
2250 struct cgraph_node *orig_node;
2251 for (orig_node = old_node; orig_node->clone_of; orig_node = orig_node->clone_of)
2253 for (arg = DECL_ARGUMENTS (orig_node->decl); arg; arg = TREE_CHAIN (arg), oldi++)
2255 if (bitmap_bit_p (old_node->clone.combined_args_to_skip, oldi))
2257 bitmap_set_bit (new_args_to_skip, oldi);
2258 continue;
2260 if (bitmap_bit_p (args_to_skip, newi))
2261 bitmap_set_bit (new_args_to_skip, oldi);
2262 newi++;
2264 new_node->clone.combined_args_to_skip = new_args_to_skip;
2266 else
2267 new_node->clone.combined_args_to_skip = args_to_skip;
2268 new_node->local.externally_visible = 0;
2269 new_node->local.local = 1;
2270 new_node->lowered = true;
2271 new_node->reachable = true;
2274 return new_node;
2277 /* NODE is no longer nested function; update cgraph accordingly. */
2278 void
2279 cgraph_unnest_node (struct cgraph_node *node)
2281 struct cgraph_node **node2 = &node->origin->nested;
2282 gcc_assert (node->origin);
2284 while (*node2 != node)
2285 node2 = &(*node2)->next_nested;
2286 *node2 = node->next_nested;
2287 node->origin = NULL;
2290 /* Return function availability. See cgraph.h for description of individual
2291 return values. */
2292 enum availability
2293 cgraph_function_body_availability (struct cgraph_node *node)
2295 enum availability avail;
2296 gcc_assert (cgraph_function_flags_ready);
2297 if (!node->analyzed)
2298 avail = AVAIL_NOT_AVAILABLE;
2299 else if (node->local.local)
2300 avail = AVAIL_LOCAL;
2301 else if (!node->local.externally_visible)
2302 avail = AVAIL_AVAILABLE;
2303 /* Inline functions are safe to be analyzed even if their sybol can
2304 be overwritten at runtime. It is not meaningful to enfore any sane
2305 behaviour on replacing inline function by different body. */
2306 else if (DECL_DECLARED_INLINE_P (node->decl))
2307 avail = AVAIL_AVAILABLE;
2309 /* If the function can be overwritten, return OVERWRITABLE. Take
2310 care at least of two notable extensions - the COMDAT functions
2311 used to share template instantiations in C++ (this is symmetric
2312 to code cp_cannot_inline_tree_fn and probably shall be shared and
2313 the inlinability hooks completely eliminated).
2315 ??? Does the C++ one definition rule allow us to always return
2316 AVAIL_AVAILABLE here? That would be good reason to preserve this
2317 bit. */
2319 else if (DECL_REPLACEABLE_P (node->decl) && !DECL_EXTERNAL (node->decl))
2320 avail = AVAIL_OVERWRITABLE;
2321 else avail = AVAIL_AVAILABLE;
2323 return avail;
2326 /* Add the function FNDECL to the call graph.
2327 Unlike cgraph_finalize_function, this function is intended to be used
2328 by middle end and allows insertion of new function at arbitrary point
2329 of compilation. The function can be either in high, low or SSA form
2330 GIMPLE.
2332 The function is assumed to be reachable and have address taken (so no
2333 API breaking optimizations are performed on it).
2335 Main work done by this function is to enqueue the function for later
2336 processing to avoid need the passes to be re-entrant. */
2338 void
2339 cgraph_add_new_function (tree fndecl, bool lowered)
2341 struct cgraph_node *node;
2342 switch (cgraph_state)
2344 case CGRAPH_STATE_CONSTRUCTION:
2345 /* Just enqueue function to be processed at nearest occurrence. */
2346 node = cgraph_node (fndecl);
2347 node->next_needed = cgraph_new_nodes;
2348 if (lowered)
2349 node->lowered = true;
2350 cgraph_new_nodes = node;
2351 break;
2353 case CGRAPH_STATE_IPA:
2354 case CGRAPH_STATE_IPA_SSA:
2355 case CGRAPH_STATE_EXPANSION:
2356 /* Bring the function into finalized state and enqueue for later
2357 analyzing and compilation. */
2358 node = cgraph_node (fndecl);
2359 node->local.local = false;
2360 node->local.finalized = true;
2361 node->reachable = node->needed = true;
2362 if (!lowered && cgraph_state == CGRAPH_STATE_EXPANSION)
2364 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
2365 current_function_decl = fndecl;
2366 gimple_register_cfg_hooks ();
2367 tree_lowering_passes (fndecl);
2368 bitmap_obstack_initialize (NULL);
2369 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
2370 execute_pass_list (pass_early_local_passes.pass.sub);
2371 bitmap_obstack_release (NULL);
2372 pop_cfun ();
2373 current_function_decl = NULL;
2375 lowered = true;
2377 if (lowered)
2378 node->lowered = true;
2379 node->next_needed = cgraph_new_nodes;
2380 cgraph_new_nodes = node;
2381 break;
2383 case CGRAPH_STATE_FINISHED:
2384 /* At the very end of compilation we have to do all the work up
2385 to expansion. */
2386 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
2387 current_function_decl = fndecl;
2388 gimple_register_cfg_hooks ();
2389 if (!lowered)
2390 tree_lowering_passes (fndecl);
2391 bitmap_obstack_initialize (NULL);
2392 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
2393 execute_pass_list (pass_early_local_passes.pass.sub);
2394 bitmap_obstack_release (NULL);
2395 tree_rest_of_compilation (fndecl);
2396 pop_cfun ();
2397 current_function_decl = NULL;
2398 break;
2401 /* Set a personality if required and we already passed EH lowering. */
2402 if (lowered
2403 && (function_needs_eh_personality (DECL_STRUCT_FUNCTION (fndecl))
2404 == eh_personality_lang))
2405 DECL_FUNCTION_PERSONALITY (fndecl) = lang_hooks.eh_personality ();
2408 /* Return true if NODE can be made local for API change.
2409 Extern inline functions and C++ COMDAT functions can be made local
2410 at the expense of possible code size growth if function is used in multiple
2411 compilation units. */
2412 bool
2413 cgraph_node_can_be_local_p (struct cgraph_node *node)
2415 return (!node->needed && !node->address_taken
2416 && ((DECL_COMDAT (node->decl) && !node->same_comdat_group)
2417 || !node->local.externally_visible));
2420 /* Make DECL local. FIXME: We shouldn't need to mess with rtl this early,
2421 but other code such as notice_global_symbol generates rtl. */
2422 void
2423 cgraph_make_decl_local (tree decl)
2425 rtx rtl, symbol;
2427 if (TREE_CODE (decl) == VAR_DECL)
2428 DECL_COMMON (decl) = 0;
2429 else if (TREE_CODE (decl) == FUNCTION_DECL)
2431 DECL_COMDAT (decl) = 0;
2432 DECL_COMDAT_GROUP (decl) = 0;
2433 DECL_WEAK (decl) = 0;
2434 DECL_EXTERNAL (decl) = 0;
2436 else
2437 gcc_unreachable ();
2438 TREE_PUBLIC (decl) = 0;
2439 if (!DECL_RTL_SET_P (decl))
2440 return;
2442 /* Update rtl flags. */
2443 make_decl_rtl (decl);
2445 rtl = DECL_RTL (decl);
2446 if (!MEM_P (rtl))
2447 return;
2449 symbol = XEXP (rtl, 0);
2450 if (GET_CODE (symbol) != SYMBOL_REF)
2451 return;
2453 SYMBOL_REF_WEAK (symbol) = DECL_WEAK (decl);
2456 /* Bring NODE local. */
2457 void
2458 cgraph_make_node_local (struct cgraph_node *node)
2460 gcc_assert (cgraph_node_can_be_local_p (node));
2461 if (DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))
2463 struct cgraph_node *alias;
2464 cgraph_make_decl_local (node->decl);
2466 for (alias = node->same_body; alias; alias = alias->next)
2467 cgraph_make_decl_local (alias->decl);
2469 node->local.externally_visible = false;
2470 node->local.local = true;
2471 gcc_assert (cgraph_function_body_availability (node) == AVAIL_LOCAL);
2475 /* Set TREE_NOTHROW on NODE's decl and on same_body aliases of NODE
2476 if any to NOTHROW. */
2478 void
2479 cgraph_set_nothrow_flag (struct cgraph_node *node, bool nothrow)
2481 struct cgraph_node *alias;
2482 TREE_NOTHROW (node->decl) = nothrow;
2483 for (alias = node->same_body; alias; alias = alias->next)
2484 TREE_NOTHROW (alias->decl) = nothrow;
2487 /* Set TREE_READONLY on NODE's decl and on same_body aliases of NODE
2488 if any to READONLY. */
2490 void
2491 cgraph_set_readonly_flag (struct cgraph_node *node, bool readonly)
2493 struct cgraph_node *alias;
2494 TREE_READONLY (node->decl) = readonly;
2495 for (alias = node->same_body; alias; alias = alias->next)
2496 TREE_READONLY (alias->decl) = readonly;
2499 /* Set DECL_PURE_P on NODE's decl and on same_body aliases of NODE
2500 if any to PURE. */
2502 void
2503 cgraph_set_pure_flag (struct cgraph_node *node, bool pure)
2505 struct cgraph_node *alias;
2506 DECL_PURE_P (node->decl) = pure;
2507 for (alias = node->same_body; alias; alias = alias->next)
2508 DECL_PURE_P (alias->decl) = pure;
2511 /* Set DECL_LOOPING_CONST_OR_PURE_P on NODE's decl and on
2512 same_body aliases of NODE if any to LOOPING_CONST_OR_PURE. */
2514 void
2515 cgraph_set_looping_const_or_pure_flag (struct cgraph_node *node,
2516 bool looping_const_or_pure)
2518 struct cgraph_node *alias;
2519 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = looping_const_or_pure;
2520 for (alias = node->same_body; alias; alias = alias->next)
2521 DECL_LOOPING_CONST_OR_PURE_P (alias->decl) = looping_const_or_pure;
2524 /* See if the frequency of NODE can be updated based on frequencies of its
2525 callers. */
2526 bool
2527 cgraph_propagate_frequency (struct cgraph_node *node)
2529 bool maybe_unlikely_executed = true, maybe_executed_once = true;
2530 struct cgraph_edge *edge;
2531 if (!node->local.local)
2532 return false;
2533 gcc_assert (node->analyzed);
2534 if (node->frequency == NODE_FREQUENCY_HOT)
2535 return false;
2536 if (node->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED)
2537 return false;
2538 if (dump_file && (dump_flags & TDF_DETAILS))
2539 fprintf (dump_file, "Processing frequency %s\n", cgraph_node_name (node));
2540 for (edge = node->callers;
2541 edge && (maybe_unlikely_executed || maybe_executed_once);
2542 edge = edge->next_caller)
2544 if (!edge->frequency)
2545 continue;
2546 switch (edge->caller->frequency)
2548 case NODE_FREQUENCY_UNLIKELY_EXECUTED:
2549 break;
2550 case NODE_FREQUENCY_EXECUTED_ONCE:
2551 if (dump_file && (dump_flags & TDF_DETAILS))
2552 fprintf (dump_file, " Called by %s that is executed once\n", cgraph_node_name (node));
2553 maybe_unlikely_executed = false;
2554 if (edge->loop_nest)
2556 maybe_executed_once = false;
2557 if (dump_file && (dump_flags & TDF_DETAILS))
2558 fprintf (dump_file, " Called in loop\n");
2560 break;
2561 case NODE_FREQUENCY_HOT:
2562 case NODE_FREQUENCY_NORMAL:
2563 if (dump_file && (dump_flags & TDF_DETAILS))
2564 fprintf (dump_file, " Called by %s that is normal or hot\n", cgraph_node_name (node));
2565 maybe_unlikely_executed = false;
2566 maybe_executed_once = false;
2567 break;
2570 if (maybe_unlikely_executed)
2572 node->frequency = NODE_FREQUENCY_UNLIKELY_EXECUTED;
2573 if (dump_file)
2574 fprintf (dump_file, "Node %s promoted to unlikely executed.\n", cgraph_node_name (node));
2575 return true;
2577 if (maybe_executed_once && node->frequency != NODE_FREQUENCY_EXECUTED_ONCE)
2579 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
2580 if (dump_file)
2581 fprintf (dump_file, "Node %s promoted to executed once.\n", cgraph_node_name (node));
2582 return true;
2584 return false;
2587 /* Return true when NODE can not return or throw and thus
2588 it is safe to ignore its side effects for IPA analysis. */
2590 bool
2591 cgraph_node_cannot_return (struct cgraph_node *node)
2593 int flags = flags_from_decl_or_type (node->decl);
2594 if (!flag_exceptions)
2595 return (flags & ECF_NORETURN) != 0;
2596 else
2597 return ((flags & (ECF_NORETURN | ECF_NOTHROW))
2598 == (ECF_NORETURN | ECF_NOTHROW));
2601 /* Return true when call of E can not lead to return from caller
2602 and thus it is safe to ignore its side effects for IPA analysis
2603 when computing side effects of the caller.
2604 FIXME: We could actually mark all edges that have no reaching
2605 patch to EXIT_BLOCK_PTR or throw to get better results. */
2606 bool
2607 cgraph_edge_cannot_lead_to_return (struct cgraph_edge *e)
2609 if (e->indirect_unknown_callee)
2611 int flags = e->indirect_info->ecf_flags;
2612 if (!flag_exceptions)
2613 return (flags & ECF_NORETURN) != 0;
2614 else
2615 return ((flags & (ECF_NORETURN | ECF_NOTHROW))
2616 == (ECF_NORETURN | ECF_NOTHROW));
2618 else
2619 return cgraph_node_cannot_return (e->callee);
2622 #include "gt-cgraph.h"