Merge from mainline
[official-gcc.git] / gcc / cgraph.c
blobe6690b2b9acfc4c8991fad8c23b8d7faf5353882
1 /* Callgraph handling code.
2 Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
22 /* This file contains basic routines manipulating call graph and variable pool
24 The callgraph:
26 The call-graph is data structure designed for intra-procedural optimization
27 but it is also used in non-unit-at-a-time compilation to allow easier code
28 sharing.
30 The call-graph consist of nodes and edges represented via linked lists.
31 Each function (external or not) corresponds to the unique node (in
32 contrast to tree DECL nodes where we can have multiple nodes for each
33 function).
35 The mapping from declarations to call-graph nodes is done using hash table
36 based on DECL_ASSEMBLER_NAME, so it is essential for assembler name to
37 not change once the declaration is inserted into the call-graph.
38 The call-graph nodes are created lazily using cgraph_node function when
39 called for unknown declaration.
41 When built, there is one edge for each direct call. It is possible that
42 the reference will be later optimized out. The call-graph is built
43 conservatively in order to make conservative data flow analysis possible.
45 The callgraph at the moment does not represent indirect calls or calls
46 from other compilation unit. Flag NEEDED is set for each node that may
47 be accessed in such an invisible way and it shall be considered an
48 entry point to the callgraph.
50 Intraprocedural information:
52 Callgraph is place to store data needed for intraprocedural optimization.
53 All data structures are divided into three components: local_info that
54 is produced while analyzing the function, global_info that is result
55 of global walking of the callgraph on the end of compilation and
56 rtl_info used by RTL backend to propagate data from already compiled
57 functions to their callers.
59 Inlining plans:
61 The function inlining information is decided in advance and maintained
62 in the callgraph as so called inline plan.
63 For each inlined call, the callee's node is cloned to represent the
64 new function copy produced by inliner.
65 Each inlined call gets a unique corresponding clone node of the callee
66 and the data structure is updated while inlining is performed, so
67 the clones are eliminated and their callee edges redirected to the
68 caller.
70 Each edge has "inline_failed" field. When the field is set to NULL,
71 the call will be inlined. When it is non-NULL it contains a reason
72 why inlining wasn't performed.
75 The varpool data structure:
77 Varpool is used to maintain variables in similar manner as call-graph
78 is used for functions. Most of the API is symmetric replacing cgraph
79 function prefix by cgraph_varpool */
82 #include "config.h"
83 #include "system.h"
84 #include "coretypes.h"
85 #include "tm.h"
86 #include "tree.h"
87 #include "tree-inline.h"
88 #include "langhooks.h"
89 #include "hashtab.h"
90 #include "toplev.h"
91 #include "flags.h"
92 #include "ggc.h"
93 #include "debug.h"
94 #include "target.h"
95 #include "basic-block.h"
96 #include "cgraph.h"
97 #include "varray.h"
98 #include "output.h"
99 #include "intl.h"
100 #include "tree-gimple.h"
101 #include "tree-dump.h"
102 #include "vec.h"
104 static void cgraph_node_remove_callers (struct cgraph_node *node);
105 static inline void cgraph_edge_remove_caller (struct cgraph_edge *e);
106 static inline void cgraph_edge_remove_callee (struct cgraph_edge *e);
108 /* Hash table used to convert declarations into nodes. */
109 static GTY((param_is (struct cgraph_node))) htab_t cgraph_hash;
111 /* The linked list of cgraph nodes. */
112 struct cgraph_node *cgraph_nodes;
113 typedef struct cgraph_node *cgraph_node_p;
114 DEF_VEC_P(cgraph_node_p);
115 DEF_VEC_ALLOC_P(cgraph_node_p,gc);
116 static GTY(()) VEC(cgraph_node_p,gc) *cgraph_nodes_vec;
118 /* Queue of cgraph nodes scheduled to be lowered. */
119 struct cgraph_node *cgraph_nodes_queue;
121 /* Queue of cgraph nodes scheduled to be expanded. This is a
122 secondary queue used during optimization to accomodate passes that
123 may generate new functions that need to be optimized and expanded. */
124 struct cgraph_node *cgraph_expand_queue;
126 /* Number of nodes in existence. */
127 int cgraph_n_nodes;
129 /* Maximal uid used in cgraph nodes. */
130 int cgraph_max_uid;
132 /* Set when whole unit has been analyzed so we can access global info. */
133 bool cgraph_global_info_ready = false;
135 /* Set when the cgraph is fully build and the basic flags are computed. */
136 bool cgraph_function_flags_ready = false;
138 /* Hash table used to convert declarations into nodes. */
139 static GTY((param_is (struct cgraph_varpool_node))) htab_t cgraph_varpool_hash;
141 /* Queue of cgraph nodes scheduled to be lowered and output. */
142 struct cgraph_varpool_node *cgraph_varpool_nodes_queue, *cgraph_varpool_first_unanalyzed_node;
144 /* The linked list of cgraph varpool nodes. */
145 static GTY(()) struct cgraph_varpool_node *cgraph_varpool_nodes;
147 /* End of the varpool queue. Needs to be QTYed to work with PCH. */
148 static GTY(()) struct cgraph_varpool_node *cgraph_varpool_last_needed_node;
150 /* Linked list of cgraph asm nodes. */
151 struct cgraph_asm_node *cgraph_asm_nodes;
153 /* Last node in cgraph_asm_nodes. */
154 static GTY(()) struct cgraph_asm_node *cgraph_asm_last_node;
156 /* The order index of the next cgraph node to be created. This is
157 used so that we can sort the cgraph nodes in order by when we saw
158 them, to support -fno-toplevel-reorder. */
159 int cgraph_order;
161 static hashval_t hash_node (const void *);
162 static int eq_node (const void *, const void *);
164 /* Returns a hash code for P. */
166 static hashval_t
167 hash_node (const void *p)
169 const struct cgraph_node *n = (const struct cgraph_node *) p;
170 return (hashval_t) DECL_UID (n->decl);
173 /* Returns nonzero if P1 and P2 are equal. */
175 static int
176 eq_node (const void *p1, const void *p2)
178 const struct cgraph_node *n1 = (const struct cgraph_node *) p1;
179 const struct cgraph_node *n2 = (const struct cgraph_node *) p2;
180 return DECL_UID (n1->decl) == DECL_UID (n2->decl);
183 /* Allocate new callgraph node and insert it into basic data structures. */
184 static struct cgraph_node *
185 cgraph_create_node (void)
187 struct cgraph_node *node;
189 node = GGC_CNEW (struct cgraph_node);
190 node->next = cgraph_nodes;
191 node->uid = cgraph_max_uid++;
192 if (!node->uid)
193 cgraph_nodes_vec = VEC_alloc (cgraph_node_p, gc, 1);
194 VEC_safe_insert (cgraph_node_p, gc, cgraph_nodes_vec, node->uid, node);
195 node->order = cgraph_order++;
196 if (cgraph_nodes)
197 cgraph_nodes->previous = node;
198 node->previous = NULL;
199 node->global.estimated_growth = INT_MIN;
200 cgraph_nodes = node;
201 cgraph_n_nodes++;
202 return node;
205 /* Return cgraph node assigned to DECL. Create new one when needed. */
206 struct cgraph_node *
207 cgraph_node (tree decl)
209 struct cgraph_node key, *node, **slot;
211 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
213 if (!cgraph_hash)
214 cgraph_hash = htab_create_ggc (10, hash_node, eq_node, NULL);
216 key.decl = decl;
218 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key, INSERT);
220 if (*slot)
222 node = *slot;
223 if (!node->master_clone)
224 node->master_clone = node;
225 return node;
228 node = cgraph_create_node ();
229 node->decl = decl;
230 *slot = node;
231 if (DECL_CONTEXT (decl) && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)
233 node->origin = cgraph_node (DECL_CONTEXT (decl));
234 node->next_nested = node->origin->nested;
235 node->origin->nested = node;
236 node->master_clone = node;
238 return node;
241 /* Insert already constructed node into hashtable. */
243 void
244 cgraph_insert_node_to_hashtable (struct cgraph_node *node)
246 struct cgraph_node **slot;
248 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, node, INSERT);
250 gcc_assert (!*slot);
251 *slot = node;
254 /* Compare ASMNAME with the DECL_ASSEMBLER_NAME of DECL. */
256 static bool
257 decl_assembler_name_equal (tree decl, tree asmname)
259 tree decl_asmname = DECL_ASSEMBLER_NAME (decl);
261 if (decl_asmname == asmname)
262 return true;
264 /* If the target assembler name was set by the user, things are trickier.
265 We have a leading '*' to begin with. After that, it's arguable what
266 is the correct thing to do with -fleading-underscore. Arguably, we've
267 historically been doing the wrong thing in assemble_alias by always
268 printing the leading underscore. Since we're not changing that, make
269 sure user_label_prefix follows the '*' before matching. */
270 if (IDENTIFIER_POINTER (decl_asmname)[0] == '*')
272 const char *decl_str = IDENTIFIER_POINTER (decl_asmname) + 1;
273 size_t ulp_len = strlen (user_label_prefix);
275 if (ulp_len == 0)
277 else if (strncmp (decl_str, user_label_prefix, ulp_len) == 0)
278 decl_str += ulp_len;
279 else
280 return false;
282 return strcmp (decl_str, IDENTIFIER_POINTER (asmname)) == 0;
285 return false;
289 /* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
290 Return NULL if there's no such node. */
292 struct cgraph_node *
293 cgraph_node_for_asm (tree asmname)
295 struct cgraph_node *node;
297 for (node = cgraph_nodes; node ; node = node->next)
298 if (decl_assembler_name_equal (node->decl, asmname))
299 return node;
301 return NULL;
304 /* Return callgraph edge representing CALL_EXPR statement. */
305 struct cgraph_edge *
306 cgraph_edge (struct cgraph_node *node, tree call_stmt)
308 struct cgraph_edge *e;
310 /* This loop may turn out to be performance problem. In such case adding
311 hashtables into call nodes with very many edges is probably best
312 solution. It is not good idea to add pointer into CALL_EXPR itself
313 because we want to make possible having multiple cgraph nodes representing
314 different clones of the same body before the body is actually cloned. */
315 for (e = node->callees; e; e= e->next_callee)
316 if (e->call_stmt == call_stmt)
317 break;
318 return e;
321 /* Create edge from CALLER to CALLEE in the cgraph. */
323 struct cgraph_edge *
324 cgraph_create_edge (struct cgraph_node *caller, struct cgraph_node *callee,
325 tree call_stmt, gcov_type count, int nest)
327 struct cgraph_edge *edge = GGC_NEW (struct cgraph_edge);
328 #ifdef ENABLE_CHECKING
329 struct cgraph_edge *e;
331 for (e = caller->callees; e; e = e->next_callee)
332 gcc_assert (e->call_stmt != call_stmt);
333 #endif
335 gcc_assert (get_call_expr_in (call_stmt));
337 if (!DECL_SAVED_TREE (callee->decl))
338 edge->inline_failed = N_("function body not available");
339 else if (callee->local.redefined_extern_inline)
340 edge->inline_failed = N_("redefined extern inline functions are not "
341 "considered for inlining");
342 else if (callee->local.inlinable)
343 edge->inline_failed = N_("function not considered for inlining");
344 else
345 edge->inline_failed = N_("function not inlinable");
347 edge->aux = NULL;
349 edge->caller = caller;
350 edge->callee = callee;
351 edge->call_stmt = call_stmt;
352 edge->prev_caller = NULL;
353 edge->next_caller = callee->callers;
354 if (callee->callers)
355 callee->callers->prev_caller = edge;
356 edge->prev_callee = NULL;
357 edge->next_callee = caller->callees;
358 if (caller->callees)
359 caller->callees->prev_callee = edge;
360 caller->callees = edge;
361 callee->callers = edge;
362 edge->count = count;
363 edge->loop_nest = nest;
364 return edge;
367 /* Remove the edge E from the list of the callers of the callee. */
369 static inline void
370 cgraph_edge_remove_callee (struct cgraph_edge *e)
372 if (e->prev_caller)
373 e->prev_caller->next_caller = e->next_caller;
374 if (e->next_caller)
375 e->next_caller->prev_caller = e->prev_caller;
376 if (!e->prev_caller)
377 e->callee->callers = e->next_caller;
380 /* Remove the edge E from the list of the callees of the caller. */
382 static inline void
383 cgraph_edge_remove_caller (struct cgraph_edge *e)
385 if (e->prev_callee)
386 e->prev_callee->next_callee = e->next_callee;
387 if (e->next_callee)
388 e->next_callee->prev_callee = e->prev_callee;
389 if (!e->prev_callee)
390 e->caller->callees = e->next_callee;
393 /* Remove the edge E in the cgraph. */
395 void
396 cgraph_remove_edge (struct cgraph_edge *e)
398 /* Remove from callers list of the callee. */
399 cgraph_edge_remove_callee (e);
401 /* Remove from callees list of the callers. */
402 cgraph_edge_remove_caller (e);
405 /* Redirect callee of E to N. The function does not update underlying
406 call expression. */
408 void
409 cgraph_redirect_edge_callee (struct cgraph_edge *e, struct cgraph_node *n)
411 /* Remove from callers list of the current callee. */
412 cgraph_edge_remove_callee (e);
414 /* Insert to callers list of the new callee. */
415 e->prev_caller = NULL;
416 if (n->callers)
417 n->callers->prev_caller = e;
418 e->next_caller = n->callers;
419 n->callers = e;
420 e->callee = n;
423 /* Remove all callees from the node. */
425 void
426 cgraph_node_remove_callees (struct cgraph_node *node)
428 struct cgraph_edge *e;
430 /* It is sufficient to remove the edges from the lists of callers of
431 the callees. The callee list of the node can be zapped with one
432 assignment. */
433 for (e = node->callees; e; e = e->next_callee)
434 cgraph_edge_remove_callee (e);
435 node->callees = NULL;
438 /* Remove all callers from the node. */
440 static void
441 cgraph_node_remove_callers (struct cgraph_node *node)
443 struct cgraph_edge *e;
445 /* It is sufficient to remove the edges from the lists of callees of
446 the callers. The caller list of the node can be zapped with one
447 assignment. */
448 for (e = node->callers; e; e = e->next_caller)
449 cgraph_edge_remove_caller (e);
450 node->callers = NULL;
453 /* Remove the node from cgraph. */
455 void
456 cgraph_remove_node (struct cgraph_node *node)
458 void **slot;
459 bool kill_body = false;
461 VEC_replace (cgraph_node_p, cgraph_nodes_vec, node->uid, NULL);
462 cgraph_node_remove_callers (node);
463 cgraph_node_remove_callees (node);
464 while (node->nested)
465 cgraph_remove_node (node->nested);
466 if (node->origin)
468 struct cgraph_node **node2 = &node->origin->nested;
470 while (*node2 != node)
471 node2 = &(*node2)->next_nested;
472 *node2 = node->next_nested;
474 if (node->previous)
475 node->previous->next = node->next;
476 else
477 cgraph_nodes = node->next;
478 if (node->next)
479 node->next->previous = node->previous;
480 slot = htab_find_slot (cgraph_hash, node, NO_INSERT);
481 if (*slot == node)
483 if (node->next_clone)
485 struct cgraph_node *new_node = node->next_clone;
486 struct cgraph_node *n;
488 /* Make the next clone be the master clone */
489 for (n = new_node; n; n = n->next_clone)
490 n->master_clone = new_node;
492 *slot = new_node;
493 node->next_clone->prev_clone = NULL;
495 else
497 htab_clear_slot (cgraph_hash, slot);
498 kill_body = true;
501 else
503 node->prev_clone->next_clone = node->next_clone;
504 if (node->next_clone)
505 node->next_clone->prev_clone = node->prev_clone;
508 /* While all the clones are removed after being proceeded, the function
509 itself is kept in the cgraph even after it is compiled. Check whether
510 we are done with this body and reclaim it proactively if this is the case.
512 if (!kill_body && *slot)
514 struct cgraph_node *n = (struct cgraph_node *) *slot;
515 if (!n->next_clone && !n->global.inlined_to
516 && (cgraph_global_info_ready
517 && (TREE_ASM_WRITTEN (n->decl) || DECL_EXTERNAL (n->decl))))
518 kill_body = true;
521 if (kill_body && !dump_enabled_p (TDI_tree_all) && flag_unit_at_a_time)
523 DECL_SAVED_TREE (node->decl) = NULL;
524 DECL_STRUCT_FUNCTION (node->decl) = NULL;
525 DECL_INITIAL (node->decl) = error_mark_node;
527 cgraph_n_nodes--;
528 /* Do not free the structure itself so the walk over chain can continue. */
531 /* Notify finalize_compilation_unit that given node is reachable. */
533 void
534 cgraph_mark_reachable_node (struct cgraph_node *node)
536 if (!node->reachable && node->local.finalized)
538 notice_global_symbol (node->decl);
539 node->reachable = 1;
540 gcc_assert (!cgraph_global_info_ready);
542 node->next_needed = cgraph_nodes_queue;
543 cgraph_nodes_queue = node;
547 /* Likewise indicate that a node is needed, i.e. reachable via some
548 external means. */
550 void
551 cgraph_mark_needed_node (struct cgraph_node *node)
553 node->needed = 1;
554 cgraph_mark_reachable_node (node);
557 /* Return local info for the compiled function. */
559 struct cgraph_local_info *
560 cgraph_local_info (tree decl)
562 struct cgraph_node *node;
564 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
565 node = cgraph_node (decl);
566 return &node->local;
569 /* Return local info for the compiled function. */
571 struct cgraph_global_info *
572 cgraph_global_info (tree decl)
574 struct cgraph_node *node;
576 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL && cgraph_global_info_ready);
577 node = cgraph_node (decl);
578 return &node->global;
581 /* Return local info for the compiled function. */
583 struct cgraph_rtl_info *
584 cgraph_rtl_info (tree decl)
586 struct cgraph_node *node;
588 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
589 node = cgraph_node (decl);
590 if (decl != current_function_decl
591 && !TREE_ASM_WRITTEN (node->decl))
592 return NULL;
593 return &node->rtl;
596 /* Return name of the node used in debug output. */
597 const char *
598 cgraph_node_name (struct cgraph_node *node)
600 return lang_hooks.decl_printable_name (node->decl, 2);
603 /* Return name of the node used in debug output. */
604 static const char *
605 cgraph_varpool_node_name (struct cgraph_varpool_node *node)
607 return lang_hooks.decl_printable_name (node->decl, 2);
610 /* Names used to print out the availability enum. */
611 static const char * const availability_names[] =
612 {"unset", "not_available", "overwrittable", "available", "local"};
614 /* Dump given cgraph node. */
615 void
616 dump_cgraph_node (FILE *f, struct cgraph_node *node)
618 struct cgraph_edge *edge;
619 fprintf (f, "%s/%i:", cgraph_node_name (node), node->uid);
620 if (node->global.inlined_to)
621 fprintf (f, " (inline copy in %s/%i)",
622 cgraph_node_name (node->global.inlined_to),
623 node->global.inlined_to->uid);
624 if (cgraph_function_flags_ready)
625 fprintf (f, " availability:%s",
626 availability_names [cgraph_function_body_availability (node)]);
627 if (node->master_clone && node->master_clone->uid != node->uid)
628 fprintf (f, "(%i)", node->master_clone->uid);
629 if (node->count)
630 fprintf (f, " executed "HOST_WIDEST_INT_PRINT_DEC"x",
631 (HOST_WIDEST_INT)node->count);
632 if (node->local.self_insns)
633 fprintf (f, " %i insns", node->local.self_insns);
634 if (node->global.insns && node->global.insns != node->local.self_insns)
635 fprintf (f, " (%i after inlining)", node->global.insns);
636 if (node->origin)
637 fprintf (f, " nested in: %s", cgraph_node_name (node->origin));
638 if (node->needed)
639 fprintf (f, " needed");
640 else if (node->reachable)
641 fprintf (f, " reachable");
642 if (DECL_SAVED_TREE (node->decl))
643 fprintf (f, " tree");
644 if (node->output)
645 fprintf (f, " output");
646 if (node->local.local)
647 fprintf (f, " local");
648 if (node->local.externally_visible)
649 fprintf (f, " externally_visible");
650 if (node->local.finalized)
651 fprintf (f, " finalized");
652 if (node->local.disregard_inline_limits)
653 fprintf (f, " always_inline");
654 else if (node->local.inlinable)
655 fprintf (f, " inlinable");
656 if (node->local.redefined_extern_inline)
657 fprintf (f, " redefined_extern_inline");
658 if (TREE_ASM_WRITTEN (node->decl))
659 fprintf (f, " asm_written");
660 if (TREE_ADDRESSABLE (node->decl))
661 fprintf (f, " addressable");
663 fprintf (f, "\n called by: ");
664 for (edge = node->callers; edge; edge = edge->next_caller)
666 fprintf (f, "%s/%i ", cgraph_node_name (edge->caller),
667 edge->caller->uid);
668 if (edge->count)
669 fprintf (f, "("HOST_WIDEST_INT_PRINT_DEC"x) ",
670 (HOST_WIDEST_INT)edge->count);
671 if (!edge->inline_failed)
672 fprintf(f, "(inlined) ");
675 fprintf (f, "\n calls: ");
676 for (edge = node->callees; edge; edge = edge->next_callee)
678 fprintf (f, "%s/%i ", cgraph_node_name (edge->callee),
679 edge->callee->uid);
680 if (!edge->inline_failed)
681 fprintf(f, "(inlined) ");
682 if (edge->count)
683 fprintf (f, "("HOST_WIDEST_INT_PRINT_DEC"x) ",
684 (HOST_WIDEST_INT)edge->count);
685 if (edge->loop_nest)
686 fprintf (f, "(nested in %i loops) ", edge->loop_nest);
688 fprintf (f, "\n");
691 /* Dump the callgraph. */
693 void
694 dump_cgraph (FILE *f)
696 struct cgraph_node *node;
698 fprintf (f, "callgraph:\n\n");
699 for (node = cgraph_nodes; node; node = node->next)
700 dump_cgraph_node (f, node);
703 /* Dump given cgraph node. */
704 void
705 dump_cgraph_varpool_node (FILE *f, struct cgraph_varpool_node *node)
707 fprintf (f, "%s:", cgraph_varpool_node_name (node));
708 fprintf (f, " availability:%s", availability_names [cgraph_variable_initializer_availability (node)]);
709 if (DECL_INITIAL (node->decl))
710 fprintf (f, " initialized");
711 if (node->needed)
712 fprintf (f, " needed");
713 if (node->analyzed)
714 fprintf (f, " analyzed");
715 if (node->finalized)
716 fprintf (f, " finalized");
717 if (node->output)
718 fprintf (f, " output");
719 if (node->externally_visible)
720 fprintf (f, " externally_visible");
721 fprintf (f, "\n");
724 /* Dump the callgraph. */
726 void
727 dump_varpool (FILE *f)
729 struct cgraph_varpool_node *node;
731 fprintf (f, "variable pool:\n\n");
732 for (node = cgraph_varpool_nodes; node; node = node->next_needed)
733 dump_cgraph_varpool_node (f, node);
736 /* Returns a hash code for P. */
738 static hashval_t
739 hash_varpool_node (const void *p)
741 const struct cgraph_varpool_node *n = (const struct cgraph_varpool_node *) p;
742 return (hashval_t) DECL_UID (n->decl);
745 /* Returns nonzero if P1 and P2 are equal. */
747 static int
748 eq_varpool_node (const void *p1, const void *p2)
750 const struct cgraph_varpool_node *n1 =
751 (const struct cgraph_varpool_node *) p1;
752 const struct cgraph_varpool_node *n2 =
753 (const struct cgraph_varpool_node *) p2;
754 return DECL_UID (n1->decl) == DECL_UID (n2->decl);
757 /* Return cgraph_varpool node assigned to DECL. Create new one when needed. */
758 struct cgraph_varpool_node *
759 cgraph_varpool_node (tree decl)
761 struct cgraph_varpool_node key, *node, **slot;
763 gcc_assert (DECL_P (decl) && TREE_CODE (decl) != FUNCTION_DECL);
765 if (!cgraph_varpool_hash)
766 cgraph_varpool_hash = htab_create_ggc (10, hash_varpool_node,
767 eq_varpool_node, NULL);
768 key.decl = decl;
769 slot = (struct cgraph_varpool_node **)
770 htab_find_slot (cgraph_varpool_hash, &key, INSERT);
771 if (*slot)
772 return *slot;
773 node = GGC_CNEW (struct cgraph_varpool_node);
774 node->decl = decl;
775 node->order = cgraph_order++;
776 node->next = cgraph_varpool_nodes;
777 cgraph_varpool_nodes = node;
778 *slot = node;
779 return node;
782 struct cgraph_varpool_node *
783 cgraph_varpool_node_for_asm (tree asmname)
785 struct cgraph_varpool_node *node;
787 for (node = cgraph_varpool_nodes; node ; node = node->next)
788 if (decl_assembler_name_equal (node->decl, asmname))
789 return node;
791 return NULL;
794 /* Set the DECL_ASSEMBLER_NAME and update cgraph hashtables. */
795 void
796 change_decl_assembler_name (tree decl, tree name)
798 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
800 SET_DECL_ASSEMBLER_NAME (decl, name);
801 return;
803 if (name == DECL_ASSEMBLER_NAME (decl))
804 return;
806 if (TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl))
807 && DECL_RTL_SET_P (decl))
808 warning (0, "%D renamed after being referenced in assembly", decl);
810 SET_DECL_ASSEMBLER_NAME (decl, name);
813 /* Helper function for finalization code - add node into lists so it will
814 be analyzed and compiled. */
815 void
816 cgraph_varpool_enqueue_needed_node (struct cgraph_varpool_node *node)
818 if (cgraph_varpool_last_needed_node)
819 cgraph_varpool_last_needed_node->next_needed = node;
820 cgraph_varpool_last_needed_node = node;
821 node->next_needed = NULL;
822 if (!cgraph_varpool_nodes_queue)
823 cgraph_varpool_nodes_queue = node;
824 if (!cgraph_varpool_first_unanalyzed_node)
825 cgraph_varpool_first_unanalyzed_node = node;
826 notice_global_symbol (node->decl);
829 /* Reset the queue of needed nodes. */
830 void
831 cgraph_varpool_reset_queue (void)
833 cgraph_varpool_last_needed_node = NULL;
834 cgraph_varpool_nodes_queue = NULL;
835 cgraph_varpool_first_unanalyzed_node = NULL;
838 /* Notify finalize_compilation_unit that given node is reachable
839 or needed. */
840 void
841 cgraph_varpool_mark_needed_node (struct cgraph_varpool_node *node)
843 if (!node->needed && node->finalized)
844 cgraph_varpool_enqueue_needed_node (node);
845 node->needed = 1;
848 /* Determine if variable DECL is needed. That is, visible to something
849 either outside this translation unit, something magic in the system
850 configury, or (if not doing unit-at-a-time) to something we haven't
851 seen yet. */
853 bool
854 decide_is_variable_needed (struct cgraph_varpool_node *node, tree decl)
856 /* If the user told us it is used, then it must be so. */
857 if (node->externally_visible
858 || lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
859 return true;
861 /* ??? If the assembler name is set by hand, it is possible to assemble
862 the name later after finalizing the function and the fact is noticed
863 in assemble_name then. This is arguably a bug. */
864 if (DECL_ASSEMBLER_NAME_SET_P (decl)
865 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
866 return true;
868 /* If we decided it was needed before, but at the time we didn't have
869 the definition available, then it's still needed. */
870 if (node->needed)
871 return true;
873 /* Externally visible variables must be output. The exception is
874 COMDAT variables that must be output only when they are needed. */
875 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
876 return true;
878 /* When not reordering top level variables, we have to assume that
879 we are going to keep everything. */
880 if (flag_unit_at_a_time && flag_toplevel_reorder)
881 return false;
883 /* We want to emit COMDAT variables only when absolutely necessary. */
884 if (DECL_COMDAT (decl))
885 return false;
886 return true;
889 void
890 cgraph_varpool_finalize_decl (tree decl)
892 struct cgraph_varpool_node *node = cgraph_varpool_node (decl);
894 /* The first declaration of a variable that comes through this function
895 decides whether it is global (in C, has external linkage)
896 or local (in C, has internal linkage). So do nothing more
897 if this function has already run. */
898 if (node->finalized)
900 if (cgraph_global_info_ready || !flag_unit_at_a_time)
901 cgraph_varpool_assemble_pending_decls ();
902 return;
904 if (node->needed)
905 cgraph_varpool_enqueue_needed_node (node);
906 node->finalized = true;
908 if (decide_is_variable_needed (node, decl))
909 cgraph_varpool_mark_needed_node (node);
910 /* Since we reclaim unreachable nodes at the end of every language
911 level unit, we need to be conservative about possible entry points
912 there. */
913 else if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
914 cgraph_varpool_mark_needed_node (node);
915 if (cgraph_global_info_ready || !flag_unit_at_a_time)
916 cgraph_varpool_assemble_pending_decls ();
919 /* Add a top-level asm statement to the list. */
921 struct cgraph_asm_node *
922 cgraph_add_asm_node (tree asm_str)
924 struct cgraph_asm_node *node;
926 node = GGC_CNEW (struct cgraph_asm_node);
927 node->asm_str = asm_str;
928 node->order = cgraph_order++;
929 node->next = NULL;
930 if (cgraph_asm_nodes == NULL)
931 cgraph_asm_nodes = node;
932 else
933 cgraph_asm_last_node->next = node;
934 cgraph_asm_last_node = node;
935 return node;
938 /* Return true when the DECL can possibly be inlined. */
939 bool
940 cgraph_function_possibly_inlined_p (tree decl)
942 if (!cgraph_global_info_ready)
943 return (DECL_INLINE (decl) && !flag_really_no_inline);
944 return DECL_POSSIBLY_INLINED (decl);
947 /* Create clone of E in the node N represented by CALL_EXPR the callgraph. */
948 struct cgraph_edge *
949 cgraph_clone_edge (struct cgraph_edge *e, struct cgraph_node *n,
950 tree call_stmt, gcov_type count_scale, int loop_nest,
951 bool update_original)
953 struct cgraph_edge *new;
955 new = cgraph_create_edge (n, e->callee, call_stmt,
956 e->count * count_scale / REG_BR_PROB_BASE,
957 e->loop_nest + loop_nest);
959 new->inline_failed = e->inline_failed;
960 if (update_original)
962 e->count -= new->count;
963 if (e->count < 0)
964 e->count = 0;
966 return new;
969 /* Create node representing clone of N executed COUNT times. Decrease
970 the execution counts from original node too.
972 When UPDATE_ORIGINAL is true, the counts are subtracted from the original
973 function's profile to reflect the fact that part of execution is handled
974 by node. */
975 struct cgraph_node *
976 cgraph_clone_node (struct cgraph_node *n, gcov_type count, int loop_nest,
977 bool update_original)
979 struct cgraph_node *new = cgraph_create_node ();
980 struct cgraph_edge *e;
981 gcov_type count_scale;
983 new->decl = n->decl;
984 new->origin = n->origin;
985 if (new->origin)
987 new->next_nested = new->origin->nested;
988 new->origin->nested = new;
990 new->analyzed = n->analyzed;
991 new->local = n->local;
992 new->global = n->global;
993 new->rtl = n->rtl;
994 new->master_clone = n->master_clone;
995 new->count = count;
996 if (n->count)
997 count_scale = new->count * REG_BR_PROB_BASE / n->count;
998 else
999 count_scale = 0;
1000 if (update_original)
1002 n->count -= count;
1003 if (n->count < 0)
1004 n->count = 0;
1007 for (e = n->callees;e; e=e->next_callee)
1008 cgraph_clone_edge (e, new, e->call_stmt, count_scale, loop_nest,
1009 update_original);
1011 new->next_clone = n->next_clone;
1012 new->prev_clone = n;
1013 n->next_clone = new;
1014 if (new->next_clone)
1015 new->next_clone->prev_clone = new;
1017 return new;
1020 /* Return true if N is an master_clone, (see cgraph_master_clone). */
1022 bool
1023 cgraph_is_master_clone (struct cgraph_node *n)
1025 return (n == cgraph_master_clone (n));
1028 struct cgraph_node *
1029 cgraph_master_clone (struct cgraph_node *n)
1031 enum availability avail = cgraph_function_body_availability (n);
1033 if (avail == AVAIL_NOT_AVAILABLE || avail == AVAIL_OVERWRITABLE)
1034 return NULL;
1036 if (!n->master_clone)
1037 n->master_clone = cgraph_node (n->decl);
1039 return n->master_clone;
1042 /* NODE is no longer nested function; update cgraph accordingly. */
1043 void
1044 cgraph_unnest_node (struct cgraph_node *node)
1046 struct cgraph_node **node2 = &node->origin->nested;
1047 gcc_assert (node->origin);
1049 while (*node2 != node)
1050 node2 = &(*node2)->next_nested;
1051 *node2 = node->next_nested;
1052 node->origin = NULL;
1055 /* Return function availability. See cgraph.h for description of individual
1056 return values. */
1057 enum availability
1058 cgraph_function_body_availability (struct cgraph_node *node)
1060 enum availability avail;
1061 gcc_assert (cgraph_function_flags_ready);
1062 if (!node->analyzed)
1063 avail = AVAIL_NOT_AVAILABLE;
1064 else if (node->local.local)
1065 avail = AVAIL_LOCAL;
1066 else if (node->local.externally_visible)
1067 avail = AVAIL_AVAILABLE;
1069 /* If the function can be overwritten, return OVERWRITABLE. Take
1070 care at least of two notable extensions - the COMDAT functions
1071 used to share template instantiations in C++ (this is symmetric
1072 to code cp_cannot_inline_tree_fn and probably shall be shared and
1073 the inlinability hooks completely eliminated).
1075 ??? Does the C++ one definition rule allow us to always return
1076 AVAIL_AVAILABLE here? That would be good reason to preserve this
1077 hook Similarly deal with extern inline functions - this is again
1078 necessary to get C++ shared functions having keyed templates
1079 right and in the C extension documentation we probably should
1080 document the requirement of both versions of function (extern
1081 inline and offline) having same side effect characteristics as
1082 good optimization is what this optimization is about. */
1084 else if (!(*targetm.binds_local_p) (node->decl)
1085 && !DECL_COMDAT (node->decl) && !DECL_EXTERNAL (node->decl))
1086 avail = AVAIL_OVERWRITABLE;
1087 else avail = AVAIL_AVAILABLE;
1089 return avail;
1092 /* Return variable availability. See cgraph.h for description of individual
1093 return values. */
1094 enum availability
1095 cgraph_variable_initializer_availability (struct cgraph_varpool_node *node)
1097 gcc_assert (cgraph_function_flags_ready);
1098 if (!node->finalized)
1099 return AVAIL_NOT_AVAILABLE;
1100 if (!TREE_PUBLIC (node->decl))
1101 return AVAIL_AVAILABLE;
1102 /* If the variable can be overwritten, return OVERWRITABLE. Takes
1103 care of at least two notable extensions - the COMDAT variables
1104 used to share template instantiations in C++. */
1105 if (!(*targetm.binds_local_p) (node->decl) && !DECL_COMDAT (node->decl))
1106 return AVAIL_OVERWRITABLE;
1107 return AVAIL_AVAILABLE;
1110 /* Obvious. */
1111 struct cgraph_node *
1112 cgraph_node_by_uid (int uid)
1114 return VEC_index (cgraph_node_p, cgraph_nodes_vec, uid);
1118 /* Add the function FNDECL to the call graph. FNDECL is assumed to be
1119 in low GIMPLE form and ready to be processed by cgraph_finalize_function.
1121 When operating in unit-at-a-time, a new callgraph node is added to
1122 CGRAPH_EXPAND_QUEUE, which is processed after all the original
1123 functions in the call graph .
1125 When not in unit-at-a-time, the new callgraph node is added to
1126 CGRAPH_NODES_QUEUE for cgraph_assemble_pending_functions to
1127 process. */
1129 void
1130 cgraph_add_new_function (tree fndecl)
1132 struct cgraph_node *n = cgraph_node (fndecl);
1133 n->next_needed = cgraph_expand_queue;
1134 cgraph_expand_queue = n;
1137 #include "gt-cgraph.h"