* config/arm/arm.c (emit_sfm): Only emit a single frame adjustment.
[official-gcc.git] / gcc / cgraph.c
blob9f00f609deef7a37f68048ee005e41f3e9c38d68
1 /* Callgraph handling code.
2 Copyright (C) 2003, 2004 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /* This file contains basic routines manipulating call graph and variable pool
24 The callgraph:
26 The call-graph is data structure designed for intra-procedural optimization
27 but it is also used in non-unit-at-a-time compilation to allow easier code
28 sharing.
30 The call-graph consist of nodes and edges represented via linked lists.
31 Each function (external or not) corresponds to the unique node (in
32 contrast to tree DECL nodes where we can have multiple nodes for each
33 function).
35 The mapping from declarations to call-graph nodes is done using hash table
36 based on DECL_ASSEMBLER_NAME, so it is essential for assembler name to
37 not change once the declaration is inserted into the call-graph.
38 The call-graph nodes are created lazily using cgraph_node function when
39 called for unknown declaration.
41 When built, there is one edge for each direct call. It is possible that
42 the reference will be later optimized out. The call-graph is built
43 conservatively in order to make conservative data flow analysis possible.
45 The callgraph at the moment does not represent indirect calls or calls
46 from other compilation unit. Flag NEEDED is set for each node that may
47 be accessed in such a invisible way and it shall be considered an
48 entry point to the callgraph.
50 Intraprocedural information:
52 Callgraph is place to store data needed for intraprocedural optimization.
53 All data structures are divided into three components: local_info that
54 is produced while analyzing the function, global_info that is result
55 of global walking of the callgraph on the end of compilation and
56 rtl_info used by RTL backend to propagate data from already compiled
57 functions to their callers.
59 Inlining plans:
61 The function inlining information is decided in advance and maintained
62 in the callgraph as so called inline plan.
63 For each inlined call, the callee's node is cloned to represent the
64 new function copy produced by inliner.
65 Each inlined call gets a unique corresponding clone node of the callee
66 and the data structure is updated while inlining is performed, so
67 the clones are eliminated and their callee edges redirected to the
68 caller.
70 Each edge has "inline_failed" field. When the field is set to NULL,
71 the call will be inlined. When it is non-NULL it contains a reason
72 why inlining wasn't performed.
75 The varpool data structure:
77 Varpool is used to maintain variables in similar manner as call-graph
78 is used for functions. Most of the API is symmetric replacing cgraph
79 function prefix by cgraph_varpool */
82 #include "config.h"
83 #include "system.h"
84 #include "coretypes.h"
85 #include "tm.h"
86 #include "tree.h"
87 #include "langhooks.h"
88 #include "hashtab.h"
89 #include "toplev.h"
90 #include "flags.h"
91 #include "ggc.h"
92 #include "debug.h"
93 #include "target.h"
94 #include "cgraph.h"
95 #include "varray.h"
96 #include "output.h"
97 #include "intl.h"
99 /* Hash table used to convert declarations into nodes. */
100 static GTY((param_is (struct cgraph_node))) htab_t cgraph_hash;
102 /* The linked list of cgraph nodes. */
103 struct cgraph_node *cgraph_nodes;
105 /* Queue of cgraph nodes scheduled to be lowered. */
106 struct cgraph_node *cgraph_nodes_queue;
108 /* Number of nodes in existence. */
109 int cgraph_n_nodes;
111 /* Maximal uid used in cgraph nodes. */
112 int cgraph_max_uid;
114 /* Set when whole unit has been analyzed so we can access global info. */
115 bool cgraph_global_info_ready = false;
117 /* Hash table used to convert declarations into nodes. */
118 static GTY((param_is (struct cgraph_varpool_node))) htab_t cgraph_varpool_hash;
120 /* Queue of cgraph nodes scheduled to be lowered and output. */
121 struct cgraph_varpool_node *cgraph_varpool_nodes_queue;
123 /* Number of nodes in existence. */
124 int cgraph_varpool_n_nodes;
126 /* The linked list of cgraph varpool nodes. */
127 static GTY(()) struct cgraph_varpool_node *cgraph_varpool_nodes;
129 static hashval_t hash_node (const void *);
130 static int eq_node (const void *, const void *);
132 /* Returns a hash code for P. */
134 static hashval_t
135 hash_node (const void *p)
137 const struct cgraph_node *n = p;
138 return (hashval_t) DECL_UID (n->decl);
141 /* Returns nonzero if P1 and P2 are equal. */
143 static int
144 eq_node (const void *p1, const void *p2)
146 const struct cgraph_node *n1 = p1, *n2 = p2;
147 return DECL_UID (n1->decl) == DECL_UID (n2->decl);
150 /* Allocate new callgraph node and insert it into basic data structures. */
151 static struct cgraph_node *
152 cgraph_create_node (void)
154 struct cgraph_node *node;
156 node = ggc_alloc_cleared (sizeof (*node));
157 node->next = cgraph_nodes;
158 node->uid = cgraph_max_uid++;
159 if (cgraph_nodes)
160 cgraph_nodes->previous = node;
161 node->previous = NULL;
162 cgraph_nodes = node;
163 cgraph_n_nodes++;
164 return node;
167 /* Return cgraph node assigned to DECL. Create new one when needed. */
168 struct cgraph_node *
169 cgraph_node (tree decl)
171 struct cgraph_node key, *node, **slot;
173 if (TREE_CODE (decl) != FUNCTION_DECL)
174 abort ();
176 if (!cgraph_hash)
177 cgraph_hash = htab_create_ggc (10, hash_node, eq_node, NULL);
179 key.decl = decl;
181 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key, INSERT);
183 if (*slot)
184 return *slot;
186 node = cgraph_create_node ();
187 node->decl = decl;
188 *slot = node;
189 if (DECL_CONTEXT (decl) && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)
191 node->origin = cgraph_node (DECL_CONTEXT (decl));
192 node->next_nested = node->origin->nested;
193 node->origin->nested = node;
195 return node;
198 /* Return callgraph edge representing CALL_EXPR. */
199 struct cgraph_edge *
200 cgraph_edge (struct cgraph_node *node, tree call_expr)
202 struct cgraph_edge *e;
204 /* This loop may turn out to be performance problem. In such case adding
205 hashtables into call nodes with very many edges is probably best
206 solution. It is not good idea to add pointer into CALL_EXPR itself
207 because we want to make possible having multiple cgraph nodes representing
208 different clones of the same body before the body is actually cloned. */
209 for (e = node->callees; e; e= e->next_callee)
210 if (e->call_expr == call_expr)
211 break;
212 return e;
215 /* Create edge from CALLER to CALLEE in the cgraph. */
217 struct cgraph_edge *
218 cgraph_create_edge (struct cgraph_node *caller, struct cgraph_node *callee,
219 tree call_expr)
221 struct cgraph_edge *edge = ggc_alloc (sizeof (struct cgraph_edge));
222 #ifdef ENABLE_CHECKING
223 struct cgraph_edge *e;
225 for (e = caller->callees; e; e = e->next_callee)
226 if (e->call_expr == call_expr)
227 abort ();
228 #endif
230 if (TREE_CODE (call_expr) != CALL_EXPR)
231 abort ();
233 if (!DECL_SAVED_TREE (callee->decl))
234 edge->inline_failed = N_("function body not available");
235 else if (callee->local.redefined_extern_inline)
236 edge->inline_failed = N_("redefined extern inline functions are not "
237 "considered for inlining");
238 else if (callee->local.inlinable)
239 edge->inline_failed = N_("function not considered for inlining");
240 else
241 edge->inline_failed = N_("function not inlinable");
243 edge->aux = NULL;
245 edge->caller = caller;
246 edge->callee = callee;
247 edge->call_expr = call_expr;
248 edge->next_caller = callee->callers;
249 edge->next_callee = caller->callees;
250 caller->callees = edge;
251 callee->callers = edge;
252 return edge;
255 /* Remove the edge E the cgraph. */
257 void
258 cgraph_remove_edge (struct cgraph_edge *e)
260 struct cgraph_edge **edge, **edge2;
262 for (edge = &e->callee->callers; *edge && *edge != e;
263 edge = &((*edge)->next_caller))
264 continue;
265 if (!*edge)
266 abort ();
267 *edge = (*edge)->next_caller;
268 for (edge2 = &e->caller->callees; *edge2 && *edge2 != e;
269 edge2 = &(*edge2)->next_callee)
270 continue;
271 if (!*edge2)
272 abort ();
273 *edge2 = (*edge2)->next_callee;
276 /* Redirect callee of E to N. The function does not update underlying
277 call expression. */
279 void
280 cgraph_redirect_edge_callee (struct cgraph_edge *e, struct cgraph_node *n)
282 struct cgraph_edge **edge;
284 for (edge = &e->callee->callers; *edge && *edge != e;
285 edge = &((*edge)->next_caller))
286 continue;
287 if (!*edge)
288 abort ();
289 *edge = (*edge)->next_caller;
290 e->callee = n;
291 e->next_caller = n->callers;
292 n->callers = e;
295 /* Remove the node from cgraph. */
297 void
298 cgraph_remove_node (struct cgraph_node *node)
300 void **slot;
301 bool check_dead = 1;
303 while (node->callers)
304 cgraph_remove_edge (node->callers);
305 while (node->callees)
306 cgraph_remove_edge (node->callees);
307 while (node->nested)
308 cgraph_remove_node (node->nested);
309 if (node->origin)
311 struct cgraph_node **node2 = &node->origin->nested;
313 while (*node2 != node)
314 node2 = &(*node2)->next_nested;
315 *node2 = node->next_nested;
317 if (node->previous)
318 node->previous->next = node->next;
319 else
320 cgraph_nodes = node->next;
321 if (node->next)
322 node->next->previous = node->previous;
323 slot = htab_find_slot (cgraph_hash, node, NO_INSERT);
324 if (*slot == node)
326 if (node->next_clone)
327 *slot = node->next_clone;
328 else
330 htab_clear_slot (cgraph_hash, slot);
331 if (!dump_enabled_p (TDI_all))
333 DECL_SAVED_TREE (node->decl) = NULL;
334 DECL_STRUCT_FUNCTION (node->decl) = NULL;
336 check_dead = false;
339 else
341 struct cgraph_node *n;
343 for (n = *slot; n->next_clone != node; n = n->next_clone)
344 continue;
345 n->next_clone = node->next_clone;
348 /* Work out whether we still need a function body (either there is inline
349 clone or there is out of line function whose body is not written). */
350 if (check_dead && flag_unit_at_a_time)
352 struct cgraph_node *n;
354 for (n = *slot; n; n = n->next_clone)
355 if (n->global.inlined_to
356 || (!n->global.inlined_to
357 && !TREE_ASM_WRITTEN (n->decl) && !DECL_EXTERNAL (n->decl)))
358 break;
359 if (!n && !dump_enabled_p (TDI_all))
361 DECL_SAVED_TREE (node->decl) = NULL;
362 DECL_STRUCT_FUNCTION (node->decl) = NULL;
365 cgraph_n_nodes--;
366 /* Do not free the structure itself so the walk over chain can continue. */
369 /* Notify finalize_compilation_unit that given node is reachable. */
371 void
372 cgraph_mark_reachable_node (struct cgraph_node *node)
374 if (!node->reachable && node->local.finalized)
376 notice_global_symbol (node->decl);
377 node->reachable = 1;
379 node->next_needed = cgraph_nodes_queue;
380 cgraph_nodes_queue = node;
384 /* Likewise indicate that a node is needed, i.e. reachable via some
385 external means. */
387 void
388 cgraph_mark_needed_node (struct cgraph_node *node)
390 node->needed = 1;
391 cgraph_mark_reachable_node (node);
394 /* Return true when CALLER_DECL calls CALLEE_DECL. */
396 bool
397 cgraph_calls_p (tree caller_decl, tree callee_decl)
399 struct cgraph_node *caller = cgraph_node (caller_decl);
400 struct cgraph_node *callee = cgraph_node (callee_decl);
401 struct cgraph_edge *edge;
403 for (edge = callee->callers; edge && (edge)->caller != caller;
404 edge = (edge->next_caller))
405 continue;
406 return edge != NULL;
409 /* Return local info for the compiled function. */
411 struct cgraph_local_info *
412 cgraph_local_info (tree decl)
414 struct cgraph_node *node;
415 if (TREE_CODE (decl) != FUNCTION_DECL)
416 abort ();
417 node = cgraph_node (decl);
418 return &node->local;
421 /* Return local info for the compiled function. */
423 struct cgraph_global_info *
424 cgraph_global_info (tree decl)
426 struct cgraph_node *node;
427 if (TREE_CODE (decl) != FUNCTION_DECL || !cgraph_global_info_ready)
428 abort ();
429 node = cgraph_node (decl);
430 return &node->global;
433 /* Return local info for the compiled function. */
435 struct cgraph_rtl_info *
436 cgraph_rtl_info (tree decl)
438 struct cgraph_node *node;
439 if (TREE_CODE (decl) != FUNCTION_DECL)
440 abort ();
441 node = cgraph_node (decl);
442 if (decl != current_function_decl
443 && !TREE_ASM_WRITTEN (node->decl))
444 return NULL;
445 return &node->rtl;
448 /* Return name of the node used in debug output. */
449 const char *
450 cgraph_node_name (struct cgraph_node *node)
452 return lang_hooks.decl_printable_name (node->decl, 2);
455 /* Dump given cgraph node. */
456 void
457 dump_cgraph_node (FILE *f, struct cgraph_node *node)
459 struct cgraph_edge *edge;
460 fprintf (f, "%s/%i:", cgraph_node_name (node), node->uid);
461 if (node->global.inlined_to)
462 fprintf (f, " (inline copy in %s/%i)",
463 cgraph_node_name (node->global.inlined_to),
464 node->global.inlined_to->uid);
465 if (node->local.self_insns)
466 fprintf (f, " %i insns", node->local.self_insns);
467 if (node->global.insns && node->global.insns != node->local.self_insns)
468 fprintf (f, " (%i after inlining)", node->global.insns);
469 if (node->origin)
470 fprintf (f, " nested in: %s", cgraph_node_name (node->origin));
471 if (node->needed)
472 fprintf (f, " needed");
473 else if (node->reachable)
474 fprintf (f, " reachable");
475 if (DECL_SAVED_TREE (node->decl))
476 fprintf (f, " tree");
477 if (node->output)
478 fprintf (f, " output");
480 if (node->local.local)
481 fprintf (f, " local");
482 if (node->local.disregard_inline_limits)
483 fprintf (f, " always_inline");
484 else if (node->local.inlinable)
485 fprintf (f, " inlinable");
486 if (TREE_ASM_WRITTEN (node->decl))
487 fprintf (f, " asm_written");
489 fprintf (f, "\n called by: ");
490 for (edge = node->callers; edge; edge = edge->next_caller)
492 fprintf (f, "%s/%i ", cgraph_node_name (edge->caller),
493 edge->caller->uid);
494 if (!edge->inline_failed)
495 fprintf(f, "(inlined) ");
498 fprintf (f, "\n calls: ");
499 for (edge = node->callees; edge; edge = edge->next_callee)
501 fprintf (f, "%s/%i ", cgraph_node_name (edge->callee),
502 edge->callee->uid);
503 if (!edge->inline_failed)
504 fprintf(f, "(inlined) ");
506 fprintf (f, "\n");
509 /* Dump the callgraph. */
511 void
512 dump_cgraph (FILE *f)
514 struct cgraph_node *node;
516 fprintf (f, "callgraph:\n\n");
517 for (node = cgraph_nodes; node; node = node->next)
518 dump_cgraph_node (f, node);
521 /* Returns a hash code for P. */
523 static hashval_t
524 hash_varpool_node (const void *p)
526 const struct cgraph_varpool_node *n = p;
527 return (hashval_t) DECL_UID (n->decl);
530 /* Returns nonzero if P1 and P2 are equal. */
532 static int
533 eq_varpool_node (const void *p1, const void *p2)
535 const struct cgraph_varpool_node *n1 = p1, *n2 = p2;
536 return DECL_UID (n1->decl) == DECL_UID (n2->decl);
539 /* Return cgraph_varpool node assigned to DECL. Create new one when needed. */
540 struct cgraph_varpool_node *
541 cgraph_varpool_node (tree decl)
543 struct cgraph_varpool_node key, *node, **slot;
545 if (!DECL_P (decl) || TREE_CODE (decl) == FUNCTION_DECL)
546 abort ();
548 if (!cgraph_varpool_hash)
549 cgraph_varpool_hash = htab_create_ggc (10, hash_varpool_node,
550 eq_varpool_node, NULL);
551 key.decl = decl;
552 slot = (struct cgraph_varpool_node **)
553 htab_find_slot (cgraph_varpool_hash, &key, INSERT);
554 if (*slot)
555 return *slot;
556 node = ggc_alloc_cleared (sizeof (*node));
557 node->decl = decl;
558 cgraph_varpool_n_nodes++;
559 cgraph_varpool_nodes = node;
560 *slot = node;
561 return node;
564 /* Set the DECL_ASSEMBLER_NAME and update cgraph hashtables. */
565 void
566 change_decl_assembler_name (tree decl, tree name)
568 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
570 SET_DECL_ASSEMBLER_NAME (decl, name);
571 return;
573 if (name == DECL_ASSEMBLER_NAME (decl))
574 return;
576 if (TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl))
577 && DECL_RTL_SET_P (decl))
578 warning ("%D renamed after being referenced in assembly", decl);
580 SET_DECL_ASSEMBLER_NAME (decl, name);
583 /* Notify finalize_compilation_unit that given node is reachable
584 or needed. */
585 void
586 cgraph_varpool_mark_needed_node (struct cgraph_varpool_node *node)
588 if (!node->needed && node->finalized)
590 node->next_needed = cgraph_varpool_nodes_queue;
591 cgraph_varpool_nodes_queue = node;
592 notice_global_symbol (node->decl);
594 node->needed = 1;
597 void
598 cgraph_varpool_finalize_decl (tree decl)
600 struct cgraph_varpool_node *node = cgraph_varpool_node (decl);
602 /* The first declaration of a variable that comes through this function
603 decides whether it is global (in C, has external linkage)
604 or local (in C, has internal linkage). So do nothing more
605 if this function has already run. */
606 if (node->finalized)
607 return;
608 if (node->needed)
610 node->next_needed = cgraph_varpool_nodes_queue;
611 cgraph_varpool_nodes_queue = node;
612 notice_global_symbol (decl);
614 node->finalized = true;
616 if (/* Externally visible variables must be output. The exception are
617 COMDAT functions that must be output only when they are needed. */
618 (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
619 /* Function whose name is output to the assembler file must be produced.
620 It is possible to assemble the name later after finalizing the function
621 and the fact is noticed in assemble_name then. */
622 || (DECL_ASSEMBLER_NAME_SET_P (decl)
623 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl))))
625 cgraph_varpool_mark_needed_node (node);
629 bool
630 cgraph_varpool_assemble_pending_decls (void)
632 bool changed = false;
634 while (cgraph_varpool_nodes_queue)
636 tree decl = cgraph_varpool_nodes_queue->decl;
637 struct cgraph_varpool_node *node = cgraph_varpool_nodes_queue;
639 cgraph_varpool_nodes_queue = cgraph_varpool_nodes_queue->next_needed;
640 if (!TREE_ASM_WRITTEN (decl))
642 assemble_variable (decl, 0, 1, 0);
643 changed = true;
645 node->next_needed = NULL;
647 return changed;
650 /* Return true when the DECL can possibly be inlined. */
651 bool
652 cgraph_function_possibly_inlined_p (tree decl)
654 if (!cgraph_global_info_ready)
655 return (DECL_INLINE (decl) && !flag_really_no_inline);
656 return DECL_POSSIBLY_INLINED (decl);
659 /* Create clone of E in the node N represented by CALL_EXPR the callgraph. */
660 struct cgraph_edge *
661 cgraph_clone_edge (struct cgraph_edge *e, struct cgraph_node *n, tree call_expr)
663 struct cgraph_edge *new = cgraph_create_edge (n, e->callee, call_expr);
665 new->inline_failed = e->inline_failed;
666 return new;
669 /* Create node representing clone of N. */
670 struct cgraph_node *
671 cgraph_clone_node (struct cgraph_node *n)
673 struct cgraph_node *new = cgraph_create_node ();
674 struct cgraph_edge *e;
676 new->decl = n->decl;
677 new->origin = n->origin;
678 if (new->origin)
680 new->next_nested = new->origin->nested;
681 new->origin->nested = new;
683 new->analyzed = n->analyzed;
684 new->local = n->local;
685 new->global = n->global;
686 new->rtl = n->rtl;
688 for (e = n->callees;e; e=e->next_callee)
689 cgraph_clone_edge (e, new, e->call_expr);
691 new->next_clone = n->next_clone;
692 n->next_clone = new;
694 return new;
696 #include "gt-cgraph.h"