First version committed to git
[zpugcc/jano.git] / toolchain / gcc / gcc / cgraphunit.c
blob75ad3c8135e08d3bfedbe0a3b20f918cd0c57175
1 /* Callgraph based intraprocedural optimizations.
2 Copyright (C) 2003, 2004 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "tree-inline.h"
28 #include "langhooks.h"
29 #include "hashtab.h"
30 #include "toplev.h"
31 #include "flags.h"
32 #include "ggc.h"
33 #include "debug.h"
34 #include "target.h"
35 #include "cgraph.h"
36 #include "diagnostic.h"
37 #include "timevar.h"
38 #include "params.h"
39 #include "fibheap.h"
40 #include "c-common.h"
41 #include "intl.h"
42 #include "function.h"
44 #define INSNS_PER_CALL 10
46 static void cgraph_expand_all_functions (void);
47 static void cgraph_mark_functions_to_output (void);
48 static void cgraph_expand_function (struct cgraph_node *);
49 static tree record_call_1 (tree *, int *, void *);
50 static void cgraph_mark_local_functions (void);
51 static void cgraph_optimize_function (struct cgraph_node *);
52 static bool cgraph_default_inline_p (struct cgraph_node *n);
53 static void cgraph_analyze_function (struct cgraph_node *node);
54 static void cgraph_decide_inlining_incrementally (struct cgraph_node *);
56 /* Statistics we collect about inlining algorithm. */
57 static int ncalls_inlined;
58 static int nfunctions_inlined;
59 static int initial_insns;
60 static int overall_insns;
62 /* Records tree nodes seen in cgraph_create_edges. Simply using
63 walk_tree_without_duplicates doesn't guarantee each node is visited
64 once because it gets a new htab upon each recursive call from
65 record_calls_1. */
66 static htab_t visited_nodes;
68 /* Determine if function DECL is needed. That is, visible to something
69 either outside this translation unit, something magic in the system
70 configury, or (if not doing unit-at-a-time) to something we havn't
71 seen yet. */
73 static bool
74 decide_is_function_needed (struct cgraph_node *node, tree decl)
76 /* If we decided it was needed before, but at the time we didn't have
77 the body of the function available, then it's still needed. We have
78 to go back and re-check its dependencies now. */
79 if (node->needed)
80 return true;
82 /* Externally visible functions must be output. The exception is
83 COMDAT functions that must be output only when they are needed. */
84 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
85 return true;
87 /* Constructors and destructors are reachable from the runtime by
88 some mechanism. */
89 if (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl))
90 return true;
92 /* If the user told us it is used, then it must be so. */
93 if (lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
94 return true;
96 /* ??? If the assembler name is set by hand, it is possible to assemble
97 the name later after finalizing the function and the fact is noticed
98 in assemble_name then. This is arguably a bug. */
99 if (DECL_ASSEMBLER_NAME_SET_P (decl)
100 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
101 return true;
103 if (flag_unit_at_a_time)
104 return false;
106 /* If not doing unit at a time, then we'll only defer this function
107 if its marked for inlining. Otherwise we want to emit it now. */
109 /* "extern inline" functions are never output locally. */
110 if (DECL_EXTERNAL (decl))
111 return false;
112 /* We want to emit COMDAT functions only when absolutely necessary. */
113 if (DECL_COMDAT (decl))
114 return false;
115 if (!DECL_INLINE (decl)
116 || (!node->local.disregard_inline_limits
117 /* When declared inline, defer even the uninlinable functions.
118 This allows them to be eliminated when unused. */
119 && !DECL_DECLARED_INLINE_P (decl)
120 && (!node->local.inlinable || !cgraph_default_inline_p (node))))
121 return true;
123 return false;
126 /* When not doing unit-at-a-time, output all functions enqueued.
127 Return true when such a functions were found. */
129 bool
130 cgraph_assemble_pending_functions (void)
132 bool output = false;
134 if (flag_unit_at_a_time)
135 return false;
137 while (cgraph_nodes_queue)
139 struct cgraph_node *n = cgraph_nodes_queue;
141 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
142 if (!n->origin && !DECL_EXTERNAL (n->decl))
144 cgraph_expand_function (n);
145 output = true;
149 return output;
152 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
153 logic in effect. If NESTED is true, then our caller cannot stand to have
154 the garbage collector run at the moment. We would need to either create
155 a new GC context, or just not compile right now. */
157 void
158 cgraph_finalize_function (tree decl, bool nested)
160 struct cgraph_node *node = cgraph_node (decl);
162 if (node->local.finalized)
164 /* As an GCC extension we allow redefinition of the function. The
165 semantics when both copies of bodies differ is not well defined.
166 We replace the old body with new body so in unit at a time mode
167 we always use new body, while in normal mode we may end up with
168 old body inlined into some functions and new body expanded and
169 inlined in others.
171 ??? It may make more sense to use one body for inlining and other
172 body for expanding the function but this is difficult to do. */
174 /* If node->output is set, then this is a unit-at-a-time compilation
175 and we have already begun whole-unit analysis. This is *not*
176 testing for whether we've already emitted the function. That
177 case can be sort-of legitimately seen with real function
178 redefinition errors. I would argue that the front end should
179 never present us with such a case, but don't enforce that for now. */
180 if (node->output)
181 abort ();
183 /* Reset our datastructures so we can analyze the function again. */
184 memset (&node->local, 0, sizeof (node->local));
185 memset (&node->global, 0, sizeof (node->global));
186 memset (&node->rtl, 0, sizeof (node->rtl));
187 node->analyzed = false;
188 node->local.redefined_extern_inline = true;
189 while (node->callees)
190 cgraph_remove_edge (node, node->callees->callee);
192 /* We may need to re-queue the node for assembling in case
193 we already proceeded it and ignored as not needed. */
194 if (node->reachable && !flag_unit_at_a_time)
196 struct cgraph_node *n;
198 for (n = cgraph_nodes_queue; n; n = n->next_needed)
199 if (n == node)
200 break;
201 if (!n)
202 node->reachable = 0;
206 notice_global_symbol (decl);
207 node->decl = decl;
208 node->local.finalized = true;
210 /* If not unit at a time, then we need to create the call graph
211 now, so that called functions can be queued and emitted now. */
212 if (!flag_unit_at_a_time)
214 cgraph_analyze_function (node);
215 cgraph_decide_inlining_incrementally (node);
218 if (decide_is_function_needed (node, decl))
219 cgraph_mark_needed_node (node);
221 /* If not unit at a time, go ahead and emit everything we've found
222 to be reachable at this time. */
223 if (!nested)
225 if (!cgraph_assemble_pending_functions ())
226 ggc_collect ();
229 /* If we've not yet emitted decl, tell the debug info about it. */
230 if (!TREE_ASM_WRITTEN (decl))
231 (*debug_hooks->deferred_inline_function) (decl);
233 /* We will never really output the function body, clear the SAVED_INSNS array
234 early then. */
235 if (DECL_EXTERNAL (decl))
236 DECL_SAVED_INSNS (decl) = NULL;
239 /* Walk tree and record all calls. Called via walk_tree. */
240 static tree
241 record_call_1 (tree *tp, int *walk_subtrees, void *data)
243 tree t = *tp;
245 switch (TREE_CODE (t))
247 case VAR_DECL:
248 /* ??? Really, we should mark this decl as *potentially* referenced
249 by this function and re-examine whether the decl is actually used
250 after rtl has been generated. */
251 if (TREE_STATIC (t))
252 cgraph_varpool_mark_needed_node (cgraph_varpool_node (t));
253 break;
255 case ADDR_EXPR:
256 if (flag_unit_at_a_time)
258 /* Record dereferences to the functions. This makes the
259 functions reachable unconditionally. */
260 tree decl = TREE_OPERAND (*tp, 0);
261 if (TREE_CODE (decl) == FUNCTION_DECL)
262 cgraph_mark_needed_node (cgraph_node (decl));
264 break;
266 case CALL_EXPR:
268 tree decl = get_callee_fndecl (*tp);
269 if (decl && TREE_CODE (decl) == FUNCTION_DECL)
271 cgraph_record_call (data, decl);
273 /* When we see a function call, we don't want to look at the
274 function reference in the ADDR_EXPR that is hanging from
275 the CALL_EXPR we're examining here, because we would
276 conclude incorrectly that the function's address could be
277 taken by something that is not a function call. So only
278 walk the function parameter list, skip the other subtrees. */
280 walk_tree (&TREE_OPERAND (*tp, 1), record_call_1, data,
281 visited_nodes);
282 *walk_subtrees = 0;
284 break;
287 default:
288 /* Save some cycles by not walking types and declaration as we
289 won't find anything useful there anyway. */
290 if (DECL_P (*tp) || TYPE_P (*tp))
292 *walk_subtrees = 0;
293 break;
296 if ((unsigned int) TREE_CODE (t) >= LAST_AND_UNUSED_TREE_CODE)
297 return (*lang_hooks.callgraph.analyze_expr) (tp, walk_subtrees, data);
298 break;
301 return NULL;
304 /* Create cgraph edges for function calls inside BODY from DECL. */
306 void
307 cgraph_create_edges (tree decl, tree body)
309 /* The nodes we're interested in are never shared, so walk
310 the tree ignoring duplicates. */
311 visited_nodes = htab_create (37, htab_hash_pointer,
312 htab_eq_pointer, NULL);
313 walk_tree (&body, record_call_1, decl, visited_nodes);
314 htab_delete (visited_nodes);
315 visited_nodes = NULL;
318 /* Analyze the function scheduled to be output. */
319 static void
320 cgraph_analyze_function (struct cgraph_node *node)
322 tree decl = node->decl;
323 struct cgraph_edge *e;
325 current_function_decl = decl;
327 /* First kill forward declaration so reverse inlining works properly. */
328 cgraph_create_edges (decl, DECL_SAVED_TREE (decl));
330 node->local.inlinable = tree_inlinable_function_p (decl);
331 if (!node->local.self_insns)
332 node->local.self_insns
333 = (*lang_hooks.tree_inlining.estimate_num_insns) (decl);
334 if (node->local.inlinable)
335 node->local.disregard_inline_limits
336 = (*lang_hooks.tree_inlining.disregard_inline_limits) (decl);
337 for (e = node->callers; e; e = e->next_caller)
338 if (e->inline_failed)
340 if (node->local.redefined_extern_inline)
341 e->inline_failed = N_("redefined extern inline functions are not "
342 "considered for inlining");
343 else if (!node->local.inlinable)
344 e->inline_failed = N_("function not inlinable");
345 else
346 e->inline_failed = N_("function not considered for inlining");
348 if (flag_really_no_inline && !node->local.disregard_inline_limits)
349 node->local.inlinable = 0;
350 /* Inlining characteristics are maintained by the cgraph_mark_inline. */
351 node->global.insns = node->local.self_insns;
352 if (!DECL_EXTERNAL (decl))
354 node->global.cloned_times = 1;
355 node->global.will_be_output = true;
358 node->analyzed = true;
359 current_function_decl = NULL;
361 /* Possibly warn about unused parameters. */
362 if (warn_unused_parameter)
363 do_warn_unused_parameter (decl);
366 /* Analyze the whole compilation unit once it is parsed completely. */
368 void
369 cgraph_finalize_compilation_unit (void)
371 struct cgraph_node *node;
373 if (!flag_unit_at_a_time)
375 cgraph_assemble_pending_functions ();
376 return;
379 cgraph_varpool_assemble_pending_decls ();
380 if (!quiet_flag)
381 fprintf (stderr, "\nAnalyzing compilation unit\n");
383 timevar_push (TV_CGRAPH);
384 if (cgraph_dump_file)
386 fprintf (cgraph_dump_file, "Initial entry points:");
387 for (node = cgraph_nodes; node; node = node->next)
388 if (node->needed && DECL_SAVED_TREE (node->decl))
389 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
390 fprintf (cgraph_dump_file, "\n");
393 /* Propagate reachability flag and lower representation of all reachable
394 functions. In the future, lowering will introduce new functions and
395 new entry points on the way (by template instantiation and virtual
396 method table generation for instance). */
397 while (cgraph_nodes_queue)
399 struct cgraph_edge *edge;
400 tree decl = cgraph_nodes_queue->decl;
402 node = cgraph_nodes_queue;
403 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
405 /* ??? It is possible to create extern inline function and later using
406 weak alas attribute to kill it's body. See
407 gcc.c-torture/compile/20011119-1.c */
408 if (!DECL_SAVED_TREE (decl))
409 continue;
411 if (node->analyzed || !node->reachable || !DECL_SAVED_TREE (decl))
412 abort ();
414 cgraph_analyze_function (node);
416 for (edge = node->callees; edge; edge = edge->next_callee)
417 if (!edge->callee->reachable)
418 cgraph_mark_reachable_node (edge->callee);
420 cgraph_varpool_assemble_pending_decls ();
423 /* Collect entry points to the unit. */
425 if (cgraph_dump_file)
427 fprintf (cgraph_dump_file, "Unit entry points:");
428 for (node = cgraph_nodes; node; node = node->next)
429 if (node->needed && DECL_SAVED_TREE (node->decl))
430 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
431 fprintf (cgraph_dump_file, "\n\nInitial ");
432 dump_cgraph (cgraph_dump_file);
435 if (cgraph_dump_file)
436 fprintf (cgraph_dump_file, "\nReclaiming functions:");
438 for (node = cgraph_nodes; node; node = node->next)
440 tree decl = node->decl;
442 if (!node->reachable && DECL_SAVED_TREE (decl))
444 cgraph_remove_node (node);
445 if (cgraph_dump_file)
446 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
448 else
449 node->next_needed = NULL;
451 if (cgraph_dump_file)
453 fprintf (cgraph_dump_file, "\n\nReclaimed ");
454 dump_cgraph (cgraph_dump_file);
456 ggc_collect ();
457 timevar_pop (TV_CGRAPH);
460 /* Figure out what functions we want to assemble. */
462 static void
463 cgraph_mark_functions_to_output (void)
465 struct cgraph_node *node;
467 for (node = cgraph_nodes; node; node = node->next)
469 tree decl = node->decl;
470 struct cgraph_edge *e;
472 if (node->output)
473 abort ();
475 for (e = node->callers; e; e = e->next_caller)
476 if (e->inline_failed)
477 break;
479 /* We need to output all local functions that are used and not
480 always inlined, as well as those that are reachable from
481 outside the current compilation unit. */
482 if (DECL_SAVED_TREE (decl)
483 && (node->needed
484 || (e && node->reachable))
485 && !TREE_ASM_WRITTEN (decl) && !node->origin
486 && !DECL_EXTERNAL (decl))
487 node->output = 1;
488 else
489 DECL_SAVED_INSNS (decl) = NULL;
493 /* Optimize the function before expansion. */
495 static void
496 cgraph_optimize_function (struct cgraph_node *node)
498 tree decl = node->decl;
500 timevar_push (TV_INTEGRATION);
501 /* optimize_inline_calls avoids inlining of current_function_decl. */
502 current_function_decl = decl;
503 if (flag_inline_trees)
505 struct cgraph_edge *e;
507 for (e = node->callees; e; e = e->next_callee)
508 if (!e->inline_failed || warn_inline
509 || (DECL_DECLARED_INLINE_P (e->callee->decl)
510 && lookup_attribute ("always_inline",
511 DECL_ATTRIBUTES (e->callee->decl))))
512 break;
513 if (e)
514 optimize_inline_calls (decl);
516 if (node->nested)
518 for (node = node->nested; node; node = node->next_nested)
519 cgraph_optimize_function (node);
521 timevar_pop (TV_INTEGRATION);
524 /* Expand function specified by NODE. */
526 static void
527 cgraph_expand_function (struct cgraph_node *node)
529 tree decl = node->decl;
531 if (flag_unit_at_a_time)
532 announce_function (decl);
534 cgraph_optimize_function (node);
536 /* Generate RTL for the body of DECL. Nested functions are expanded
537 via lang_expand_decl_stmt. */
538 (*lang_hooks.callgraph.expand_function) (decl);
539 if (DECL_DEFER_OUTPUT (decl))
540 abort ();
542 current_function_decl = NULL;
545 /* Fill array order with all nodes with output flag set in the reverse
546 topological order. */
548 static int
549 cgraph_postorder (struct cgraph_node **order)
551 struct cgraph_node *node, *node2;
552 int stack_size = 0;
553 int order_pos = 0;
554 struct cgraph_edge *edge, last;
556 struct cgraph_node **stack =
557 xcalloc (cgraph_n_nodes, sizeof (struct cgraph_node *));
559 /* We have to deal with cycles nicely, so use a depth first traversal
560 output algorithm. Ignore the fact that some functions won't need
561 to be output and put them into order as well, so we get dependencies
562 right through intline functions. */
563 for (node = cgraph_nodes; node; node = node->next)
564 node->aux = NULL;
565 for (node = cgraph_nodes; node; node = node->next)
566 if (!node->aux)
568 node2 = node;
569 if (!node->callers)
570 node->aux = &last;
571 else
572 node->aux = node->callers;
573 while (node2)
575 while (node2->aux != &last)
577 edge = node2->aux;
578 if (edge->next_caller)
579 node2->aux = edge->next_caller;
580 else
581 node2->aux = &last;
582 if (!edge->caller->aux)
584 if (!edge->caller->callers)
585 edge->caller->aux = &last;
586 else
587 edge->caller->aux = edge->caller->callers;
588 stack[stack_size++] = node2;
589 node2 = edge->caller;
590 break;
593 if (node2->aux == &last)
595 order[order_pos++] = node2;
596 if (stack_size)
597 node2 = stack[--stack_size];
598 else
599 node2 = NULL;
603 free (stack);
604 return order_pos;
607 #define INLINED_TIMES(node) ((size_t)(node)->aux)
608 #define SET_INLINED_TIMES(node,times) ((node)->aux = (void *)(times))
610 /* Return list of nodes we decided to inline NODE into, set their output
611 flag and compute INLINED_TIMES.
613 We do simple backtracing to get INLINED_TIMES right. This should not be
614 expensive as we limit the amount of inlining. Alternatively we may first
615 discover set of nodes, topologically sort these and propagate
616 INLINED_TIMES */
618 static int
619 cgraph_inlined_into (struct cgraph_node *node, struct cgraph_node **array)
621 int nfound = 0;
622 struct cgraph_edge **stack;
623 struct cgraph_edge *e, *e1;
624 int sp;
625 int i;
627 /* Fast path: since we traverse in mostly topological order, we will likely
628 find no edges. */
629 for (e = node->callers; e; e = e->next_caller)
630 if (!e->inline_failed)
631 break;
633 if (!e)
634 return 0;
636 /* Allocate stack for back-tracking up callgraph. */
637 stack = xmalloc ((cgraph_n_nodes + 1) * sizeof (struct cgraph_edge));
638 sp = 0;
640 /* Push the first edge on to the stack. */
641 stack[sp++] = e;
643 while (sp)
645 struct cgraph_node *caller;
647 /* Look at the edge on the top of the stack. */
648 e = stack[sp - 1];
649 caller = e->caller;
651 /* Check if the caller destination has been visited yet. */
652 if (!caller->output)
654 array[nfound++] = e->caller;
655 /* Mark that we have visited the destination. */
656 caller->output = true;
657 SET_INLINED_TIMES (caller, 0);
659 SET_INLINED_TIMES (caller, INLINED_TIMES (caller) + 1);
661 for (e1 = caller->callers; e1; e1 = e1->next_caller)
662 if (!e1->inline_failed)
663 break;
665 if (e1)
666 stack[sp++] = e1;
667 else
669 while (true)
671 for (e1 = e->next_caller; e1; e1 = e1->next_caller)
672 if (!e1->inline_failed)
673 break;
675 if (e1)
677 stack[sp - 1] = e1;
678 break;
680 else
682 sp--;
683 if (!sp)
684 break;
685 e = stack[sp - 1];
691 free (stack);
694 if (cgraph_dump_file)
696 fprintf (cgraph_dump_file, " Found inline predecesors of %s:",
697 cgraph_node_name (node));
698 for (i = 0; i < nfound; i++)
700 fprintf (cgraph_dump_file, " %s", cgraph_node_name (array[i]));
701 if (INLINED_TIMES (array[i]) != 1)
702 fprintf (cgraph_dump_file, " (%i times)",
703 (int)INLINED_TIMES (array[i]));
705 fprintf (cgraph_dump_file, "\n");
708 return nfound;
711 /* Return list of nodes we decided to inline into NODE, set their output
712 flag and compute INLINED_TIMES.
714 This function is identical to cgraph_inlined_into with callers and callees
715 nodes swapped. */
717 static int
718 cgraph_inlined_callees (struct cgraph_node *node, struct cgraph_node **array)
720 int nfound = 0;
721 struct cgraph_edge **stack;
722 struct cgraph_edge *e, *e1;
723 int sp;
724 int i;
726 /* Fast path: since we traverse in mostly topological order, we will likely
727 find no edges. */
728 for (e = node->callees; e; e = e->next_callee)
729 if (!e->inline_failed)
730 break;
732 if (!e)
733 return 0;
735 /* Allocate stack for back-tracking up callgraph. */
736 stack = xmalloc ((cgraph_n_nodes + 1) * sizeof (struct cgraph_edge));
737 sp = 0;
739 /* Push the first edge on to the stack. */
740 stack[sp++] = e;
742 while (sp)
744 struct cgraph_node *callee;
746 /* Look at the edge on the top of the stack. */
747 e = stack[sp - 1];
748 callee = e->callee;
750 /* Check if the callee destination has been visited yet. */
751 if (!callee->output)
753 array[nfound++] = e->callee;
754 /* Mark that we have visited the destination. */
755 callee->output = true;
756 SET_INLINED_TIMES (callee, 0);
758 SET_INLINED_TIMES (callee, INLINED_TIMES (callee) + 1);
760 for (e1 = callee->callees; e1; e1 = e1->next_callee)
761 if (!e1->inline_failed)
762 break;
763 if (e1)
764 stack[sp++] = e1;
765 else
767 while (true)
769 for (e1 = e->next_callee; e1; e1 = e1->next_callee)
770 if (!e1->inline_failed)
771 break;
773 if (e1)
775 stack[sp - 1] = e1;
776 break;
778 else
780 sp--;
781 if (!sp)
782 break;
783 e = stack[sp - 1];
789 free (stack);
791 if (cgraph_dump_file)
793 fprintf (cgraph_dump_file, " Found inline successors of %s:",
794 cgraph_node_name (node));
795 for (i = 0; i < nfound; i++)
797 fprintf (cgraph_dump_file, " %s", cgraph_node_name (array[i]));
798 if (INLINED_TIMES (array[i]) != 1)
799 fprintf (cgraph_dump_file, " (%i times)",
800 (int)INLINED_TIMES (array[i]));
802 fprintf (cgraph_dump_file, "\n");
805 return nfound;
808 /* Perform reachability analysis and reclaim all unreachable nodes.
809 This function also remove unneeded bodies of extern inline functions
810 and thus needs to be done only after inlining decisions has been made. */
811 static bool
812 cgraph_remove_unreachable_nodes (void)
814 struct cgraph_node *first = (void *) 1;
815 struct cgraph_node *node;
816 bool changed = false;
817 int insns = 0;
819 if (cgraph_dump_file)
820 fprintf (cgraph_dump_file, "\nReclaiming functions:");
821 #ifdef ENABLE_CHECKING
822 for (node = cgraph_nodes; node; node = node->next)
823 if (node->aux)
824 abort ();
825 #endif
826 for (node = cgraph_nodes; node; node = node->next)
827 if (node->needed && (!DECL_EXTERNAL (node->decl) || !node->analyzed))
829 node->aux = first;
830 first = node;
832 else if (node->aux)
833 abort ();
835 /* Perform reachability analysis. As a special case do not consider
836 extern inline functions not inlined as live because we won't output
837 them at all. */
838 while (first != (void *) 1)
840 struct cgraph_edge *e;
841 node = first;
842 first = first->aux;
844 for (e = node->callees; e; e = e->next_callee)
845 if (!e->callee->aux
846 && node->analyzed
847 && (!e->inline_failed || !e->callee->analyzed
848 || !DECL_EXTERNAL (e->callee->decl)))
850 e->callee->aux = first;
851 first = e->callee;
855 /* Remove unreachable nodes. Extern inline functions need special care;
856 Unreachable extern inline functions shall be removed.
857 Reachable extern inline functions we never inlined shall get their bodies
858 elliminated
859 Reachable extern inline functions we sometimes inlined will be turned into
860 unanalyzed nodes so they look like for true extern functions to the rest
861 of code. */
862 for (node = cgraph_nodes; node; node = node->next)
864 if (!node->aux)
866 int local_insns;
867 tree decl = node->decl;
869 if (DECL_SAVED_INSNS (decl))
870 local_insns = node->local.self_insns;
871 else
872 local_insns = 0;
873 if (cgraph_dump_file)
874 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
875 if (!node->analyzed || !DECL_EXTERNAL (node->decl))
876 cgraph_remove_node (node);
877 else
879 struct cgraph_edge *e;
881 for (e = node->callers; e; e = e->next_caller)
882 if (e->caller->aux)
883 break;
884 if (e || node->needed)
886 DECL_SAVED_TREE (node->decl) = NULL_TREE;
887 while (node->callees)
888 cgraph_remove_edge (node, node->callees->callee);
889 node->analyzed = false;
891 else
892 cgraph_remove_node (node);
894 if (!DECL_SAVED_TREE (decl))
895 insns += local_insns;
896 changed = true;
899 for (node = cgraph_nodes; node; node = node->next)
900 node->aux = NULL;
901 if (cgraph_dump_file)
902 fprintf (cgraph_dump_file, "\nReclaimed %i insns", insns);
903 return changed;
907 /* Estimate size of the function after inlining WHAT into TO. */
909 static int
910 cgraph_estimate_size_after_inlining (int times, struct cgraph_node *to,
911 struct cgraph_node *what)
913 return (what->global.insns - INSNS_PER_CALL) * times + to->global.insns;
916 /* Estimate the growth caused by inlining NODE into all callees. */
918 static int
919 cgraph_estimate_growth (struct cgraph_node *node)
921 int growth = 0;
922 int calls_saved = 0;
923 int clones_added = 0;
924 struct cgraph_edge *e;
926 for (e = node->callers; e; e = e->next_caller)
927 if (e->inline_failed)
929 growth += ((cgraph_estimate_size_after_inlining (1, e->caller, node)
931 e->caller->global.insns) *e->caller->global.cloned_times);
932 calls_saved += e->caller->global.cloned_times;
933 clones_added += e->caller->global.cloned_times;
936 /* ??? Wrong for self recursive functions or cases where we decide to not
937 inline for different reasons, but it is not big deal as in that case
938 we will keep the body around, but we will also avoid some inlining. */
939 if (!node->needed && !node->origin && !DECL_EXTERNAL (node->decl))
940 growth -= node->global.insns, clones_added--;
942 if (!calls_saved)
943 calls_saved = 1;
945 return growth;
948 /* Update insn sizes after inlining WHAT into TO that is already inlined into
949 all nodes in INLINED array. */
951 static void
952 cgraph_mark_inline (struct cgraph_node *to, struct cgraph_node *what,
953 struct cgraph_node **inlined, int ninlined,
954 struct cgraph_node **inlined_callees,
955 int ninlined_callees)
957 int i;
958 int times = 0;
959 int clones = 0;
960 struct cgraph_edge *e;
961 bool called = false;
962 int new_insns;
964 what->global.inlined = 1;
965 for (e = what->callers; e; e = e->next_caller)
967 if (e->caller == to)
969 if (!e->inline_failed)
970 continue;
971 e->inline_failed = NULL;
972 times++;
973 clones += e->caller->global.cloned_times;
975 else if (e->inline_failed)
976 called = true;
978 if (!times)
979 abort ();
980 ncalls_inlined += times;
982 new_insns = cgraph_estimate_size_after_inlining (times, to, what);
983 if (to->global.will_be_output)
984 overall_insns += new_insns - to->global.insns;
985 to->global.insns = new_insns;
987 if (!called && !what->needed && !what->origin
988 && flag_unit_at_a_time
989 && !DECL_EXTERNAL (what->decl))
991 if (!what->global.will_be_output)
992 abort ();
993 clones--;
994 nfunctions_inlined++;
995 what->global.will_be_output = 0;
996 overall_insns -= what->global.insns;
998 what->global.cloned_times += clones;
999 for (i = 0; i < ninlined; i++)
1001 new_insns =
1002 cgraph_estimate_size_after_inlining (INLINED_TIMES (inlined[i]) *
1003 times, inlined[i], what);
1004 if (inlined[i]->global.will_be_output)
1005 overall_insns += new_insns - inlined[i]->global.insns;
1006 inlined[i]->global.insns = new_insns;
1008 for (i = 0; i < ninlined_callees; i++)
1010 inlined_callees[i]->global.cloned_times +=
1011 INLINED_TIMES (inlined_callees[i]) * clones;
1015 /* Return false when inlining WHAT into TO is not good idea as it would cause
1016 too large growth of function bodies. */
1018 static bool
1019 cgraph_check_inline_limits (struct cgraph_node *to, struct cgraph_node *what,
1020 struct cgraph_node **inlined, int ninlined,
1021 const char **reason)
1023 int i;
1024 int times = 0;
1025 struct cgraph_edge *e;
1026 int newsize;
1027 int limit;
1029 for (e = to->callees; e; e = e->next_callee)
1030 if (e->callee == what)
1031 times++;
1033 /* When inlining large function body called once into small function,
1034 take the inlined function as base for limiting the growth. */
1035 if (to->local.self_insns > what->local.self_insns)
1036 limit = to->local.self_insns;
1037 else
1038 limit = what->local.self_insns;
1040 limit += limit * PARAM_VALUE (PARAM_LARGE_FUNCTION_GROWTH) / 100;
1042 newsize = cgraph_estimate_size_after_inlining (times, to, what);
1043 if (newsize > PARAM_VALUE (PARAM_LARGE_FUNCTION_INSNS)
1044 && newsize > limit)
1046 *reason = N_("--param large-function-growth limit reached");
1047 return false;
1049 for (i = 0; i < ninlined; i++)
1051 newsize =
1052 cgraph_estimate_size_after_inlining (INLINED_TIMES (inlined[i]) *
1053 times, inlined[i], what);
1054 if (newsize > PARAM_VALUE (PARAM_LARGE_FUNCTION_INSNS)
1055 && newsize >
1056 inlined[i]->local.self_insns *
1057 (100 + PARAM_VALUE (PARAM_LARGE_FUNCTION_GROWTH)) / 100)
1059 *reason = N_("--param large-function-growth limit reached while inlining the caller");
1060 return false;
1063 return true;
1066 /* Return true when function N is small enough to be inlined. */
1068 static bool
1069 cgraph_default_inline_p (struct cgraph_node *n)
1071 if (!DECL_INLINE (n->decl) || !DECL_SAVED_TREE (n->decl))
1072 return false;
1073 if (DECL_DECLARED_INLINE_P (n->decl))
1074 return n->global.insns < MAX_INLINE_INSNS_SINGLE;
1075 else
1076 return n->global.insns < MAX_INLINE_INSNS_AUTO;
1079 /* Set inline_failed for all callers of given function to REASON. */
1081 static void
1082 cgraph_set_inline_failed (struct cgraph_node *node, const char *reason)
1084 struct cgraph_edge *e;
1086 if (cgraph_dump_file)
1087 fprintf (cgraph_dump_file, "Inlining failed: %s\n", reason);
1088 for (e = node->callers; e; e = e->next_caller)
1089 if (e->inline_failed)
1090 e->inline_failed = reason;
1093 /* We use greedy algorithm for inlining of small functions:
1094 All inline candidates are put into prioritized heap based on estimated
1095 growth of the overall number of instructions and then update the estimates.
1097 INLINED and INLINED_CALEES are just pointers to arrays large enough
1098 to be passed to cgraph_inlined_into and cgraph_inlined_callees. */
1100 static void
1101 cgraph_decide_inlining_of_small_functions (struct cgraph_node **inlined,
1102 struct cgraph_node **inlined_callees)
1104 int i;
1105 struct cgraph_node *node;
1106 fibheap_t heap = fibheap_new ();
1107 struct fibnode **heap_node =
1108 xcalloc (cgraph_max_uid, sizeof (struct fibnode *));
1109 int ninlined, ninlined_callees;
1110 int max_insns = ((HOST_WIDEST_INT) initial_insns
1111 * (100 + PARAM_VALUE (PARAM_INLINE_UNIT_GROWTH)) / 100);
1113 /* Put all inline candidates into the heap. */
1115 for (node = cgraph_nodes; node; node = node->next)
1117 if (!node->local.inlinable || !node->callers
1118 || node->local.disregard_inline_limits)
1119 continue;
1121 if (!cgraph_default_inline_p (node))
1123 cgraph_set_inline_failed (node,
1124 N_("--param max-inline-insns-single limit reached"));
1125 continue;
1127 heap_node[node->uid] =
1128 fibheap_insert (heap, cgraph_estimate_growth (node), node);
1131 if (cgraph_dump_file)
1132 fprintf (cgraph_dump_file, "\nDeciding on smaller functions:\n");
1133 while (overall_insns <= max_insns && (node = fibheap_extract_min (heap)))
1135 struct cgraph_edge *e;
1136 int old_insns = overall_insns;
1138 heap_node[node->uid] = NULL;
1139 if (cgraph_dump_file)
1140 fprintf (cgraph_dump_file,
1141 "\nConsidering %s with %i insns\n"
1142 " Estimated growth is %+i insns.\n",
1143 cgraph_node_name (node), node->global.insns,
1144 cgraph_estimate_growth (node));
1145 if (!cgraph_default_inline_p (node))
1147 cgraph_set_inline_failed (node,
1148 N_("--param max-inline-insns-single limit reached after inlining into the callee"));
1149 continue;
1151 ninlined_callees = cgraph_inlined_callees (node, inlined_callees);
1152 for (e = node->callers; e; e = e->next_caller)
1153 if (e->inline_failed)
1155 /* Marking recursive function inlinine has sane semantic and
1156 thus we should not warn on it. */
1157 if (e->caller == node)
1159 e->inline_failed = "";
1160 continue;
1162 ninlined = cgraph_inlined_into (e->caller, inlined);
1163 if (e->callee->output)
1164 e->inline_failed = "";
1165 if (e->callee->output
1166 || !cgraph_check_inline_limits (e->caller, node, inlined,
1167 ninlined, &e->inline_failed))
1169 for (i = 0; i < ninlined; i++)
1170 inlined[i]->output = 0, inlined[i]->aux = 0;
1171 if (cgraph_dump_file)
1172 fprintf (cgraph_dump_file, " Not inlining into %s.\n",
1173 cgraph_node_name (e->caller));
1174 continue;
1176 cgraph_mark_inline (e->caller, node, inlined, ninlined,
1177 inlined_callees, ninlined_callees);
1178 if (heap_node[e->caller->uid])
1179 fibheap_replace_key (heap, heap_node[e->caller->uid],
1180 cgraph_estimate_growth (e->caller));
1182 /* Size of the functions we updated into has changed, so update
1183 the keys. */
1184 for (i = 0; i < ninlined; i++)
1186 inlined[i]->output = 0, inlined[i]->aux = 0;
1187 if (heap_node[inlined[i]->uid])
1188 fibheap_replace_key (heap, heap_node[inlined[i]->uid],
1189 cgraph_estimate_growth (inlined[i]));
1191 if (cgraph_dump_file)
1192 fprintf (cgraph_dump_file,
1193 " Inlined into %s which now has %i insns.\n",
1194 cgraph_node_name (e->caller),
1195 e->caller->global.insns);
1198 /* Similarly all functions called by the function we just inlined
1199 are now called more times; update keys. */
1201 for (e = node->callees; e; e = e->next_callee)
1202 if (e->inline_failed && heap_node[e->callee->uid])
1203 fibheap_replace_key (heap, heap_node[e->callee->uid],
1204 cgraph_estimate_growth (e->callee));
1206 for (i = 0; i < ninlined_callees; i++)
1208 struct cgraph_edge *e;
1210 for (e = inlined_callees[i]->callees; e; e = e->next_callee)
1211 if (e->inline_failed && heap_node[e->callee->uid])
1212 fibheap_replace_key (heap, heap_node[e->callee->uid],
1213 cgraph_estimate_growth (e->callee));
1215 inlined_callees[i]->output = 0;
1216 inlined_callees[i]->aux = 0;
1218 if (cgraph_dump_file)
1219 fprintf (cgraph_dump_file,
1220 " Inlined %i times for a net change of %+i insns.\n",
1221 node->global.cloned_times, overall_insns - old_insns);
1223 while ((node = fibheap_extract_min (heap)) != NULL)
1224 if (!node->local.disregard_inline_limits)
1225 cgraph_set_inline_failed (node, N_("--param inline-unit-growth limit reached"));
1226 fibheap_delete (heap);
1227 free (heap_node);
1230 /* Decide on the inlining. We do so in the topological order to avoid
1231 expenses on updating datastructures. */
1233 static void
1234 cgraph_decide_inlining (void)
1236 struct cgraph_node *node;
1237 int nnodes;
1238 struct cgraph_node **order =
1239 xcalloc (cgraph_n_nodes, sizeof (struct cgraph_node *));
1240 struct cgraph_node **inlined =
1241 xcalloc (cgraph_n_nodes, sizeof (struct cgraph_node *));
1242 struct cgraph_node **inlined_callees =
1243 xcalloc (cgraph_n_nodes, sizeof (struct cgraph_node *));
1244 int ninlined;
1245 int ninlined_callees;
1246 int old_insns = 0;
1247 int i, y;
1249 for (node = cgraph_nodes; node; node = node->next)
1250 initial_insns += node->local.self_insns;
1251 overall_insns = initial_insns;
1253 nnodes = cgraph_postorder (order);
1255 if (cgraph_dump_file)
1256 fprintf (cgraph_dump_file,
1257 "\nDeciding on inlining. Starting with %i insns.\n",
1258 initial_insns);
1260 for (node = cgraph_nodes; node; node = node->next)
1261 node->aux = 0;
1263 if (cgraph_dump_file)
1264 fprintf (cgraph_dump_file, "\nInlining always_inline functions:\n");
1265 #ifdef ENABLE_CHECKING
1266 for (node = cgraph_nodes; node; node = node->next)
1267 if (node->aux || node->output)
1268 abort ();
1269 #endif
1271 /* In the first pass mark all always_inline edges. Do this with a priority
1272 so none of our later choices will make this impossible. */
1273 for (i = nnodes - 1; i >= 0; i--)
1275 struct cgraph_edge *e;
1277 node = order[i];
1279 for (e = node->callees; e; e = e->next_callee)
1280 if (e->callee->local.disregard_inline_limits)
1281 break;
1282 if (!e)
1283 continue;
1284 if (cgraph_dump_file)
1285 fprintf (cgraph_dump_file,
1286 "\nConsidering %s %i insns (always inline)\n",
1287 cgraph_node_name (e->callee), e->callee->global.insns);
1288 ninlined = cgraph_inlined_into (order[i], inlined);
1289 for (; e; e = e->next_callee)
1291 old_insns = overall_insns;
1292 if (!e->inline_failed || !e->callee->local.inlinable
1293 || !e->callee->local.disregard_inline_limits)
1294 continue;
1295 if (e->callee->output || e->callee == node)
1297 e->inline_failed = N_("recursive inlining");
1298 continue;
1300 ninlined_callees =
1301 cgraph_inlined_callees (e->callee, inlined_callees);
1302 cgraph_mark_inline (node, e->callee, inlined, ninlined,
1303 inlined_callees, ninlined_callees);
1304 for (y = 0; y < ninlined_callees; y++)
1305 inlined_callees[y]->output = 0, inlined_callees[y]->aux = 0;
1306 if (cgraph_dump_file)
1307 fprintf (cgraph_dump_file,
1308 " Inlined into %s which now has %i insns.\n",
1309 cgraph_node_name (node->callees->caller),
1310 node->callees->caller->global.insns);
1312 if (cgraph_dump_file && node->global.cloned_times > 0)
1313 fprintf (cgraph_dump_file,
1314 " Inlined %i times for a net change of %+i insns.\n",
1315 node->global.cloned_times, overall_insns - old_insns);
1316 for (y = 0; y < ninlined; y++)
1317 inlined[y]->output = 0, inlined[y]->aux = 0;
1319 #ifdef ENABLE_CHECKING
1320 for (node = cgraph_nodes; node; node = node->next)
1321 if (node->aux || node->output)
1322 abort ();
1323 #endif
1325 if (!flag_really_no_inline)
1327 cgraph_decide_inlining_of_small_functions (inlined, inlined_callees);
1328 #ifdef ENABLE_CHECKING
1329 for (node = cgraph_nodes; node; node = node->next)
1330 if (node->aux || node->output)
1331 abort ();
1332 #endif
1334 if (cgraph_dump_file)
1335 fprintf (cgraph_dump_file, "\nDeciding on functions called once:\n");
1337 /* And finally decide what functions are called once. */
1339 for (i = nnodes - 1; i >= 0; i--)
1341 node = order[i];
1343 if (node->callers && !node->callers->next_caller && !node->needed
1344 && node->local.inlinable && node->callers->inline_failed
1345 && !DECL_EXTERNAL (node->decl) && !DECL_COMDAT (node->decl))
1347 bool ok = true;
1348 struct cgraph_node *node1;
1350 /* Verify that we won't duplicate the caller. */
1351 for (node1 = node->callers->caller;
1352 node1->callers && !node1->callers->inline_failed
1353 && ok; node1 = node1->callers->caller)
1354 if (node1->callers->next_caller || node1->needed)
1355 ok = false;
1356 if (ok)
1358 const char *dummy_reason;
1359 if (cgraph_dump_file)
1360 fprintf (cgraph_dump_file,
1361 "\nConsidering %s %i insns.\n"
1362 " Called once from %s %i insns.\n",
1363 cgraph_node_name (node), node->global.insns,
1364 cgraph_node_name (node->callers->caller),
1365 node->callers->caller->global.insns);
1366 ninlined = cgraph_inlined_into (node->callers->caller,
1367 inlined);
1368 old_insns = overall_insns;
1370 /* Inlining functions once would never cause inlining warnings. */
1371 if (cgraph_check_inline_limits
1372 (node->callers->caller, node, inlined, ninlined,
1373 &dummy_reason))
1375 ninlined_callees =
1376 cgraph_inlined_callees (node, inlined_callees);
1377 cgraph_mark_inline (node->callers->caller, node, inlined,
1378 ninlined, inlined_callees,
1379 ninlined_callees);
1380 for (y = 0; y < ninlined_callees; y++)
1381 inlined_callees[y]->output = 0, inlined_callees[y]->aux = 0;
1382 if (cgraph_dump_file)
1383 fprintf (cgraph_dump_file,
1384 " Inlined into %s which now has %i insns"
1385 " for a net change of %+i insns.\n",
1386 cgraph_node_name (node->callers->caller),
1387 node->callers->caller->global.insns,
1388 overall_insns - old_insns);
1390 else
1392 if (cgraph_dump_file)
1393 fprintf (cgraph_dump_file,
1394 " Inline limit reached, not inlined.\n");
1396 for (y = 0; y < ninlined; y++)
1397 inlined[y]->output = 0, inlined[y]->aux = 0;
1402 cgraph_remove_unreachable_nodes ();
1404 if (cgraph_dump_file)
1405 fprintf (cgraph_dump_file,
1406 "\nInlined %i calls, eliminated %i functions, "
1407 "%i insns turned to %i insns.\n\n",
1408 ncalls_inlined, nfunctions_inlined, initial_insns,
1409 overall_insns);
1410 free (order);
1411 free (inlined);
1412 free (inlined_callees);
1415 /* Decide on the inlining. We do so in the topological order to avoid
1416 expenses on updating datastructures. */
1418 static void
1419 cgraph_decide_inlining_incrementally (struct cgraph_node *node)
1421 struct cgraph_edge *e;
1422 struct cgraph_node **inlined =
1423 xmalloc (sizeof (struct cgraph_node *) * cgraph_n_nodes);
1424 struct cgraph_node **inlined_callees =
1425 xmalloc (sizeof (struct cgraph_node *) * cgraph_n_nodes);
1426 int ninlined;
1427 int ninlined_callees;
1428 int y;
1430 ninlined = cgraph_inlined_into (node, inlined);
1432 /* First of all look for always inline functions. */
1433 for (e = node->callees; e; e = e->next_callee)
1434 if (e->callee->local.disregard_inline_limits && e->inline_failed
1435 /* ??? It is possible that renaming variable removed the function body
1436 in duplicate_decls. See gcc.c-torture/compile/20011119-2.c */
1437 && DECL_SAVED_TREE (e->callee->decl))
1439 if (e->callee->output || e->callee == node)
1441 e->inline_failed = N_("recursive inlining");
1442 continue;
1444 ninlined_callees = cgraph_inlined_callees (e->callee, inlined_callees);
1445 cgraph_mark_inline (node, e->callee, inlined, ninlined,
1446 inlined_callees, ninlined_callees);
1447 for (y = 0; y < ninlined_callees; y++)
1448 inlined_callees[y]->output = 0, inlined_callees[y]->aux = 0;
1451 if (!flag_really_no_inline)
1453 /* Now do the automatic inlining. */
1454 for (e = node->callees; e; e = e->next_callee)
1455 if (e->callee->local.inlinable && e->inline_failed
1456 && cgraph_default_inline_p (e->callee)
1457 && cgraph_check_inline_limits (node, e->callee, inlined,
1458 ninlined, &e->inline_failed)
1459 && DECL_SAVED_TREE (e->callee->decl))
1461 /* Marking recursive function inlinine has sane semantic and thus
1462 we should not warn on it. */
1463 if (e->callee->output || e->callee == node)
1465 e->inline_failed = "";
1466 continue;
1468 ninlined_callees = cgraph_inlined_callees (e->callee,
1469 inlined_callees);
1470 cgraph_mark_inline (node, e->callee, inlined, ninlined,
1471 inlined_callees, ninlined_callees);
1472 for (y = 0; y < ninlined_callees; y++)
1473 inlined_callees[y]->output = 0, inlined_callees[y]->aux = 0;
1477 /* Clear the flags set by cgraph_inlined_into. */
1478 for (y = 0; y < ninlined; y++)
1479 inlined[y]->output = 0, inlined[y]->aux = 0;
1481 free (inlined);
1482 free (inlined_callees);
1486 /* Return true when CALLER_DECL should be inlined into CALLEE_DECL.
1487 When returned false and reason is non-NULL, set it to the reason
1488 why the call was not inlined. */
1490 bool
1491 cgraph_inline_p (tree caller_decl, tree callee_decl, const char **reason)
1493 struct cgraph_node *caller = cgraph_node (caller_decl);
1494 struct cgraph_node *callee = cgraph_node (callee_decl);
1495 struct cgraph_edge *e;
1497 for (e = caller->callees; e; e = e->next_callee)
1498 if (e->callee == callee)
1500 if (e->inline_failed && reason)
1501 *reason = e->inline_failed;
1502 return !e->inline_failed;
1504 /* We do not record builtins in the callgraph. Perhaps it would make more
1505 sense to do so and then prune out those not overwritten by explicit
1506 function body. */
1507 if (reason)
1508 *reason = "originally indirect function calls never inlined";
1509 return false;
1511 /* Expand all functions that must be output.
1513 Attempt to topologically sort the nodes so function is output when
1514 all called functions are already assembled to allow data to be
1515 propagated across the callgraph. Use a stack to get smaller distance
1516 between a function and it's callees (later we may choose to use a more
1517 sophisticated algorithm for function reordering; we will likely want
1518 to use subsections to make the output functions appear in top-down
1519 order). */
1521 static void
1522 cgraph_expand_all_functions (void)
1524 struct cgraph_node *node;
1525 struct cgraph_node **order =
1526 xcalloc (cgraph_n_nodes, sizeof (struct cgraph_node *));
1527 int order_pos = 0;
1528 int i;
1530 cgraph_mark_functions_to_output ();
1532 order_pos = cgraph_postorder (order);
1534 for (i = order_pos - 1; i >= 0; i--)
1536 node = order[i];
1537 if (node->output)
1539 if (!node->reachable)
1540 abort ();
1541 node->output = 0;
1542 cgraph_expand_function (node);
1545 free (order);
1548 /* Mark all local functions.
1550 A local function is one whose calls can occur only in the
1551 current compilation unit and all it's calls are explicit,
1552 so we can change its calling convention.
1553 We simply mark all static functions whose address is not taken
1554 as local. */
1556 static void
1557 cgraph_mark_local_functions (void)
1559 struct cgraph_node *node;
1561 if (cgraph_dump_file)
1562 fprintf (cgraph_dump_file, "\nMarking local functions:");
1564 /* Figure out functions we want to assemble. */
1565 for (node = cgraph_nodes; node; node = node->next)
1567 node->local.local = (!node->needed
1568 && DECL_SAVED_TREE (node->decl)
1569 && !TREE_PUBLIC (node->decl));
1570 if (cgraph_dump_file && node->local.local)
1571 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1573 if (cgraph_dump_file)
1574 fprintf (cgraph_dump_file, "\n\n");
1577 /* Perform simple optimizations based on callgraph. */
1579 void
1580 cgraph_optimize (void)
1582 if (!flag_unit_at_a_time)
1583 return;
1584 timevar_push (TV_CGRAPHOPT);
1585 if (!quiet_flag)
1586 fprintf (stderr, "Performing intraprocedural optimizations\n");
1588 cgraph_mark_local_functions ();
1589 if (cgraph_dump_file)
1591 fprintf (cgraph_dump_file, "Marked ");
1592 dump_cgraph (cgraph_dump_file);
1595 cgraph_decide_inlining ();
1596 cgraph_global_info_ready = true;
1597 if (cgraph_dump_file)
1599 fprintf (cgraph_dump_file, "Optimized ");
1600 dump_cgraph (cgraph_dump_file);
1602 timevar_pop (TV_CGRAPHOPT);
1604 /* Output everything. */
1605 if (!quiet_flag)
1606 fprintf (stderr, "Assembling functions:\n");
1607 cgraph_expand_all_functions ();
1608 if (cgraph_dump_file)
1610 fprintf (cgraph_dump_file, "\nFinal ");
1611 dump_cgraph (cgraph_dump_file);