gcc/
[official-gcc.git] / gcc / ipa-utils.c
blobd666b0a286c6d546517df590b74b92c23422f8bb
1 /* Utilities for ipa analysis.
2 Copyright (C) 2005-2015 Free Software Foundation, Inc.
3 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "alias.h"
26 #include "symtab.h"
27 #include "options.h"
28 #include "tree.h"
29 #include "fold-const.h"
30 #include "predict.h"
31 #include "hard-reg-set.h"
32 #include "function.h"
33 #include "dominance.h"
34 #include "cfg.h"
35 #include "basic-block.h"
36 #include "tree-ssa-alias.h"
37 #include "internal-fn.h"
38 #include "gimple-expr.h"
39 #include "gimple.h"
40 #include "tree-inline.h"
41 #include "dumpfile.h"
42 #include "langhooks.h"
43 #include "splay-tree.h"
44 #include "cgraph.h"
45 #include "ipa-utils.h"
46 #include "bitmap.h"
47 #include "ipa-reference.h"
48 #include "flags.h"
49 #include "diagnostic.h"
50 #include "langhooks.h"
51 #include "lto-streamer.h"
52 #include "alloc-pool.h"
53 #include "symbol-summary.h"
54 #include "ipa-prop.h"
55 #include "ipa-inline.h"
57 /* Debugging function for postorder and inorder code. NOTE is a string
58 that is printed before the nodes are printed. ORDER is an array of
59 cgraph_nodes that has COUNT useful nodes in it. */
61 void
62 ipa_print_order (FILE* out,
63 const char * note,
64 struct cgraph_node** order,
65 int count)
67 int i;
68 fprintf (out, "\n\n ordered call graph: %s\n", note);
70 for (i = count - 1; i >= 0; i--)
71 order[i]->dump (out);
72 fprintf (out, "\n");
73 fflush (out);
77 struct searchc_env {
78 struct cgraph_node **stack;
79 int stack_size;
80 struct cgraph_node **result;
81 int order_pos;
82 splay_tree nodes_marked_new;
83 bool reduce;
84 bool allow_overwritable;
85 int count;
88 /* This is an implementation of Tarjan's strongly connected region
89 finder as reprinted in Aho Hopcraft and Ullman's The Design and
90 Analysis of Computer Programs (1975) pages 192-193. This version
91 has been customized for cgraph_nodes. The env parameter is because
92 it is recursive and there are no nested functions here. This
93 function should only be called from itself or
94 ipa_reduced_postorder. ENV is a stack env and would be
95 unnecessary if C had nested functions. V is the node to start
96 searching from. */
98 static void
99 searchc (struct searchc_env* env, struct cgraph_node *v,
100 bool (*ignore_edge) (struct cgraph_edge *))
102 struct cgraph_edge *edge;
103 struct ipa_dfs_info *v_info = (struct ipa_dfs_info *) v->aux;
105 /* mark node as old */
106 v_info->new_node = false;
107 splay_tree_remove (env->nodes_marked_new, v->uid);
109 v_info->dfn_number = env->count;
110 v_info->low_link = env->count;
111 env->count++;
112 env->stack[(env->stack_size)++] = v;
113 v_info->on_stack = true;
115 for (edge = v->callees; edge; edge = edge->next_callee)
117 struct ipa_dfs_info * w_info;
118 enum availability avail;
119 struct cgraph_node *w = edge->callee->ultimate_alias_target (&avail);
121 if (!w || (ignore_edge && ignore_edge (edge)))
122 continue;
124 if (w->aux
125 && (avail > AVAIL_INTERPOSABLE
126 || (env->allow_overwritable && avail == AVAIL_INTERPOSABLE)))
128 w_info = (struct ipa_dfs_info *) w->aux;
129 if (w_info->new_node)
131 searchc (env, w, ignore_edge);
132 v_info->low_link =
133 (v_info->low_link < w_info->low_link) ?
134 v_info->low_link : w_info->low_link;
136 else
137 if ((w_info->dfn_number < v_info->dfn_number)
138 && (w_info->on_stack))
139 v_info->low_link =
140 (w_info->dfn_number < v_info->low_link) ?
141 w_info->dfn_number : v_info->low_link;
146 if (v_info->low_link == v_info->dfn_number)
148 struct cgraph_node *last = NULL;
149 struct cgraph_node *x;
150 struct ipa_dfs_info *x_info;
151 do {
152 x = env->stack[--(env->stack_size)];
153 x_info = (struct ipa_dfs_info *) x->aux;
154 x_info->on_stack = false;
155 x_info->scc_no = v_info->dfn_number;
157 if (env->reduce)
159 x_info->next_cycle = last;
160 last = x;
162 else
163 env->result[env->order_pos++] = x;
165 while (v != x);
166 if (env->reduce)
167 env->result[env->order_pos++] = v;
171 /* Topsort the call graph by caller relation. Put the result in ORDER.
173 The REDUCE flag is true if you want the cycles reduced to single nodes.
174 You can use ipa_get_nodes_in_cycle to obtain a vector containing all real
175 call graph nodes in a reduced node.
177 Set ALLOW_OVERWRITABLE if nodes with such availability should be included.
178 IGNORE_EDGE, if non-NULL is a hook that may make some edges insignificant
179 for the topological sort. */
182 ipa_reduced_postorder (struct cgraph_node **order,
183 bool reduce, bool allow_overwritable,
184 bool (*ignore_edge) (struct cgraph_edge *))
186 struct cgraph_node *node;
187 struct searchc_env env;
188 splay_tree_node result;
189 env.stack = XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
190 env.stack_size = 0;
191 env.result = order;
192 env.order_pos = 0;
193 env.nodes_marked_new = splay_tree_new (splay_tree_compare_ints, 0, 0);
194 env.count = 1;
195 env.reduce = reduce;
196 env.allow_overwritable = allow_overwritable;
198 FOR_EACH_DEFINED_FUNCTION (node)
200 enum availability avail = node->get_availability ();
202 if (avail > AVAIL_INTERPOSABLE
203 || (allow_overwritable
204 && (avail == AVAIL_INTERPOSABLE)))
206 /* Reuse the info if it is already there. */
207 struct ipa_dfs_info *info = (struct ipa_dfs_info *) node->aux;
208 if (!info)
209 info = XCNEW (struct ipa_dfs_info);
210 info->new_node = true;
211 info->on_stack = false;
212 info->next_cycle = NULL;
213 node->aux = info;
215 splay_tree_insert (env.nodes_marked_new,
216 (splay_tree_key)node->uid,
217 (splay_tree_value)node);
219 else
220 node->aux = NULL;
222 result = splay_tree_min (env.nodes_marked_new);
223 while (result)
225 node = (struct cgraph_node *)result->value;
226 searchc (&env, node, ignore_edge);
227 result = splay_tree_min (env.nodes_marked_new);
229 splay_tree_delete (env.nodes_marked_new);
230 free (env.stack);
232 return env.order_pos;
235 /* Deallocate all ipa_dfs_info structures pointed to by the aux pointer of call
236 graph nodes. */
238 void
239 ipa_free_postorder_info (void)
241 struct cgraph_node *node;
242 FOR_EACH_DEFINED_FUNCTION (node)
244 /* Get rid of the aux information. */
245 if (node->aux)
247 free (node->aux);
248 node->aux = NULL;
253 /* Get the set of nodes for the cycle in the reduced call graph starting
254 from NODE. */
256 vec<cgraph_node *>
257 ipa_get_nodes_in_cycle (struct cgraph_node *node)
259 vec<cgraph_node *> v = vNULL;
260 struct ipa_dfs_info *node_dfs_info;
261 while (node)
263 v.safe_push (node);
264 node_dfs_info = (struct ipa_dfs_info *) node->aux;
265 node = node_dfs_info->next_cycle;
267 return v;
270 /* Return true iff the CS is an edge within a strongly connected component as
271 computed by ipa_reduced_postorder. */
273 bool
274 ipa_edge_within_scc (struct cgraph_edge *cs)
276 struct ipa_dfs_info *caller_dfs = (struct ipa_dfs_info *) cs->caller->aux;
277 struct ipa_dfs_info *callee_dfs;
278 struct cgraph_node *callee = cs->callee->function_symbol ();
280 callee_dfs = (struct ipa_dfs_info *) callee->aux;
281 return (caller_dfs
282 && callee_dfs
283 && caller_dfs->scc_no == callee_dfs->scc_no);
286 struct postorder_stack
288 struct cgraph_node *node;
289 struct cgraph_edge *edge;
290 int ref;
293 /* Fill array order with all nodes with output flag set in the reverse
294 topological order. Return the number of elements in the array.
295 FIXME: While walking, consider aliases, too. */
298 ipa_reverse_postorder (struct cgraph_node **order)
300 struct cgraph_node *node, *node2;
301 int stack_size = 0;
302 int order_pos = 0;
303 struct cgraph_edge *edge;
304 int pass;
305 struct ipa_ref *ref = NULL;
307 struct postorder_stack *stack =
308 XCNEWVEC (struct postorder_stack, symtab->cgraph_count);
310 /* We have to deal with cycles nicely, so use a depth first traversal
311 output algorithm. Ignore the fact that some functions won't need
312 to be output and put them into order as well, so we get dependencies
313 right through inline functions. */
314 FOR_EACH_FUNCTION (node)
315 node->aux = NULL;
316 for (pass = 0; pass < 2; pass++)
317 FOR_EACH_FUNCTION (node)
318 if (!node->aux
319 && (pass
320 || (!node->address_taken
321 && !node->global.inlined_to
322 && !node->alias && !node->thunk.thunk_p
323 && !node->only_called_directly_p ())))
325 stack_size = 0;
326 stack[stack_size].node = node;
327 stack[stack_size].edge = node->callers;
328 stack[stack_size].ref = 0;
329 node->aux = (void *)(size_t)1;
330 while (stack_size >= 0)
332 while (true)
334 node2 = NULL;
335 while (stack[stack_size].edge && !node2)
337 edge = stack[stack_size].edge;
338 node2 = edge->caller;
339 stack[stack_size].edge = edge->next_caller;
340 /* Break possible cycles involving always-inline
341 functions by ignoring edges from always-inline
342 functions to non-always-inline functions. */
343 if (DECL_DISREGARD_INLINE_LIMITS (edge->caller->decl)
344 && !DECL_DISREGARD_INLINE_LIMITS
345 (edge->callee->function_symbol ()->decl))
346 node2 = NULL;
348 for (; stack[stack_size].node->iterate_referring (
349 stack[stack_size].ref,
350 ref) && !node2;
351 stack[stack_size].ref++)
353 if (ref->use == IPA_REF_ALIAS)
354 node2 = dyn_cast <cgraph_node *> (ref->referring);
356 if (!node2)
357 break;
358 if (!node2->aux)
360 stack[++stack_size].node = node2;
361 stack[stack_size].edge = node2->callers;
362 stack[stack_size].ref = 0;
363 node2->aux = (void *)(size_t)1;
366 order[order_pos++] = stack[stack_size--].node;
369 free (stack);
370 FOR_EACH_FUNCTION (node)
371 node->aux = NULL;
372 return order_pos;
377 /* Given a memory reference T, will return the variable at the bottom
378 of the access. Unlike get_base_address, this will recurse through
379 INDIRECT_REFS. */
381 tree
382 get_base_var (tree t)
384 while (!SSA_VAR_P (t)
385 && (!CONSTANT_CLASS_P (t))
386 && TREE_CODE (t) != LABEL_DECL
387 && TREE_CODE (t) != FUNCTION_DECL
388 && TREE_CODE (t) != CONST_DECL
389 && TREE_CODE (t) != CONSTRUCTOR)
391 t = TREE_OPERAND (t, 0);
393 return t;
397 /* SRC and DST are going to be merged. Take SRC's profile and merge it into
398 DST so it is not going to be lost. Possibly destroy SRC's body on the way
399 unless PRESERVE_BODY is set. */
401 void
402 ipa_merge_profiles (struct cgraph_node *dst,
403 struct cgraph_node *src,
404 bool preserve_body)
406 tree oldsrcdecl = src->decl;
407 struct function *srccfun, *dstcfun;
408 bool match = true;
410 if (!src->definition
411 || !dst->definition)
412 return;
413 if (src->frequency < dst->frequency)
414 src->frequency = dst->frequency;
416 /* Time profiles are merged. */
417 if (dst->tp_first_run > src->tp_first_run && src->tp_first_run)
418 dst->tp_first_run = src->tp_first_run;
420 if (src->profile_id && !dst->profile_id)
421 dst->profile_id = src->profile_id;
423 if (!dst->count)
424 return;
425 if (symtab->dump_file)
427 fprintf (symtab->dump_file, "Merging profiles of %s/%i to %s/%i\n",
428 xstrdup_for_dump (src->name ()), src->order,
429 xstrdup_for_dump (dst->name ()), dst->order);
431 dst->count += src->count;
433 /* This is ugly. We need to get both function bodies into memory.
434 If declaration is merged, we need to duplicate it to be able
435 to load body that is being replaced. This makes symbol table
436 temporarily inconsistent. */
437 if (src->decl == dst->decl)
439 struct lto_in_decl_state temp;
440 struct lto_in_decl_state *state;
442 /* We are going to move the decl, we want to remove its file decl data.
443 and link these with the new decl. */
444 temp.fn_decl = src->decl;
445 lto_in_decl_state **slot
446 = src->lto_file_data->function_decl_states->find_slot (&temp,
447 NO_INSERT);
448 state = *slot;
449 src->lto_file_data->function_decl_states->clear_slot (slot);
450 gcc_assert (state);
452 /* Duplicate the decl and be sure it does not link into body of DST. */
453 src->decl = copy_node (src->decl);
454 DECL_STRUCT_FUNCTION (src->decl) = NULL;
455 DECL_ARGUMENTS (src->decl) = NULL;
456 DECL_INITIAL (src->decl) = NULL;
457 DECL_RESULT (src->decl) = NULL;
459 /* Associate the decl state with new declaration, so LTO streamer
460 can look it up. */
461 state->fn_decl = src->decl;
462 slot
463 = src->lto_file_data->function_decl_states->find_slot (state, INSERT);
464 gcc_assert (!*slot);
465 *slot = state;
467 src->get_untransformed_body ();
468 dst->get_untransformed_body ();
469 srccfun = DECL_STRUCT_FUNCTION (src->decl);
470 dstcfun = DECL_STRUCT_FUNCTION (dst->decl);
471 if (n_basic_blocks_for_fn (srccfun)
472 != n_basic_blocks_for_fn (dstcfun))
474 if (symtab->dump_file)
475 fprintf (symtab->dump_file,
476 "Giving up; number of basic block mismatch.\n");
477 match = false;
479 else if (last_basic_block_for_fn (srccfun)
480 != last_basic_block_for_fn (dstcfun))
482 if (symtab->dump_file)
483 fprintf (symtab->dump_file,
484 "Giving up; last block mismatch.\n");
485 match = false;
487 else
489 basic_block srcbb, dstbb;
491 FOR_ALL_BB_FN (srcbb, srccfun)
493 unsigned int i;
495 dstbb = BASIC_BLOCK_FOR_FN (dstcfun, srcbb->index);
496 if (dstbb == NULL)
498 if (symtab->dump_file)
499 fprintf (symtab->dump_file,
500 "No matching block for bb %i.\n",
501 srcbb->index);
502 match = false;
503 break;
505 if (EDGE_COUNT (srcbb->succs) != EDGE_COUNT (dstbb->succs))
507 if (symtab->dump_file)
508 fprintf (symtab->dump_file,
509 "Edge count mistmatch for bb %i.\n",
510 srcbb->index);
511 match = false;
512 break;
514 for (i = 0; i < EDGE_COUNT (srcbb->succs); i++)
516 edge srce = EDGE_SUCC (srcbb, i);
517 edge dste = EDGE_SUCC (dstbb, i);
518 if (srce->dest->index != dste->dest->index)
520 if (symtab->dump_file)
521 fprintf (symtab->dump_file,
522 "Succ edge mistmatch for bb %i.\n",
523 srce->dest->index);
524 match = false;
525 break;
530 if (match)
532 struct cgraph_edge *e, *e2;
533 basic_block srcbb, dstbb;
535 /* TODO: merge also statement histograms. */
536 FOR_ALL_BB_FN (srcbb, srccfun)
538 unsigned int i;
540 dstbb = BASIC_BLOCK_FOR_FN (dstcfun, srcbb->index);
541 dstbb->count += srcbb->count;
542 for (i = 0; i < EDGE_COUNT (srcbb->succs); i++)
544 edge srce = EDGE_SUCC (srcbb, i);
545 edge dste = EDGE_SUCC (dstbb, i);
546 dste->count += srce->count;
549 push_cfun (dstcfun);
550 counts_to_freqs ();
551 compute_function_frequency ();
552 pop_cfun ();
553 for (e = dst->callees; e; e = e->next_callee)
555 if (e->speculative)
556 continue;
557 e->count = gimple_bb (e->call_stmt)->count;
558 e->frequency = compute_call_stmt_bb_frequency
559 (dst->decl,
560 gimple_bb (e->call_stmt));
562 for (e = dst->indirect_calls, e2 = src->indirect_calls; e;
563 e2 = (e2 ? e2->next_callee : NULL), e = e->next_callee)
565 gcov_type count = gimple_bb (e->call_stmt)->count;
566 int freq = compute_call_stmt_bb_frequency
567 (dst->decl,
568 gimple_bb (e->call_stmt));
569 /* When call is speculative, we need to re-distribute probabilities
570 the same way as they was. This is not really correct because
571 in the other copy the speculation may differ; but probably it
572 is not really worth the effort. */
573 if (e->speculative)
575 cgraph_edge *direct, *indirect;
576 cgraph_edge *direct2 = NULL, *indirect2 = NULL;
577 ipa_ref *ref;
579 e->speculative_call_info (direct, indirect, ref);
580 gcc_assert (e == indirect);
581 if (e2 && e2->speculative)
582 e2->speculative_call_info (direct2, indirect2, ref);
583 if (indirect->count || direct->count)
585 /* We should mismatch earlier if there is no matching
586 indirect edge. */
587 if (!e2)
589 if (dump_file)
590 fprintf (dump_file,
591 "Mismatch in merging indirect edges\n");
593 else if (!e2->speculative)
594 indirect->count += e2->count;
595 else if (e2->speculative)
597 if (DECL_ASSEMBLER_NAME (direct2->callee->decl)
598 != DECL_ASSEMBLER_NAME (direct->callee->decl))
600 if (direct2->count >= direct->count)
602 direct->redirect_callee (direct2->callee);
603 indirect->count += indirect2->count
604 + direct->count;
605 direct->count = direct2->count;
607 else
608 indirect->count += indirect2->count + direct2->count;
610 else
612 direct->count += direct2->count;
613 indirect->count += indirect2->count;
616 int prob = RDIV (direct->count * REG_BR_PROB_BASE ,
617 direct->count + indirect->count);
618 direct->frequency = RDIV (freq * prob, REG_BR_PROB_BASE);
619 indirect->frequency = RDIV (freq * (REG_BR_PROB_BASE - prob),
620 REG_BR_PROB_BASE);
622 else
623 /* At the moment we should have only profile feedback based
624 speculations when merging. */
625 gcc_unreachable ();
627 else if (e2 && e2->speculative)
629 cgraph_edge *direct, *indirect;
630 ipa_ref *ref;
632 e2->speculative_call_info (direct, indirect, ref);
633 e->count = count;
634 e->frequency = freq;
635 int prob = RDIV (direct->count * REG_BR_PROB_BASE, e->count);
636 e->make_speculative (direct->callee, direct->count,
637 RDIV (freq * prob, REG_BR_PROB_BASE));
639 else
641 e->count = count;
642 e->frequency = freq;
645 if (!preserve_body)
646 src->release_body ();
647 inline_update_overall_summary (dst);
649 /* TODO: if there is no match, we can scale up. */
650 src->decl = oldsrcdecl;
653 /* Return true if call to DEST is known to be self-recusive call withing FUNC. */
655 bool
656 recursive_call_p (tree func, tree dest)
658 struct cgraph_node *dest_node = cgraph_node::get_create (dest);
659 struct cgraph_node *cnode = cgraph_node::get_create (func);
661 return dest_node->semantically_equivalent_p (cnode);