Implement TARGET_IRA_CHANGE_PSEUDO_ALLOCNO_CLASS hook.
[official-gcc.git] / gcc / ipa-utils.c
blobc02657b5af2ce396c622a1c9d446356a633fb9f9
1 /* Utilities for ipa analysis.
2 Copyright (C) 2005-2015 Free Software Foundation, Inc.
3 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "input.h"
26 #include "alias.h"
27 #include "symtab.h"
28 #include "options.h"
29 #include "tree.h"
30 #include "fold-const.h"
31 #include "predict.h"
32 #include "hard-reg-set.h"
33 #include "input.h"
34 #include "function.h"
35 #include "dominance.h"
36 #include "cfg.h"
37 #include "basic-block.h"
38 #include "tree-ssa-alias.h"
39 #include "internal-fn.h"
40 #include "gimple-expr.h"
41 #include "is-a.h"
42 #include "gimple.h"
43 #include "tree-inline.h"
44 #include "dumpfile.h"
45 #include "langhooks.h"
46 #include "splay-tree.h"
47 #include "plugin-api.h"
48 #include "ipa-ref.h"
49 #include "cgraph.h"
50 #include "ipa-utils.h"
51 #include "bitmap.h"
52 #include "ipa-reference.h"
53 #include "flags.h"
54 #include "diagnostic.h"
55 #include "langhooks.h"
56 #include "lto-streamer.h"
57 #include "alloc-pool.h"
58 #include "symbol-summary.h"
59 #include "ipa-prop.h"
60 #include "ipa-inline.h"
62 /* Debugging function for postorder and inorder code. NOTE is a string
63 that is printed before the nodes are printed. ORDER is an array of
64 cgraph_nodes that has COUNT useful nodes in it. */
66 void
67 ipa_print_order (FILE* out,
68 const char * note,
69 struct cgraph_node** order,
70 int count)
72 int i;
73 fprintf (out, "\n\n ordered call graph: %s\n", note);
75 for (i = count - 1; i >= 0; i--)
76 order[i]->dump (out);
77 fprintf (out, "\n");
78 fflush (out);
82 struct searchc_env {
83 struct cgraph_node **stack;
84 int stack_size;
85 struct cgraph_node **result;
86 int order_pos;
87 splay_tree nodes_marked_new;
88 bool reduce;
89 bool allow_overwritable;
90 int count;
93 /* This is an implementation of Tarjan's strongly connected region
94 finder as reprinted in Aho Hopcraft and Ullman's The Design and
95 Analysis of Computer Programs (1975) pages 192-193. This version
96 has been customized for cgraph_nodes. The env parameter is because
97 it is recursive and there are no nested functions here. This
98 function should only be called from itself or
99 ipa_reduced_postorder. ENV is a stack env and would be
100 unnecessary if C had nested functions. V is the node to start
101 searching from. */
103 static void
104 searchc (struct searchc_env* env, struct cgraph_node *v,
105 bool (*ignore_edge) (struct cgraph_edge *))
107 struct cgraph_edge *edge;
108 struct ipa_dfs_info *v_info = (struct ipa_dfs_info *) v->aux;
110 /* mark node as old */
111 v_info->new_node = false;
112 splay_tree_remove (env->nodes_marked_new, v->uid);
114 v_info->dfn_number = env->count;
115 v_info->low_link = env->count;
116 env->count++;
117 env->stack[(env->stack_size)++] = v;
118 v_info->on_stack = true;
120 for (edge = v->callees; edge; edge = edge->next_callee)
122 struct ipa_dfs_info * w_info;
123 enum availability avail;
124 struct cgraph_node *w = edge->callee->ultimate_alias_target (&avail);
126 if (!w || (ignore_edge && ignore_edge (edge)))
127 continue;
129 if (w->aux
130 && (avail > AVAIL_INTERPOSABLE
131 || (env->allow_overwritable && avail == AVAIL_INTERPOSABLE)))
133 w_info = (struct ipa_dfs_info *) w->aux;
134 if (w_info->new_node)
136 searchc (env, w, ignore_edge);
137 v_info->low_link =
138 (v_info->low_link < w_info->low_link) ?
139 v_info->low_link : w_info->low_link;
141 else
142 if ((w_info->dfn_number < v_info->dfn_number)
143 && (w_info->on_stack))
144 v_info->low_link =
145 (w_info->dfn_number < v_info->low_link) ?
146 w_info->dfn_number : v_info->low_link;
151 if (v_info->low_link == v_info->dfn_number)
153 struct cgraph_node *last = NULL;
154 struct cgraph_node *x;
155 struct ipa_dfs_info *x_info;
156 do {
157 x = env->stack[--(env->stack_size)];
158 x_info = (struct ipa_dfs_info *) x->aux;
159 x_info->on_stack = false;
160 x_info->scc_no = v_info->dfn_number;
162 if (env->reduce)
164 x_info->next_cycle = last;
165 last = x;
167 else
168 env->result[env->order_pos++] = x;
170 while (v != x);
171 if (env->reduce)
172 env->result[env->order_pos++] = v;
176 /* Topsort the call graph by caller relation. Put the result in ORDER.
178 The REDUCE flag is true if you want the cycles reduced to single nodes.
179 You can use ipa_get_nodes_in_cycle to obtain a vector containing all real
180 call graph nodes in a reduced node.
182 Set ALLOW_OVERWRITABLE if nodes with such availability should be included.
183 IGNORE_EDGE, if non-NULL is a hook that may make some edges insignificant
184 for the topological sort. */
187 ipa_reduced_postorder (struct cgraph_node **order,
188 bool reduce, bool allow_overwritable,
189 bool (*ignore_edge) (struct cgraph_edge *))
191 struct cgraph_node *node;
192 struct searchc_env env;
193 splay_tree_node result;
194 env.stack = XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
195 env.stack_size = 0;
196 env.result = order;
197 env.order_pos = 0;
198 env.nodes_marked_new = splay_tree_new (splay_tree_compare_ints, 0, 0);
199 env.count = 1;
200 env.reduce = reduce;
201 env.allow_overwritable = allow_overwritable;
203 FOR_EACH_DEFINED_FUNCTION (node)
205 enum availability avail = node->get_availability ();
207 if (avail > AVAIL_INTERPOSABLE
208 || (allow_overwritable
209 && (avail == AVAIL_INTERPOSABLE)))
211 /* Reuse the info if it is already there. */
212 struct ipa_dfs_info *info = (struct ipa_dfs_info *) node->aux;
213 if (!info)
214 info = XCNEW (struct ipa_dfs_info);
215 info->new_node = true;
216 info->on_stack = false;
217 info->next_cycle = NULL;
218 node->aux = info;
220 splay_tree_insert (env.nodes_marked_new,
221 (splay_tree_key)node->uid,
222 (splay_tree_value)node);
224 else
225 node->aux = NULL;
227 result = splay_tree_min (env.nodes_marked_new);
228 while (result)
230 node = (struct cgraph_node *)result->value;
231 searchc (&env, node, ignore_edge);
232 result = splay_tree_min (env.nodes_marked_new);
234 splay_tree_delete (env.nodes_marked_new);
235 free (env.stack);
237 return env.order_pos;
240 /* Deallocate all ipa_dfs_info structures pointed to by the aux pointer of call
241 graph nodes. */
243 void
244 ipa_free_postorder_info (void)
246 struct cgraph_node *node;
247 FOR_EACH_DEFINED_FUNCTION (node)
249 /* Get rid of the aux information. */
250 if (node->aux)
252 free (node->aux);
253 node->aux = NULL;
258 /* Get the set of nodes for the cycle in the reduced call graph starting
259 from NODE. */
261 vec<cgraph_node *>
262 ipa_get_nodes_in_cycle (struct cgraph_node *node)
264 vec<cgraph_node *> v = vNULL;
265 struct ipa_dfs_info *node_dfs_info;
266 while (node)
268 v.safe_push (node);
269 node_dfs_info = (struct ipa_dfs_info *) node->aux;
270 node = node_dfs_info->next_cycle;
272 return v;
275 /* Return true iff the CS is an edge within a strongly connected component as
276 computed by ipa_reduced_postorder. */
278 bool
279 ipa_edge_within_scc (struct cgraph_edge *cs)
281 struct ipa_dfs_info *caller_dfs = (struct ipa_dfs_info *) cs->caller->aux;
282 struct ipa_dfs_info *callee_dfs;
283 struct cgraph_node *callee = cs->callee->function_symbol ();
285 callee_dfs = (struct ipa_dfs_info *) callee->aux;
286 return (caller_dfs
287 && callee_dfs
288 && caller_dfs->scc_no == callee_dfs->scc_no);
291 struct postorder_stack
293 struct cgraph_node *node;
294 struct cgraph_edge *edge;
295 int ref;
298 /* Fill array order with all nodes with output flag set in the reverse
299 topological order. Return the number of elements in the array.
300 FIXME: While walking, consider aliases, too. */
303 ipa_reverse_postorder (struct cgraph_node **order)
305 struct cgraph_node *node, *node2;
306 int stack_size = 0;
307 int order_pos = 0;
308 struct cgraph_edge *edge;
309 int pass;
310 struct ipa_ref *ref = NULL;
312 struct postorder_stack *stack =
313 XCNEWVEC (struct postorder_stack, symtab->cgraph_count);
315 /* We have to deal with cycles nicely, so use a depth first traversal
316 output algorithm. Ignore the fact that some functions won't need
317 to be output and put them into order as well, so we get dependencies
318 right through inline functions. */
319 FOR_EACH_FUNCTION (node)
320 node->aux = NULL;
321 for (pass = 0; pass < 2; pass++)
322 FOR_EACH_FUNCTION (node)
323 if (!node->aux
324 && (pass
325 || (!node->address_taken
326 && !node->global.inlined_to
327 && !node->alias && !node->thunk.thunk_p
328 && !node->only_called_directly_p ())))
330 stack_size = 0;
331 stack[stack_size].node = node;
332 stack[stack_size].edge = node->callers;
333 stack[stack_size].ref = 0;
334 node->aux = (void *)(size_t)1;
335 while (stack_size >= 0)
337 while (true)
339 node2 = NULL;
340 while (stack[stack_size].edge && !node2)
342 edge = stack[stack_size].edge;
343 node2 = edge->caller;
344 stack[stack_size].edge = edge->next_caller;
345 /* Break possible cycles involving always-inline
346 functions by ignoring edges from always-inline
347 functions to non-always-inline functions. */
348 if (DECL_DISREGARD_INLINE_LIMITS (edge->caller->decl)
349 && !DECL_DISREGARD_INLINE_LIMITS
350 (edge->callee->function_symbol ()->decl))
351 node2 = NULL;
353 for (; stack[stack_size].node->iterate_referring (
354 stack[stack_size].ref,
355 ref) && !node2;
356 stack[stack_size].ref++)
358 if (ref->use == IPA_REF_ALIAS)
359 node2 = dyn_cast <cgraph_node *> (ref->referring);
361 if (!node2)
362 break;
363 if (!node2->aux)
365 stack[++stack_size].node = node2;
366 stack[stack_size].edge = node2->callers;
367 stack[stack_size].ref = 0;
368 node2->aux = (void *)(size_t)1;
371 order[order_pos++] = stack[stack_size--].node;
374 free (stack);
375 FOR_EACH_FUNCTION (node)
376 node->aux = NULL;
377 return order_pos;
382 /* Given a memory reference T, will return the variable at the bottom
383 of the access. Unlike get_base_address, this will recurse through
384 INDIRECT_REFS. */
386 tree
387 get_base_var (tree t)
389 while (!SSA_VAR_P (t)
390 && (!CONSTANT_CLASS_P (t))
391 && TREE_CODE (t) != LABEL_DECL
392 && TREE_CODE (t) != FUNCTION_DECL
393 && TREE_CODE (t) != CONST_DECL
394 && TREE_CODE (t) != CONSTRUCTOR)
396 t = TREE_OPERAND (t, 0);
398 return t;
402 /* SRC and DST are going to be merged. Take SRC's profile and merge it into
403 DST so it is not going to be lost. Possibly destroy SRC's body on the way
404 unless PRESERVE_BODY is set. */
406 void
407 ipa_merge_profiles (struct cgraph_node *dst,
408 struct cgraph_node *src,
409 bool preserve_body)
411 tree oldsrcdecl = src->decl;
412 struct function *srccfun, *dstcfun;
413 bool match = true;
415 if (!src->definition
416 || !dst->definition)
417 return;
418 if (src->frequency < dst->frequency)
419 src->frequency = dst->frequency;
421 /* Time profiles are merged. */
422 if (dst->tp_first_run > src->tp_first_run && src->tp_first_run)
423 dst->tp_first_run = src->tp_first_run;
425 if (src->profile_id && !dst->profile_id)
426 dst->profile_id = src->profile_id;
428 if (!dst->count)
429 return;
430 if (symtab->dump_file)
432 fprintf (symtab->dump_file, "Merging profiles of %s/%i to %s/%i\n",
433 xstrdup_for_dump (src->name ()), src->order,
434 xstrdup_for_dump (dst->name ()), dst->order);
436 dst->count += src->count;
438 /* This is ugly. We need to get both function bodies into memory.
439 If declaration is merged, we need to duplicate it to be able
440 to load body that is being replaced. This makes symbol table
441 temporarily inconsistent. */
442 if (src->decl == dst->decl)
444 struct lto_in_decl_state temp;
445 struct lto_in_decl_state *state;
447 /* We are going to move the decl, we want to remove its file decl data.
448 and link these with the new decl. */
449 temp.fn_decl = src->decl;
450 lto_in_decl_state **slot
451 = src->lto_file_data->function_decl_states->find_slot (&temp,
452 NO_INSERT);
453 state = *slot;
454 src->lto_file_data->function_decl_states->clear_slot (slot);
455 gcc_assert (state);
457 /* Duplicate the decl and be sure it does not link into body of DST. */
458 src->decl = copy_node (src->decl);
459 DECL_STRUCT_FUNCTION (src->decl) = NULL;
460 DECL_ARGUMENTS (src->decl) = NULL;
461 DECL_INITIAL (src->decl) = NULL;
462 DECL_RESULT (src->decl) = NULL;
464 /* Associate the decl state with new declaration, so LTO streamer
465 can look it up. */
466 state->fn_decl = src->decl;
467 slot
468 = src->lto_file_data->function_decl_states->find_slot (state, INSERT);
469 gcc_assert (!*slot);
470 *slot = state;
472 src->get_untransformed_body ();
473 dst->get_untransformed_body ();
474 srccfun = DECL_STRUCT_FUNCTION (src->decl);
475 dstcfun = DECL_STRUCT_FUNCTION (dst->decl);
476 if (n_basic_blocks_for_fn (srccfun)
477 != n_basic_blocks_for_fn (dstcfun))
479 if (symtab->dump_file)
480 fprintf (symtab->dump_file,
481 "Giving up; number of basic block mismatch.\n");
482 match = false;
484 else if (last_basic_block_for_fn (srccfun)
485 != last_basic_block_for_fn (dstcfun))
487 if (symtab->dump_file)
488 fprintf (symtab->dump_file,
489 "Giving up; last block mismatch.\n");
490 match = false;
492 else
494 basic_block srcbb, dstbb;
496 FOR_ALL_BB_FN (srcbb, srccfun)
498 unsigned int i;
500 dstbb = BASIC_BLOCK_FOR_FN (dstcfun, srcbb->index);
501 if (dstbb == NULL)
503 if (symtab->dump_file)
504 fprintf (symtab->dump_file,
505 "No matching block for bb %i.\n",
506 srcbb->index);
507 match = false;
508 break;
510 if (EDGE_COUNT (srcbb->succs) != EDGE_COUNT (dstbb->succs))
512 if (symtab->dump_file)
513 fprintf (symtab->dump_file,
514 "Edge count mistmatch for bb %i.\n",
515 srcbb->index);
516 match = false;
517 break;
519 for (i = 0; i < EDGE_COUNT (srcbb->succs); i++)
521 edge srce = EDGE_SUCC (srcbb, i);
522 edge dste = EDGE_SUCC (dstbb, i);
523 if (srce->dest->index != dste->dest->index)
525 if (symtab->dump_file)
526 fprintf (symtab->dump_file,
527 "Succ edge mistmatch for bb %i.\n",
528 srce->dest->index);
529 match = false;
530 break;
535 if (match)
537 struct cgraph_edge *e, *e2;
538 basic_block srcbb, dstbb;
540 /* TODO: merge also statement histograms. */
541 FOR_ALL_BB_FN (srcbb, srccfun)
543 unsigned int i;
545 dstbb = BASIC_BLOCK_FOR_FN (dstcfun, srcbb->index);
546 dstbb->count += srcbb->count;
547 for (i = 0; i < EDGE_COUNT (srcbb->succs); i++)
549 edge srce = EDGE_SUCC (srcbb, i);
550 edge dste = EDGE_SUCC (dstbb, i);
551 dste->count += srce->count;
554 push_cfun (dstcfun);
555 counts_to_freqs ();
556 compute_function_frequency ();
557 pop_cfun ();
558 for (e = dst->callees; e; e = e->next_callee)
560 if (e->speculative)
561 continue;
562 e->count = gimple_bb (e->call_stmt)->count;
563 e->frequency = compute_call_stmt_bb_frequency
564 (dst->decl,
565 gimple_bb (e->call_stmt));
567 for (e = dst->indirect_calls, e2 = src->indirect_calls; e;
568 e2 = (e2 ? e2->next_callee : NULL), e = e->next_callee)
570 gcov_type count = gimple_bb (e->call_stmt)->count;
571 int freq = compute_call_stmt_bb_frequency
572 (dst->decl,
573 gimple_bb (e->call_stmt));
574 /* When call is speculative, we need to re-distribute probabilities
575 the same way as they was. This is not really correct because
576 in the other copy the speculation may differ; but probably it
577 is not really worth the effort. */
578 if (e->speculative)
580 cgraph_edge *direct, *indirect;
581 cgraph_edge *direct2 = NULL, *indirect2 = NULL;
582 ipa_ref *ref;
584 e->speculative_call_info (direct, indirect, ref);
585 gcc_assert (e == indirect);
586 if (e2 && e2->speculative)
587 e2->speculative_call_info (direct2, indirect2, ref);
588 if (indirect->count || direct->count)
590 /* We should mismatch earlier if there is no matching
591 indirect edge. */
592 if (!e2)
594 if (dump_file)
595 fprintf (dump_file,
596 "Mismatch in merging indirect edges\n");
598 else if (!e2->speculative)
599 indirect->count += e2->count;
600 else if (e2->speculative)
602 if (DECL_ASSEMBLER_NAME (direct2->callee->decl)
603 != DECL_ASSEMBLER_NAME (direct->callee->decl))
605 if (direct2->count >= direct->count)
607 direct->redirect_callee (direct2->callee);
608 indirect->count += indirect2->count
609 + direct->count;
610 direct->count = direct2->count;
612 else
613 indirect->count += indirect2->count + direct2->count;
615 else
617 direct->count += direct2->count;
618 indirect->count += indirect2->count;
621 int prob = RDIV (direct->count * REG_BR_PROB_BASE ,
622 direct->count + indirect->count);
623 direct->frequency = RDIV (freq * prob, REG_BR_PROB_BASE);
624 indirect->frequency = RDIV (freq * (REG_BR_PROB_BASE - prob),
625 REG_BR_PROB_BASE);
627 else
628 /* At the moment we should have only profile feedback based
629 speculations when merging. */
630 gcc_unreachable ();
632 else if (e2 && e2->speculative)
634 cgraph_edge *direct, *indirect;
635 ipa_ref *ref;
637 e2->speculative_call_info (direct, indirect, ref);
638 e->count = count;
639 e->frequency = freq;
640 int prob = RDIV (direct->count * REG_BR_PROB_BASE, e->count);
641 e->make_speculative (direct->callee, direct->count,
642 RDIV (freq * prob, REG_BR_PROB_BASE));
644 else
646 e->count = count;
647 e->frequency = freq;
650 if (!preserve_body)
651 src->release_body ();
652 inline_update_overall_summary (dst);
654 /* TODO: if there is no match, we can scale up. */
655 src->decl = oldsrcdecl;
658 /* Return true if call to DEST is known to be self-recusive call withing FUNC. */
660 bool
661 recursive_call_p (tree func, tree dest)
663 struct cgraph_node *dest_node = cgraph_node::get_create (dest);
664 struct cgraph_node *cnode = cgraph_node::get_create (func);
666 return dest_node->semantically_equivalent_p (cnode);