* include/bits/atomic_futex.h [_GLIBCXX_HAVE_LINUX_FUTEX]
[official-gcc.git] / gcc / ipa-utils.c
blob30b08b2f6e3dbf68da6e4fd44025f81c69b6570f
1 /* Utilities for ipa analysis.
2 Copyright (C) 2005-2015 Free Software Foundation, Inc.
3 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "predict.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "hard-reg-set.h"
29 #include "alias.h"
30 #include "options.h"
31 #include "fold-const.h"
32 #include "internal-fn.h"
33 #include "tree-inline.h"
34 #include "dumpfile.h"
35 #include "langhooks.h"
36 #include "splay-tree.h"
37 #include "cgraph.h"
38 #include "ipa-utils.h"
39 #include "ipa-reference.h"
40 #include "flags.h"
41 #include "diagnostic.h"
42 #include "langhooks.h"
43 #include "lto-streamer.h"
44 #include "alloc-pool.h"
45 #include "symbol-summary.h"
46 #include "ipa-prop.h"
47 #include "ipa-inline.h"
49 /* Debugging function for postorder and inorder code. NOTE is a string
50 that is printed before the nodes are printed. ORDER is an array of
51 cgraph_nodes that has COUNT useful nodes in it. */
53 void
54 ipa_print_order (FILE* out,
55 const char * note,
56 struct cgraph_node** order,
57 int count)
59 int i;
60 fprintf (out, "\n\n ordered call graph: %s\n", note);
62 for (i = count - 1; i >= 0; i--)
63 order[i]->dump (out);
64 fprintf (out, "\n");
65 fflush (out);
69 struct searchc_env {
70 struct cgraph_node **stack;
71 int stack_size;
72 struct cgraph_node **result;
73 int order_pos;
74 splay_tree nodes_marked_new;
75 bool reduce;
76 bool allow_overwritable;
77 int count;
80 /* This is an implementation of Tarjan's strongly connected region
81 finder as reprinted in Aho Hopcraft and Ullman's The Design and
82 Analysis of Computer Programs (1975) pages 192-193. This version
83 has been customized for cgraph_nodes. The env parameter is because
84 it is recursive and there are no nested functions here. This
85 function should only be called from itself or
86 ipa_reduced_postorder. ENV is a stack env and would be
87 unnecessary if C had nested functions. V is the node to start
88 searching from. */
90 static void
91 searchc (struct searchc_env* env, struct cgraph_node *v,
92 bool (*ignore_edge) (struct cgraph_edge *))
94 struct cgraph_edge *edge;
95 struct ipa_dfs_info *v_info = (struct ipa_dfs_info *) v->aux;
97 /* mark node as old */
98 v_info->new_node = false;
99 splay_tree_remove (env->nodes_marked_new, v->uid);
101 v_info->dfn_number = env->count;
102 v_info->low_link = env->count;
103 env->count++;
104 env->stack[(env->stack_size)++] = v;
105 v_info->on_stack = true;
107 for (edge = v->callees; edge; edge = edge->next_callee)
109 struct ipa_dfs_info * w_info;
110 enum availability avail;
111 struct cgraph_node *w = edge->callee->ultimate_alias_target (&avail);
113 if (!w || (ignore_edge && ignore_edge (edge)))
114 continue;
116 if (w->aux
117 && (avail > AVAIL_INTERPOSABLE
118 || (env->allow_overwritable && avail == AVAIL_INTERPOSABLE)))
120 w_info = (struct ipa_dfs_info *) w->aux;
121 if (w_info->new_node)
123 searchc (env, w, ignore_edge);
124 v_info->low_link =
125 (v_info->low_link < w_info->low_link) ?
126 v_info->low_link : w_info->low_link;
128 else
129 if ((w_info->dfn_number < v_info->dfn_number)
130 && (w_info->on_stack))
131 v_info->low_link =
132 (w_info->dfn_number < v_info->low_link) ?
133 w_info->dfn_number : v_info->low_link;
138 if (v_info->low_link == v_info->dfn_number)
140 struct cgraph_node *last = NULL;
141 struct cgraph_node *x;
142 struct ipa_dfs_info *x_info;
143 do {
144 x = env->stack[--(env->stack_size)];
145 x_info = (struct ipa_dfs_info *) x->aux;
146 x_info->on_stack = false;
147 x_info->scc_no = v_info->dfn_number;
149 if (env->reduce)
151 x_info->next_cycle = last;
152 last = x;
154 else
155 env->result[env->order_pos++] = x;
157 while (v != x);
158 if (env->reduce)
159 env->result[env->order_pos++] = v;
163 /* Topsort the call graph by caller relation. Put the result in ORDER.
165 The REDUCE flag is true if you want the cycles reduced to single nodes.
166 You can use ipa_get_nodes_in_cycle to obtain a vector containing all real
167 call graph nodes in a reduced node.
169 Set ALLOW_OVERWRITABLE if nodes with such availability should be included.
170 IGNORE_EDGE, if non-NULL is a hook that may make some edges insignificant
171 for the topological sort. */
174 ipa_reduced_postorder (struct cgraph_node **order,
175 bool reduce, bool allow_overwritable,
176 bool (*ignore_edge) (struct cgraph_edge *))
178 struct cgraph_node *node;
179 struct searchc_env env;
180 splay_tree_node result;
181 env.stack = XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
182 env.stack_size = 0;
183 env.result = order;
184 env.order_pos = 0;
185 env.nodes_marked_new = splay_tree_new (splay_tree_compare_ints, 0, 0);
186 env.count = 1;
187 env.reduce = reduce;
188 env.allow_overwritable = allow_overwritable;
190 FOR_EACH_DEFINED_FUNCTION (node)
192 enum availability avail = node->get_availability ();
194 if (avail > AVAIL_INTERPOSABLE
195 || (allow_overwritable
196 && (avail == AVAIL_INTERPOSABLE)))
198 /* Reuse the info if it is already there. */
199 struct ipa_dfs_info *info = (struct ipa_dfs_info *) node->aux;
200 if (!info)
201 info = XCNEW (struct ipa_dfs_info);
202 info->new_node = true;
203 info->on_stack = false;
204 info->next_cycle = NULL;
205 node->aux = info;
207 splay_tree_insert (env.nodes_marked_new,
208 (splay_tree_key)node->uid,
209 (splay_tree_value)node);
211 else
212 node->aux = NULL;
214 result = splay_tree_min (env.nodes_marked_new);
215 while (result)
217 node = (struct cgraph_node *)result->value;
218 searchc (&env, node, ignore_edge);
219 result = splay_tree_min (env.nodes_marked_new);
221 splay_tree_delete (env.nodes_marked_new);
222 free (env.stack);
224 return env.order_pos;
227 /* Deallocate all ipa_dfs_info structures pointed to by the aux pointer of call
228 graph nodes. */
230 void
231 ipa_free_postorder_info (void)
233 struct cgraph_node *node;
234 FOR_EACH_DEFINED_FUNCTION (node)
236 /* Get rid of the aux information. */
237 if (node->aux)
239 free (node->aux);
240 node->aux = NULL;
245 /* Get the set of nodes for the cycle in the reduced call graph starting
246 from NODE. */
248 vec<cgraph_node *>
249 ipa_get_nodes_in_cycle (struct cgraph_node *node)
251 vec<cgraph_node *> v = vNULL;
252 struct ipa_dfs_info *node_dfs_info;
253 while (node)
255 v.safe_push (node);
256 node_dfs_info = (struct ipa_dfs_info *) node->aux;
257 node = node_dfs_info->next_cycle;
259 return v;
262 /* Return true iff the CS is an edge within a strongly connected component as
263 computed by ipa_reduced_postorder. */
265 bool
266 ipa_edge_within_scc (struct cgraph_edge *cs)
268 struct ipa_dfs_info *caller_dfs = (struct ipa_dfs_info *) cs->caller->aux;
269 struct ipa_dfs_info *callee_dfs;
270 struct cgraph_node *callee = cs->callee->function_symbol ();
272 callee_dfs = (struct ipa_dfs_info *) callee->aux;
273 return (caller_dfs
274 && callee_dfs
275 && caller_dfs->scc_no == callee_dfs->scc_no);
278 struct postorder_stack
280 struct cgraph_node *node;
281 struct cgraph_edge *edge;
282 int ref;
285 /* Fill array order with all nodes with output flag set in the reverse
286 topological order. Return the number of elements in the array.
287 FIXME: While walking, consider aliases, too. */
290 ipa_reverse_postorder (struct cgraph_node **order)
292 struct cgraph_node *node, *node2;
293 int stack_size = 0;
294 int order_pos = 0;
295 struct cgraph_edge *edge;
296 int pass;
297 struct ipa_ref *ref = NULL;
299 struct postorder_stack *stack =
300 XCNEWVEC (struct postorder_stack, symtab->cgraph_count);
302 /* We have to deal with cycles nicely, so use a depth first traversal
303 output algorithm. Ignore the fact that some functions won't need
304 to be output and put them into order as well, so we get dependencies
305 right through inline functions. */
306 FOR_EACH_FUNCTION (node)
307 node->aux = NULL;
308 for (pass = 0; pass < 2; pass++)
309 FOR_EACH_FUNCTION (node)
310 if (!node->aux
311 && (pass
312 || (!node->address_taken
313 && !node->global.inlined_to
314 && !node->alias && !node->thunk.thunk_p
315 && !node->only_called_directly_p ())))
317 stack_size = 0;
318 stack[stack_size].node = node;
319 stack[stack_size].edge = node->callers;
320 stack[stack_size].ref = 0;
321 node->aux = (void *)(size_t)1;
322 while (stack_size >= 0)
324 while (true)
326 node2 = NULL;
327 while (stack[stack_size].edge && !node2)
329 edge = stack[stack_size].edge;
330 node2 = edge->caller;
331 stack[stack_size].edge = edge->next_caller;
332 /* Break possible cycles involving always-inline
333 functions by ignoring edges from always-inline
334 functions to non-always-inline functions. */
335 if (DECL_DISREGARD_INLINE_LIMITS (edge->caller->decl)
336 && !DECL_DISREGARD_INLINE_LIMITS
337 (edge->callee->function_symbol ()->decl))
338 node2 = NULL;
340 for (; stack[stack_size].node->iterate_referring (
341 stack[stack_size].ref,
342 ref) && !node2;
343 stack[stack_size].ref++)
345 if (ref->use == IPA_REF_ALIAS)
346 node2 = dyn_cast <cgraph_node *> (ref->referring);
348 if (!node2)
349 break;
350 if (!node2->aux)
352 stack[++stack_size].node = node2;
353 stack[stack_size].edge = node2->callers;
354 stack[stack_size].ref = 0;
355 node2->aux = (void *)(size_t)1;
358 order[order_pos++] = stack[stack_size--].node;
361 free (stack);
362 FOR_EACH_FUNCTION (node)
363 node->aux = NULL;
364 return order_pos;
369 /* Given a memory reference T, will return the variable at the bottom
370 of the access. Unlike get_base_address, this will recurse through
371 INDIRECT_REFS. */
373 tree
374 get_base_var (tree t)
376 while (!SSA_VAR_P (t)
377 && (!CONSTANT_CLASS_P (t))
378 && TREE_CODE (t) != LABEL_DECL
379 && TREE_CODE (t) != FUNCTION_DECL
380 && TREE_CODE (t) != CONST_DECL
381 && TREE_CODE (t) != CONSTRUCTOR)
383 t = TREE_OPERAND (t, 0);
385 return t;
389 /* SRC and DST are going to be merged. Take SRC's profile and merge it into
390 DST so it is not going to be lost. Possibly destroy SRC's body on the way
391 unless PRESERVE_BODY is set. */
393 void
394 ipa_merge_profiles (struct cgraph_node *dst,
395 struct cgraph_node *src,
396 bool preserve_body)
398 tree oldsrcdecl = src->decl;
399 struct function *srccfun, *dstcfun;
400 bool match = true;
402 if (!src->definition
403 || !dst->definition)
404 return;
405 if (src->frequency < dst->frequency)
406 src->frequency = dst->frequency;
408 /* Time profiles are merged. */
409 if (dst->tp_first_run > src->tp_first_run && src->tp_first_run)
410 dst->tp_first_run = src->tp_first_run;
412 if (src->profile_id && !dst->profile_id)
413 dst->profile_id = src->profile_id;
415 if (!dst->count)
416 return;
417 if (symtab->dump_file)
419 fprintf (symtab->dump_file, "Merging profiles of %s/%i to %s/%i\n",
420 xstrdup_for_dump (src->name ()), src->order,
421 xstrdup_for_dump (dst->name ()), dst->order);
423 dst->count += src->count;
425 /* This is ugly. We need to get both function bodies into memory.
426 If declaration is merged, we need to duplicate it to be able
427 to load body that is being replaced. This makes symbol table
428 temporarily inconsistent. */
429 if (src->decl == dst->decl)
431 struct lto_in_decl_state temp;
432 struct lto_in_decl_state *state;
434 /* We are going to move the decl, we want to remove its file decl data.
435 and link these with the new decl. */
436 temp.fn_decl = src->decl;
437 lto_in_decl_state **slot
438 = src->lto_file_data->function_decl_states->find_slot (&temp,
439 NO_INSERT);
440 state = *slot;
441 src->lto_file_data->function_decl_states->clear_slot (slot);
442 gcc_assert (state);
444 /* Duplicate the decl and be sure it does not link into body of DST. */
445 src->decl = copy_node (src->decl);
446 DECL_STRUCT_FUNCTION (src->decl) = NULL;
447 DECL_ARGUMENTS (src->decl) = NULL;
448 DECL_INITIAL (src->decl) = NULL;
449 DECL_RESULT (src->decl) = NULL;
451 /* Associate the decl state with new declaration, so LTO streamer
452 can look it up. */
453 state->fn_decl = src->decl;
454 slot
455 = src->lto_file_data->function_decl_states->find_slot (state, INSERT);
456 gcc_assert (!*slot);
457 *slot = state;
459 src->get_untransformed_body ();
460 dst->get_untransformed_body ();
461 srccfun = DECL_STRUCT_FUNCTION (src->decl);
462 dstcfun = DECL_STRUCT_FUNCTION (dst->decl);
463 if (n_basic_blocks_for_fn (srccfun)
464 != n_basic_blocks_for_fn (dstcfun))
466 if (symtab->dump_file)
467 fprintf (symtab->dump_file,
468 "Giving up; number of basic block mismatch.\n");
469 match = false;
471 else if (last_basic_block_for_fn (srccfun)
472 != last_basic_block_for_fn (dstcfun))
474 if (symtab->dump_file)
475 fprintf (symtab->dump_file,
476 "Giving up; last block mismatch.\n");
477 match = false;
479 else
481 basic_block srcbb, dstbb;
483 FOR_ALL_BB_FN (srcbb, srccfun)
485 unsigned int i;
487 dstbb = BASIC_BLOCK_FOR_FN (dstcfun, srcbb->index);
488 if (dstbb == NULL)
490 if (symtab->dump_file)
491 fprintf (symtab->dump_file,
492 "No matching block for bb %i.\n",
493 srcbb->index);
494 match = false;
495 break;
497 if (EDGE_COUNT (srcbb->succs) != EDGE_COUNT (dstbb->succs))
499 if (symtab->dump_file)
500 fprintf (symtab->dump_file,
501 "Edge count mistmatch for bb %i.\n",
502 srcbb->index);
503 match = false;
504 break;
506 for (i = 0; i < EDGE_COUNT (srcbb->succs); i++)
508 edge srce = EDGE_SUCC (srcbb, i);
509 edge dste = EDGE_SUCC (dstbb, i);
510 if (srce->dest->index != dste->dest->index)
512 if (symtab->dump_file)
513 fprintf (symtab->dump_file,
514 "Succ edge mistmatch for bb %i.\n",
515 srce->dest->index);
516 match = false;
517 break;
522 if (match)
524 struct cgraph_edge *e, *e2;
525 basic_block srcbb, dstbb;
527 /* TODO: merge also statement histograms. */
528 FOR_ALL_BB_FN (srcbb, srccfun)
530 unsigned int i;
532 dstbb = BASIC_BLOCK_FOR_FN (dstcfun, srcbb->index);
533 dstbb->count += srcbb->count;
534 for (i = 0; i < EDGE_COUNT (srcbb->succs); i++)
536 edge srce = EDGE_SUCC (srcbb, i);
537 edge dste = EDGE_SUCC (dstbb, i);
538 dste->count += srce->count;
541 push_cfun (dstcfun);
542 counts_to_freqs ();
543 compute_function_frequency ();
544 pop_cfun ();
545 for (e = dst->callees; e; e = e->next_callee)
547 if (e->speculative)
548 continue;
549 e->count = gimple_bb (e->call_stmt)->count;
550 e->frequency = compute_call_stmt_bb_frequency
551 (dst->decl,
552 gimple_bb (e->call_stmt));
554 for (e = dst->indirect_calls, e2 = src->indirect_calls; e;
555 e2 = (e2 ? e2->next_callee : NULL), e = e->next_callee)
557 gcov_type count = gimple_bb (e->call_stmt)->count;
558 int freq = compute_call_stmt_bb_frequency
559 (dst->decl,
560 gimple_bb (e->call_stmt));
561 /* When call is speculative, we need to re-distribute probabilities
562 the same way as they was. This is not really correct because
563 in the other copy the speculation may differ; but probably it
564 is not really worth the effort. */
565 if (e->speculative)
567 cgraph_edge *direct, *indirect;
568 cgraph_edge *direct2 = NULL, *indirect2 = NULL;
569 ipa_ref *ref;
571 e->speculative_call_info (direct, indirect, ref);
572 gcc_assert (e == indirect);
573 if (e2 && e2->speculative)
574 e2->speculative_call_info (direct2, indirect2, ref);
575 if (indirect->count || direct->count)
577 /* We should mismatch earlier if there is no matching
578 indirect edge. */
579 if (!e2)
581 if (dump_file)
582 fprintf (dump_file,
583 "Mismatch in merging indirect edges\n");
585 else if (!e2->speculative)
586 indirect->count += e2->count;
587 else if (e2->speculative)
589 if (DECL_ASSEMBLER_NAME (direct2->callee->decl)
590 != DECL_ASSEMBLER_NAME (direct->callee->decl))
592 if (direct2->count >= direct->count)
594 direct->redirect_callee (direct2->callee);
595 indirect->count += indirect2->count
596 + direct->count;
597 direct->count = direct2->count;
599 else
600 indirect->count += indirect2->count + direct2->count;
602 else
604 direct->count += direct2->count;
605 indirect->count += indirect2->count;
608 int prob = RDIV (direct->count * REG_BR_PROB_BASE ,
609 direct->count + indirect->count);
610 direct->frequency = RDIV (freq * prob, REG_BR_PROB_BASE);
611 indirect->frequency = RDIV (freq * (REG_BR_PROB_BASE - prob),
612 REG_BR_PROB_BASE);
614 else
615 /* At the moment we should have only profile feedback based
616 speculations when merging. */
617 gcc_unreachable ();
619 else if (e2 && e2->speculative)
621 cgraph_edge *direct, *indirect;
622 ipa_ref *ref;
624 e2->speculative_call_info (direct, indirect, ref);
625 e->count = count;
626 e->frequency = freq;
627 int prob = RDIV (direct->count * REG_BR_PROB_BASE, e->count);
628 e->make_speculative (direct->callee, direct->count,
629 RDIV (freq * prob, REG_BR_PROB_BASE));
631 else
633 e->count = count;
634 e->frequency = freq;
637 if (!preserve_body)
638 src->release_body ();
639 inline_update_overall_summary (dst);
641 /* TODO: if there is no match, we can scale up. */
642 src->decl = oldsrcdecl;
645 /* Return true if call to DEST is known to be self-recusive call withing FUNC. */
647 bool
648 recursive_call_p (tree func, tree dest)
650 struct cgraph_node *dest_node = cgraph_node::get_create (dest);
651 struct cgraph_node *cnode = cgraph_node::get_create (func);
653 return dest_node->semantically_equivalent_p (cnode);