2011-08-19 Andrew Stubbs <ams@codesourcery.com>
[official-gcc.git] / gcc / ipa-cp.c
blob94118b7b1a5d0fe814102548aa14f5b85a2ceb3a
1 /* Interprocedural constant propagation
2 Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
5 Contributed by Razya Ladelsky <RAZYA@il.ibm.com> and Martin Jambor
6 <mjambor@suse.cz>
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
13 version.
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 for more details.
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
24 /* Interprocedural constant propagation (IPA-CP).
26 The goal of this transformation is to
28 1) discover functions which are always invoked with some arguments with the
29 same known constant values and modify the functions so that the
30 subsequent optimizations can take advantage of the knowledge, and
32 2) partial specialization - create specialized versions of functions
33 transformed in this way if some parameters are known constants only in
34 certain contexts but the estimated tradeoff between speedup and cost size
35 is deemed good.
37 The algorithm also propagates types and attempts to perform type based
38 devirtualization. Types are propagated much like constants.
40 The algorithm basically consists of three stages. In the first, functions
41 are analyzed one at a time and jump functions are constructed for all known
42 call-sites. In the second phase, the pass propagates information from the
43 jump functions across the call to reveal what values are available at what
44 call sites, performs estimations of effects of known values on functions and
45 their callees, and finally decides what specialized extra versions should be
46 created. In the third, the special versions materialize and appropriate
47 calls are redirected.
49 The algorithm used is to a certain extent based on "Interprocedural Constant
50 Propagation", by David Callahan, Keith D Cooper, Ken Kennedy, Linda Torczon,
51 Comp86, pg 152-161 and "A Methodology for Procedure Cloning" by Keith D
52 Cooper, Mary W. Hall, and Ken Kennedy.
55 First stage - intraprocedural analysis
56 =======================================
58 This phase computes jump_function and modification flags.
60 A jump function for a call-site represents the values passed as an actual
61 arguments of a given call-site. In principle, there are three types of
62 values:
64 Pass through - the caller's formal parameter is passed as an actual
65 argument, plus an operation on it can be performed.
66 Constant - a constant is passed as an actual argument.
67 Unknown - neither of the above.
69 All jump function types are described in detail in ipa-prop.h, together with
70 the data structures that represent them and methods of accessing them.
72 ipcp_generate_summary() is the main function of the first stage.
74 Second stage - interprocedural analysis
75 ========================================
77 This stage is itself divided into two phases. In the first, we propagate
78 known values over the call graph, in the second, we make cloning decisions.
79 It uses a different algorithm than the original Callahan's paper.
81 First, we traverse the functions topologically from callers to callees and,
82 for each strongly connected component (SCC), we propagate constants
83 according to previously computed jump functions. We also record what known
84 values depend on other known values and estimate local effects. Finally, we
85 propagate cumulative information about these effects from dependant values
86 to those on which they depend.
88 Second, we again traverse the call graph in the same topological order and
89 make clones for functions which we know are called with the same values in
90 all contexts and decide about extra specialized clones of functions just for
91 some contexts - these decisions are based on both local estimates and
92 cumulative estimates propagated from callees.
94 ipcp_propagate_stage() and ipcp_decision_stage() together constitute the
95 third stage.
97 Third phase - materialization of clones, call statement updates.
98 ============================================
100 This stage is currently performed by call graph code (mainly in cgraphunit.c
101 and tree-inline.c) according to instructions inserted to the call graph by
102 the second stage. */
104 #include "config.h"
105 #include "system.h"
106 #include "coretypes.h"
107 #include "tree.h"
108 #include "target.h"
109 #include "gimple.h"
110 #include "cgraph.h"
111 #include "ipa-prop.h"
112 #include "tree-flow.h"
113 #include "tree-pass.h"
114 #include "flags.h"
115 #include "timevar.h"
116 #include "diagnostic.h"
117 #include "tree-pretty-print.h"
118 #include "tree-dump.h"
119 #include "tree-inline.h"
120 #include "fibheap.h"
121 #include "params.h"
122 #include "ipa-inline.h"
123 #include "ipa-utils.h"
125 struct ipcp_value;
127 /* Describes a particular source for an IPA-CP value. */
129 struct ipcp_value_source
131 /* The incoming edge that brought the value. */
132 struct cgraph_edge *cs;
133 /* If the jump function that resulted into his value was a pass-through or an
134 ancestor, this is the ipcp_value of the caller from which the described
135 value has been derived. Otherwise it is NULL. */
136 struct ipcp_value *val;
137 /* Next pointer in a linked list of sources of a value. */
138 struct ipcp_value_source *next;
139 /* If the jump function that resulted into his value was a pass-through or an
140 ancestor, this is the index of the parameter of the caller the jump
141 function references. */
142 int index;
145 /* Describes one particular value stored in struct ipcp_lattice. */
147 struct ipcp_value
149 /* The actual value for the given parameter. This is either an IPA invariant
150 or a TREE_BINFO describing a type that can be used for
151 devirtualization. */
152 tree value;
153 /* The list of sources from which this value originates. */
154 struct ipcp_value_source *sources;
155 /* Next pointers in a linked list of all values in a lattice. */
156 struct ipcp_value *next;
157 /* Next pointers in a linked list of values in a strongly connected component
158 of values. */
159 struct ipcp_value *scc_next;
160 /* Next pointers in a linked list of SCCs of values sorted topologically
161 according their sources. */
162 struct ipcp_value *topo_next;
163 /* A specialized node created for this value, NULL if none has been (so far)
164 created. */
165 struct cgraph_node *spec_node;
166 /* Depth first search number and low link for topological sorting of
167 values. */
168 int dfs, low_link;
169 /* Time benefit and size cost that specializing the function for this value
170 would bring about in this function alone. */
171 int local_time_benefit, local_size_cost;
172 /* Time benefit and size cost that specializing the function for this value
173 can bring about in it's callees (transitively). */
174 int prop_time_benefit, prop_size_cost;
175 /* True if this valye is currently on the topo-sort stack. */
176 bool on_stack;
179 /* Allocation pools for values and their sources in ipa-cp. */
181 alloc_pool ipcp_values_pool;
182 alloc_pool ipcp_sources_pool;
184 /* Lattice describing potential values of a formal parameter of a function and
185 some of their other properties. TOP is represented by a lattice with zero
186 values and with contains_variable and bottom flags cleared. BOTTOM is
187 represented by a lattice with the bottom flag set. In that case, values and
188 contains_variable flag should be disregarded. */
190 struct ipcp_lattice
192 /* The list of known values and types in this lattice. Note that values are
193 not deallocated if a lattice is set to bottom because there may be value
194 sources referencing them. */
195 struct ipcp_value *values;
196 /* Number of known values and types in this lattice. */
197 int values_count;
198 /* The lattice contains a variable component (in addition to values). */
199 bool contains_variable;
200 /* The value of the lattice is bottom (i.e. variable and unusable for any
201 propagation). */
202 bool bottom;
203 /* There is a virtual call based on this parameter. */
204 bool virt_call;
207 /* Maximal count found in program. */
209 static gcov_type max_count;
211 /* Original overall size of the program. */
213 static long overall_size, max_new_size;
215 /* Head of the linked list of topologically sorted values. */
217 static struct ipcp_value *values_topo;
219 /* Return the lattice corresponding to the Ith formal parameter of the function
220 described by INFO. */
221 static inline struct ipcp_lattice *
222 ipa_get_lattice (struct ipa_node_params *info, int i)
224 gcc_assert (i >= 0 && i <= ipa_get_param_count (info));
225 gcc_checking_assert (!info->ipcp_orig_node);
226 gcc_checking_assert (info->lattices);
227 return &(info->lattices[i]);
230 /* Return whether LAT is a lattice with a single constant and without an
231 undefined value. */
233 static inline bool
234 ipa_lat_is_single_const (struct ipcp_lattice *lat)
236 if (lat->bottom
237 || lat->contains_variable
238 || lat->values_count != 1)
239 return false;
240 else
241 return true;
244 /* Return true iff the CS is an edge within a strongly connected component as
245 computed by ipa_reduced_postorder. */
247 static inline bool
248 edge_within_scc (struct cgraph_edge *cs)
250 struct ipa_dfs_info *caller_dfs = (struct ipa_dfs_info *) cs->caller->aux;
251 struct ipa_dfs_info *callee_dfs;
252 struct cgraph_node *callee = cgraph_function_node (cs->callee, NULL);
254 callee_dfs = (struct ipa_dfs_info *) callee->aux;
255 return (caller_dfs
256 && callee_dfs
257 && caller_dfs->scc_no == callee_dfs->scc_no);
260 /* Print V which is extracted from a value in a lattice to F. */
262 static void
263 print_ipcp_constant_value (FILE * f, tree v)
265 if (TREE_CODE (v) == TREE_BINFO)
267 fprintf (f, "BINFO ");
268 print_generic_expr (f, BINFO_TYPE (v), 0);
270 else if (TREE_CODE (v) == ADDR_EXPR
271 && TREE_CODE (TREE_OPERAND (v, 0)) == CONST_DECL)
273 fprintf (f, "& ");
274 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (v, 0)), 0);
276 else
277 print_generic_expr (f, v, 0);
280 /* Print all ipcp_lattices of all functions to F. */
282 static void
283 print_all_lattices (FILE * f, bool dump_sources, bool dump_benefits)
285 struct cgraph_node *node;
286 int i, count;
288 fprintf (f, "\nLattices:\n");
289 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
291 struct ipa_node_params *info;
293 info = IPA_NODE_REF (node);
294 fprintf (f, " Node: %s/%i:\n", cgraph_node_name (node), node->uid);
295 count = ipa_get_param_count (info);
296 for (i = 0; i < count; i++)
298 struct ipcp_lattice *lat = ipa_get_lattice (info, i);
299 struct ipcp_value *val;
300 bool prev = false;
302 fprintf (f, " param [%d]: ", i);
303 if (lat->bottom)
305 fprintf (f, "BOTTOM\n");
306 continue;
309 if (!lat->values_count && !lat->contains_variable)
311 fprintf (f, "TOP\n");
312 continue;
315 if (lat->contains_variable)
317 fprintf (f, "VARIABLE");
318 prev = true;
319 if (dump_benefits)
320 fprintf (f, "\n");
323 for (val = lat->values; val; val = val->next)
325 if (dump_benefits && prev)
326 fprintf (f, " ");
327 else if (!dump_benefits && prev)
328 fprintf (f, ", ");
329 else
330 prev = true;
332 print_ipcp_constant_value (f, val->value);
334 if (dump_sources)
336 struct ipcp_value_source *s;
338 fprintf (f, " [from:");
339 for (s = val->sources; s; s = s->next)
340 fprintf (f, " %i(%i)", s->cs->caller->uid,s->cs->frequency);
341 fprintf (f, "]");
344 if (dump_benefits)
345 fprintf (f, " [loc_time: %i, loc_size: %i, "
346 "prop_time: %i, prop_size: %i]\n",
347 val->local_time_benefit, val->local_size_cost,
348 val->prop_time_benefit, val->prop_size_cost);
350 if (!dump_benefits)
351 fprintf (f, "\n");
356 /* Determine whether it is at all technically possible to create clones of NODE
357 and store this information in the ipa_node_params structure associated
358 with NODE. */
360 static void
361 determine_versionability (struct cgraph_node *node)
363 struct cgraph_edge *edge;
364 const char *reason = NULL;
366 /* There are a number of generic reasons functions cannot be versioned. We
367 also cannot remove parameters if there are type attributes such as fnspec
368 present. */
369 if (node->alias || node->thunk.thunk_p)
370 reason = "alias or thunk";
371 else if (!inline_summary (node)->versionable)
372 reason = "inliner claims it is so";
373 else if (TYPE_ATTRIBUTES (TREE_TYPE (node->decl)))
374 reason = "there are type attributes";
375 else if (cgraph_function_body_availability (node) <= AVAIL_OVERWRITABLE)
376 reason = "insufficient body availability";
377 else
378 /* Removing arguments doesn't work if the function takes varargs
379 or use __builtin_apply_args.
380 FIXME: handle this together with can_change_signature flag. */
381 for (edge = node->callees; edge; edge = edge->next_callee)
383 tree t = edge->callee->decl;
384 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
385 && (DECL_FUNCTION_CODE (t) == BUILT_IN_APPLY_ARGS
386 || DECL_FUNCTION_CODE (t) == BUILT_IN_VA_START))
388 reason = "prohibitive builtins called";
389 break;
393 if (reason && dump_file && !node->alias && !node->thunk.thunk_p)
394 fprintf (dump_file, "Function %s/%i is not versionable, reason: %s.\n",
395 cgraph_node_name (node), node->uid, reason);
397 IPA_NODE_REF (node)->node_versionable = (reason == NULL);
400 /* Return true if it is at all technically possible to create clones of a
401 NODE. */
403 static bool
404 ipcp_versionable_function_p (struct cgraph_node *node)
406 return IPA_NODE_REF (node)->node_versionable;
409 /* Structure holding accumulated information about callers of a node. */
411 struct caller_statistics
413 gcov_type count_sum;
414 int n_calls, n_hot_calls, freq_sum;
417 /* Initialize fields of STAT to zeroes. */
419 static inline void
420 init_caller_stats (struct caller_statistics *stats)
422 stats->count_sum = 0;
423 stats->n_calls = 0;
424 stats->n_hot_calls = 0;
425 stats->freq_sum = 0;
428 /* Worker callback of cgraph_for_node_and_aliases accumulating statistics of
429 non-thunk incoming edges to NODE. */
431 static bool
432 gather_caller_stats (struct cgraph_node *node, void *data)
434 struct caller_statistics *stats = (struct caller_statistics *) data;
435 struct cgraph_edge *cs;
437 for (cs = node->callers; cs; cs = cs->next_caller)
438 if (cs->caller->thunk.thunk_p)
439 cgraph_for_node_and_aliases (cs->caller, gather_caller_stats,
440 stats, false);
441 else
443 stats->count_sum += cs->count;
444 stats->freq_sum += cs->frequency;
445 stats->n_calls++;
446 if (cgraph_maybe_hot_edge_p (cs))
447 stats->n_hot_calls ++;
449 return false;
453 /* Return true if this NODE is viable candidate for cloning. */
455 static bool
456 ipcp_cloning_candidate_p (struct cgraph_node *node)
458 struct caller_statistics stats;
460 gcc_checking_assert (cgraph_function_with_gimple_body_p (node));
462 if (!flag_ipa_cp_clone)
464 if (dump_file)
465 fprintf (dump_file, "Not considering %s for cloning; "
466 "-fipa-cp-clone disabled.\n",
467 cgraph_node_name (node));
468 return false;
471 if (!optimize_function_for_speed_p (DECL_STRUCT_FUNCTION (node->decl)))
473 if (dump_file)
474 fprintf (dump_file, "Not considering %s for cloning; "
475 "optimizing it for size.\n",
476 cgraph_node_name (node));
477 return false;
480 init_caller_stats (&stats);
481 cgraph_for_node_and_aliases (node, gather_caller_stats, &stats, false);
483 if (inline_summary (node)->self_size < stats.n_calls)
485 if (dump_file)
486 fprintf (dump_file, "Considering %s for cloning; code might shrink.\n",
487 cgraph_node_name (node));
488 return true;
491 /* When profile is available and function is hot, propagate into it even if
492 calls seems cold; constant propagation can improve function's speed
493 significantly. */
494 if (max_count)
496 if (stats.count_sum > node->count * 90 / 100)
498 if (dump_file)
499 fprintf (dump_file, "Considering %s for cloning; "
500 "usually called directly.\n",
501 cgraph_node_name (node));
502 return true;
505 if (!stats.n_hot_calls)
507 if (dump_file)
508 fprintf (dump_file, "Not considering %s for cloning; no hot calls.\n",
509 cgraph_node_name (node));
510 return false;
512 if (dump_file)
513 fprintf (dump_file, "Considering %s for cloning.\n",
514 cgraph_node_name (node));
515 return true;
518 /* Arrays representing a topological ordering of call graph nodes and a stack
519 of noes used during constant propagation. */
521 struct topo_info
523 struct cgraph_node **order;
524 struct cgraph_node **stack;
525 int nnodes, stack_top;
528 /* Allocate the arrays in TOPO and topologically sort the nodes into order. */
530 static void
531 build_toporder_info (struct topo_info *topo)
533 topo->order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
534 topo->stack = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
535 topo->stack_top = 0;
536 topo->nnodes = ipa_reduced_postorder (topo->order, true, true, NULL);
539 /* Free information about strongly connected components and the arrays in
540 TOPO. */
542 static void
543 free_toporder_info (struct topo_info *topo)
545 ipa_free_postorder_info ();
546 free (topo->order);
547 free (topo->stack);
550 /* Add NODE to the stack in TOPO, unless it is already there. */
552 static inline void
553 push_node_to_stack (struct topo_info *topo, struct cgraph_node *node)
555 struct ipa_node_params *info = IPA_NODE_REF (node);
556 if (info->node_enqueued)
557 return;
558 info->node_enqueued = 1;
559 topo->stack[topo->stack_top++] = node;
562 /* Pop a node from the stack in TOPO and return it or return NULL if the stack
563 is empty. */
565 static struct cgraph_node *
566 pop_node_from_stack (struct topo_info *topo)
568 if (topo->stack_top)
570 struct cgraph_node *node;
571 topo->stack_top--;
572 node = topo->stack[topo->stack_top];
573 IPA_NODE_REF (node)->node_enqueued = 0;
574 return node;
576 else
577 return NULL;
580 /* Set lattice LAT to bottom and return true if it previously was not set as
581 such. */
583 static inline bool
584 set_lattice_to_bottom (struct ipcp_lattice *lat)
586 bool ret = !lat->bottom;
587 lat->bottom = true;
588 return ret;
591 /* Mark lattice as containing an unknown value and return true if it previously
592 was not marked as such. */
594 static inline bool
595 set_lattice_contains_variable (struct ipcp_lattice *lat)
597 bool ret = !lat->contains_variable;
598 lat->contains_variable = true;
599 return ret;
602 /* Initialize ipcp_lattices. */
604 static void
605 initialize_node_lattices (struct cgraph_node *node)
607 struct ipa_node_params *info = IPA_NODE_REF (node);
608 struct cgraph_edge *ie;
609 bool disable = false, variable = false;
610 int i;
612 gcc_checking_assert (cgraph_function_with_gimple_body_p (node));
613 if (ipa_is_called_with_var_arguments (info))
614 disable = true;
615 else if (!node->local.local)
617 /* When cloning is allowed, we can assume that externally visible
618 functions are not called. We will compensate this by cloning
619 later. */
620 if (ipcp_versionable_function_p (node)
621 && ipcp_cloning_candidate_p (node))
622 variable = true;
623 else
624 disable = true;
627 if (disable || variable)
629 for (i = 0; i < ipa_get_param_count (info) ; i++)
631 struct ipcp_lattice *lat = ipa_get_lattice (info, i);
632 if (disable)
633 set_lattice_to_bottom (lat);
634 else
635 set_lattice_contains_variable (lat);
637 if (dump_file && (dump_flags & TDF_DETAILS)
638 && node->alias && node->thunk.thunk_p)
639 fprintf (dump_file, "Marking all lattices of %s/%i as %s\n",
640 cgraph_node_name (node), node->uid,
641 disable ? "BOTTOM" : "VARIABLE");
644 for (ie = node->indirect_calls; ie; ie = ie->next_callee)
645 if (ie->indirect_info->polymorphic)
647 gcc_checking_assert (ie->indirect_info->param_index >= 0);
648 ipa_get_lattice (info, ie->indirect_info->param_index)->virt_call = 1;
652 /* Return the result of a (possibly arithmetic) pass through jump function
653 JFUNC on the constant value INPUT. Return NULL_TREE if that cannot be
654 determined or itself is considered an interprocedural invariant. */
656 static tree
657 ipa_get_jf_pass_through_result (struct ipa_jump_func *jfunc, tree input)
659 tree restype, res;
661 gcc_checking_assert (is_gimple_ip_invariant (input));
662 if (jfunc->value.pass_through.operation == NOP_EXPR)
663 return input;
665 if (TREE_CODE_CLASS (jfunc->value.pass_through.operation)
666 == tcc_comparison)
667 restype = boolean_type_node;
668 else
669 restype = TREE_TYPE (input);
670 res = fold_binary (jfunc->value.pass_through.operation, restype,
671 input, jfunc->value.pass_through.operand);
673 if (res && !is_gimple_ip_invariant (res))
674 return NULL_TREE;
676 return res;
679 /* Return the result of an ancestor jump function JFUNC on the constant value
680 INPUT. Return NULL_TREE if that cannot be determined. */
682 static tree
683 ipa_get_jf_ancestor_result (struct ipa_jump_func *jfunc, tree input)
685 if (TREE_CODE (input) == ADDR_EXPR)
687 tree t = TREE_OPERAND (input, 0);
688 t = build_ref_for_offset (EXPR_LOCATION (t), t,
689 jfunc->value.ancestor.offset,
690 jfunc->value.ancestor.type, NULL, false);
691 return build_fold_addr_expr (t);
693 else
694 return NULL_TREE;
697 /* Determine whether JFUNC evaluates to a known value (that is either a
698 constant or a binfo) and if so, return it. Otherwise return NULL. INFO
699 describes the caller node so that pass-through jump functions can be
700 evaluated. */
702 static tree
703 ipa_value_from_jfunc (struct ipa_node_params *info, struct ipa_jump_func *jfunc)
705 if (jfunc->type == IPA_JF_CONST)
706 return jfunc->value.constant;
707 else if (jfunc->type == IPA_JF_KNOWN_TYPE)
708 return jfunc->value.base_binfo;
709 else if (jfunc->type == IPA_JF_PASS_THROUGH
710 || jfunc->type == IPA_JF_ANCESTOR)
712 tree input;
713 int idx;
715 if (jfunc->type == IPA_JF_PASS_THROUGH)
716 idx = jfunc->value.pass_through.formal_id;
717 else
718 idx = jfunc->value.ancestor.formal_id;
720 if (info->ipcp_orig_node)
721 input = VEC_index (tree, info->known_vals, idx);
722 else
724 struct ipcp_lattice *lat;
726 if (!info->lattices)
728 gcc_checking_assert (!flag_ipa_cp);
729 return NULL_TREE;
731 lat = ipa_get_lattice (info, idx);
732 if (!ipa_lat_is_single_const (lat))
733 return NULL_TREE;
734 input = lat->values->value;
737 if (!input)
738 return NULL_TREE;
740 if (jfunc->type == IPA_JF_PASS_THROUGH)
742 if (jfunc->value.pass_through.operation == NOP_EXPR)
743 return input;
744 else if (TREE_CODE (input) == TREE_BINFO)
745 return NULL_TREE;
746 else
747 return ipa_get_jf_pass_through_result (jfunc, input);
749 else
751 if (TREE_CODE (input) == TREE_BINFO)
752 return get_binfo_at_offset (input, jfunc->value.ancestor.offset,
753 jfunc->value.ancestor.type);
754 else
755 return ipa_get_jf_ancestor_result (jfunc, input);
758 else
759 return NULL_TREE;
762 /* Determine whether JFUNC evaluates to a constant and if so, return it.
763 Otherwise return NULL. INFO describes the caller node so that pass-through
764 jump functions can be evaluated. */
766 tree
767 ipa_cst_from_jfunc (struct ipa_node_params *info, struct ipa_jump_func *jfunc)
769 tree res = ipa_value_from_jfunc (info, jfunc);
771 if (res && TREE_CODE (res) == TREE_BINFO)
772 return NULL_TREE;
773 else
774 return res;
778 /* If checking is enabled, verify that no lattice is in the TOP state, i.e. not
779 bottom, not containing a variable component and without any known value at
780 the same time. */
782 DEBUG_FUNCTION void
783 ipcp_verify_propagated_values (void)
785 struct cgraph_node *node;
787 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
789 struct ipa_node_params *info = IPA_NODE_REF (node);
790 int i, count = ipa_get_param_count (info);
792 for (i = 0; i < count; i++)
794 struct ipcp_lattice *lat = ipa_get_lattice (info, i);
796 if (!lat->bottom
797 && !lat->contains_variable
798 && lat->values_count == 0)
800 if (dump_file)
802 fprintf (dump_file, "\nIPA lattices after constant "
803 "propagation:\n");
804 print_all_lattices (dump_file, true, false);
807 gcc_unreachable ();
813 /* Return true iff X and Y should be considered equal values by IPA-CP. */
815 static bool
816 values_equal_for_ipcp_p (tree x, tree y)
818 gcc_checking_assert (x != NULL_TREE && y != NULL_TREE);
820 if (x == y)
821 return true;
823 if (TREE_CODE (x) == TREE_BINFO || TREE_CODE (y) == TREE_BINFO)
824 return false;
826 if (TREE_CODE (x) == ADDR_EXPR
827 && TREE_CODE (y) == ADDR_EXPR
828 && TREE_CODE (TREE_OPERAND (x, 0)) == CONST_DECL
829 && TREE_CODE (TREE_OPERAND (y, 0)) == CONST_DECL)
830 return operand_equal_p (DECL_INITIAL (TREE_OPERAND (x, 0)),
831 DECL_INITIAL (TREE_OPERAND (y, 0)), 0);
832 else
833 return operand_equal_p (x, y, 0);
836 /* Add a new value source to VAL, marking that a value comes from edge CS and
837 (if the underlying jump function is a pass-through or an ancestor one) from
838 a caller value SRC_VAL of a caller parameter described by SRC_INDEX. */
840 static void
841 add_value_source (struct ipcp_value *val, struct cgraph_edge *cs,
842 struct ipcp_value *src_val, int src_idx)
844 struct ipcp_value_source *src;
846 src = (struct ipcp_value_source *) pool_alloc (ipcp_sources_pool);
847 src->cs = cs;
848 src->val = src_val;
849 src->index = src_idx;
851 src->next = val->sources;
852 val->sources = src;
856 /* Try to add NEWVAL to LAT, potentially creating a new struct ipcp_value for
857 it. CS, SRC_VAL and SRC_INDEX are meant for add_value_source and have the
858 same meaning. */
860 static bool
861 add_value_to_lattice (struct ipcp_lattice *lat, tree newval,
862 struct cgraph_edge *cs, struct ipcp_value *src_val,
863 int src_idx)
865 struct ipcp_value *val;
867 if (lat->bottom)
868 return false;
871 for (val = lat->values; val; val = val->next)
872 if (values_equal_for_ipcp_p (val->value, newval))
874 if (edge_within_scc (cs))
876 struct ipcp_value_source *s;
877 for (s = val->sources; s ; s = s->next)
878 if (s->cs == cs)
879 break;
880 if (s)
881 return false;
884 add_value_source (val, cs, src_val, src_idx);
885 return false;
888 if (lat->values_count == PARAM_VALUE (PARAM_IPA_CP_VALUE_LIST_SIZE))
890 /* We can only free sources, not the values themselves, because sources
891 of other values in this this SCC might point to them. */
892 for (val = lat->values; val; val = val->next)
894 while (val->sources)
896 struct ipcp_value_source *src = val->sources;
897 val->sources = src->next;
898 pool_free (ipcp_sources_pool, src);
902 lat->values = NULL;
903 return set_lattice_to_bottom (lat);
906 lat->values_count++;
907 val = (struct ipcp_value *) pool_alloc (ipcp_values_pool);
908 memset (val, 0, sizeof (*val));
910 add_value_source (val, cs, src_val, src_idx);
911 val->value = newval;
912 val->next = lat->values;
913 lat->values = val;
914 return true;
917 /* Propagate values through a pass-through jump function JFUNC associated with
918 edge CS, taking values from SRC_LAT and putting them into DEST_LAT. SRC_IDX
919 is the index of the source parameter. */
921 static bool
922 propagate_vals_accross_pass_through (struct cgraph_edge *cs,
923 struct ipa_jump_func *jfunc,
924 struct ipcp_lattice *src_lat,
925 struct ipcp_lattice *dest_lat,
926 int src_idx)
928 struct ipcp_value *src_val;
929 bool ret = false;
931 if (jfunc->value.pass_through.operation == NOP_EXPR)
932 for (src_val = src_lat->values; src_val; src_val = src_val->next)
933 ret |= add_value_to_lattice (dest_lat, src_val->value, cs,
934 src_val, src_idx);
935 /* Do not create new values when propagating within an SCC because if there
936 arithmetic functions with circular dependencies, there is infinite number
937 of them and we would just make lattices bottom. */
938 else if (edge_within_scc (cs))
939 ret = set_lattice_contains_variable (dest_lat);
940 else
941 for (src_val = src_lat->values; src_val; src_val = src_val->next)
943 tree cstval = src_val->value;
945 if (TREE_CODE (cstval) == TREE_BINFO)
947 ret |= set_lattice_contains_variable (dest_lat);
948 continue;
950 cstval = ipa_get_jf_pass_through_result (jfunc, cstval);
952 if (cstval)
953 ret |= add_value_to_lattice (dest_lat, cstval, cs, src_val, src_idx);
954 else
955 ret |= set_lattice_contains_variable (dest_lat);
958 return ret;
961 /* Propagate values through an ancestor jump function JFUNC associated with
962 edge CS, taking values from SRC_LAT and putting them into DEST_LAT. SRC_IDX
963 is the index of the source parameter. */
965 static bool
966 propagate_vals_accross_ancestor (struct cgraph_edge *cs,
967 struct ipa_jump_func *jfunc,
968 struct ipcp_lattice *src_lat,
969 struct ipcp_lattice *dest_lat,
970 int src_idx)
972 struct ipcp_value *src_val;
973 bool ret = false;
975 if (edge_within_scc (cs))
976 return set_lattice_contains_variable (dest_lat);
978 for (src_val = src_lat->values; src_val; src_val = src_val->next)
980 tree t = src_val->value;
982 if (TREE_CODE (t) == TREE_BINFO)
983 t = get_binfo_at_offset (t, jfunc->value.ancestor.offset,
984 jfunc->value.ancestor.type);
985 else
986 t = ipa_get_jf_ancestor_result (jfunc, t);
988 if (t)
989 ret |= add_value_to_lattice (dest_lat, t, cs, src_val, src_idx);
990 else
991 ret |= set_lattice_contains_variable (dest_lat);
994 return ret;
997 /* Propagate values across jump function JFUNC that is associated with edge CS
998 and put the values into DEST_LAT. */
1000 static bool
1001 propagate_accross_jump_function (struct cgraph_edge *cs,
1002 struct ipa_jump_func *jfunc,
1003 struct ipcp_lattice *dest_lat)
1005 if (dest_lat->bottom)
1006 return false;
1008 if (jfunc->type == IPA_JF_CONST
1009 || jfunc->type == IPA_JF_KNOWN_TYPE)
1011 tree val;
1013 if (jfunc->type == IPA_JF_KNOWN_TYPE)
1014 val = jfunc->value.base_binfo;
1015 else
1016 val = jfunc->value.constant;
1017 return add_value_to_lattice (dest_lat, val, cs, NULL, 0);
1019 else if (jfunc->type == IPA_JF_PASS_THROUGH
1020 || jfunc->type == IPA_JF_ANCESTOR)
1022 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
1023 struct ipcp_lattice *src_lat;
1024 int src_idx;
1025 bool ret;
1027 if (jfunc->type == IPA_JF_PASS_THROUGH)
1028 src_idx = jfunc->value.pass_through.formal_id;
1029 else
1030 src_idx = jfunc->value.ancestor.formal_id;
1032 src_lat = ipa_get_lattice (caller_info, src_idx);
1033 if (src_lat->bottom)
1034 return set_lattice_contains_variable (dest_lat);
1036 /* If we would need to clone the caller and cannot, do not propagate. */
1037 if (!ipcp_versionable_function_p (cs->caller)
1038 && (src_lat->contains_variable
1039 || (src_lat->values_count > 1)))
1040 return set_lattice_contains_variable (dest_lat);
1042 if (jfunc->type == IPA_JF_PASS_THROUGH)
1043 ret = propagate_vals_accross_pass_through (cs, jfunc, src_lat,
1044 dest_lat, src_idx);
1045 else
1046 ret = propagate_vals_accross_ancestor (cs, jfunc, src_lat, dest_lat,
1047 src_idx);
1049 if (src_lat->contains_variable)
1050 ret |= set_lattice_contains_variable (dest_lat);
1052 return ret;
1055 /* TODO: We currently do not handle member method pointers in IPA-CP (we only
1056 use it for indirect inlining), we should propagate them too. */
1057 return set_lattice_contains_variable (dest_lat);
1060 /* Propagate constants from the caller to the callee of CS. INFO describes the
1061 caller. */
1063 static bool
1064 propagate_constants_accross_call (struct cgraph_edge *cs)
1066 struct ipa_node_params *callee_info;
1067 enum availability availability;
1068 struct cgraph_node *callee, *alias_or_thunk;
1069 struct ipa_edge_args *args;
1070 bool ret = false;
1071 int i, count;
1073 callee = cgraph_function_node (cs->callee, &availability);
1074 if (!callee->analyzed)
1075 return false;
1076 gcc_checking_assert (cgraph_function_with_gimple_body_p (callee));
1077 callee_info = IPA_NODE_REF (callee);
1078 if (ipa_is_called_with_var_arguments (callee_info))
1079 return false;
1081 args = IPA_EDGE_REF (cs);
1082 count = ipa_get_cs_argument_count (args);
1084 /* If this call goes through a thunk we must not propagate to the first (0th)
1085 parameter. However, we might need to uncover a thunk from below a series
1086 of aliases first. */
1087 alias_or_thunk = cs->callee;
1088 while (alias_or_thunk->alias)
1089 alias_or_thunk = cgraph_alias_aliased_node (alias_or_thunk);
1090 if (alias_or_thunk->thunk.thunk_p)
1092 ret |= set_lattice_contains_variable (ipa_get_lattice (callee_info, 0));
1093 i = 1;
1095 else
1096 i = 0;
1098 for (; i < count; i++)
1100 struct ipa_jump_func *jump_func = ipa_get_ith_jump_func (args, i);
1101 struct ipcp_lattice *dest_lat = ipa_get_lattice (callee_info, i);
1103 if (availability == AVAIL_OVERWRITABLE)
1104 ret |= set_lattice_contains_variable (dest_lat);
1105 else
1106 ret |= propagate_accross_jump_function (cs, jump_func, dest_lat);
1108 return ret;
1111 /* If an indirect edge IE can be turned into a direct one based on KNOWN_VALS
1112 (which can contain both constants and binfos) or KNOWN_BINFOS (which can be
1113 NULL) return the destination. If simple thunk delta must be applied too,
1114 store it to DELTA. */
1116 static tree
1117 get_indirect_edge_target (struct cgraph_edge *ie, tree *delta,
1118 VEC (tree, heap) *known_vals,
1119 VEC (tree, heap) *known_binfos)
1121 int param_index = ie->indirect_info->param_index;
1122 HOST_WIDE_INT token, anc_offset;
1123 tree otr_type;
1124 tree t;
1126 if (param_index == -1)
1127 return NULL_TREE;
1129 if (!ie->indirect_info->polymorphic)
1131 tree t = VEC_index (tree, known_vals, param_index);
1132 if (t &&
1133 TREE_CODE (t) == ADDR_EXPR
1134 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL)
1136 *delta = NULL_TREE;
1137 return TREE_OPERAND (t, 0);
1139 else
1140 return NULL_TREE;
1143 token = ie->indirect_info->otr_token;
1144 anc_offset = ie->indirect_info->anc_offset;
1145 otr_type = ie->indirect_info->otr_type;
1147 t = VEC_index (tree, known_vals, param_index);
1148 if (!t && known_binfos)
1149 t = VEC_index (tree, known_binfos, param_index);
1150 if (!t)
1151 return NULL_TREE;
1153 if (TREE_CODE (t) != TREE_BINFO)
1155 tree binfo;
1156 binfo = gimple_extract_devirt_binfo_from_cst (t);
1157 if (!binfo)
1158 return NULL_TREE;
1159 binfo = get_binfo_at_offset (binfo, anc_offset, otr_type);
1160 if (!binfo)
1161 return NULL_TREE;
1162 return gimple_get_virt_method_for_binfo (token, binfo, delta);
1164 else
1166 tree binfo;
1168 binfo = get_binfo_at_offset (t, anc_offset, otr_type);
1169 if (!binfo)
1170 return NULL_TREE;
1171 return gimple_get_virt_method_for_binfo (token, binfo, delta);
1175 /* Calculate devirtualization time bonus for NODE, assuming we know KNOWN_CSTS
1176 and KNOWN_BINFOS. */
1178 static int
1179 devirtualization_time_bonus (struct cgraph_node *node,
1180 VEC (tree, heap) *known_csts,
1181 VEC (tree, heap) *known_binfos)
1183 struct cgraph_edge *ie;
1184 int res = 0;
1186 for (ie = node->indirect_calls; ie; ie = ie->next_callee)
1188 struct cgraph_node *callee;
1189 struct inline_summary *isummary;
1190 tree delta, target;
1192 target = get_indirect_edge_target (ie, &delta, known_csts, known_binfos);
1193 if (!target)
1194 continue;
1196 /* Only bare minimum benefit for clearly un-inlineable targets. */
1197 res += 1;
1198 callee = cgraph_get_node (target);
1199 if (!callee || !callee->analyzed)
1200 continue;
1201 isummary = inline_summary (callee);
1202 if (!isummary->inlinable)
1203 continue;
1205 /* FIXME: The values below need re-considering and perhaps also
1206 integrating into the cost metrics, at lest in some very basic way. */
1207 if (isummary->size <= MAX_INLINE_INSNS_AUTO / 4)
1208 res += 31;
1209 else if (isummary->size <= MAX_INLINE_INSNS_AUTO / 2)
1210 res += 15;
1211 else if (isummary->size <= MAX_INLINE_INSNS_AUTO
1212 || DECL_DECLARED_INLINE_P (callee->decl))
1213 res += 7;
1216 return res;
1219 /* Return true if cloning NODE is a good idea, given the estimated TIME_BENEFIT
1220 and SIZE_COST and with the sum of frequencies of incoming edges to the
1221 potential new clone in FREQUENCIES. */
1223 static bool
1224 good_cloning_opportunity_p (struct cgraph_node *node, int time_benefit,
1225 int freq_sum, gcov_type count_sum, int size_cost)
1227 if (time_benefit == 0
1228 || !flag_ipa_cp_clone
1229 || !optimize_function_for_speed_p (DECL_STRUCT_FUNCTION (node->decl)))
1230 return false;
1232 gcc_checking_assert (size_cost >= 0);
1234 /* FIXME: These decisions need tuning. */
1235 if (max_count)
1237 int evaluation, factor = (count_sum * 1000) / max_count;
1239 evaluation = (time_benefit * factor) / size_cost;
1241 if (dump_file && (dump_flags & TDF_DETAILS))
1242 fprintf (dump_file, " good_cloning_opportunity_p (time: %i, "
1243 "size: %i, count_sum: " HOST_WIDE_INT_PRINT_DEC
1244 ") -> evaluation: %i, threshold: %i\n",
1245 time_benefit, size_cost, (HOST_WIDE_INT) count_sum,
1246 evaluation, 500);
1248 return evaluation >= PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD);
1250 else
1252 int evaluation = (time_benefit * freq_sum) / size_cost;
1254 if (dump_file && (dump_flags & TDF_DETAILS))
1255 fprintf (dump_file, " good_cloning_opportunity_p (time: %i, "
1256 "size: %i, freq_sum: %i) -> evaluation: %i, threshold: %i\n",
1257 time_benefit, size_cost, freq_sum, evaluation,
1258 CGRAPH_FREQ_BASE /2);
1260 return evaluation >= PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD);
1265 /* Allocate KNOWN_CSTS and KNOWN_BINFOS and populate them with values of
1266 parameters that are known independent of the context. INFO describes the
1267 function. If REMOVABLE_PARAMS_COST is non-NULL, the movement cost of all
1268 removable parameters will be stored in it. */
1270 static bool
1271 gather_context_independent_values (struct ipa_node_params *info,
1272 VEC (tree, heap) **known_csts,
1273 VEC (tree, heap) **known_binfos,
1274 int *removable_params_cost)
1276 int i, count = ipa_get_param_count (info);
1277 bool ret = false;
1279 *known_csts = NULL;
1280 *known_binfos = NULL;
1281 VEC_safe_grow_cleared (tree, heap, *known_csts, count);
1282 VEC_safe_grow_cleared (tree, heap, *known_binfos, count);
1284 if (removable_params_cost)
1285 *removable_params_cost = 0;
1287 for (i = 0; i < count ; i++)
1289 struct ipcp_lattice *lat = ipa_get_lattice (info, i);
1291 if (ipa_lat_is_single_const (lat))
1293 struct ipcp_value *val = lat->values;
1294 if (TREE_CODE (val->value) != TREE_BINFO)
1296 VEC_replace (tree, *known_csts, i, val->value);
1297 if (removable_params_cost)
1298 *removable_params_cost
1299 += estimate_move_cost (TREE_TYPE (val->value));
1300 ret = true;
1302 else if (lat->virt_call)
1304 VEC_replace (tree, *known_binfos, i, val->value);
1305 ret = true;
1307 else if (removable_params_cost
1308 && !ipa_is_param_used (info, i))
1309 *removable_params_cost
1310 += estimate_move_cost (TREE_TYPE (ipa_get_param (info, i)));
1312 else if (removable_params_cost
1313 && !ipa_is_param_used (info, i))
1314 *removable_params_cost
1315 += estimate_move_cost (TREE_TYPE (ipa_get_param (info, i)));
1318 return ret;
1321 /* Iterate over known values of parameters of NODE and estimate the local
1322 effects in terms of time and size they have. */
1324 static void
1325 estimate_local_effects (struct cgraph_node *node)
1327 struct ipa_node_params *info = IPA_NODE_REF (node);
1328 int i, count = ipa_get_param_count (info);
1329 VEC (tree, heap) *known_csts, *known_binfos;
1330 bool always_const;
1331 int base_time = inline_summary (node)->time;
1332 int removable_params_cost;
1334 if (!count || !ipcp_versionable_function_p (node))
1335 return;
1337 if (dump_file && (dump_flags & TDF_DETAILS))
1338 fprintf (dump_file, "\nEstimating effects for %s/%i, base_time: %i.\n",
1339 cgraph_node_name (node), node->uid, base_time);
1341 always_const = gather_context_independent_values (info, &known_csts,
1342 &known_binfos,
1343 &removable_params_cost);
1344 if (always_const)
1346 struct caller_statistics stats;
1347 int time, size;
1349 init_caller_stats (&stats);
1350 cgraph_for_node_and_aliases (node, gather_caller_stats, &stats, false);
1351 estimate_ipcp_clone_size_and_time (node, known_csts, &size, &time);
1352 time -= devirtualization_time_bonus (node, known_csts, known_binfos);
1353 time -= removable_params_cost;
1354 size -= stats.n_calls * removable_params_cost;
1356 if (dump_file)
1357 fprintf (dump_file, " - context independent values, size: %i, "
1358 "time_benefit: %i\n", size, base_time - time);
1360 if (size <= 0
1361 || cgraph_will_be_removed_from_program_if_no_direct_calls (node))
1363 info->clone_for_all_contexts = true;
1364 base_time = time;
1366 if (dump_file)
1367 fprintf (dump_file, " Decided to specialize for all "
1368 "known contexts, code not going to grow.\n");
1370 else if (good_cloning_opportunity_p (node, base_time - time,
1371 stats.freq_sum, stats.count_sum,
1372 size))
1374 if (size + overall_size <= max_new_size)
1376 info->clone_for_all_contexts = true;
1377 base_time = time;
1378 overall_size += size;
1380 if (dump_file)
1381 fprintf (dump_file, " Decided to specialize for all "
1382 "known contexts, growth deemed beneficial.\n");
1384 else if (dump_file && (dump_flags & TDF_DETAILS))
1385 fprintf (dump_file, " Not cloning for all contexts because "
1386 "max_new_size would be reached with %li.\n",
1387 size + overall_size);
1391 for (i = 0; i < count ; i++)
1393 struct ipcp_lattice *lat = ipa_get_lattice (info, i);
1394 struct ipcp_value *val;
1395 int emc;
1397 if (lat->bottom
1398 || !lat->values
1399 || VEC_index (tree, known_csts, i)
1400 || VEC_index (tree, known_binfos, i))
1401 continue;
1403 for (val = lat->values; val; val = val->next)
1405 int time, size, time_benefit;
1407 if (TREE_CODE (val->value) != TREE_BINFO)
1409 VEC_replace (tree, known_csts, i, val->value);
1410 VEC_replace (tree, known_binfos, i, NULL_TREE);
1411 emc = estimate_move_cost (TREE_TYPE (val->value));
1413 else if (lat->virt_call)
1415 VEC_replace (tree, known_csts, i, NULL_TREE);
1416 VEC_replace (tree, known_binfos, i, val->value);
1417 emc = 0;
1419 else
1420 continue;
1422 estimate_ipcp_clone_size_and_time (node, known_csts, &size, &time);
1423 time_benefit = base_time - time
1424 + devirtualization_time_bonus (node, known_csts, known_binfos)
1425 + removable_params_cost + emc;
1427 if (dump_file && (dump_flags & TDF_DETAILS))
1429 fprintf (dump_file, " - estimates for value ");
1430 print_ipcp_constant_value (dump_file, val->value);
1431 fprintf (dump_file, " for parameter ");
1432 print_generic_expr (dump_file, ipa_get_param (info, i), 0);
1433 fprintf (dump_file, ": time_benefit: %i, size: %i\n",
1434 time_benefit, size);
1437 val->local_time_benefit = time_benefit;
1438 val->local_size_cost = size;
1442 VEC_free (tree, heap, known_csts);
1443 VEC_free (tree, heap, known_binfos);
1447 /* Add value CUR_VAL and all yet-unsorted values it is dependent on to the
1448 topological sort of values. */
1450 static void
1451 add_val_to_toposort (struct ipcp_value *cur_val)
1453 static int dfs_counter = 0;
1454 static struct ipcp_value *stack;
1455 struct ipcp_value_source *src;
1457 if (cur_val->dfs)
1458 return;
1460 dfs_counter++;
1461 cur_val->dfs = dfs_counter;
1462 cur_val->low_link = dfs_counter;
1464 cur_val->topo_next = stack;
1465 stack = cur_val;
1466 cur_val->on_stack = true;
1468 for (src = cur_val->sources; src; src = src->next)
1469 if (src->val)
1471 if (src->val->dfs == 0)
1473 add_val_to_toposort (src->val);
1474 if (src->val->low_link < cur_val->low_link)
1475 cur_val->low_link = src->val->low_link;
1477 else if (src->val->on_stack
1478 && src->val->dfs < cur_val->low_link)
1479 cur_val->low_link = src->val->dfs;
1482 if (cur_val->dfs == cur_val->low_link)
1484 struct ipcp_value *v, *scc_list = NULL;
1488 v = stack;
1489 stack = v->topo_next;
1490 v->on_stack = false;
1492 v->scc_next = scc_list;
1493 scc_list = v;
1495 while (v != cur_val);
1497 cur_val->topo_next = values_topo;
1498 values_topo = cur_val;
1502 /* Add all values in lattices associated with NODE to the topological sort if
1503 they are not there yet. */
1505 static void
1506 add_all_node_vals_to_toposort (struct cgraph_node *node)
1508 struct ipa_node_params *info = IPA_NODE_REF (node);
1509 int i, count = ipa_get_param_count (info);
1511 for (i = 0; i < count ; i++)
1513 struct ipcp_lattice *lat = ipa_get_lattice (info, i);
1514 struct ipcp_value *val;
1516 if (lat->bottom || !lat->values)
1517 continue;
1518 for (val = lat->values; val; val = val->next)
1519 add_val_to_toposort (val);
1523 /* One pass of constants propagation along the call graph edges, from callers
1524 to callees (requires topological ordering in TOPO), iterate over strongly
1525 connected components. */
1527 static void
1528 propagate_constants_topo (struct topo_info *topo)
1530 int i;
1532 for (i = topo->nnodes - 1; i >= 0; i--)
1534 struct cgraph_node *v, *node = topo->order[i];
1535 struct ipa_dfs_info *node_dfs_info;
1537 if (!cgraph_function_with_gimple_body_p (node))
1538 continue;
1540 node_dfs_info = (struct ipa_dfs_info *) node->aux;
1541 /* First, iteratively propagate within the strongly connected component
1542 until all lattices stabilize. */
1543 v = node_dfs_info->next_cycle;
1544 while (v)
1546 push_node_to_stack (topo, v);
1547 v = ((struct ipa_dfs_info *) v->aux)->next_cycle;
1550 v = node;
1551 while (v)
1553 struct cgraph_edge *cs;
1555 for (cs = v->callees; cs; cs = cs->next_callee)
1556 if (edge_within_scc (cs)
1557 && propagate_constants_accross_call (cs))
1558 push_node_to_stack (topo, cs->callee);
1559 v = pop_node_from_stack (topo);
1562 /* Afterwards, propagate along edges leading out of the SCC, calculates
1563 the local effects of the discovered constants and all valid values to
1564 their topological sort. */
1565 v = node;
1566 while (v)
1568 struct cgraph_edge *cs;
1570 estimate_local_effects (v);
1571 add_all_node_vals_to_toposort (v);
1572 for (cs = v->callees; cs; cs = cs->next_callee)
1573 if (!edge_within_scc (cs))
1574 propagate_constants_accross_call (cs);
1576 v = ((struct ipa_dfs_info *) v->aux)->next_cycle;
1581 /* Propagate the estimated effects of individual values along the topological
1582 from the dependant values to those they depend on. */
1584 static void
1585 propagate_effects (void)
1587 struct ipcp_value *base;
1589 for (base = values_topo; base; base = base->topo_next)
1591 struct ipcp_value_source *src;
1592 struct ipcp_value *val;
1593 int time = 0, size = 0;
1595 for (val = base; val; val = val->scc_next)
1597 time += val->local_time_benefit + val->prop_time_benefit;
1598 size += val->local_size_cost + val->prop_size_cost;
1601 for (val = base; val; val = val->scc_next)
1602 for (src = val->sources; src; src = src->next)
1603 if (src->val
1604 && cgraph_maybe_hot_edge_p (src->cs))
1606 src->val->prop_time_benefit += time;
1607 src->val->prop_size_cost += size;
1613 /* Propagate constants, binfos and their effects from the summaries
1614 interprocedurally. */
1616 static void
1617 ipcp_propagate_stage (struct topo_info *topo)
1619 struct cgraph_node *node;
1621 if (dump_file)
1622 fprintf (dump_file, "\n Propagating constants:\n\n");
1624 if (in_lto_p)
1625 ipa_update_after_lto_read ();
1628 FOR_EACH_DEFINED_FUNCTION (node)
1630 struct ipa_node_params *info = IPA_NODE_REF (node);
1632 determine_versionability (node);
1633 if (cgraph_function_with_gimple_body_p (node))
1635 info->lattices = XCNEWVEC (struct ipcp_lattice,
1636 ipa_get_param_count (info));
1637 initialize_node_lattices (node);
1639 if (node->count > max_count)
1640 max_count = node->count;
1641 overall_size += inline_summary (node)->self_size;
1644 max_new_size = overall_size;
1645 if (max_new_size < PARAM_VALUE (PARAM_LARGE_UNIT_INSNS))
1646 max_new_size = PARAM_VALUE (PARAM_LARGE_UNIT_INSNS);
1647 max_new_size += max_new_size * PARAM_VALUE (PARAM_IPCP_UNIT_GROWTH) / 100 + 1;
1649 if (dump_file)
1650 fprintf (dump_file, "\noverall_size: %li, max_new_size: %li\n",
1651 overall_size, max_new_size);
1653 propagate_constants_topo (topo);
1654 #ifdef ENABLE_CHECKING
1655 ipcp_verify_propagated_values ();
1656 #endif
1657 propagate_effects ();
1659 if (dump_file)
1661 fprintf (dump_file, "\nIPA lattices after all propagation:\n");
1662 print_all_lattices (dump_file, (dump_flags & TDF_DETAILS), true);
1666 /* Discover newly direct outgoing edges from NODE which is a new clone with
1667 known KNOWN_VALS and make them direct. */
1669 static void
1670 ipcp_discover_new_direct_edges (struct cgraph_node *node,
1671 VEC (tree, heap) *known_vals)
1673 struct cgraph_edge *ie, *next_ie;
1675 for (ie = node->indirect_calls; ie; ie = next_ie)
1677 tree delta, target;
1679 next_ie = ie->next_callee;
1680 target = get_indirect_edge_target (ie, &delta, known_vals, NULL);
1681 if (target)
1682 ipa_make_edge_direct_to_target (ie, target, delta);
1686 /* Vector of pointers which for linked lists of clones of an original crgaph
1687 edge. */
1689 static VEC (cgraph_edge_p, heap) *next_edge_clone;
1691 static inline void
1692 grow_next_edge_clone_vector (void)
1694 if (VEC_length (cgraph_edge_p, next_edge_clone)
1695 <= (unsigned) cgraph_edge_max_uid)
1696 VEC_safe_grow_cleared (cgraph_edge_p, heap, next_edge_clone,
1697 cgraph_edge_max_uid + 1);
1700 /* Edge duplication hook to grow the appropriate linked list in
1701 next_edge_clone. */
1703 static void
1704 ipcp_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
1705 __attribute__((unused)) void *data)
1707 grow_next_edge_clone_vector ();
1708 VEC_replace (cgraph_edge_p, next_edge_clone, dst->uid,
1709 VEC_index (cgraph_edge_p, next_edge_clone, src->uid));
1710 VEC_replace (cgraph_edge_p, next_edge_clone, src->uid, dst);
1713 /* Get the next clone in the linked list of clones of an edge. */
1715 static inline struct cgraph_edge *
1716 get_next_cgraph_edge_clone (struct cgraph_edge *cs)
1718 return VEC_index (cgraph_edge_p, next_edge_clone, cs->uid);
1721 /* Return true if edge CS does bring about the value described by SRC. */
1723 static bool
1724 cgraph_edge_brings_value_p (struct cgraph_edge *cs,
1725 struct ipcp_value_source *src)
1727 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
1729 if (IPA_NODE_REF (cs->callee)->ipcp_orig_node
1730 || caller_info->node_dead)
1731 return false;
1732 if (!src->val)
1733 return true;
1735 if (caller_info->ipcp_orig_node)
1737 tree t = VEC_index (tree, caller_info->known_vals, src->index);
1738 return (t != NULL_TREE
1739 && values_equal_for_ipcp_p (src->val->value, t));
1741 else
1743 struct ipcp_lattice *lat = ipa_get_lattice (caller_info, src->index);
1744 if (ipa_lat_is_single_const (lat)
1745 && values_equal_for_ipcp_p (src->val->value, lat->values->value))
1746 return true;
1747 else
1748 return false;
1752 /* Given VAL, iterate over all its sources and if they still hold, add their
1753 edge frequency and their number into *FREQUENCY and *CALLER_COUNT
1754 respectively. */
1756 static bool
1757 get_info_about_necessary_edges (struct ipcp_value *val, int *freq_sum,
1758 gcov_type *count_sum, int *caller_count)
1760 struct ipcp_value_source *src;
1761 int freq = 0, count = 0;
1762 gcov_type cnt = 0;
1763 bool hot = false;
1765 for (src = val->sources; src; src = src->next)
1767 struct cgraph_edge *cs = src->cs;
1768 while (cs)
1770 if (cgraph_edge_brings_value_p (cs, src))
1772 count++;
1773 freq += cs->frequency;
1774 cnt += cs->count;
1775 hot |= cgraph_maybe_hot_edge_p (cs);
1777 cs = get_next_cgraph_edge_clone (cs);
1781 *freq_sum = freq;
1782 *count_sum = cnt;
1783 *caller_count = count;
1784 return hot;
1787 /* Return a vector of incoming edges that do bring value VAL. It is assumed
1788 their number is known and equal to CALLER_COUNT. */
1790 static VEC (cgraph_edge_p,heap) *
1791 gather_edges_for_value (struct ipcp_value *val, int caller_count)
1793 struct ipcp_value_source *src;
1794 VEC (cgraph_edge_p,heap) *ret;
1796 ret = VEC_alloc (cgraph_edge_p, heap, caller_count);
1797 for (src = val->sources; src; src = src->next)
1799 struct cgraph_edge *cs = src->cs;
1800 while (cs)
1802 if (cgraph_edge_brings_value_p (cs, src))
1803 VEC_quick_push (cgraph_edge_p, ret, cs);
1804 cs = get_next_cgraph_edge_clone (cs);
1808 return ret;
1811 /* Construct a replacement map for a know VALUE for a formal parameter PARAM.
1812 Return it or NULL if for some reason it cannot be created. */
1814 static struct ipa_replace_map *
1815 get_replacement_map (tree value, tree parm)
1817 tree req_type = TREE_TYPE (parm);
1818 struct ipa_replace_map *replace_map;
1820 if (!useless_type_conversion_p (req_type, TREE_TYPE (value)))
1822 if (fold_convertible_p (req_type, value))
1823 value = fold_build1 (NOP_EXPR, req_type, value);
1824 else if (TYPE_SIZE (req_type) == TYPE_SIZE (TREE_TYPE (value)))
1825 value = fold_build1 (VIEW_CONVERT_EXPR, req_type, value);
1826 else
1828 if (dump_file)
1830 fprintf (dump_file, " const ");
1831 print_generic_expr (dump_file, value, 0);
1832 fprintf (dump_file, " can't be converted to param ");
1833 print_generic_expr (dump_file, parm, 0);
1834 fprintf (dump_file, "\n");
1836 return NULL;
1840 replace_map = ggc_alloc_ipa_replace_map ();
1841 if (dump_file)
1843 fprintf (dump_file, " replacing param ");
1844 print_generic_expr (dump_file, parm, 0);
1845 fprintf (dump_file, " with const ");
1846 print_generic_expr (dump_file, value, 0);
1847 fprintf (dump_file, "\n");
1849 replace_map->old_tree = parm;
1850 replace_map->new_tree = value;
1851 replace_map->replace_p = true;
1852 replace_map->ref_p = false;
1854 return replace_map;
1857 /* Dump new profiling counts */
1859 static void
1860 dump_profile_updates (struct cgraph_node *orig_node,
1861 struct cgraph_node *new_node)
1863 struct cgraph_edge *cs;
1865 fprintf (dump_file, " setting count of the specialized node to "
1866 HOST_WIDE_INT_PRINT_DEC "\n", (HOST_WIDE_INT) new_node->count);
1867 for (cs = new_node->callees; cs ; cs = cs->next_callee)
1868 fprintf (dump_file, " edge to %s has count "
1869 HOST_WIDE_INT_PRINT_DEC "\n",
1870 cgraph_node_name (cs->callee), (HOST_WIDE_INT) cs->count);
1872 fprintf (dump_file, " setting count of the original node to "
1873 HOST_WIDE_INT_PRINT_DEC "\n", (HOST_WIDE_INT) orig_node->count);
1874 for (cs = orig_node->callees; cs ; cs = cs->next_callee)
1875 fprintf (dump_file, " edge to %s is left with "
1876 HOST_WIDE_INT_PRINT_DEC "\n",
1877 cgraph_node_name (cs->callee), (HOST_WIDE_INT) cs->count);
1880 /* After a specialized NEW_NODE version of ORIG_NODE has been created, update
1881 their profile information to reflect this. */
1883 static void
1884 update_profiling_info (struct cgraph_node *orig_node,
1885 struct cgraph_node *new_node)
1887 struct cgraph_edge *cs;
1888 struct caller_statistics stats;
1889 gcov_type new_sum, orig_sum;
1890 gcov_type remainder, orig_node_count = orig_node->count;
1892 if (orig_node_count == 0)
1893 return;
1895 init_caller_stats (&stats);
1896 cgraph_for_node_and_aliases (orig_node, gather_caller_stats, &stats, false);
1897 orig_sum = stats.count_sum;
1898 init_caller_stats (&stats);
1899 cgraph_for_node_and_aliases (new_node, gather_caller_stats, &stats, false);
1900 new_sum = stats.count_sum;
1902 if (orig_node_count < orig_sum + new_sum)
1904 if (dump_file)
1905 fprintf (dump_file, " Problem: node %s/%i has too low count "
1906 HOST_WIDE_INT_PRINT_DEC " while the sum of incoming "
1907 "counts is " HOST_WIDE_INT_PRINT_DEC "\n",
1908 cgraph_node_name (orig_node), orig_node->uid,
1909 (HOST_WIDE_INT) orig_node_count,
1910 (HOST_WIDE_INT) (orig_sum + new_sum));
1912 orig_node_count = (orig_sum + new_sum) * 12 / 10;
1913 if (dump_file)
1914 fprintf (dump_file, " proceeding by pretending it was "
1915 HOST_WIDE_INT_PRINT_DEC "\n",
1916 (HOST_WIDE_INT) orig_node_count);
1919 new_node->count = new_sum;
1920 remainder = orig_node_count - new_sum;
1921 orig_node->count = remainder;
1923 for (cs = new_node->callees; cs ; cs = cs->next_callee)
1924 if (cs->frequency)
1925 cs->count = cs->count * (new_sum * REG_BR_PROB_BASE
1926 / orig_node_count) / REG_BR_PROB_BASE;
1927 else
1928 cs->count = 0;
1930 for (cs = orig_node->callees; cs ; cs = cs->next_callee)
1931 cs->count = cs->count * (remainder * REG_BR_PROB_BASE
1932 / orig_node_count) / REG_BR_PROB_BASE;
1934 if (dump_file)
1935 dump_profile_updates (orig_node, new_node);
1938 /* Update the respective profile of specialized NEW_NODE and the original
1939 ORIG_NODE after additional edges with cumulative count sum REDIRECTED_SUM
1940 have been redirected to the specialized version. */
1942 static void
1943 update_specialized_profile (struct cgraph_node *new_node,
1944 struct cgraph_node *orig_node,
1945 gcov_type redirected_sum)
1947 struct cgraph_edge *cs;
1948 gcov_type new_node_count, orig_node_count = orig_node->count;
1950 if (dump_file)
1951 fprintf (dump_file, " the sum of counts of redirected edges is "
1952 HOST_WIDE_INT_PRINT_DEC "\n", (HOST_WIDE_INT) redirected_sum);
1953 if (orig_node_count == 0)
1954 return;
1956 gcc_assert (orig_node_count >= redirected_sum);
1958 new_node_count = new_node->count;
1959 new_node->count += redirected_sum;
1960 orig_node->count -= redirected_sum;
1962 for (cs = new_node->callees; cs ; cs = cs->next_callee)
1963 if (cs->frequency)
1964 cs->count += cs->count * redirected_sum / new_node_count;
1965 else
1966 cs->count = 0;
1968 for (cs = orig_node->callees; cs ; cs = cs->next_callee)
1970 gcov_type dec = cs->count * (redirected_sum * REG_BR_PROB_BASE
1971 / orig_node_count) / REG_BR_PROB_BASE;
1972 if (dec < cs->count)
1973 cs->count -= dec;
1974 else
1975 cs->count = 0;
1978 if (dump_file)
1979 dump_profile_updates (orig_node, new_node);
1982 /* Create a specialized version of NODE with known constants and types of
1983 parameters in KNOWN_VALS and redirect all edges in CALLERS to it. */
1985 static struct cgraph_node *
1986 create_specialized_node (struct cgraph_node *node,
1987 VEC (tree, heap) *known_vals,
1988 VEC (cgraph_edge_p,heap) *callers)
1990 struct ipa_node_params *new_info, *info = IPA_NODE_REF (node);
1991 VEC (ipa_replace_map_p,gc)* replace_trees = NULL;
1992 struct cgraph_node *new_node;
1993 int i, count = ipa_get_param_count (info);
1994 bitmap args_to_skip;
1996 gcc_assert (!info->ipcp_orig_node);
1998 if (node->local.can_change_signature)
2000 args_to_skip = BITMAP_GGC_ALLOC ();
2001 for (i = 0; i < count; i++)
2003 tree t = VEC_index (tree, known_vals, i);
2005 if ((t && TREE_CODE (t) != TREE_BINFO)
2006 || !ipa_is_param_used (info, i))
2007 bitmap_set_bit (args_to_skip, i);
2010 else
2011 args_to_skip = NULL;
2013 for (i = 0; i < count ; i++)
2015 tree t = VEC_index (tree, known_vals, i);
2016 if (t && TREE_CODE (t) != TREE_BINFO)
2018 struct ipa_replace_map *replace_map;
2020 replace_map = get_replacement_map (t, ipa_get_param (info, i));
2021 if (replace_map)
2022 VEC_safe_push (ipa_replace_map_p, gc, replace_trees, replace_map);
2026 new_node = cgraph_create_virtual_clone (node, callers, replace_trees,
2027 args_to_skip, "constprop");
2028 if (dump_file && (dump_flags & TDF_DETAILS))
2029 fprintf (dump_file, " the new node is %s/%i.\n",
2030 cgraph_node_name (new_node), new_node->uid);
2031 gcc_checking_assert (ipa_node_params_vector
2032 && (VEC_length (ipa_node_params_t,
2033 ipa_node_params_vector)
2034 > (unsigned) cgraph_max_uid));
2035 update_profiling_info (node, new_node);
2036 new_info = IPA_NODE_REF (new_node);
2037 new_info->ipcp_orig_node = node;
2038 new_info->known_vals = known_vals;
2040 ipcp_discover_new_direct_edges (new_node, known_vals);
2042 VEC_free (cgraph_edge_p, heap, callers);
2043 return new_node;
2046 /* Given a NODE, and a subset of its CALLERS, try to populate blanks slots in
2047 KNOWN_VALS with constants and types that are also known for all of the
2048 CALLERS. */
2050 static void
2051 find_more_values_for_callers_subset (struct cgraph_node *node,
2052 VEC (tree, heap) *known_vals,
2053 VEC (cgraph_edge_p,heap) *callers)
2055 struct ipa_node_params *info = IPA_NODE_REF (node);
2056 int i, count = ipa_get_param_count (info);
2058 for (i = 0; i < count ; i++)
2060 struct cgraph_edge *cs;
2061 tree newval = NULL_TREE;
2062 int j;
2064 if (ipa_get_lattice (info, i)->bottom
2065 || VEC_index (tree, known_vals, i))
2066 continue;
2068 FOR_EACH_VEC_ELT (cgraph_edge_p, callers, j, cs)
2070 struct ipa_jump_func *jump_func;
2071 tree t;
2073 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
2075 t = ipa_value_from_jfunc (IPA_NODE_REF (cs->caller), jump_func);
2076 if (!t
2077 || (newval
2078 && !values_equal_for_ipcp_p (t, newval)))
2080 newval = NULL_TREE;
2081 break;
2083 else
2084 newval = t;
2087 if (newval)
2089 if (dump_file && (dump_flags & TDF_DETAILS))
2091 fprintf (dump_file, " adding an extra known value ");
2092 print_ipcp_constant_value (dump_file, newval);
2093 fprintf (dump_file, " for parameter ");
2094 print_generic_expr (dump_file, ipa_get_param (info, i), 0);
2095 fprintf (dump_file, "\n");
2098 VEC_replace (tree, known_vals, i, newval);
2103 /* Given an original NODE and a VAL for which we have already created a
2104 specialized clone, look whether there are incoming edges that still lead
2105 into the old node but now also bring the requested value and also conform to
2106 all other criteria such that they can be redirected the the special node.
2107 This function can therefore redirect the final edge in a SCC. */
2109 static void
2110 perhaps_add_new_callers (struct cgraph_node *node, struct ipcp_value *val)
2112 struct ipa_node_params *dest_info = IPA_NODE_REF (val->spec_node);
2113 struct ipcp_value_source *src;
2114 int count = ipa_get_param_count (dest_info);
2115 gcov_type redirected_sum = 0;
2117 for (src = val->sources; src; src = src->next)
2119 struct cgraph_edge *cs = src->cs;
2120 while (cs)
2122 enum availability availability;
2123 bool insufficient = false;
2125 if (cgraph_function_node (cs->callee, &availability) == node
2126 && availability > AVAIL_OVERWRITABLE
2127 && cgraph_edge_brings_value_p (cs, src))
2129 struct ipa_node_params *caller_info;
2130 struct ipa_edge_args *args;
2131 int i;
2133 caller_info = IPA_NODE_REF (cs->caller);
2134 args = IPA_EDGE_REF (cs);
2135 for (i = 0; i < count; i++)
2137 struct ipa_jump_func *jump_func;
2138 tree val, t;
2140 val = VEC_index (tree, dest_info->known_vals, i);
2141 if (!val)
2142 continue;
2144 jump_func = ipa_get_ith_jump_func (args, i);
2145 t = ipa_value_from_jfunc (caller_info, jump_func);
2146 if (!t || !values_equal_for_ipcp_p (val, t))
2148 insufficient = true;
2149 break;
2153 if (!insufficient)
2155 if (dump_file)
2156 fprintf (dump_file, " - adding an extra caller %s/%i"
2157 " of %s/%i\n",
2158 cgraph_node_name (cs->caller), cs->caller->uid,
2159 cgraph_node_name (val->spec_node),
2160 val->spec_node->uid);
2162 cgraph_redirect_edge_callee (cs, val->spec_node);
2163 redirected_sum += cs->count;
2166 cs = get_next_cgraph_edge_clone (cs);
2170 if (redirected_sum)
2171 update_specialized_profile (val->spec_node, node, redirected_sum);
2175 /* Copy KNOWN_BINFOS to KNOWN_VALS. */
2177 static void
2178 move_binfos_to_values (VEC (tree, heap) *known_vals,
2179 VEC (tree, heap) *known_binfos)
2181 tree t;
2182 int i;
2184 for (i = 0; VEC_iterate (tree, known_binfos, i, t); i++)
2185 if (t)
2186 VEC_replace (tree, known_vals, i, t);
2190 /* Decide whether and what specialized clones of NODE should be created. */
2192 static bool
2193 decide_whether_version_node (struct cgraph_node *node)
2195 struct ipa_node_params *info = IPA_NODE_REF (node);
2196 int i, count = ipa_get_param_count (info);
2197 VEC (tree, heap) *known_csts, *known_binfos;
2198 bool ret = false;
2200 if (count == 0)
2201 return false;
2203 if (dump_file && (dump_flags & TDF_DETAILS))
2204 fprintf (dump_file, "\nEvaluating opportunities for %s/%i.\n",
2205 cgraph_node_name (node), node->uid);
2207 gather_context_independent_values (info, &known_csts, &known_binfos,
2208 NULL);
2210 for (i = 0; i < count ; i++)
2212 struct ipcp_lattice *lat = ipa_get_lattice (info, i);
2213 struct ipcp_value *val;
2215 if (lat->bottom
2216 || VEC_index (tree, known_csts, i)
2217 || VEC_index (tree, known_binfos, i))
2218 continue;
2220 for (val = lat->values; val; val = val->next)
2222 int freq_sum, caller_count;
2223 gcov_type count_sum;
2224 VEC (cgraph_edge_p, heap) *callers;
2225 VEC (tree, heap) *kv;
2227 if (val->spec_node)
2229 perhaps_add_new_callers (node, val);
2230 continue;
2232 else if (val->local_size_cost + overall_size > max_new_size)
2234 if (dump_file && (dump_flags & TDF_DETAILS))
2235 fprintf (dump_file, " Ignoring candidate value because "
2236 "max_new_size would be reached with %li.\n",
2237 val->local_size_cost + overall_size);
2238 continue;
2240 else if (!get_info_about_necessary_edges (val, &freq_sum, &count_sum,
2241 &caller_count))
2242 continue;
2244 if (dump_file && (dump_flags & TDF_DETAILS))
2246 fprintf (dump_file, " - considering value ");
2247 print_ipcp_constant_value (dump_file, val->value);
2248 fprintf (dump_file, " for parameter ");
2249 print_generic_expr (dump_file, ipa_get_param (info, i), 0);
2250 fprintf (dump_file, " (caller_count: %i)\n", caller_count);
2254 if (!good_cloning_opportunity_p (node, val->local_time_benefit,
2255 freq_sum, count_sum,
2256 val->local_size_cost)
2257 && !good_cloning_opportunity_p (node,
2258 val->local_time_benefit
2259 + val->prop_time_benefit,
2260 freq_sum, count_sum,
2261 val->local_size_cost
2262 + val->prop_size_cost))
2263 continue;
2265 if (dump_file)
2266 fprintf (dump_file, " Creating a specialized node of %s/%i.\n",
2267 cgraph_node_name (node), node->uid);
2269 callers = gather_edges_for_value (val, caller_count);
2270 kv = VEC_copy (tree, heap, known_csts);
2271 move_binfos_to_values (kv, known_binfos);
2272 VEC_replace (tree, kv, i, val->value);
2273 find_more_values_for_callers_subset (node, kv, callers);
2274 val->spec_node = create_specialized_node (node, kv, callers);
2275 overall_size += val->local_size_cost;
2276 info = IPA_NODE_REF (node);
2278 /* TODO: If for some lattice there is only one other known value
2279 left, make a special node for it too. */
2280 ret = true;
2282 VEC_replace (tree, kv, i, val->value);
2286 if (info->clone_for_all_contexts)
2288 VEC (cgraph_edge_p, heap) *callers;
2290 if (dump_file)
2291 fprintf (dump_file, " - Creating a specialized node of %s/%i "
2292 "for all known contexts.\n", cgraph_node_name (node),
2293 node->uid);
2295 callers = collect_callers_of_node (node);
2296 move_binfos_to_values (known_csts, known_binfos);
2297 create_specialized_node (node, known_csts, callers);
2298 info = IPA_NODE_REF (node);
2299 info->clone_for_all_contexts = false;
2300 ret = true;
2302 else
2303 VEC_free (tree, heap, known_csts);
2305 VEC_free (tree, heap, known_binfos);
2306 return ret;
2309 /* Transitively mark all callees of NODE within the same SCC as not dead. */
2311 static void
2312 spread_undeadness (struct cgraph_node *node)
2314 struct cgraph_edge *cs;
2316 for (cs = node->callees; cs; cs = cs->next_callee)
2317 if (edge_within_scc (cs))
2319 struct cgraph_node *callee;
2320 struct ipa_node_params *info;
2322 callee = cgraph_function_node (cs->callee, NULL);
2323 info = IPA_NODE_REF (callee);
2325 if (info->node_dead)
2327 info->node_dead = 0;
2328 spread_undeadness (callee);
2333 /* Return true if NODE has a caller from outside of its SCC that is not
2334 dead. Worker callback for cgraph_for_node_and_aliases. */
2336 static bool
2337 has_undead_caller_from_outside_scc_p (struct cgraph_node *node,
2338 void *data ATTRIBUTE_UNUSED)
2340 struct cgraph_edge *cs;
2342 for (cs = node->callers; cs; cs = cs->next_caller)
2343 if (cs->caller->thunk.thunk_p
2344 && cgraph_for_node_and_aliases (cs->caller,
2345 has_undead_caller_from_outside_scc_p,
2346 NULL, true))
2347 return true;
2348 else if (!edge_within_scc (cs)
2349 && !IPA_NODE_REF (cs->caller)->node_dead)
2350 return true;
2351 return false;
2355 /* Identify nodes within the same SCC as NODE which are no longer needed
2356 because of new clones and will be removed as unreachable. */
2358 static void
2359 identify_dead_nodes (struct cgraph_node *node)
2361 struct cgraph_node *v;
2362 for (v = node; v ; v = ((struct ipa_dfs_info *) v->aux)->next_cycle)
2363 if (cgraph_will_be_removed_from_program_if_no_direct_calls (v)
2364 && !cgraph_for_node_and_aliases (v,
2365 has_undead_caller_from_outside_scc_p,
2366 NULL, true))
2367 IPA_NODE_REF (v)->node_dead = 1;
2369 for (v = node; v ; v = ((struct ipa_dfs_info *) v->aux)->next_cycle)
2370 if (!IPA_NODE_REF (v)->node_dead)
2371 spread_undeadness (v);
2373 if (dump_file && (dump_flags & TDF_DETAILS))
2375 for (v = node; v ; v = ((struct ipa_dfs_info *) v->aux)->next_cycle)
2376 if (IPA_NODE_REF (v)->node_dead)
2377 fprintf (dump_file, " Marking node as dead: %s/%i.\n",
2378 cgraph_node_name (v), v->uid);
2382 /* The decision stage. Iterate over the topological order of call graph nodes
2383 TOPO and make specialized clones if deemed beneficial. */
2385 static void
2386 ipcp_decision_stage (struct topo_info *topo)
2388 int i;
2390 if (dump_file)
2391 fprintf (dump_file, "\nIPA decision stage:\n\n");
2393 for (i = topo->nnodes - 1; i >= 0; i--)
2395 struct cgraph_node *node = topo->order[i];
2396 bool change = false, iterate = true;
2398 while (iterate)
2400 struct cgraph_node *v;
2401 iterate = false;
2402 for (v = node; v ; v = ((struct ipa_dfs_info *) v->aux)->next_cycle)
2403 if (cgraph_function_with_gimple_body_p (v)
2404 && ipcp_versionable_function_p (v))
2405 iterate |= decide_whether_version_node (v);
2407 change |= iterate;
2409 if (change)
2410 identify_dead_nodes (node);
2414 /* The IPCP driver. */
2416 static unsigned int
2417 ipcp_driver (void)
2419 struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
2420 struct topo_info topo;
2422 cgraph_remove_unreachable_nodes (true,dump_file);
2423 ipa_check_create_node_params ();
2424 ipa_check_create_edge_args ();
2425 grow_next_edge_clone_vector ();
2426 edge_duplication_hook_holder =
2427 cgraph_add_edge_duplication_hook (&ipcp_edge_duplication_hook, NULL);
2428 ipcp_values_pool = create_alloc_pool ("IPA-CP values",
2429 sizeof (struct ipcp_value), 32);
2430 ipcp_sources_pool = create_alloc_pool ("IPA-CP value sources",
2431 sizeof (struct ipcp_value_source), 64);
2432 if (dump_file)
2434 fprintf (dump_file, "\nIPA structures before propagation:\n");
2435 if (dump_flags & TDF_DETAILS)
2436 ipa_print_all_params (dump_file);
2437 ipa_print_all_jump_functions (dump_file);
2440 /* Topological sort. */
2441 build_toporder_info (&topo);
2442 /* Do the interprocedural propagation. */
2443 ipcp_propagate_stage (&topo);
2444 /* Decide what constant propagation and cloning should be performed. */
2445 ipcp_decision_stage (&topo);
2447 /* Free all IPCP structures. */
2448 free_toporder_info (&topo);
2449 VEC_free (cgraph_edge_p, heap, next_edge_clone);
2450 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
2451 ipa_free_all_structures_after_ipa_cp ();
2452 if (dump_file)
2453 fprintf (dump_file, "\nIPA constant propagation end\n");
2454 return 0;
2457 /* Initialization and computation of IPCP data structures. This is the initial
2458 intraprocedural analysis of functions, which gathers information to be
2459 propagated later on. */
2461 static void
2462 ipcp_generate_summary (void)
2464 struct cgraph_node *node;
2466 if (dump_file)
2467 fprintf (dump_file, "\nIPA constant propagation start:\n");
2468 ipa_register_cgraph_hooks ();
2470 /* FIXME: We could propagate through thunks happily and we could be
2471 even able to clone them, if needed. Do that later. */
2472 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
2474 /* Unreachable nodes should have been eliminated before ipcp. */
2475 gcc_assert (node->needed || node->reachable);
2477 inline_summary (node)->versionable = tree_versionable_function_p (node->decl);
2478 ipa_analyze_node (node);
2482 /* Write ipcp summary for nodes in SET. */
2484 static void
2485 ipcp_write_summary (cgraph_node_set set,
2486 varpool_node_set vset ATTRIBUTE_UNUSED)
2488 ipa_prop_write_jump_functions (set);
2491 /* Read ipcp summary. */
2493 static void
2494 ipcp_read_summary (void)
2496 ipa_prop_read_jump_functions ();
2499 /* Gate for IPCP optimization. */
2501 static bool
2502 cgraph_gate_cp (void)
2504 /* FIXME: We should remove the optimize check after we ensure we never run
2505 IPA passes when not optimizing. */
2506 return flag_ipa_cp && optimize;
2509 struct ipa_opt_pass_d pass_ipa_cp =
2512 IPA_PASS,
2513 "cp", /* name */
2514 cgraph_gate_cp, /* gate */
2515 ipcp_driver, /* execute */
2516 NULL, /* sub */
2517 NULL, /* next */
2518 0, /* static_pass_number */
2519 TV_IPA_CONSTANT_PROP, /* tv_id */
2520 0, /* properties_required */
2521 0, /* properties_provided */
2522 0, /* properties_destroyed */
2523 0, /* todo_flags_start */
2524 TODO_dump_cgraph |
2525 TODO_remove_functions | TODO_ggc_collect /* todo_flags_finish */
2527 ipcp_generate_summary, /* generate_summary */
2528 ipcp_write_summary, /* write_summary */
2529 ipcp_read_summary, /* read_summary */
2530 NULL, /* write_optimization_summary */
2531 NULL, /* read_optimization_summary */
2532 NULL, /* stmt_fixup */
2533 0, /* TODOs */
2534 NULL, /* function_transform */
2535 NULL, /* variable_transform */