make __stl_prime_list in comdat
[official-gcc.git] / gcc / ipa-cp.c
blob45cb00b21694514a0d041bf632415e99a71915ec
1 /* Interprocedural constant propagation
2 Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
5 Contributed by Razya Ladelsky <RAZYA@il.ibm.com> and Martin Jambor
6 <mjambor@suse.cz>
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
13 version.
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 for more details.
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
24 /* Interprocedural constant propagation (IPA-CP).
26 The goal of this transformation is to
28 1) discover functions which are always invoked with some arguments with the
29 same known constant values and modify the functions so that the
30 subsequent optimizations can take advantage of the knowledge, and
32 2) partial specialization - create specialized versions of functions
33 transformed in this way if some parameters are known constants only in
34 certain contexts but the estimated tradeoff between speedup and cost size
35 is deemed good.
37 The algorithm also propagates types and attempts to perform type based
38 devirtualization. Types are propagated much like constants.
40 The algorithm basically consists of three stages. In the first, functions
41 are analyzed one at a time and jump functions are constructed for all known
42 call-sites. In the second phase, the pass propagates information from the
43 jump functions across the call to reveal what values are available at what
44 call sites, performs estimations of effects of known values on functions and
45 their callees, and finally decides what specialized extra versions should be
46 created. In the third, the special versions materialize and appropriate
47 calls are redirected.
49 The algorithm used is to a certain extent based on "Interprocedural Constant
50 Propagation", by David Callahan, Keith D Cooper, Ken Kennedy, Linda Torczon,
51 Comp86, pg 152-161 and "A Methodology for Procedure Cloning" by Keith D
52 Cooper, Mary W. Hall, and Ken Kennedy.
55 First stage - intraprocedural analysis
56 =======================================
58 This phase computes jump_function and modification flags.
60 A jump function for a call-site represents the values passed as an actual
61 arguments of a given call-site. In principle, there are three types of
62 values:
64 Pass through - the caller's formal parameter is passed as an actual
65 argument, plus an operation on it can be performed.
66 Constant - a constant is passed as an actual argument.
67 Unknown - neither of the above.
69 All jump function types are described in detail in ipa-prop.h, together with
70 the data structures that represent them and methods of accessing them.
72 ipcp_generate_summary() is the main function of the first stage.
74 Second stage - interprocedural analysis
75 ========================================
77 This stage is itself divided into two phases. In the first, we propagate
78 known values over the call graph, in the second, we make cloning decisions.
79 It uses a different algorithm than the original Callahan's paper.
81 First, we traverse the functions topologically from callers to callees and,
82 for each strongly connected component (SCC), we propagate constants
83 according to previously computed jump functions. We also record what known
84 values depend on other known values and estimate local effects. Finally, we
85 propagate cumulative information about these effects from dependant values
86 to those on which they depend.
88 Second, we again traverse the call graph in the same topological order and
89 make clones for functions which we know are called with the same values in
90 all contexts and decide about extra specialized clones of functions just for
91 some contexts - these decisions are based on both local estimates and
92 cumulative estimates propagated from callees.
94 ipcp_propagate_stage() and ipcp_decision_stage() together constitute the
95 third stage.
97 Third phase - materialization of clones, call statement updates.
98 ============================================
100 This stage is currently performed by call graph code (mainly in cgraphunit.c
101 and tree-inline.c) according to instructions inserted to the call graph by
102 the second stage. */
104 #include "config.h"
105 #include "system.h"
106 #include "coretypes.h"
107 #include "tree.h"
108 #include "target.h"
109 #include "gimple.h"
110 #include "cgraph.h"
111 #include "ipa-prop.h"
112 #include "tree-flow.h"
113 #include "tree-pass.h"
114 #include "flags.h"
115 #include "timevar.h"
116 #include "diagnostic.h"
117 #include "tree-pretty-print.h"
118 #include "tree-dump.h"
119 #include "tree-inline.h"
120 #include "fibheap.h"
121 #include "params.h"
122 #include "ipa-inline.h"
123 #include "ipa-utils.h"
125 struct ipcp_value;
127 /* Describes a particular source for an IPA-CP value. */
129 struct ipcp_value_source
131 /* The incoming edge that brought the value. */
132 struct cgraph_edge *cs;
133 /* If the jump function that resulted into his value was a pass-through or an
134 ancestor, this is the ipcp_value of the caller from which the described
135 value has been derived. Otherwise it is NULL. */
136 struct ipcp_value *val;
137 /* Next pointer in a linked list of sources of a value. */
138 struct ipcp_value_source *next;
139 /* If the jump function that resulted into his value was a pass-through or an
140 ancestor, this is the index of the parameter of the caller the jump
141 function references. */
142 int index;
145 /* Describes one particular value stored in struct ipcp_lattice. */
147 struct ipcp_value
149 /* The actual value for the given parameter. This is either an IPA invariant
150 or a TREE_BINFO describing a type that can be used for
151 devirtualization. */
152 tree value;
153 /* The list of sources from which this value originates. */
154 struct ipcp_value_source *sources;
155 /* Next pointers in a linked list of all values in a lattice. */
156 struct ipcp_value *next;
157 /* Next pointers in a linked list of values in a strongly connected component
158 of values. */
159 struct ipcp_value *scc_next;
160 /* Next pointers in a linked list of SCCs of values sorted topologically
161 according their sources. */
162 struct ipcp_value *topo_next;
163 /* A specialized node created for this value, NULL if none has been (so far)
164 created. */
165 struct cgraph_node *spec_node;
166 /* Depth first search number and low link for topological sorting of
167 values. */
168 int dfs, low_link;
169 /* Time benefit and size cost that specializing the function for this value
170 would bring about in this function alone. */
171 int local_time_benefit, local_size_cost;
172 /* Time benefit and size cost that specializing the function for this value
173 can bring about in it's callees (transitively). */
174 int prop_time_benefit, prop_size_cost;
175 /* True if this valye is currently on the topo-sort stack. */
176 bool on_stack;
179 /* Allocation pools for values and their sources in ipa-cp. */
181 alloc_pool ipcp_values_pool;
182 alloc_pool ipcp_sources_pool;
184 /* Lattice describing potential values of a formal parameter of a function and
185 some of their other properties. TOP is represented by a lattice with zero
186 values and with contains_variable and bottom flags cleared. BOTTOM is
187 represented by a lattice with the bottom flag set. In that case, values and
188 contains_variable flag should be disregarded. */
190 struct ipcp_lattice
192 /* The list of known values and types in this lattice. Note that values are
193 not deallocated if a lattice is set to bottom because there may be value
194 sources referencing them. */
195 struct ipcp_value *values;
196 /* Number of known values and types in this lattice. */
197 int values_count;
198 /* The lattice contains a variable component (in addition to values). */
199 bool contains_variable;
200 /* The value of the lattice is bottom (i.e. variable and unusable for any
201 propagation). */
202 bool bottom;
203 /* There is a virtual call based on this parameter. */
204 bool virt_call;
207 /* Maximal count found in program. */
209 static gcov_type max_count;
211 /* Original overall size of the program. */
213 static long overall_size, max_new_size;
215 /* Head of the linked list of topologically sorted values. */
217 static struct ipcp_value *values_topo;
219 /* Return the lattice corresponding to the Ith formal parameter of the function
220 described by INFO. */
221 static inline struct ipcp_lattice *
222 ipa_get_lattice (struct ipa_node_params *info, int i)
224 gcc_assert (i >= 0 && i < ipa_get_param_count (info));
225 gcc_checking_assert (!info->ipcp_orig_node);
226 gcc_checking_assert (info->lattices);
227 return &(info->lattices[i]);
230 /* Return whether LAT is a lattice with a single constant and without an
231 undefined value. */
233 static inline bool
234 ipa_lat_is_single_const (struct ipcp_lattice *lat)
236 if (lat->bottom
237 || lat->contains_variable
238 || lat->values_count != 1)
239 return false;
240 else
241 return true;
244 /* Return true iff the CS is an edge within a strongly connected component as
245 computed by ipa_reduced_postorder. */
247 static inline bool
248 edge_within_scc (struct cgraph_edge *cs)
250 struct ipa_dfs_info *caller_dfs = (struct ipa_dfs_info *) cs->caller->aux;
251 struct ipa_dfs_info *callee_dfs;
252 struct cgraph_node *callee = cgraph_function_node (cs->callee, NULL);
254 callee_dfs = (struct ipa_dfs_info *) callee->aux;
255 return (caller_dfs
256 && callee_dfs
257 && caller_dfs->scc_no == callee_dfs->scc_no);
260 /* Print V which is extracted from a value in a lattice to F. */
262 static void
263 print_ipcp_constant_value (FILE * f, tree v)
265 if (TREE_CODE (v) == TREE_BINFO)
267 fprintf (f, "BINFO ");
268 print_generic_expr (f, BINFO_TYPE (v), 0);
270 else if (TREE_CODE (v) == ADDR_EXPR
271 && TREE_CODE (TREE_OPERAND (v, 0)) == CONST_DECL)
273 fprintf (f, "& ");
274 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (v, 0)), 0);
276 else
277 print_generic_expr (f, v, 0);
280 /* Print all ipcp_lattices of all functions to F. */
282 static void
283 print_all_lattices (FILE * f, bool dump_sources, bool dump_benefits)
285 struct cgraph_node *node;
286 int i, count;
288 fprintf (f, "\nLattices:\n");
289 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
291 struct ipa_node_params *info;
293 info = IPA_NODE_REF (node);
294 fprintf (f, " Node: %s/%i:\n", cgraph_node_name (node), node->uid);
295 count = ipa_get_param_count (info);
296 for (i = 0; i < count; i++)
298 struct ipcp_lattice *lat = ipa_get_lattice (info, i);
299 struct ipcp_value *val;
300 bool prev = false;
302 fprintf (f, " param [%d]: ", i);
303 if (lat->bottom)
305 fprintf (f, "BOTTOM\n");
306 continue;
309 if (!lat->values_count && !lat->contains_variable)
311 fprintf (f, "TOP\n");
312 continue;
315 if (lat->contains_variable)
317 fprintf (f, "VARIABLE");
318 prev = true;
319 if (dump_benefits)
320 fprintf (f, "\n");
323 for (val = lat->values; val; val = val->next)
325 if (dump_benefits && prev)
326 fprintf (f, " ");
327 else if (!dump_benefits && prev)
328 fprintf (f, ", ");
329 else
330 prev = true;
332 print_ipcp_constant_value (f, val->value);
334 if (dump_sources)
336 struct ipcp_value_source *s;
338 fprintf (f, " [from:");
339 for (s = val->sources; s; s = s->next)
340 fprintf (f, " %i(%i)", s->cs->caller->uid,s->cs->frequency);
341 fprintf (f, "]");
344 if (dump_benefits)
345 fprintf (f, " [loc_time: %i, loc_size: %i, "
346 "prop_time: %i, prop_size: %i]\n",
347 val->local_time_benefit, val->local_size_cost,
348 val->prop_time_benefit, val->prop_size_cost);
350 if (!dump_benefits)
351 fprintf (f, "\n");
356 /* Determine whether it is at all technically possible to create clones of NODE
357 and store this information in the ipa_node_params structure associated
358 with NODE. */
360 static void
361 determine_versionability (struct cgraph_node *node)
363 const char *reason = NULL;
365 /* There are a number of generic reasons functions cannot be versioned. We
366 also cannot remove parameters if there are type attributes such as fnspec
367 present. */
368 if (node->alias || node->thunk.thunk_p)
369 reason = "alias or thunk";
370 else if (!node->local.versionable)
371 reason = "not a tree_versionable_function";
372 else if (cgraph_function_body_availability (node) <= AVAIL_OVERWRITABLE)
373 reason = "insufficient body availability";
375 if (reason && dump_file && !node->alias && !node->thunk.thunk_p)
376 fprintf (dump_file, "Function %s/%i is not versionable, reason: %s.\n",
377 cgraph_node_name (node), node->uid, reason);
379 node->local.versionable = (reason == NULL);
382 /* Return true if it is at all technically possible to create clones of a
383 NODE. */
385 static bool
386 ipcp_versionable_function_p (struct cgraph_node *node)
388 return node->local.versionable;
391 /* Structure holding accumulated information about callers of a node. */
393 struct caller_statistics
395 gcov_type count_sum;
396 int n_calls, n_hot_calls, freq_sum;
399 /* Initialize fields of STAT to zeroes. */
401 static inline void
402 init_caller_stats (struct caller_statistics *stats)
404 stats->count_sum = 0;
405 stats->n_calls = 0;
406 stats->n_hot_calls = 0;
407 stats->freq_sum = 0;
410 /* Worker callback of cgraph_for_node_and_aliases accumulating statistics of
411 non-thunk incoming edges to NODE. */
413 static bool
414 gather_caller_stats (struct cgraph_node *node, void *data)
416 struct caller_statistics *stats = (struct caller_statistics *) data;
417 struct cgraph_edge *cs;
419 for (cs = node->callers; cs; cs = cs->next_caller)
420 if (cs->caller->thunk.thunk_p)
421 cgraph_for_node_and_aliases (cs->caller, gather_caller_stats,
422 stats, false);
423 else
425 stats->count_sum += cs->count;
426 stats->freq_sum += cs->frequency;
427 stats->n_calls++;
428 if (cgraph_maybe_hot_edge_p (cs))
429 stats->n_hot_calls ++;
431 return false;
435 /* Return true if this NODE is viable candidate for cloning. */
437 static bool
438 ipcp_cloning_candidate_p (struct cgraph_node *node)
440 struct caller_statistics stats;
442 gcc_checking_assert (cgraph_function_with_gimple_body_p (node));
444 if (!flag_ipa_cp_clone)
446 if (dump_file)
447 fprintf (dump_file, "Not considering %s for cloning; "
448 "-fipa-cp-clone disabled.\n",
449 cgraph_node_name (node));
450 return false;
453 if (!optimize_function_for_speed_p (DECL_STRUCT_FUNCTION (node->decl)))
455 if (dump_file)
456 fprintf (dump_file, "Not considering %s for cloning; "
457 "optimizing it for size.\n",
458 cgraph_node_name (node));
459 return false;
462 init_caller_stats (&stats);
463 cgraph_for_node_and_aliases (node, gather_caller_stats, &stats, false);
465 if (inline_summary (node)->self_size < stats.n_calls)
467 if (dump_file)
468 fprintf (dump_file, "Considering %s for cloning; code might shrink.\n",
469 cgraph_node_name (node));
470 return true;
473 /* When profile is available and function is hot, propagate into it even if
474 calls seems cold; constant propagation can improve function's speed
475 significantly. */
476 if (max_count)
478 if (stats.count_sum > node->count * 90 / 100)
480 if (dump_file)
481 fprintf (dump_file, "Considering %s for cloning; "
482 "usually called directly.\n",
483 cgraph_node_name (node));
484 return true;
487 if (!stats.n_hot_calls)
489 if (dump_file)
490 fprintf (dump_file, "Not considering %s for cloning; no hot calls.\n",
491 cgraph_node_name (node));
492 return false;
494 if (dump_file)
495 fprintf (dump_file, "Considering %s for cloning.\n",
496 cgraph_node_name (node));
497 return true;
500 /* Arrays representing a topological ordering of call graph nodes and a stack
501 of noes used during constant propagation. */
503 struct topo_info
505 struct cgraph_node **order;
506 struct cgraph_node **stack;
507 int nnodes, stack_top;
510 /* Allocate the arrays in TOPO and topologically sort the nodes into order. */
512 static void
513 build_toporder_info (struct topo_info *topo)
515 topo->order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
516 topo->stack = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
517 topo->stack_top = 0;
518 topo->nnodes = ipa_reduced_postorder (topo->order, true, true, NULL);
521 /* Free information about strongly connected components and the arrays in
522 TOPO. */
524 static void
525 free_toporder_info (struct topo_info *topo)
527 ipa_free_postorder_info ();
528 free (topo->order);
529 free (topo->stack);
532 /* Add NODE to the stack in TOPO, unless it is already there. */
534 static inline void
535 push_node_to_stack (struct topo_info *topo, struct cgraph_node *node)
537 struct ipa_node_params *info = IPA_NODE_REF (node);
538 if (info->node_enqueued)
539 return;
540 info->node_enqueued = 1;
541 topo->stack[topo->stack_top++] = node;
544 /* Pop a node from the stack in TOPO and return it or return NULL if the stack
545 is empty. */
547 static struct cgraph_node *
548 pop_node_from_stack (struct topo_info *topo)
550 if (topo->stack_top)
552 struct cgraph_node *node;
553 topo->stack_top--;
554 node = topo->stack[topo->stack_top];
555 IPA_NODE_REF (node)->node_enqueued = 0;
556 return node;
558 else
559 return NULL;
562 /* Set lattice LAT to bottom and return true if it previously was not set as
563 such. */
565 static inline bool
566 set_lattice_to_bottom (struct ipcp_lattice *lat)
568 bool ret = !lat->bottom;
569 lat->bottom = true;
570 return ret;
573 /* Mark lattice as containing an unknown value and return true if it previously
574 was not marked as such. */
576 static inline bool
577 set_lattice_contains_variable (struct ipcp_lattice *lat)
579 bool ret = !lat->contains_variable;
580 lat->contains_variable = true;
581 return ret;
584 /* Initialize ipcp_lattices. */
586 static void
587 initialize_node_lattices (struct cgraph_node *node)
589 struct ipa_node_params *info = IPA_NODE_REF (node);
590 struct cgraph_edge *ie;
591 bool disable = false, variable = false;
592 int i;
594 gcc_checking_assert (cgraph_function_with_gimple_body_p (node));
595 if (!node->local.local)
597 /* When cloning is allowed, we can assume that externally visible
598 functions are not called. We will compensate this by cloning
599 later. */
600 if (ipcp_versionable_function_p (node)
601 && ipcp_cloning_candidate_p (node))
602 variable = true;
603 else
604 disable = true;
607 if (disable || variable)
609 for (i = 0; i < ipa_get_param_count (info) ; i++)
611 struct ipcp_lattice *lat = ipa_get_lattice (info, i);
612 if (disable)
613 set_lattice_to_bottom (lat);
614 else
615 set_lattice_contains_variable (lat);
617 if (dump_file && (dump_flags & TDF_DETAILS)
618 && node->alias && node->thunk.thunk_p)
619 fprintf (dump_file, "Marking all lattices of %s/%i as %s\n",
620 cgraph_node_name (node), node->uid,
621 disable ? "BOTTOM" : "VARIABLE");
624 for (ie = node->indirect_calls; ie; ie = ie->next_callee)
625 if (ie->indirect_info->polymorphic)
627 gcc_checking_assert (ie->indirect_info->param_index >= 0);
628 ipa_get_lattice (info, ie->indirect_info->param_index)->virt_call = 1;
632 /* Return the result of a (possibly arithmetic) pass through jump function
633 JFUNC on the constant value INPUT. Return NULL_TREE if that cannot be
634 determined or itself is considered an interprocedural invariant. */
636 static tree
637 ipa_get_jf_pass_through_result (struct ipa_jump_func *jfunc, tree input)
639 tree restype, res;
641 gcc_checking_assert (is_gimple_ip_invariant (input));
642 if (jfunc->value.pass_through.operation == NOP_EXPR)
643 return input;
645 if (TREE_CODE_CLASS (jfunc->value.pass_through.operation)
646 == tcc_comparison)
647 restype = boolean_type_node;
648 else
649 restype = TREE_TYPE (input);
650 res = fold_binary (jfunc->value.pass_through.operation, restype,
651 input, jfunc->value.pass_through.operand);
653 if (res && !is_gimple_ip_invariant (res))
654 return NULL_TREE;
656 return res;
659 /* Return the result of an ancestor jump function JFUNC on the constant value
660 INPUT. Return NULL_TREE if that cannot be determined. */
662 static tree
663 ipa_get_jf_ancestor_result (struct ipa_jump_func *jfunc, tree input)
665 if (TREE_CODE (input) == ADDR_EXPR)
667 tree t = TREE_OPERAND (input, 0);
668 t = build_ref_for_offset (EXPR_LOCATION (t), t,
669 jfunc->value.ancestor.offset,
670 jfunc->value.ancestor.type, NULL, false);
671 return build_fold_addr_expr (t);
673 else
674 return NULL_TREE;
677 /* Extract the acual BINFO being described by JFUNC which must be a known type
678 jump function. */
680 static tree
681 ipa_value_from_known_type_jfunc (struct ipa_jump_func *jfunc)
683 tree base_binfo = TYPE_BINFO (jfunc->value.known_type.base_type);
684 if (!base_binfo)
685 return NULL_TREE;
686 return get_binfo_at_offset (base_binfo,
687 jfunc->value.known_type.offset,
688 jfunc->value.known_type.component_type);
691 /* Determine whether JFUNC evaluates to a known value (that is either a
692 constant or a binfo) and if so, return it. Otherwise return NULL. INFO
693 describes the caller node so that pass-through jump functions can be
694 evaluated. */
696 static tree
697 ipa_value_from_jfunc (struct ipa_node_params *info, struct ipa_jump_func *jfunc)
699 if (jfunc->type == IPA_JF_CONST)
700 return jfunc->value.constant;
701 else if (jfunc->type == IPA_JF_KNOWN_TYPE)
702 return ipa_value_from_known_type_jfunc (jfunc);
703 else if (jfunc->type == IPA_JF_PASS_THROUGH
704 || jfunc->type == IPA_JF_ANCESTOR)
706 tree input;
707 int idx;
709 if (jfunc->type == IPA_JF_PASS_THROUGH)
710 idx = jfunc->value.pass_through.formal_id;
711 else
712 idx = jfunc->value.ancestor.formal_id;
714 if (info->ipcp_orig_node)
715 input = VEC_index (tree, info->known_vals, idx);
716 else
718 struct ipcp_lattice *lat;
720 if (!info->lattices)
722 gcc_checking_assert (!flag_ipa_cp);
723 return NULL_TREE;
725 lat = ipa_get_lattice (info, idx);
726 if (!ipa_lat_is_single_const (lat))
727 return NULL_TREE;
728 input = lat->values->value;
731 if (!input)
732 return NULL_TREE;
734 if (jfunc->type == IPA_JF_PASS_THROUGH)
736 if (jfunc->value.pass_through.operation == NOP_EXPR)
737 return input;
738 else if (TREE_CODE (input) == TREE_BINFO)
739 return NULL_TREE;
740 else
741 return ipa_get_jf_pass_through_result (jfunc, input);
743 else
745 if (TREE_CODE (input) == TREE_BINFO)
746 return get_binfo_at_offset (input, jfunc->value.ancestor.offset,
747 jfunc->value.ancestor.type);
748 else
749 return ipa_get_jf_ancestor_result (jfunc, input);
752 else
753 return NULL_TREE;
756 /* Determine whether JFUNC evaluates to a constant and if so, return it.
757 Otherwise return NULL. INFO describes the caller node so that pass-through
758 jump functions can be evaluated. */
760 tree
761 ipa_cst_from_jfunc (struct ipa_node_params *info, struct ipa_jump_func *jfunc)
763 tree res = ipa_value_from_jfunc (info, jfunc);
765 if (res && TREE_CODE (res) == TREE_BINFO)
766 return NULL_TREE;
767 else
768 return res;
772 /* If checking is enabled, verify that no lattice is in the TOP state, i.e. not
773 bottom, not containing a variable component and without any known value at
774 the same time. */
776 DEBUG_FUNCTION void
777 ipcp_verify_propagated_values (void)
779 struct cgraph_node *node;
781 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
783 struct ipa_node_params *info = IPA_NODE_REF (node);
784 int i, count = ipa_get_param_count (info);
786 for (i = 0; i < count; i++)
788 struct ipcp_lattice *lat = ipa_get_lattice (info, i);
790 if (!lat->bottom
791 && !lat->contains_variable
792 && lat->values_count == 0)
794 if (dump_file)
796 fprintf (dump_file, "\nIPA lattices after constant "
797 "propagation:\n");
798 print_all_lattices (dump_file, true, false);
801 gcc_unreachable ();
807 /* Return true iff X and Y should be considered equal values by IPA-CP. */
809 static bool
810 values_equal_for_ipcp_p (tree x, tree y)
812 gcc_checking_assert (x != NULL_TREE && y != NULL_TREE);
814 if (x == y)
815 return true;
817 if (TREE_CODE (x) == TREE_BINFO || TREE_CODE (y) == TREE_BINFO)
818 return false;
820 if (TREE_CODE (x) == ADDR_EXPR
821 && TREE_CODE (y) == ADDR_EXPR
822 && TREE_CODE (TREE_OPERAND (x, 0)) == CONST_DECL
823 && TREE_CODE (TREE_OPERAND (y, 0)) == CONST_DECL)
824 return operand_equal_p (DECL_INITIAL (TREE_OPERAND (x, 0)),
825 DECL_INITIAL (TREE_OPERAND (y, 0)), 0);
826 else
827 return operand_equal_p (x, y, 0);
830 /* Add a new value source to VAL, marking that a value comes from edge CS and
831 (if the underlying jump function is a pass-through or an ancestor one) from
832 a caller value SRC_VAL of a caller parameter described by SRC_INDEX. */
834 static void
835 add_value_source (struct ipcp_value *val, struct cgraph_edge *cs,
836 struct ipcp_value *src_val, int src_idx)
838 struct ipcp_value_source *src;
840 src = (struct ipcp_value_source *) pool_alloc (ipcp_sources_pool);
841 src->cs = cs;
842 src->val = src_val;
843 src->index = src_idx;
845 src->next = val->sources;
846 val->sources = src;
850 /* Try to add NEWVAL to LAT, potentially creating a new struct ipcp_value for
851 it. CS, SRC_VAL and SRC_INDEX are meant for add_value_source and have the
852 same meaning. */
854 static bool
855 add_value_to_lattice (struct ipcp_lattice *lat, tree newval,
856 struct cgraph_edge *cs, struct ipcp_value *src_val,
857 int src_idx)
859 struct ipcp_value *val;
861 if (lat->bottom)
862 return false;
865 for (val = lat->values; val; val = val->next)
866 if (values_equal_for_ipcp_p (val->value, newval))
868 if (edge_within_scc (cs))
870 struct ipcp_value_source *s;
871 for (s = val->sources; s ; s = s->next)
872 if (s->cs == cs)
873 break;
874 if (s)
875 return false;
878 add_value_source (val, cs, src_val, src_idx);
879 return false;
882 if (lat->values_count == PARAM_VALUE (PARAM_IPA_CP_VALUE_LIST_SIZE))
884 /* We can only free sources, not the values themselves, because sources
885 of other values in this this SCC might point to them. */
886 for (val = lat->values; val; val = val->next)
888 while (val->sources)
890 struct ipcp_value_source *src = val->sources;
891 val->sources = src->next;
892 pool_free (ipcp_sources_pool, src);
896 lat->values = NULL;
897 return set_lattice_to_bottom (lat);
900 lat->values_count++;
901 val = (struct ipcp_value *) pool_alloc (ipcp_values_pool);
902 memset (val, 0, sizeof (*val));
904 add_value_source (val, cs, src_val, src_idx);
905 val->value = newval;
906 val->next = lat->values;
907 lat->values = val;
908 return true;
911 /* Propagate values through a pass-through jump function JFUNC associated with
912 edge CS, taking values from SRC_LAT and putting them into DEST_LAT. SRC_IDX
913 is the index of the source parameter. */
915 static bool
916 propagate_vals_accross_pass_through (struct cgraph_edge *cs,
917 struct ipa_jump_func *jfunc,
918 struct ipcp_lattice *src_lat,
919 struct ipcp_lattice *dest_lat,
920 int src_idx)
922 struct ipcp_value *src_val;
923 bool ret = false;
925 if (jfunc->value.pass_through.operation == NOP_EXPR)
926 for (src_val = src_lat->values; src_val; src_val = src_val->next)
927 ret |= add_value_to_lattice (dest_lat, src_val->value, cs,
928 src_val, src_idx);
929 /* Do not create new values when propagating within an SCC because if there
930 arithmetic functions with circular dependencies, there is infinite number
931 of them and we would just make lattices bottom. */
932 else if (edge_within_scc (cs))
933 ret = set_lattice_contains_variable (dest_lat);
934 else
935 for (src_val = src_lat->values; src_val; src_val = src_val->next)
937 tree cstval = src_val->value;
939 if (TREE_CODE (cstval) == TREE_BINFO)
941 ret |= set_lattice_contains_variable (dest_lat);
942 continue;
944 cstval = ipa_get_jf_pass_through_result (jfunc, cstval);
946 if (cstval)
947 ret |= add_value_to_lattice (dest_lat, cstval, cs, src_val, src_idx);
948 else
949 ret |= set_lattice_contains_variable (dest_lat);
952 return ret;
955 /* Propagate values through an ancestor jump function JFUNC associated with
956 edge CS, taking values from SRC_LAT and putting them into DEST_LAT. SRC_IDX
957 is the index of the source parameter. */
959 static bool
960 propagate_vals_accross_ancestor (struct cgraph_edge *cs,
961 struct ipa_jump_func *jfunc,
962 struct ipcp_lattice *src_lat,
963 struct ipcp_lattice *dest_lat,
964 int src_idx)
966 struct ipcp_value *src_val;
967 bool ret = false;
969 if (edge_within_scc (cs))
970 return set_lattice_contains_variable (dest_lat);
972 for (src_val = src_lat->values; src_val; src_val = src_val->next)
974 tree t = src_val->value;
976 if (TREE_CODE (t) == TREE_BINFO)
977 t = get_binfo_at_offset (t, jfunc->value.ancestor.offset,
978 jfunc->value.ancestor.type);
979 else
980 t = ipa_get_jf_ancestor_result (jfunc, t);
982 if (t)
983 ret |= add_value_to_lattice (dest_lat, t, cs, src_val, src_idx);
984 else
985 ret |= set_lattice_contains_variable (dest_lat);
988 return ret;
991 /* Propagate values across jump function JFUNC that is associated with edge CS
992 and put the values into DEST_LAT. */
994 static bool
995 propagate_accross_jump_function (struct cgraph_edge *cs,
996 struct ipa_jump_func *jfunc,
997 struct ipcp_lattice *dest_lat)
999 if (dest_lat->bottom)
1000 return false;
1002 if (jfunc->type == IPA_JF_CONST
1003 || jfunc->type == IPA_JF_KNOWN_TYPE)
1005 tree val;
1007 if (jfunc->type == IPA_JF_KNOWN_TYPE)
1009 val = ipa_value_from_known_type_jfunc (jfunc);
1010 if (!val)
1011 return set_lattice_contains_variable (dest_lat);
1013 else
1014 val = jfunc->value.constant;
1015 return add_value_to_lattice (dest_lat, val, cs, NULL, 0);
1017 else if (jfunc->type == IPA_JF_PASS_THROUGH
1018 || jfunc->type == IPA_JF_ANCESTOR)
1020 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
1021 struct ipcp_lattice *src_lat;
1022 int src_idx;
1023 bool ret;
1025 if (jfunc->type == IPA_JF_PASS_THROUGH)
1026 src_idx = jfunc->value.pass_through.formal_id;
1027 else
1028 src_idx = jfunc->value.ancestor.formal_id;
1030 src_lat = ipa_get_lattice (caller_info, src_idx);
1031 if (src_lat->bottom)
1032 return set_lattice_contains_variable (dest_lat);
1034 /* If we would need to clone the caller and cannot, do not propagate. */
1035 if (!ipcp_versionable_function_p (cs->caller)
1036 && (src_lat->contains_variable
1037 || (src_lat->values_count > 1)))
1038 return set_lattice_contains_variable (dest_lat);
1040 if (jfunc->type == IPA_JF_PASS_THROUGH)
1041 ret = propagate_vals_accross_pass_through (cs, jfunc, src_lat,
1042 dest_lat, src_idx);
1043 else
1044 ret = propagate_vals_accross_ancestor (cs, jfunc, src_lat, dest_lat,
1045 src_idx);
1047 if (src_lat->contains_variable)
1048 ret |= set_lattice_contains_variable (dest_lat);
1050 return ret;
1053 /* TODO: We currently do not handle member method pointers in IPA-CP (we only
1054 use it for indirect inlining), we should propagate them too. */
1055 return set_lattice_contains_variable (dest_lat);
1058 /* Propagate constants from the caller to the callee of CS. INFO describes the
1059 caller. */
1061 static bool
1062 propagate_constants_accross_call (struct cgraph_edge *cs)
1064 struct ipa_node_params *callee_info;
1065 enum availability availability;
1066 struct cgraph_node *callee, *alias_or_thunk;
1067 struct ipa_edge_args *args;
1068 bool ret = false;
1069 int i, args_count, parms_count;
1071 callee = cgraph_function_node (cs->callee, &availability);
1072 if (!callee->analyzed)
1073 return false;
1074 gcc_checking_assert (cgraph_function_with_gimple_body_p (callee));
1075 callee_info = IPA_NODE_REF (callee);
1077 args = IPA_EDGE_REF (cs);
1078 args_count = ipa_get_cs_argument_count (args);
1079 parms_count = ipa_get_param_count (callee_info);
1081 /* If this call goes through a thunk we must not propagate to the first (0th)
1082 parameter. However, we might need to uncover a thunk from below a series
1083 of aliases first. */
1084 alias_or_thunk = cs->callee;
1085 while (alias_or_thunk->alias)
1086 alias_or_thunk = cgraph_alias_aliased_node (alias_or_thunk);
1087 if (alias_or_thunk->thunk.thunk_p)
1089 ret |= set_lattice_contains_variable (ipa_get_lattice (callee_info, 0));
1090 i = 1;
1092 else
1093 i = 0;
1095 for (; (i < args_count) && (i < parms_count); i++)
1097 struct ipa_jump_func *jump_func = ipa_get_ith_jump_func (args, i);
1098 struct ipcp_lattice *dest_lat = ipa_get_lattice (callee_info, i);
1100 if (availability == AVAIL_OVERWRITABLE)
1101 ret |= set_lattice_contains_variable (dest_lat);
1102 else
1103 ret |= propagate_accross_jump_function (cs, jump_func, dest_lat);
1105 for (; i < parms_count; i++)
1106 ret |= set_lattice_contains_variable (ipa_get_lattice (callee_info, i));
1108 return ret;
1111 /* If an indirect edge IE can be turned into a direct one based on KNOWN_VALS
1112 (which can contain both constants and binfos) or KNOWN_BINFOS (which can be
1113 NULL) return the destination. */
1115 static tree
1116 get_indirect_edge_target (struct cgraph_edge *ie,
1117 VEC (tree, heap) *known_vals,
1118 VEC (tree, heap) *known_binfos)
1120 int param_index = ie->indirect_info->param_index;
1121 HOST_WIDE_INT token, anc_offset;
1122 tree otr_type;
1123 tree t;
1125 if (param_index == -1)
1126 return NULL_TREE;
1128 if (!ie->indirect_info->polymorphic)
1130 tree t = VEC_index (tree, known_vals, param_index);
1131 if (t &&
1132 TREE_CODE (t) == ADDR_EXPR
1133 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL)
1134 return TREE_OPERAND (t, 0);
1135 else
1136 return NULL_TREE;
1139 token = ie->indirect_info->otr_token;
1140 anc_offset = ie->indirect_info->anc_offset;
1141 otr_type = ie->indirect_info->otr_type;
1143 t = VEC_index (tree, known_vals, param_index);
1144 if (!t && known_binfos)
1145 t = VEC_index (tree, known_binfos, param_index);
1146 if (!t)
1147 return NULL_TREE;
1149 if (TREE_CODE (t) != TREE_BINFO)
1151 tree binfo;
1152 binfo = gimple_extract_devirt_binfo_from_cst (t);
1153 if (!binfo)
1154 return NULL_TREE;
1155 binfo = get_binfo_at_offset (binfo, anc_offset, otr_type);
1156 if (!binfo)
1157 return NULL_TREE;
1158 return gimple_get_virt_method_for_binfo (token, binfo);
1160 else
1162 tree binfo;
1164 binfo = get_binfo_at_offset (t, anc_offset, otr_type);
1165 if (!binfo)
1166 return NULL_TREE;
1167 return gimple_get_virt_method_for_binfo (token, binfo);
1171 /* Calculate devirtualization time bonus for NODE, assuming we know KNOWN_CSTS
1172 and KNOWN_BINFOS. */
1174 static int
1175 devirtualization_time_bonus (struct cgraph_node *node,
1176 VEC (tree, heap) *known_csts,
1177 VEC (tree, heap) *known_binfos)
1179 struct cgraph_edge *ie;
1180 int res = 0;
1182 for (ie = node->indirect_calls; ie; ie = ie->next_callee)
1184 struct cgraph_node *callee;
1185 struct inline_summary *isummary;
1186 tree target;
1188 target = get_indirect_edge_target (ie, known_csts, known_binfos);
1189 if (!target)
1190 continue;
1192 /* Only bare minimum benefit for clearly un-inlineable targets. */
1193 res += 1;
1194 callee = cgraph_get_node (target);
1195 if (!callee || !callee->analyzed)
1196 continue;
1197 isummary = inline_summary (callee);
1198 if (!isummary->inlinable)
1199 continue;
1201 /* FIXME: The values below need re-considering and perhaps also
1202 integrating into the cost metrics, at lest in some very basic way. */
1203 if (isummary->size <= MAX_INLINE_INSNS_AUTO / 4)
1204 res += 31;
1205 else if (isummary->size <= MAX_INLINE_INSNS_AUTO / 2)
1206 res += 15;
1207 else if (isummary->size <= MAX_INLINE_INSNS_AUTO
1208 || DECL_DECLARED_INLINE_P (callee->decl))
1209 res += 7;
1212 return res;
1215 /* Return true if cloning NODE is a good idea, given the estimated TIME_BENEFIT
1216 and SIZE_COST and with the sum of frequencies of incoming edges to the
1217 potential new clone in FREQUENCIES. */
1219 static bool
1220 good_cloning_opportunity_p (struct cgraph_node *node, int time_benefit,
1221 int freq_sum, gcov_type count_sum, int size_cost)
1223 if (time_benefit == 0
1224 || !flag_ipa_cp_clone
1225 || !optimize_function_for_speed_p (DECL_STRUCT_FUNCTION (node->decl)))
1226 return false;
1228 gcc_checking_assert (size_cost >= 0);
1230 /* FIXME: These decisions need tuning. */
1231 if (max_count)
1233 int evaluation, factor = (count_sum * 1000) / max_count;
1235 evaluation = (time_benefit * factor) / size_cost;
1237 if (dump_file && (dump_flags & TDF_DETAILS))
1238 fprintf (dump_file, " good_cloning_opportunity_p (time: %i, "
1239 "size: %i, count_sum: " HOST_WIDE_INT_PRINT_DEC
1240 ") -> evaluation: %i, threshold: %i\n",
1241 time_benefit, size_cost, (HOST_WIDE_INT) count_sum,
1242 evaluation, 500);
1244 return evaluation >= PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD);
1246 else
1248 int evaluation = (time_benefit * freq_sum) / size_cost;
1250 if (dump_file && (dump_flags & TDF_DETAILS))
1251 fprintf (dump_file, " good_cloning_opportunity_p (time: %i, "
1252 "size: %i, freq_sum: %i) -> evaluation: %i, threshold: %i\n",
1253 time_benefit, size_cost, freq_sum, evaluation,
1254 CGRAPH_FREQ_BASE /2);
1256 return evaluation >= PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD);
1261 /* Allocate KNOWN_CSTS and KNOWN_BINFOS and populate them with values of
1262 parameters that are known independent of the context. INFO describes the
1263 function. If REMOVABLE_PARAMS_COST is non-NULL, the movement cost of all
1264 removable parameters will be stored in it. */
1266 static bool
1267 gather_context_independent_values (struct ipa_node_params *info,
1268 VEC (tree, heap) **known_csts,
1269 VEC (tree, heap) **known_binfos,
1270 int *removable_params_cost)
1272 int i, count = ipa_get_param_count (info);
1273 bool ret = false;
1275 *known_csts = NULL;
1276 *known_binfos = NULL;
1277 VEC_safe_grow_cleared (tree, heap, *known_csts, count);
1278 VEC_safe_grow_cleared (tree, heap, *known_binfos, count);
1280 if (removable_params_cost)
1281 *removable_params_cost = 0;
1283 for (i = 0; i < count ; i++)
1285 struct ipcp_lattice *lat = ipa_get_lattice (info, i);
1287 if (ipa_lat_is_single_const (lat))
1289 struct ipcp_value *val = lat->values;
1290 if (TREE_CODE (val->value) != TREE_BINFO)
1292 VEC_replace (tree, *known_csts, i, val->value);
1293 if (removable_params_cost)
1294 *removable_params_cost
1295 += estimate_move_cost (TREE_TYPE (val->value));
1296 ret = true;
1298 else if (lat->virt_call)
1300 VEC_replace (tree, *known_binfos, i, val->value);
1301 ret = true;
1303 else if (removable_params_cost
1304 && !ipa_is_param_used (info, i))
1305 *removable_params_cost
1306 += estimate_move_cost (TREE_TYPE (ipa_get_param (info, i)));
1308 else if (removable_params_cost
1309 && !ipa_is_param_used (info, i))
1310 *removable_params_cost
1311 += estimate_move_cost (TREE_TYPE (ipa_get_param (info, i)));
1314 return ret;
1317 /* Iterate over known values of parameters of NODE and estimate the local
1318 effects in terms of time and size they have. */
1320 static void
1321 estimate_local_effects (struct cgraph_node *node)
1323 struct ipa_node_params *info = IPA_NODE_REF (node);
1324 int i, count = ipa_get_param_count (info);
1325 VEC (tree, heap) *known_csts, *known_binfos;
1326 bool always_const;
1327 int base_time = inline_summary (node)->time;
1328 int removable_params_cost;
1330 if (!count || !ipcp_versionable_function_p (node))
1331 return;
1333 if (dump_file && (dump_flags & TDF_DETAILS))
1334 fprintf (dump_file, "\nEstimating effects for %s/%i, base_time: %i.\n",
1335 cgraph_node_name (node), node->uid, base_time);
1337 always_const = gather_context_independent_values (info, &known_csts,
1338 &known_binfos,
1339 &removable_params_cost);
1340 if (always_const)
1342 struct caller_statistics stats;
1343 int time, size;
1345 init_caller_stats (&stats);
1346 cgraph_for_node_and_aliases (node, gather_caller_stats, &stats, false);
1347 estimate_ipcp_clone_size_and_time (node, known_csts, &size, &time);
1348 time -= devirtualization_time_bonus (node, known_csts, known_binfos);
1349 time -= removable_params_cost;
1350 size -= stats.n_calls * removable_params_cost;
1352 if (dump_file)
1353 fprintf (dump_file, " - context independent values, size: %i, "
1354 "time_benefit: %i\n", size, base_time - time);
1356 if (size <= 0
1357 || cgraph_will_be_removed_from_program_if_no_direct_calls (node))
1359 info->clone_for_all_contexts = true;
1360 base_time = time;
1362 if (dump_file)
1363 fprintf (dump_file, " Decided to specialize for all "
1364 "known contexts, code not going to grow.\n");
1366 else if (good_cloning_opportunity_p (node, base_time - time,
1367 stats.freq_sum, stats.count_sum,
1368 size))
1370 if (size + overall_size <= max_new_size)
1372 info->clone_for_all_contexts = true;
1373 base_time = time;
1374 overall_size += size;
1376 if (dump_file)
1377 fprintf (dump_file, " Decided to specialize for all "
1378 "known contexts, growth deemed beneficial.\n");
1380 else if (dump_file && (dump_flags & TDF_DETAILS))
1381 fprintf (dump_file, " Not cloning for all contexts because "
1382 "max_new_size would be reached with %li.\n",
1383 size + overall_size);
1387 for (i = 0; i < count ; i++)
1389 struct ipcp_lattice *lat = ipa_get_lattice (info, i);
1390 struct ipcp_value *val;
1391 int emc;
1393 if (lat->bottom
1394 || !lat->values
1395 || VEC_index (tree, known_csts, i)
1396 || VEC_index (tree, known_binfos, i))
1397 continue;
1399 for (val = lat->values; val; val = val->next)
1401 int time, size, time_benefit;
1403 if (TREE_CODE (val->value) != TREE_BINFO)
1405 VEC_replace (tree, known_csts, i, val->value);
1406 VEC_replace (tree, known_binfos, i, NULL_TREE);
1407 emc = estimate_move_cost (TREE_TYPE (val->value));
1409 else if (lat->virt_call)
1411 VEC_replace (tree, known_csts, i, NULL_TREE);
1412 VEC_replace (tree, known_binfos, i, val->value);
1413 emc = 0;
1415 else
1416 continue;
1418 estimate_ipcp_clone_size_and_time (node, known_csts, &size, &time);
1419 time_benefit = base_time - time
1420 + devirtualization_time_bonus (node, known_csts, known_binfos)
1421 + removable_params_cost + emc;
1423 if (dump_file && (dump_flags & TDF_DETAILS))
1425 fprintf (dump_file, " - estimates for value ");
1426 print_ipcp_constant_value (dump_file, val->value);
1427 fprintf (dump_file, " for parameter ");
1428 print_generic_expr (dump_file, ipa_get_param (info, i), 0);
1429 fprintf (dump_file, ": time_benefit: %i, size: %i\n",
1430 time_benefit, size);
1433 val->local_time_benefit = time_benefit;
1434 val->local_size_cost = size;
1438 VEC_free (tree, heap, known_csts);
1439 VEC_free (tree, heap, known_binfos);
1443 /* Add value CUR_VAL and all yet-unsorted values it is dependent on to the
1444 topological sort of values. */
1446 static void
1447 add_val_to_toposort (struct ipcp_value *cur_val)
1449 static int dfs_counter = 0;
1450 static struct ipcp_value *stack;
1451 struct ipcp_value_source *src;
1453 if (cur_val->dfs)
1454 return;
1456 dfs_counter++;
1457 cur_val->dfs = dfs_counter;
1458 cur_val->low_link = dfs_counter;
1460 cur_val->topo_next = stack;
1461 stack = cur_val;
1462 cur_val->on_stack = true;
1464 for (src = cur_val->sources; src; src = src->next)
1465 if (src->val)
1467 if (src->val->dfs == 0)
1469 add_val_to_toposort (src->val);
1470 if (src->val->low_link < cur_val->low_link)
1471 cur_val->low_link = src->val->low_link;
1473 else if (src->val->on_stack
1474 && src->val->dfs < cur_val->low_link)
1475 cur_val->low_link = src->val->dfs;
1478 if (cur_val->dfs == cur_val->low_link)
1480 struct ipcp_value *v, *scc_list = NULL;
1484 v = stack;
1485 stack = v->topo_next;
1486 v->on_stack = false;
1488 v->scc_next = scc_list;
1489 scc_list = v;
1491 while (v != cur_val);
1493 cur_val->topo_next = values_topo;
1494 values_topo = cur_val;
1498 /* Add all values in lattices associated with NODE to the topological sort if
1499 they are not there yet. */
1501 static void
1502 add_all_node_vals_to_toposort (struct cgraph_node *node)
1504 struct ipa_node_params *info = IPA_NODE_REF (node);
1505 int i, count = ipa_get_param_count (info);
1507 for (i = 0; i < count ; i++)
1509 struct ipcp_lattice *lat = ipa_get_lattice (info, i);
1510 struct ipcp_value *val;
1512 if (lat->bottom || !lat->values)
1513 continue;
1514 for (val = lat->values; val; val = val->next)
1515 add_val_to_toposort (val);
1519 /* One pass of constants propagation along the call graph edges, from callers
1520 to callees (requires topological ordering in TOPO), iterate over strongly
1521 connected components. */
1523 static void
1524 propagate_constants_topo (struct topo_info *topo)
1526 int i;
1528 for (i = topo->nnodes - 1; i >= 0; i--)
1530 struct cgraph_node *v, *node = topo->order[i];
1531 struct ipa_dfs_info *node_dfs_info;
1533 if (!cgraph_function_with_gimple_body_p (node))
1534 continue;
1536 node_dfs_info = (struct ipa_dfs_info *) node->aux;
1537 /* First, iteratively propagate within the strongly connected component
1538 until all lattices stabilize. */
1539 v = node_dfs_info->next_cycle;
1540 while (v)
1542 push_node_to_stack (topo, v);
1543 v = ((struct ipa_dfs_info *) v->aux)->next_cycle;
1546 v = node;
1547 while (v)
1549 struct cgraph_edge *cs;
1551 for (cs = v->callees; cs; cs = cs->next_callee)
1552 if (edge_within_scc (cs)
1553 && propagate_constants_accross_call (cs))
1554 push_node_to_stack (topo, cs->callee);
1555 v = pop_node_from_stack (topo);
1558 /* Afterwards, propagate along edges leading out of the SCC, calculates
1559 the local effects of the discovered constants and all valid values to
1560 their topological sort. */
1561 v = node;
1562 while (v)
1564 struct cgraph_edge *cs;
1566 estimate_local_effects (v);
1567 add_all_node_vals_to_toposort (v);
1568 for (cs = v->callees; cs; cs = cs->next_callee)
1569 if (!edge_within_scc (cs))
1570 propagate_constants_accross_call (cs);
1572 v = ((struct ipa_dfs_info *) v->aux)->next_cycle;
1577 /* Propagate the estimated effects of individual values along the topological
1578 from the dependant values to those they depend on. */
1580 static void
1581 propagate_effects (void)
1583 struct ipcp_value *base;
1585 for (base = values_topo; base; base = base->topo_next)
1587 struct ipcp_value_source *src;
1588 struct ipcp_value *val;
1589 int time = 0, size = 0;
1591 for (val = base; val; val = val->scc_next)
1593 time += val->local_time_benefit + val->prop_time_benefit;
1594 size += val->local_size_cost + val->prop_size_cost;
1597 for (val = base; val; val = val->scc_next)
1598 for (src = val->sources; src; src = src->next)
1599 if (src->val
1600 && cgraph_maybe_hot_edge_p (src->cs))
1602 src->val->prop_time_benefit += time;
1603 src->val->prop_size_cost += size;
1609 /* Propagate constants, binfos and their effects from the summaries
1610 interprocedurally. */
1612 static void
1613 ipcp_propagate_stage (struct topo_info *topo)
1615 struct cgraph_node *node;
1617 if (dump_file)
1618 fprintf (dump_file, "\n Propagating constants:\n\n");
1620 if (in_lto_p)
1621 ipa_update_after_lto_read ();
1624 FOR_EACH_DEFINED_FUNCTION (node)
1626 struct ipa_node_params *info = IPA_NODE_REF (node);
1628 determine_versionability (node);
1629 if (cgraph_function_with_gimple_body_p (node))
1631 info->lattices = XCNEWVEC (struct ipcp_lattice,
1632 ipa_get_param_count (info));
1633 initialize_node_lattices (node);
1635 if (node->count > max_count)
1636 max_count = node->count;
1637 overall_size += inline_summary (node)->self_size;
1640 max_new_size = overall_size;
1641 if (max_new_size < PARAM_VALUE (PARAM_LARGE_UNIT_INSNS))
1642 max_new_size = PARAM_VALUE (PARAM_LARGE_UNIT_INSNS);
1643 max_new_size += max_new_size * PARAM_VALUE (PARAM_IPCP_UNIT_GROWTH) / 100 + 1;
1645 if (dump_file)
1646 fprintf (dump_file, "\noverall_size: %li, max_new_size: %li\n",
1647 overall_size, max_new_size);
1649 propagate_constants_topo (topo);
1650 #ifdef ENABLE_CHECKING
1651 ipcp_verify_propagated_values ();
1652 #endif
1653 propagate_effects ();
1655 if (dump_file)
1657 fprintf (dump_file, "\nIPA lattices after all propagation:\n");
1658 print_all_lattices (dump_file, (dump_flags & TDF_DETAILS), true);
1662 /* Discover newly direct outgoing edges from NODE which is a new clone with
1663 known KNOWN_VALS and make them direct. */
1665 static void
1666 ipcp_discover_new_direct_edges (struct cgraph_node *node,
1667 VEC (tree, heap) *known_vals)
1669 struct cgraph_edge *ie, *next_ie;
1671 for (ie = node->indirect_calls; ie; ie = next_ie)
1673 tree target;
1675 next_ie = ie->next_callee;
1676 target = get_indirect_edge_target (ie, known_vals, NULL);
1677 if (target)
1678 ipa_make_edge_direct_to_target (ie, target);
1682 /* Vector of pointers which for linked lists of clones of an original crgaph
1683 edge. */
1685 static VEC (cgraph_edge_p, heap) *next_edge_clone;
1687 static inline void
1688 grow_next_edge_clone_vector (void)
1690 if (VEC_length (cgraph_edge_p, next_edge_clone)
1691 <= (unsigned) cgraph_edge_max_uid)
1692 VEC_safe_grow_cleared (cgraph_edge_p, heap, next_edge_clone,
1693 cgraph_edge_max_uid + 1);
1696 /* Edge duplication hook to grow the appropriate linked list in
1697 next_edge_clone. */
1699 static void
1700 ipcp_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
1701 __attribute__((unused)) void *data)
1703 grow_next_edge_clone_vector ();
1704 VEC_replace (cgraph_edge_p, next_edge_clone, dst->uid,
1705 VEC_index (cgraph_edge_p, next_edge_clone, src->uid));
1706 VEC_replace (cgraph_edge_p, next_edge_clone, src->uid, dst);
1709 /* Get the next clone in the linked list of clones of an edge. */
1711 static inline struct cgraph_edge *
1712 get_next_cgraph_edge_clone (struct cgraph_edge *cs)
1714 return VEC_index (cgraph_edge_p, next_edge_clone, cs->uid);
1717 /* Return true if edge CS does bring about the value described by SRC. */
1719 static bool
1720 cgraph_edge_brings_value_p (struct cgraph_edge *cs,
1721 struct ipcp_value_source *src)
1723 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
1725 if (IPA_NODE_REF (cs->callee)->ipcp_orig_node
1726 || caller_info->node_dead)
1727 return false;
1728 if (!src->val)
1729 return true;
1731 if (caller_info->ipcp_orig_node)
1733 tree t = VEC_index (tree, caller_info->known_vals, src->index);
1734 return (t != NULL_TREE
1735 && values_equal_for_ipcp_p (src->val->value, t));
1737 else
1739 struct ipcp_lattice *lat = ipa_get_lattice (caller_info, src->index);
1740 if (ipa_lat_is_single_const (lat)
1741 && values_equal_for_ipcp_p (src->val->value, lat->values->value))
1742 return true;
1743 else
1744 return false;
1748 /* Given VAL, iterate over all its sources and if they still hold, add their
1749 edge frequency and their number into *FREQUENCY and *CALLER_COUNT
1750 respectively. */
1752 static bool
1753 get_info_about_necessary_edges (struct ipcp_value *val, int *freq_sum,
1754 gcov_type *count_sum, int *caller_count)
1756 struct ipcp_value_source *src;
1757 int freq = 0, count = 0;
1758 gcov_type cnt = 0;
1759 bool hot = false;
1761 for (src = val->sources; src; src = src->next)
1763 struct cgraph_edge *cs = src->cs;
1764 while (cs)
1766 if (cgraph_edge_brings_value_p (cs, src))
1768 count++;
1769 freq += cs->frequency;
1770 cnt += cs->count;
1771 hot |= cgraph_maybe_hot_edge_p (cs);
1773 cs = get_next_cgraph_edge_clone (cs);
1777 *freq_sum = freq;
1778 *count_sum = cnt;
1779 *caller_count = count;
1780 return hot;
1783 /* Return a vector of incoming edges that do bring value VAL. It is assumed
1784 their number is known and equal to CALLER_COUNT. */
1786 static VEC (cgraph_edge_p,heap) *
1787 gather_edges_for_value (struct ipcp_value *val, int caller_count)
1789 struct ipcp_value_source *src;
1790 VEC (cgraph_edge_p,heap) *ret;
1792 ret = VEC_alloc (cgraph_edge_p, heap, caller_count);
1793 for (src = val->sources; src; src = src->next)
1795 struct cgraph_edge *cs = src->cs;
1796 while (cs)
1798 if (cgraph_edge_brings_value_p (cs, src))
1799 VEC_quick_push (cgraph_edge_p, ret, cs);
1800 cs = get_next_cgraph_edge_clone (cs);
1804 return ret;
1807 /* Construct a replacement map for a know VALUE for a formal parameter PARAM.
1808 Return it or NULL if for some reason it cannot be created. */
1810 static struct ipa_replace_map *
1811 get_replacement_map (tree value, tree parm)
1813 tree req_type = TREE_TYPE (parm);
1814 struct ipa_replace_map *replace_map;
1816 if (!useless_type_conversion_p (req_type, TREE_TYPE (value)))
1818 if (fold_convertible_p (req_type, value))
1819 value = fold_build1 (NOP_EXPR, req_type, value);
1820 else if (TYPE_SIZE (req_type) == TYPE_SIZE (TREE_TYPE (value)))
1821 value = fold_build1 (VIEW_CONVERT_EXPR, req_type, value);
1822 else
1824 if (dump_file)
1826 fprintf (dump_file, " const ");
1827 print_generic_expr (dump_file, value, 0);
1828 fprintf (dump_file, " can't be converted to param ");
1829 print_generic_expr (dump_file, parm, 0);
1830 fprintf (dump_file, "\n");
1832 return NULL;
1836 replace_map = ggc_alloc_ipa_replace_map ();
1837 if (dump_file)
1839 fprintf (dump_file, " replacing param ");
1840 print_generic_expr (dump_file, parm, 0);
1841 fprintf (dump_file, " with const ");
1842 print_generic_expr (dump_file, value, 0);
1843 fprintf (dump_file, "\n");
1845 replace_map->old_tree = parm;
1846 replace_map->new_tree = value;
1847 replace_map->replace_p = true;
1848 replace_map->ref_p = false;
1850 return replace_map;
1853 /* Dump new profiling counts */
1855 static void
1856 dump_profile_updates (struct cgraph_node *orig_node,
1857 struct cgraph_node *new_node)
1859 struct cgraph_edge *cs;
1861 fprintf (dump_file, " setting count of the specialized node to "
1862 HOST_WIDE_INT_PRINT_DEC "\n", (HOST_WIDE_INT) new_node->count);
1863 for (cs = new_node->callees; cs ; cs = cs->next_callee)
1864 fprintf (dump_file, " edge to %s has count "
1865 HOST_WIDE_INT_PRINT_DEC "\n",
1866 cgraph_node_name (cs->callee), (HOST_WIDE_INT) cs->count);
1868 fprintf (dump_file, " setting count of the original node to "
1869 HOST_WIDE_INT_PRINT_DEC "\n", (HOST_WIDE_INT) orig_node->count);
1870 for (cs = orig_node->callees; cs ; cs = cs->next_callee)
1871 fprintf (dump_file, " edge to %s is left with "
1872 HOST_WIDE_INT_PRINT_DEC "\n",
1873 cgraph_node_name (cs->callee), (HOST_WIDE_INT) cs->count);
1876 /* After a specialized NEW_NODE version of ORIG_NODE has been created, update
1877 their profile information to reflect this. */
1879 static void
1880 update_profiling_info (struct cgraph_node *orig_node,
1881 struct cgraph_node *new_node)
1883 struct cgraph_edge *cs;
1884 struct caller_statistics stats;
1885 gcov_type new_sum, orig_sum;
1886 gcov_type remainder, orig_node_count = orig_node->count;
1888 if (orig_node_count == 0)
1889 return;
1891 init_caller_stats (&stats);
1892 cgraph_for_node_and_aliases (orig_node, gather_caller_stats, &stats, false);
1893 orig_sum = stats.count_sum;
1894 init_caller_stats (&stats);
1895 cgraph_for_node_and_aliases (new_node, gather_caller_stats, &stats, false);
1896 new_sum = stats.count_sum;
1898 if (orig_node_count < orig_sum + new_sum)
1900 if (dump_file)
1901 fprintf (dump_file, " Problem: node %s/%i has too low count "
1902 HOST_WIDE_INT_PRINT_DEC " while the sum of incoming "
1903 "counts is " HOST_WIDE_INT_PRINT_DEC "\n",
1904 cgraph_node_name (orig_node), orig_node->uid,
1905 (HOST_WIDE_INT) orig_node_count,
1906 (HOST_WIDE_INT) (orig_sum + new_sum));
1908 orig_node_count = (orig_sum + new_sum) * 12 / 10;
1909 if (dump_file)
1910 fprintf (dump_file, " proceeding by pretending it was "
1911 HOST_WIDE_INT_PRINT_DEC "\n",
1912 (HOST_WIDE_INT) orig_node_count);
1915 new_node->count = new_sum;
1916 remainder = orig_node_count - new_sum;
1917 orig_node->count = remainder;
1919 for (cs = new_node->callees; cs ; cs = cs->next_callee)
1920 if (cs->frequency)
1921 cs->count = cs->count * (new_sum * REG_BR_PROB_BASE
1922 / orig_node_count) / REG_BR_PROB_BASE;
1923 else
1924 cs->count = 0;
1926 for (cs = orig_node->callees; cs ; cs = cs->next_callee)
1927 cs->count = cs->count * (remainder * REG_BR_PROB_BASE
1928 / orig_node_count) / REG_BR_PROB_BASE;
1930 if (dump_file)
1931 dump_profile_updates (orig_node, new_node);
1934 /* Update the respective profile of specialized NEW_NODE and the original
1935 ORIG_NODE after additional edges with cumulative count sum REDIRECTED_SUM
1936 have been redirected to the specialized version. */
1938 static void
1939 update_specialized_profile (struct cgraph_node *new_node,
1940 struct cgraph_node *orig_node,
1941 gcov_type redirected_sum)
1943 struct cgraph_edge *cs;
1944 gcov_type new_node_count, orig_node_count = orig_node->count;
1946 if (dump_file)
1947 fprintf (dump_file, " the sum of counts of redirected edges is "
1948 HOST_WIDE_INT_PRINT_DEC "\n", (HOST_WIDE_INT) redirected_sum);
1949 if (orig_node_count == 0)
1950 return;
1952 gcc_assert (orig_node_count >= redirected_sum);
1954 new_node_count = new_node->count;
1955 new_node->count += redirected_sum;
1956 orig_node->count -= redirected_sum;
1958 for (cs = new_node->callees; cs ; cs = cs->next_callee)
1959 if (cs->frequency)
1960 cs->count += cs->count * redirected_sum / new_node_count;
1961 else
1962 cs->count = 0;
1964 for (cs = orig_node->callees; cs ; cs = cs->next_callee)
1966 gcov_type dec = cs->count * (redirected_sum * REG_BR_PROB_BASE
1967 / orig_node_count) / REG_BR_PROB_BASE;
1968 if (dec < cs->count)
1969 cs->count -= dec;
1970 else
1971 cs->count = 0;
1974 if (dump_file)
1975 dump_profile_updates (orig_node, new_node);
1978 /* Create a specialized version of NODE with known constants and types of
1979 parameters in KNOWN_VALS and redirect all edges in CALLERS to it. */
1981 static struct cgraph_node *
1982 create_specialized_node (struct cgraph_node *node,
1983 VEC (tree, heap) *known_vals,
1984 VEC (cgraph_edge_p,heap) *callers)
1986 struct ipa_node_params *new_info, *info = IPA_NODE_REF (node);
1987 VEC (ipa_replace_map_p,gc)* replace_trees = NULL;
1988 struct cgraph_node *new_node;
1989 int i, count = ipa_get_param_count (info);
1990 bitmap args_to_skip;
1992 gcc_assert (!info->ipcp_orig_node);
1994 if (node->local.can_change_signature)
1996 args_to_skip = BITMAP_GGC_ALLOC ();
1997 for (i = 0; i < count; i++)
1999 tree t = VEC_index (tree, known_vals, i);
2001 if ((t && TREE_CODE (t) != TREE_BINFO)
2002 || !ipa_is_param_used (info, i))
2003 bitmap_set_bit (args_to_skip, i);
2006 else
2008 args_to_skip = NULL;
2009 if (dump_file && (dump_flags & TDF_DETAILS))
2010 fprintf (dump_file, " cannot change function signature\n");
2013 for (i = 0; i < count ; i++)
2015 tree t = VEC_index (tree, known_vals, i);
2016 if (t && TREE_CODE (t) != TREE_BINFO)
2018 struct ipa_replace_map *replace_map;
2020 replace_map = get_replacement_map (t, ipa_get_param (info, i));
2021 if (replace_map)
2022 VEC_safe_push (ipa_replace_map_p, gc, replace_trees, replace_map);
2026 new_node = cgraph_create_virtual_clone (node, callers, replace_trees,
2027 args_to_skip, "constprop");
2028 if (dump_file && (dump_flags & TDF_DETAILS))
2029 fprintf (dump_file, " the new node is %s/%i.\n",
2030 cgraph_node_name (new_node), new_node->uid);
2031 gcc_checking_assert (ipa_node_params_vector
2032 && (VEC_length (ipa_node_params_t,
2033 ipa_node_params_vector)
2034 > (unsigned) cgraph_max_uid));
2035 update_profiling_info (node, new_node);
2036 new_info = IPA_NODE_REF (new_node);
2037 new_info->ipcp_orig_node = node;
2038 new_info->known_vals = known_vals;
2040 ipcp_discover_new_direct_edges (new_node, known_vals);
2042 VEC_free (cgraph_edge_p, heap, callers);
2043 return new_node;
2046 /* Given a NODE, and a subset of its CALLERS, try to populate blanks slots in
2047 KNOWN_VALS with constants and types that are also known for all of the
2048 CALLERS. */
2050 static void
2051 find_more_values_for_callers_subset (struct cgraph_node *node,
2052 VEC (tree, heap) *known_vals,
2053 VEC (cgraph_edge_p,heap) *callers)
2055 struct ipa_node_params *info = IPA_NODE_REF (node);
2056 int i, count = ipa_get_param_count (info);
2058 for (i = 0; i < count ; i++)
2060 struct cgraph_edge *cs;
2061 tree newval = NULL_TREE;
2062 int j;
2064 if (ipa_get_lattice (info, i)->bottom
2065 || VEC_index (tree, known_vals, i))
2066 continue;
2068 FOR_EACH_VEC_ELT (cgraph_edge_p, callers, j, cs)
2070 struct ipa_jump_func *jump_func;
2071 tree t;
2073 if (i >= ipa_get_cs_argument_count (IPA_EDGE_REF (cs)))
2075 newval = NULL_TREE;
2076 break;
2078 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
2079 t = ipa_value_from_jfunc (IPA_NODE_REF (cs->caller), jump_func);
2080 if (!t
2081 || (newval
2082 && !values_equal_for_ipcp_p (t, newval)))
2084 newval = NULL_TREE;
2085 break;
2087 else
2088 newval = t;
2091 if (newval)
2093 if (dump_file && (dump_flags & TDF_DETAILS))
2095 fprintf (dump_file, " adding an extra known value ");
2096 print_ipcp_constant_value (dump_file, newval);
2097 fprintf (dump_file, " for parameter ");
2098 print_generic_expr (dump_file, ipa_get_param (info, i), 0);
2099 fprintf (dump_file, "\n");
2102 VEC_replace (tree, known_vals, i, newval);
2107 /* Given an original NODE and a VAL for which we have already created a
2108 specialized clone, look whether there are incoming edges that still lead
2109 into the old node but now also bring the requested value and also conform to
2110 all other criteria such that they can be redirected the the special node.
2111 This function can therefore redirect the final edge in a SCC. */
2113 static void
2114 perhaps_add_new_callers (struct cgraph_node *node, struct ipcp_value *val)
2116 struct ipa_node_params *dest_info = IPA_NODE_REF (val->spec_node);
2117 struct ipcp_value_source *src;
2118 int count = ipa_get_param_count (dest_info);
2119 gcov_type redirected_sum = 0;
2121 for (src = val->sources; src; src = src->next)
2123 struct cgraph_edge *cs = src->cs;
2124 while (cs)
2126 enum availability availability;
2127 bool insufficient = false;
2129 if (cgraph_function_node (cs->callee, &availability) == node
2130 && availability > AVAIL_OVERWRITABLE
2131 && cgraph_edge_brings_value_p (cs, src))
2133 struct ipa_node_params *caller_info;
2134 struct ipa_edge_args *args;
2135 int i;
2137 caller_info = IPA_NODE_REF (cs->caller);
2138 args = IPA_EDGE_REF (cs);
2139 for (i = 0; i < count; i++)
2141 struct ipa_jump_func *jump_func;
2142 tree val, t;
2144 val = VEC_index (tree, dest_info->known_vals, i);
2145 if (!val)
2146 continue;
2148 if (i >= ipa_get_cs_argument_count (args))
2150 insufficient = true;
2151 break;
2153 jump_func = ipa_get_ith_jump_func (args, i);
2154 t = ipa_value_from_jfunc (caller_info, jump_func);
2155 if (!t || !values_equal_for_ipcp_p (val, t))
2157 insufficient = true;
2158 break;
2162 if (!insufficient)
2164 if (dump_file)
2165 fprintf (dump_file, " - adding an extra caller %s/%i"
2166 " of %s/%i\n",
2167 cgraph_node_name (cs->caller), cs->caller->uid,
2168 cgraph_node_name (val->spec_node),
2169 val->spec_node->uid);
2171 cgraph_redirect_edge_callee (cs, val->spec_node);
2172 redirected_sum += cs->count;
2175 cs = get_next_cgraph_edge_clone (cs);
2179 if (redirected_sum)
2180 update_specialized_profile (val->spec_node, node, redirected_sum);
2184 /* Copy KNOWN_BINFOS to KNOWN_VALS. */
2186 static void
2187 move_binfos_to_values (VEC (tree, heap) *known_vals,
2188 VEC (tree, heap) *known_binfos)
2190 tree t;
2191 int i;
2193 for (i = 0; VEC_iterate (tree, known_binfos, i, t); i++)
2194 if (t)
2195 VEC_replace (tree, known_vals, i, t);
2199 /* Decide whether and what specialized clones of NODE should be created. */
2201 static bool
2202 decide_whether_version_node (struct cgraph_node *node)
2204 struct ipa_node_params *info = IPA_NODE_REF (node);
2205 int i, count = ipa_get_param_count (info);
2206 VEC (tree, heap) *known_csts, *known_binfos;
2207 bool ret = false;
2209 if (count == 0)
2210 return false;
2212 if (dump_file && (dump_flags & TDF_DETAILS))
2213 fprintf (dump_file, "\nEvaluating opportunities for %s/%i.\n",
2214 cgraph_node_name (node), node->uid);
2216 gather_context_independent_values (info, &known_csts, &known_binfos,
2217 NULL);
2219 for (i = 0; i < count ; i++)
2221 struct ipcp_lattice *lat = ipa_get_lattice (info, i);
2222 struct ipcp_value *val;
2224 if (lat->bottom
2225 || VEC_index (tree, known_csts, i)
2226 || VEC_index (tree, known_binfos, i))
2227 continue;
2229 for (val = lat->values; val; val = val->next)
2231 int freq_sum, caller_count;
2232 gcov_type count_sum;
2233 VEC (cgraph_edge_p, heap) *callers;
2234 VEC (tree, heap) *kv;
2236 if (val->spec_node)
2238 perhaps_add_new_callers (node, val);
2239 continue;
2241 else if (val->local_size_cost + overall_size > max_new_size)
2243 if (dump_file && (dump_flags & TDF_DETAILS))
2244 fprintf (dump_file, " Ignoring candidate value because "
2245 "max_new_size would be reached with %li.\n",
2246 val->local_size_cost + overall_size);
2247 continue;
2249 else if (!get_info_about_necessary_edges (val, &freq_sum, &count_sum,
2250 &caller_count))
2251 continue;
2253 if (dump_file && (dump_flags & TDF_DETAILS))
2255 fprintf (dump_file, " - considering value ");
2256 print_ipcp_constant_value (dump_file, val->value);
2257 fprintf (dump_file, " for parameter ");
2258 print_generic_expr (dump_file, ipa_get_param (info, i), 0);
2259 fprintf (dump_file, " (caller_count: %i)\n", caller_count);
2263 if (!good_cloning_opportunity_p (node, val->local_time_benefit,
2264 freq_sum, count_sum,
2265 val->local_size_cost)
2266 && !good_cloning_opportunity_p (node,
2267 val->local_time_benefit
2268 + val->prop_time_benefit,
2269 freq_sum, count_sum,
2270 val->local_size_cost
2271 + val->prop_size_cost))
2272 continue;
2274 if (dump_file)
2275 fprintf (dump_file, " Creating a specialized node of %s/%i.\n",
2276 cgraph_node_name (node), node->uid);
2278 callers = gather_edges_for_value (val, caller_count);
2279 kv = VEC_copy (tree, heap, known_csts);
2280 move_binfos_to_values (kv, known_binfos);
2281 VEC_replace (tree, kv, i, val->value);
2282 find_more_values_for_callers_subset (node, kv, callers);
2283 val->spec_node = create_specialized_node (node, kv, callers);
2284 overall_size += val->local_size_cost;
2285 info = IPA_NODE_REF (node);
2287 /* TODO: If for some lattice there is only one other known value
2288 left, make a special node for it too. */
2289 ret = true;
2291 VEC_replace (tree, kv, i, val->value);
2295 if (info->clone_for_all_contexts)
2297 VEC (cgraph_edge_p, heap) *callers;
2299 if (dump_file)
2300 fprintf (dump_file, " - Creating a specialized node of %s/%i "
2301 "for all known contexts.\n", cgraph_node_name (node),
2302 node->uid);
2304 callers = collect_callers_of_node (node);
2305 move_binfos_to_values (known_csts, known_binfos);
2306 create_specialized_node (node, known_csts, callers);
2307 info = IPA_NODE_REF (node);
2308 info->clone_for_all_contexts = false;
2309 ret = true;
2311 else
2312 VEC_free (tree, heap, known_csts);
2314 VEC_free (tree, heap, known_binfos);
2315 return ret;
2318 /* Transitively mark all callees of NODE within the same SCC as not dead. */
2320 static void
2321 spread_undeadness (struct cgraph_node *node)
2323 struct cgraph_edge *cs;
2325 for (cs = node->callees; cs; cs = cs->next_callee)
2326 if (edge_within_scc (cs))
2328 struct cgraph_node *callee;
2329 struct ipa_node_params *info;
2331 callee = cgraph_function_node (cs->callee, NULL);
2332 info = IPA_NODE_REF (callee);
2334 if (info->node_dead)
2336 info->node_dead = 0;
2337 spread_undeadness (callee);
2342 /* Return true if NODE has a caller from outside of its SCC that is not
2343 dead. Worker callback for cgraph_for_node_and_aliases. */
2345 static bool
2346 has_undead_caller_from_outside_scc_p (struct cgraph_node *node,
2347 void *data ATTRIBUTE_UNUSED)
2349 struct cgraph_edge *cs;
2351 for (cs = node->callers; cs; cs = cs->next_caller)
2352 if (cs->caller->thunk.thunk_p
2353 && cgraph_for_node_and_aliases (cs->caller,
2354 has_undead_caller_from_outside_scc_p,
2355 NULL, true))
2356 return true;
2357 else if (!edge_within_scc (cs)
2358 && !IPA_NODE_REF (cs->caller)->node_dead)
2359 return true;
2360 return false;
2364 /* Identify nodes within the same SCC as NODE which are no longer needed
2365 because of new clones and will be removed as unreachable. */
2367 static void
2368 identify_dead_nodes (struct cgraph_node *node)
2370 struct cgraph_node *v;
2371 for (v = node; v ; v = ((struct ipa_dfs_info *) v->aux)->next_cycle)
2372 if (cgraph_will_be_removed_from_program_if_no_direct_calls (v)
2373 && !cgraph_for_node_and_aliases (v,
2374 has_undead_caller_from_outside_scc_p,
2375 NULL, true))
2376 IPA_NODE_REF (v)->node_dead = 1;
2378 for (v = node; v ; v = ((struct ipa_dfs_info *) v->aux)->next_cycle)
2379 if (!IPA_NODE_REF (v)->node_dead)
2380 spread_undeadness (v);
2382 if (dump_file && (dump_flags & TDF_DETAILS))
2384 for (v = node; v ; v = ((struct ipa_dfs_info *) v->aux)->next_cycle)
2385 if (IPA_NODE_REF (v)->node_dead)
2386 fprintf (dump_file, " Marking node as dead: %s/%i.\n",
2387 cgraph_node_name (v), v->uid);
2391 /* The decision stage. Iterate over the topological order of call graph nodes
2392 TOPO and make specialized clones if deemed beneficial. */
2394 static void
2395 ipcp_decision_stage (struct topo_info *topo)
2397 int i;
2399 if (dump_file)
2400 fprintf (dump_file, "\nIPA decision stage:\n\n");
2402 for (i = topo->nnodes - 1; i >= 0; i--)
2404 struct cgraph_node *node = topo->order[i];
2405 bool change = false, iterate = true;
2407 while (iterate)
2409 struct cgraph_node *v;
2410 iterate = false;
2411 for (v = node; v ; v = ((struct ipa_dfs_info *) v->aux)->next_cycle)
2412 if (cgraph_function_with_gimple_body_p (v)
2413 && ipcp_versionable_function_p (v))
2414 iterate |= decide_whether_version_node (v);
2416 change |= iterate;
2418 if (change)
2419 identify_dead_nodes (node);
2423 /* The IPCP driver. */
2425 static unsigned int
2426 ipcp_driver (void)
2428 struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
2429 struct topo_info topo;
2431 cgraph_remove_unreachable_nodes (true,dump_file);
2432 ipa_check_create_node_params ();
2433 ipa_check_create_edge_args ();
2434 grow_next_edge_clone_vector ();
2435 edge_duplication_hook_holder =
2436 cgraph_add_edge_duplication_hook (&ipcp_edge_duplication_hook, NULL);
2437 ipcp_values_pool = create_alloc_pool ("IPA-CP values",
2438 sizeof (struct ipcp_value), 32);
2439 ipcp_sources_pool = create_alloc_pool ("IPA-CP value sources",
2440 sizeof (struct ipcp_value_source), 64);
2441 if (dump_file)
2443 fprintf (dump_file, "\nIPA structures before propagation:\n");
2444 if (dump_flags & TDF_DETAILS)
2445 ipa_print_all_params (dump_file);
2446 ipa_print_all_jump_functions (dump_file);
2449 /* Topological sort. */
2450 build_toporder_info (&topo);
2451 /* Do the interprocedural propagation. */
2452 ipcp_propagate_stage (&topo);
2453 /* Decide what constant propagation and cloning should be performed. */
2454 ipcp_decision_stage (&topo);
2456 /* Free all IPCP structures. */
2457 free_toporder_info (&topo);
2458 VEC_free (cgraph_edge_p, heap, next_edge_clone);
2459 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
2460 ipa_free_all_structures_after_ipa_cp ();
2461 if (dump_file)
2462 fprintf (dump_file, "\nIPA constant propagation end\n");
2463 return 0;
2466 /* Initialization and computation of IPCP data structures. This is the initial
2467 intraprocedural analysis of functions, which gathers information to be
2468 propagated later on. */
2470 static void
2471 ipcp_generate_summary (void)
2473 struct cgraph_node *node;
2475 if (dump_file)
2476 fprintf (dump_file, "\nIPA constant propagation start:\n");
2477 ipa_register_cgraph_hooks ();
2479 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
2481 /* Unreachable nodes should have been eliminated before ipcp. */
2482 gcc_assert (node->needed || node->reachable);
2483 node->local.versionable = tree_versionable_function_p (node->decl);
2484 ipa_analyze_node (node);
2488 /* Write ipcp summary for nodes in SET. */
2490 static void
2491 ipcp_write_summary (cgraph_node_set set,
2492 varpool_node_set vset ATTRIBUTE_UNUSED)
2494 ipa_prop_write_jump_functions (set);
2497 /* Read ipcp summary. */
2499 static void
2500 ipcp_read_summary (void)
2502 ipa_prop_read_jump_functions ();
2505 /* Gate for IPCP optimization. */
2507 static bool
2508 cgraph_gate_cp (void)
2510 /* FIXME: We should remove the optimize check after we ensure we never run
2511 IPA passes when not optimizing. */
2512 return flag_ipa_cp && optimize;
2515 struct ipa_opt_pass_d pass_ipa_cp =
2518 IPA_PASS,
2519 "cp", /* name */
2520 cgraph_gate_cp, /* gate */
2521 ipcp_driver, /* execute */
2522 NULL, /* sub */
2523 NULL, /* next */
2524 0, /* static_pass_number */
2525 TV_IPA_CONSTANT_PROP, /* tv_id */
2526 0, /* properties_required */
2527 0, /* properties_provided */
2528 0, /* properties_destroyed */
2529 0, /* todo_flags_start */
2530 TODO_dump_cgraph |
2531 TODO_remove_functions | TODO_ggc_collect /* todo_flags_finish */
2533 ipcp_generate_summary, /* generate_summary */
2534 ipcp_write_summary, /* write_summary */
2535 ipcp_read_summary, /* read_summary */
2536 NULL, /* write_optimization_summary */
2537 NULL, /* read_optimization_summary */
2538 NULL, /* stmt_fixup */
2539 0, /* TODOs */
2540 NULL, /* function_transform */
2541 NULL, /* variable_transform */