DR 1402
[official-gcc.git] / gcc / ipa-cp.c
blob3f3ab36119e04259d6555f63d5ac5732b44b898c
1 /* Interprocedural constant propagation
2 Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012
3 Free Software Foundation, Inc.
5 Contributed by Razya Ladelsky <RAZYA@il.ibm.com> and Martin Jambor
6 <mjambor@suse.cz>
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
13 version.
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 for more details.
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
24 /* Interprocedural constant propagation (IPA-CP).
26 The goal of this transformation is to
28 1) discover functions which are always invoked with some arguments with the
29 same known constant values and modify the functions so that the
30 subsequent optimizations can take advantage of the knowledge, and
32 2) partial specialization - create specialized versions of functions
33 transformed in this way if some parameters are known constants only in
34 certain contexts but the estimated tradeoff between speedup and cost size
35 is deemed good.
37 The algorithm also propagates types and attempts to perform type based
38 devirtualization. Types are propagated much like constants.
40 The algorithm basically consists of three stages. In the first, functions
41 are analyzed one at a time and jump functions are constructed for all known
42 call-sites. In the second phase, the pass propagates information from the
43 jump functions across the call to reveal what values are available at what
44 call sites, performs estimations of effects of known values on functions and
45 their callees, and finally decides what specialized extra versions should be
46 created. In the third, the special versions materialize and appropriate
47 calls are redirected.
49 The algorithm used is to a certain extent based on "Interprocedural Constant
50 Propagation", by David Callahan, Keith D Cooper, Ken Kennedy, Linda Torczon,
51 Comp86, pg 152-161 and "A Methodology for Procedure Cloning" by Keith D
52 Cooper, Mary W. Hall, and Ken Kennedy.
55 First stage - intraprocedural analysis
56 =======================================
58 This phase computes jump_function and modification flags.
60 A jump function for a call-site represents the values passed as an actual
61 arguments of a given call-site. In principle, there are three types of
62 values:
64 Pass through - the caller's formal parameter is passed as an actual
65 argument, plus an operation on it can be performed.
66 Constant - a constant is passed as an actual argument.
67 Unknown - neither of the above.
69 All jump function types are described in detail in ipa-prop.h, together with
70 the data structures that represent them and methods of accessing them.
72 ipcp_generate_summary() is the main function of the first stage.
74 Second stage - interprocedural analysis
75 ========================================
77 This stage is itself divided into two phases. In the first, we propagate
78 known values over the call graph, in the second, we make cloning decisions.
79 It uses a different algorithm than the original Callahan's paper.
81 First, we traverse the functions topologically from callers to callees and,
82 for each strongly connected component (SCC), we propagate constants
83 according to previously computed jump functions. We also record what known
84 values depend on other known values and estimate local effects. Finally, we
85 propagate cumulative information about these effects from dependent values
86 to those on which they depend.
88 Second, we again traverse the call graph in the same topological order and
89 make clones for functions which we know are called with the same values in
90 all contexts and decide about extra specialized clones of functions just for
91 some contexts - these decisions are based on both local estimates and
92 cumulative estimates propagated from callees.
94 ipcp_propagate_stage() and ipcp_decision_stage() together constitute the
95 third stage.
97 Third phase - materialization of clones, call statement updates.
98 ============================================
100 This stage is currently performed by call graph code (mainly in cgraphunit.c
101 and tree-inline.c) according to instructions inserted to the call graph by
102 the second stage. */
104 #include "config.h"
105 #include "system.h"
106 #include "coretypes.h"
107 #include "tree.h"
108 #include "target.h"
109 #include "gimple.h"
110 #include "cgraph.h"
111 #include "ipa-prop.h"
112 #include "tree-flow.h"
113 #include "tree-pass.h"
114 #include "flags.h"
115 #include "timevar.h"
116 #include "diagnostic.h"
117 #include "tree-pretty-print.h"
118 #include "tree-dump.h"
119 #include "tree-inline.h"
120 #include "fibheap.h"
121 #include "params.h"
122 #include "ipa-inline.h"
123 #include "ipa-utils.h"
125 struct ipcp_value;
127 /* Describes a particular source for an IPA-CP value. */
129 struct ipcp_value_source
131 /* The incoming edge that brought the value. */
132 struct cgraph_edge *cs;
133 /* If the jump function that resulted into his value was a pass-through or an
134 ancestor, this is the ipcp_value of the caller from which the described
135 value has been derived. Otherwise it is NULL. */
136 struct ipcp_value *val;
137 /* Next pointer in a linked list of sources of a value. */
138 struct ipcp_value_source *next;
139 /* If the jump function that resulted into his value was a pass-through or an
140 ancestor, this is the index of the parameter of the caller the jump
141 function references. */
142 int index;
145 /* Describes one particular value stored in struct ipcp_lattice. */
147 struct ipcp_value
149 /* The actual value for the given parameter. This is either an IPA invariant
150 or a TREE_BINFO describing a type that can be used for
151 devirtualization. */
152 tree value;
153 /* The list of sources from which this value originates. */
154 struct ipcp_value_source *sources;
155 /* Next pointers in a linked list of all values in a lattice. */
156 struct ipcp_value *next;
157 /* Next pointers in a linked list of values in a strongly connected component
158 of values. */
159 struct ipcp_value *scc_next;
160 /* Next pointers in a linked list of SCCs of values sorted topologically
161 according their sources. */
162 struct ipcp_value *topo_next;
163 /* A specialized node created for this value, NULL if none has been (so far)
164 created. */
165 struct cgraph_node *spec_node;
166 /* Depth first search number and low link for topological sorting of
167 values. */
168 int dfs, low_link;
169 /* Time benefit and size cost that specializing the function for this value
170 would bring about in this function alone. */
171 int local_time_benefit, local_size_cost;
172 /* Time benefit and size cost that specializing the function for this value
173 can bring about in it's callees (transitively). */
174 int prop_time_benefit, prop_size_cost;
175 /* True if this valye is currently on the topo-sort stack. */
176 bool on_stack;
179 /* Allocation pools for values and their sources in ipa-cp. */
181 alloc_pool ipcp_values_pool;
182 alloc_pool ipcp_sources_pool;
184 /* Lattice describing potential values of a formal parameter of a function and
185 some of their other properties. TOP is represented by a lattice with zero
186 values and with contains_variable and bottom flags cleared. BOTTOM is
187 represented by a lattice with the bottom flag set. In that case, values and
188 contains_variable flag should be disregarded. */
190 struct ipcp_lattice
192 /* The list of known values and types in this lattice. Note that values are
193 not deallocated if a lattice is set to bottom because there may be value
194 sources referencing them. */
195 struct ipcp_value *values;
196 /* Number of known values and types in this lattice. */
197 int values_count;
198 /* The lattice contains a variable component (in addition to values). */
199 bool contains_variable;
200 /* The value of the lattice is bottom (i.e. variable and unusable for any
201 propagation). */
202 bool bottom;
203 /* There is a virtual call based on this parameter. */
204 bool virt_call;
207 /* Maximal count found in program. */
209 static gcov_type max_count;
211 /* Original overall size of the program. */
213 static long overall_size, max_new_size;
215 /* Head of the linked list of topologically sorted values. */
217 static struct ipcp_value *values_topo;
219 /* Return the lattice corresponding to the Ith formal parameter of the function
220 described by INFO. */
221 static inline struct ipcp_lattice *
222 ipa_get_lattice (struct ipa_node_params *info, int i)
224 gcc_assert (i >= 0 && i < ipa_get_param_count (info));
225 gcc_checking_assert (!info->ipcp_orig_node);
226 gcc_checking_assert (info->lattices);
227 return &(info->lattices[i]);
230 /* Return whether LAT is a lattice with a single constant and without an
231 undefined value. */
233 static inline bool
234 ipa_lat_is_single_const (struct ipcp_lattice *lat)
236 if (lat->bottom
237 || lat->contains_variable
238 || lat->values_count != 1)
239 return false;
240 else
241 return true;
244 /* Return true iff the CS is an edge within a strongly connected component as
245 computed by ipa_reduced_postorder. */
247 static inline bool
248 edge_within_scc (struct cgraph_edge *cs)
250 struct ipa_dfs_info *caller_dfs = (struct ipa_dfs_info *) cs->caller->symbol.aux;
251 struct ipa_dfs_info *callee_dfs;
252 struct cgraph_node *callee = cgraph_function_node (cs->callee, NULL);
254 callee_dfs = (struct ipa_dfs_info *) callee->symbol.aux;
255 return (caller_dfs
256 && callee_dfs
257 && caller_dfs->scc_no == callee_dfs->scc_no);
260 /* Print V which is extracted from a value in a lattice to F. */
262 static void
263 print_ipcp_constant_value (FILE * f, tree v)
265 if (TREE_CODE (v) == TREE_BINFO)
267 fprintf (f, "BINFO ");
268 print_generic_expr (f, BINFO_TYPE (v), 0);
270 else if (TREE_CODE (v) == ADDR_EXPR
271 && TREE_CODE (TREE_OPERAND (v, 0)) == CONST_DECL)
273 fprintf (f, "& ");
274 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (v, 0)), 0);
276 else
277 print_generic_expr (f, v, 0);
280 /* Print all ipcp_lattices of all functions to F. */
282 static void
283 print_all_lattices (FILE * f, bool dump_sources, bool dump_benefits)
285 struct cgraph_node *node;
286 int i, count;
288 fprintf (f, "\nLattices:\n");
289 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
291 struct ipa_node_params *info;
293 info = IPA_NODE_REF (node);
294 fprintf (f, " Node: %s/%i:\n", cgraph_node_name (node), node->uid);
295 count = ipa_get_param_count (info);
296 for (i = 0; i < count; i++)
298 struct ipcp_lattice *lat = ipa_get_lattice (info, i);
299 struct ipcp_value *val;
300 bool prev = false;
302 fprintf (f, " param [%d]: ", i);
303 if (lat->bottom)
305 fprintf (f, "BOTTOM\n");
306 continue;
309 if (!lat->values_count && !lat->contains_variable)
311 fprintf (f, "TOP\n");
312 continue;
315 if (lat->contains_variable)
317 fprintf (f, "VARIABLE");
318 prev = true;
319 if (dump_benefits)
320 fprintf (f, "\n");
323 for (val = lat->values; val; val = val->next)
325 if (dump_benefits && prev)
326 fprintf (f, " ");
327 else if (!dump_benefits && prev)
328 fprintf (f, ", ");
329 else
330 prev = true;
332 print_ipcp_constant_value (f, val->value);
334 if (dump_sources)
336 struct ipcp_value_source *s;
338 fprintf (f, " [from:");
339 for (s = val->sources; s; s = s->next)
340 fprintf (f, " %i(%i)", s->cs->caller->uid,s->cs->frequency);
341 fprintf (f, "]");
344 if (dump_benefits)
345 fprintf (f, " [loc_time: %i, loc_size: %i, "
346 "prop_time: %i, prop_size: %i]\n",
347 val->local_time_benefit, val->local_size_cost,
348 val->prop_time_benefit, val->prop_size_cost);
350 if (!dump_benefits)
351 fprintf (f, "\n");
356 /* Determine whether it is at all technically possible to create clones of NODE
357 and store this information in the ipa_node_params structure associated
358 with NODE. */
360 static void
361 determine_versionability (struct cgraph_node *node)
363 const char *reason = NULL;
365 /* There are a number of generic reasons functions cannot be versioned. We
366 also cannot remove parameters if there are type attributes such as fnspec
367 present. */
368 if (node->alias || node->thunk.thunk_p)
369 reason = "alias or thunk";
370 else if (!node->local.versionable)
371 reason = "not a tree_versionable_function";
372 else if (cgraph_function_body_availability (node) <= AVAIL_OVERWRITABLE)
373 reason = "insufficient body availability";
375 if (reason && dump_file && !node->alias && !node->thunk.thunk_p)
376 fprintf (dump_file, "Function %s/%i is not versionable, reason: %s.\n",
377 cgraph_node_name (node), node->uid, reason);
379 node->local.versionable = (reason == NULL);
382 /* Return true if it is at all technically possible to create clones of a
383 NODE. */
385 static bool
386 ipcp_versionable_function_p (struct cgraph_node *node)
388 return node->local.versionable;
391 /* Structure holding accumulated information about callers of a node. */
393 struct caller_statistics
395 gcov_type count_sum;
396 int n_calls, n_hot_calls, freq_sum;
399 /* Initialize fields of STAT to zeroes. */
401 static inline void
402 init_caller_stats (struct caller_statistics *stats)
404 stats->count_sum = 0;
405 stats->n_calls = 0;
406 stats->n_hot_calls = 0;
407 stats->freq_sum = 0;
410 /* Worker callback of cgraph_for_node_and_aliases accumulating statistics of
411 non-thunk incoming edges to NODE. */
413 static bool
414 gather_caller_stats (struct cgraph_node *node, void *data)
416 struct caller_statistics *stats = (struct caller_statistics *) data;
417 struct cgraph_edge *cs;
419 for (cs = node->callers; cs; cs = cs->next_caller)
420 if (cs->caller->thunk.thunk_p)
421 cgraph_for_node_and_aliases (cs->caller, gather_caller_stats,
422 stats, false);
423 else
425 stats->count_sum += cs->count;
426 stats->freq_sum += cs->frequency;
427 stats->n_calls++;
428 if (cgraph_maybe_hot_edge_p (cs))
429 stats->n_hot_calls ++;
431 return false;
435 /* Return true if this NODE is viable candidate for cloning. */
437 static bool
438 ipcp_cloning_candidate_p (struct cgraph_node *node)
440 struct caller_statistics stats;
442 gcc_checking_assert (cgraph_function_with_gimple_body_p (node));
444 if (!flag_ipa_cp_clone)
446 if (dump_file)
447 fprintf (dump_file, "Not considering %s for cloning; "
448 "-fipa-cp-clone disabled.\n",
449 cgraph_node_name (node));
450 return false;
453 if (!optimize_function_for_speed_p (DECL_STRUCT_FUNCTION (node->symbol.decl)))
455 if (dump_file)
456 fprintf (dump_file, "Not considering %s for cloning; "
457 "optimizing it for size.\n",
458 cgraph_node_name (node));
459 return false;
462 init_caller_stats (&stats);
463 cgraph_for_node_and_aliases (node, gather_caller_stats, &stats, false);
465 if (inline_summary (node)->self_size < stats.n_calls)
467 if (dump_file)
468 fprintf (dump_file, "Considering %s for cloning; code might shrink.\n",
469 cgraph_node_name (node));
470 return true;
473 /* When profile is available and function is hot, propagate into it even if
474 calls seems cold; constant propagation can improve function's speed
475 significantly. */
476 if (max_count)
478 if (stats.count_sum > node->count * 90 / 100)
480 if (dump_file)
481 fprintf (dump_file, "Considering %s for cloning; "
482 "usually called directly.\n",
483 cgraph_node_name (node));
484 return true;
487 if (!stats.n_hot_calls)
489 if (dump_file)
490 fprintf (dump_file, "Not considering %s for cloning; no hot calls.\n",
491 cgraph_node_name (node));
492 return false;
494 if (dump_file)
495 fprintf (dump_file, "Considering %s for cloning.\n",
496 cgraph_node_name (node));
497 return true;
500 /* Arrays representing a topological ordering of call graph nodes and a stack
501 of noes used during constant propagation. */
503 struct topo_info
505 struct cgraph_node **order;
506 struct cgraph_node **stack;
507 int nnodes, stack_top;
510 /* Allocate the arrays in TOPO and topologically sort the nodes into order. */
512 static void
513 build_toporder_info (struct topo_info *topo)
515 topo->order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
516 topo->stack = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
517 topo->stack_top = 0;
518 topo->nnodes = ipa_reduced_postorder (topo->order, true, true, NULL);
521 /* Free information about strongly connected components and the arrays in
522 TOPO. */
524 static void
525 free_toporder_info (struct topo_info *topo)
527 ipa_free_postorder_info ();
528 free (topo->order);
529 free (topo->stack);
532 /* Add NODE to the stack in TOPO, unless it is already there. */
534 static inline void
535 push_node_to_stack (struct topo_info *topo, struct cgraph_node *node)
537 struct ipa_node_params *info = IPA_NODE_REF (node);
538 if (info->node_enqueued)
539 return;
540 info->node_enqueued = 1;
541 topo->stack[topo->stack_top++] = node;
544 /* Pop a node from the stack in TOPO and return it or return NULL if the stack
545 is empty. */
547 static struct cgraph_node *
548 pop_node_from_stack (struct topo_info *topo)
550 if (topo->stack_top)
552 struct cgraph_node *node;
553 topo->stack_top--;
554 node = topo->stack[topo->stack_top];
555 IPA_NODE_REF (node)->node_enqueued = 0;
556 return node;
558 else
559 return NULL;
562 /* Set lattice LAT to bottom and return true if it previously was not set as
563 such. */
565 static inline bool
566 set_lattice_to_bottom (struct ipcp_lattice *lat)
568 bool ret = !lat->bottom;
569 lat->bottom = true;
570 return ret;
573 /* Mark lattice as containing an unknown value and return true if it previously
574 was not marked as such. */
576 static inline bool
577 set_lattice_contains_variable (struct ipcp_lattice *lat)
579 bool ret = !lat->contains_variable;
580 lat->contains_variable = true;
581 return ret;
584 /* Initialize ipcp_lattices. */
586 static void
587 initialize_node_lattices (struct cgraph_node *node)
589 struct ipa_node_params *info = IPA_NODE_REF (node);
590 struct cgraph_edge *ie;
591 bool disable = false, variable = false;
592 int i;
594 gcc_checking_assert (cgraph_function_with_gimple_body_p (node));
595 if (!node->local.local)
597 /* When cloning is allowed, we can assume that externally visible
598 functions are not called. We will compensate this by cloning
599 later. */
600 if (ipcp_versionable_function_p (node)
601 && ipcp_cloning_candidate_p (node))
602 variable = true;
603 else
604 disable = true;
607 if (disable || variable)
609 for (i = 0; i < ipa_get_param_count (info) ; i++)
611 struct ipcp_lattice *lat = ipa_get_lattice (info, i);
612 if (disable)
613 set_lattice_to_bottom (lat);
614 else
615 set_lattice_contains_variable (lat);
617 if (dump_file && (dump_flags & TDF_DETAILS)
618 && node->alias && node->thunk.thunk_p)
619 fprintf (dump_file, "Marking all lattices of %s/%i as %s\n",
620 cgraph_node_name (node), node->uid,
621 disable ? "BOTTOM" : "VARIABLE");
624 for (ie = node->indirect_calls; ie; ie = ie->next_callee)
625 if (ie->indirect_info->polymorphic)
627 gcc_checking_assert (ie->indirect_info->param_index >= 0);
628 ipa_get_lattice (info, ie->indirect_info->param_index)->virt_call = 1;
632 /* Return the result of a (possibly arithmetic) pass through jump function
633 JFUNC on the constant value INPUT. Return NULL_TREE if that cannot be
634 determined or itself is considered an interprocedural invariant. */
636 static tree
637 ipa_get_jf_pass_through_result (struct ipa_jump_func *jfunc, tree input)
639 tree restype, res;
641 if (ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
642 return input;
643 else if (TREE_CODE (input) == TREE_BINFO)
644 return NULL_TREE;
646 gcc_checking_assert (is_gimple_ip_invariant (input));
647 if (TREE_CODE_CLASS (ipa_get_jf_pass_through_operation (jfunc))
648 == tcc_comparison)
649 restype = boolean_type_node;
650 else
651 restype = TREE_TYPE (input);
652 res = fold_binary (ipa_get_jf_pass_through_operation (jfunc), restype,
653 input, ipa_get_jf_pass_through_operand (jfunc));
655 if (res && !is_gimple_ip_invariant (res))
656 return NULL_TREE;
658 return res;
661 /* Return the result of an ancestor jump function JFUNC on the constant value
662 INPUT. Return NULL_TREE if that cannot be determined. */
664 static tree
665 ipa_get_jf_ancestor_result (struct ipa_jump_func *jfunc, tree input)
667 if (TREE_CODE (input) == TREE_BINFO)
668 return get_binfo_at_offset (input,
669 ipa_get_jf_ancestor_offset (jfunc),
670 ipa_get_jf_ancestor_type (jfunc));
671 else if (TREE_CODE (input) == ADDR_EXPR)
673 tree t = TREE_OPERAND (input, 0);
674 t = build_ref_for_offset (EXPR_LOCATION (t), t,
675 ipa_get_jf_ancestor_offset (jfunc),
676 ipa_get_jf_ancestor_type (jfunc), NULL, false);
677 return build_fold_addr_expr (t);
679 else
680 return NULL_TREE;
683 /* Extract the acual BINFO being described by JFUNC which must be a known type
684 jump function. */
686 static tree
687 ipa_value_from_known_type_jfunc (struct ipa_jump_func *jfunc)
689 tree base_binfo = TYPE_BINFO (ipa_get_jf_known_type_base_type (jfunc));
690 if (!base_binfo)
691 return NULL_TREE;
692 return get_binfo_at_offset (base_binfo,
693 ipa_get_jf_known_type_offset (jfunc),
694 ipa_get_jf_known_type_component_type (jfunc));
697 /* Determine whether JFUNC evaluates to a known value (that is either a
698 constant or a binfo) and if so, return it. Otherwise return NULL. INFO
699 describes the caller node so that pass-through jump functions can be
700 evaluated. */
702 tree
703 ipa_value_from_jfunc (struct ipa_node_params *info, struct ipa_jump_func *jfunc)
705 if (jfunc->type == IPA_JF_CONST)
706 return ipa_get_jf_constant (jfunc);
707 else if (jfunc->type == IPA_JF_KNOWN_TYPE)
708 return ipa_value_from_known_type_jfunc (jfunc);
709 else if (jfunc->type == IPA_JF_PASS_THROUGH
710 || jfunc->type == IPA_JF_ANCESTOR)
712 tree input;
713 int idx;
715 if (jfunc->type == IPA_JF_PASS_THROUGH)
716 idx = ipa_get_jf_pass_through_formal_id (jfunc);
717 else
718 idx = ipa_get_jf_ancestor_formal_id (jfunc);
720 if (info->ipcp_orig_node)
721 input = VEC_index (tree, info->known_vals, idx);
722 else
724 struct ipcp_lattice *lat;
726 if (!info->lattices)
728 gcc_checking_assert (!flag_ipa_cp);
729 return NULL_TREE;
731 lat = ipa_get_lattice (info, idx);
732 if (!ipa_lat_is_single_const (lat))
733 return NULL_TREE;
734 input = lat->values->value;
737 if (!input)
738 return NULL_TREE;
740 if (jfunc->type == IPA_JF_PASS_THROUGH)
741 return ipa_get_jf_pass_through_result (jfunc, input);
742 else
743 return ipa_get_jf_ancestor_result (jfunc, input);
745 else
746 return NULL_TREE;
750 /* If checking is enabled, verify that no lattice is in the TOP state, i.e. not
751 bottom, not containing a variable component and without any known value at
752 the same time. */
754 DEBUG_FUNCTION void
755 ipcp_verify_propagated_values (void)
757 struct cgraph_node *node;
759 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
761 struct ipa_node_params *info = IPA_NODE_REF (node);
762 int i, count = ipa_get_param_count (info);
764 for (i = 0; i < count; i++)
766 struct ipcp_lattice *lat = ipa_get_lattice (info, i);
768 if (!lat->bottom
769 && !lat->contains_variable
770 && lat->values_count == 0)
772 if (dump_file)
774 fprintf (dump_file, "\nIPA lattices after constant "
775 "propagation:\n");
776 print_all_lattices (dump_file, true, false);
779 gcc_unreachable ();
785 /* Return true iff X and Y should be considered equal values by IPA-CP. */
787 static bool
788 values_equal_for_ipcp_p (tree x, tree y)
790 gcc_checking_assert (x != NULL_TREE && y != NULL_TREE);
792 if (x == y)
793 return true;
795 if (TREE_CODE (x) == TREE_BINFO || TREE_CODE (y) == TREE_BINFO)
796 return false;
798 if (TREE_CODE (x) == ADDR_EXPR
799 && TREE_CODE (y) == ADDR_EXPR
800 && TREE_CODE (TREE_OPERAND (x, 0)) == CONST_DECL
801 && TREE_CODE (TREE_OPERAND (y, 0)) == CONST_DECL)
802 return operand_equal_p (DECL_INITIAL (TREE_OPERAND (x, 0)),
803 DECL_INITIAL (TREE_OPERAND (y, 0)), 0);
804 else
805 return operand_equal_p (x, y, 0);
808 /* Add a new value source to VAL, marking that a value comes from edge CS and
809 (if the underlying jump function is a pass-through or an ancestor one) from
810 a caller value SRC_VAL of a caller parameter described by SRC_INDEX. */
812 static void
813 add_value_source (struct ipcp_value *val, struct cgraph_edge *cs,
814 struct ipcp_value *src_val, int src_idx)
816 struct ipcp_value_source *src;
818 src = (struct ipcp_value_source *) pool_alloc (ipcp_sources_pool);
819 src->cs = cs;
820 src->val = src_val;
821 src->index = src_idx;
823 src->next = val->sources;
824 val->sources = src;
828 /* Try to add NEWVAL to LAT, potentially creating a new struct ipcp_value for
829 it. CS, SRC_VAL and SRC_INDEX are meant for add_value_source and have the
830 same meaning. */
832 static bool
833 add_value_to_lattice (struct ipcp_lattice *lat, tree newval,
834 struct cgraph_edge *cs, struct ipcp_value *src_val,
835 int src_idx)
837 struct ipcp_value *val;
839 if (lat->bottom)
840 return false;
843 for (val = lat->values; val; val = val->next)
844 if (values_equal_for_ipcp_p (val->value, newval))
846 if (edge_within_scc (cs))
848 struct ipcp_value_source *s;
849 for (s = val->sources; s ; s = s->next)
850 if (s->cs == cs)
851 break;
852 if (s)
853 return false;
856 add_value_source (val, cs, src_val, src_idx);
857 return false;
860 if (lat->values_count == PARAM_VALUE (PARAM_IPA_CP_VALUE_LIST_SIZE))
862 /* We can only free sources, not the values themselves, because sources
863 of other values in this this SCC might point to them. */
864 for (val = lat->values; val; val = val->next)
866 while (val->sources)
868 struct ipcp_value_source *src = val->sources;
869 val->sources = src->next;
870 pool_free (ipcp_sources_pool, src);
874 lat->values = NULL;
875 return set_lattice_to_bottom (lat);
878 lat->values_count++;
879 val = (struct ipcp_value *) pool_alloc (ipcp_values_pool);
880 memset (val, 0, sizeof (*val));
882 add_value_source (val, cs, src_val, src_idx);
883 val->value = newval;
884 val->next = lat->values;
885 lat->values = val;
886 return true;
889 /* Propagate values through a pass-through jump function JFUNC associated with
890 edge CS, taking values from SRC_LAT and putting them into DEST_LAT. SRC_IDX
891 is the index of the source parameter. */
893 static bool
894 propagate_vals_accross_pass_through (struct cgraph_edge *cs,
895 struct ipa_jump_func *jfunc,
896 struct ipcp_lattice *src_lat,
897 struct ipcp_lattice *dest_lat,
898 int src_idx)
900 struct ipcp_value *src_val;
901 bool ret = false;
903 if (ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
904 for (src_val = src_lat->values; src_val; src_val = src_val->next)
905 ret |= add_value_to_lattice (dest_lat, src_val->value, cs,
906 src_val, src_idx);
907 /* Do not create new values when propagating within an SCC because if there
908 are arithmetic functions with circular dependencies, there is infinite
909 number of them and we would just make lattices bottom. */
910 else if (edge_within_scc (cs))
911 ret = set_lattice_contains_variable (dest_lat);
912 else
913 for (src_val = src_lat->values; src_val; src_val = src_val->next)
915 tree cstval = src_val->value;
917 if (TREE_CODE (cstval) == TREE_BINFO)
919 ret |= set_lattice_contains_variable (dest_lat);
920 continue;
922 cstval = ipa_get_jf_pass_through_result (jfunc, cstval);
924 if (cstval)
925 ret |= add_value_to_lattice (dest_lat, cstval, cs, src_val, src_idx);
926 else
927 ret |= set_lattice_contains_variable (dest_lat);
930 return ret;
933 /* Propagate values through an ancestor jump function JFUNC associated with
934 edge CS, taking values from SRC_LAT and putting them into DEST_LAT. SRC_IDX
935 is the index of the source parameter. */
937 static bool
938 propagate_vals_accross_ancestor (struct cgraph_edge *cs,
939 struct ipa_jump_func *jfunc,
940 struct ipcp_lattice *src_lat,
941 struct ipcp_lattice *dest_lat,
942 int src_idx)
944 struct ipcp_value *src_val;
945 bool ret = false;
947 if (edge_within_scc (cs))
948 return set_lattice_contains_variable (dest_lat);
950 for (src_val = src_lat->values; src_val; src_val = src_val->next)
952 tree t = ipa_get_jf_ancestor_result (jfunc, src_val->value);
954 if (t)
955 ret |= add_value_to_lattice (dest_lat, t, cs, src_val, src_idx);
956 else
957 ret |= set_lattice_contains_variable (dest_lat);
960 return ret;
963 /* Propagate values across jump function JFUNC that is associated with edge CS
964 and put the values into DEST_LAT. */
966 static bool
967 propagate_accross_jump_function (struct cgraph_edge *cs,
968 struct ipa_jump_func *jfunc,
969 struct ipcp_lattice *dest_lat)
971 if (dest_lat->bottom)
972 return false;
974 if (jfunc->type == IPA_JF_CONST
975 || jfunc->type == IPA_JF_KNOWN_TYPE)
977 tree val;
979 if (jfunc->type == IPA_JF_KNOWN_TYPE)
981 val = ipa_value_from_known_type_jfunc (jfunc);
982 if (!val)
983 return set_lattice_contains_variable (dest_lat);
985 else
986 val = ipa_get_jf_constant (jfunc);
987 return add_value_to_lattice (dest_lat, val, cs, NULL, 0);
989 else if (jfunc->type == IPA_JF_PASS_THROUGH
990 || jfunc->type == IPA_JF_ANCESTOR)
992 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
993 struct ipcp_lattice *src_lat;
994 int src_idx;
995 bool ret;
997 if (jfunc->type == IPA_JF_PASS_THROUGH)
998 src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
999 else
1000 src_idx = ipa_get_jf_ancestor_formal_id (jfunc);
1002 src_lat = ipa_get_lattice (caller_info, src_idx);
1003 if (src_lat->bottom)
1004 return set_lattice_contains_variable (dest_lat);
1006 /* If we would need to clone the caller and cannot, do not propagate. */
1007 if (!ipcp_versionable_function_p (cs->caller)
1008 && (src_lat->contains_variable
1009 || (src_lat->values_count > 1)))
1010 return set_lattice_contains_variable (dest_lat);
1012 if (jfunc->type == IPA_JF_PASS_THROUGH)
1013 ret = propagate_vals_accross_pass_through (cs, jfunc, src_lat,
1014 dest_lat, src_idx);
1015 else
1016 ret = propagate_vals_accross_ancestor (cs, jfunc, src_lat, dest_lat,
1017 src_idx);
1019 if (src_lat->contains_variable)
1020 ret |= set_lattice_contains_variable (dest_lat);
1022 return ret;
1025 /* TODO: We currently do not handle member method pointers in IPA-CP (we only
1026 use it for indirect inlining), we should propagate them too. */
1027 return set_lattice_contains_variable (dest_lat);
1030 /* Propagate constants from the caller to the callee of CS. INFO describes the
1031 caller. */
1033 static bool
1034 propagate_constants_accross_call (struct cgraph_edge *cs)
1036 struct ipa_node_params *callee_info;
1037 enum availability availability;
1038 struct cgraph_node *callee, *alias_or_thunk;
1039 struct ipa_edge_args *args;
1040 bool ret = false;
1041 int i, args_count, parms_count;
1043 callee = cgraph_function_node (cs->callee, &availability);
1044 if (!callee->analyzed)
1045 return false;
1046 gcc_checking_assert (cgraph_function_with_gimple_body_p (callee));
1047 callee_info = IPA_NODE_REF (callee);
1049 args = IPA_EDGE_REF (cs);
1050 args_count = ipa_get_cs_argument_count (args);
1051 parms_count = ipa_get_param_count (callee_info);
1053 /* If this call goes through a thunk we must not propagate to the first (0th)
1054 parameter. However, we might need to uncover a thunk from below a series
1055 of aliases first. */
1056 alias_or_thunk = cs->callee;
1057 while (alias_or_thunk->alias)
1058 alias_or_thunk = cgraph_alias_aliased_node (alias_or_thunk);
1059 if (alias_or_thunk->thunk.thunk_p)
1061 ret |= set_lattice_contains_variable (ipa_get_lattice (callee_info, 0));
1062 i = 1;
1064 else
1065 i = 0;
1067 for (; (i < args_count) && (i < parms_count); i++)
1069 struct ipa_jump_func *jump_func = ipa_get_ith_jump_func (args, i);
1070 struct ipcp_lattice *dest_lat = ipa_get_lattice (callee_info, i);
1072 if (availability == AVAIL_OVERWRITABLE)
1073 ret |= set_lattice_contains_variable (dest_lat);
1074 else
1075 ret |= propagate_accross_jump_function (cs, jump_func, dest_lat);
1077 for (; i < parms_count; i++)
1078 ret |= set_lattice_contains_variable (ipa_get_lattice (callee_info, i));
1080 return ret;
1083 /* If an indirect edge IE can be turned into a direct one based on KNOWN_VALS
1084 (which can contain both constants and binfos) or KNOWN_BINFOS (which can be
1085 NULL) return the destination. */
1087 tree
1088 ipa_get_indirect_edge_target (struct cgraph_edge *ie,
1089 VEC (tree, heap) *known_vals,
1090 VEC (tree, heap) *known_binfos)
1092 int param_index = ie->indirect_info->param_index;
1093 HOST_WIDE_INT token, anc_offset;
1094 tree otr_type;
1095 tree t;
1097 if (param_index == -1)
1098 return NULL_TREE;
1100 if (!ie->indirect_info->polymorphic)
1102 tree t = (VEC_length (tree, known_vals) > (unsigned int) param_index
1103 ? VEC_index (tree, known_vals, param_index) : NULL);
1104 if (t &&
1105 TREE_CODE (t) == ADDR_EXPR
1106 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL)
1107 return TREE_OPERAND (t, 0);
1108 else
1109 return NULL_TREE;
1112 token = ie->indirect_info->otr_token;
1113 anc_offset = ie->indirect_info->anc_offset;
1114 otr_type = ie->indirect_info->otr_type;
1116 t = VEC_index (tree, known_vals, param_index);
1117 if (!t && known_binfos
1118 && VEC_length (tree, known_binfos) > (unsigned int) param_index)
1119 t = VEC_index (tree, known_binfos, param_index);
1120 if (!t)
1121 return NULL_TREE;
1123 if (TREE_CODE (t) != TREE_BINFO)
1125 tree binfo;
1126 binfo = gimple_extract_devirt_binfo_from_cst (t);
1127 if (!binfo)
1128 return NULL_TREE;
1129 binfo = get_binfo_at_offset (binfo, anc_offset, otr_type);
1130 if (!binfo)
1131 return NULL_TREE;
1132 return gimple_get_virt_method_for_binfo (token, binfo);
1134 else
1136 tree binfo;
1138 binfo = get_binfo_at_offset (t, anc_offset, otr_type);
1139 if (!binfo)
1140 return NULL_TREE;
1141 return gimple_get_virt_method_for_binfo (token, binfo);
1145 /* Calculate devirtualization time bonus for NODE, assuming we know KNOWN_CSTS
1146 and KNOWN_BINFOS. */
1148 static int
1149 devirtualization_time_bonus (struct cgraph_node *node,
1150 VEC (tree, heap) *known_csts,
1151 VEC (tree, heap) *known_binfos)
1153 struct cgraph_edge *ie;
1154 int res = 0;
1156 for (ie = node->indirect_calls; ie; ie = ie->next_callee)
1158 struct cgraph_node *callee;
1159 struct inline_summary *isummary;
1160 tree target;
1162 target = ipa_get_indirect_edge_target (ie, known_csts, known_binfos);
1163 if (!target)
1164 continue;
1166 /* Only bare minimum benefit for clearly un-inlineable targets. */
1167 res += 1;
1168 callee = cgraph_get_node (target);
1169 if (!callee || !callee->analyzed)
1170 continue;
1171 isummary = inline_summary (callee);
1172 if (!isummary->inlinable)
1173 continue;
1175 /* FIXME: The values below need re-considering and perhaps also
1176 integrating into the cost metrics, at lest in some very basic way. */
1177 if (isummary->size <= MAX_INLINE_INSNS_AUTO / 4)
1178 res += 31;
1179 else if (isummary->size <= MAX_INLINE_INSNS_AUTO / 2)
1180 res += 15;
1181 else if (isummary->size <= MAX_INLINE_INSNS_AUTO
1182 || DECL_DECLARED_INLINE_P (callee->symbol.decl))
1183 res += 7;
1186 return res;
1189 /* Return true if cloning NODE is a good idea, given the estimated TIME_BENEFIT
1190 and SIZE_COST and with the sum of frequencies of incoming edges to the
1191 potential new clone in FREQUENCIES. */
1193 static bool
1194 good_cloning_opportunity_p (struct cgraph_node *node, int time_benefit,
1195 int freq_sum, gcov_type count_sum, int size_cost)
1197 if (time_benefit == 0
1198 || !flag_ipa_cp_clone
1199 || !optimize_function_for_speed_p (DECL_STRUCT_FUNCTION (node->symbol.decl)))
1200 return false;
1202 gcc_assert (size_cost > 0);
1204 if (max_count)
1206 int factor = (count_sum * 1000) / max_count;
1207 HOST_WIDEST_INT evaluation = (((HOST_WIDEST_INT) time_benefit * factor)
1208 / size_cost);
1210 if (dump_file && (dump_flags & TDF_DETAILS))
1211 fprintf (dump_file, " good_cloning_opportunity_p (time: %i, "
1212 "size: %i, count_sum: " HOST_WIDE_INT_PRINT_DEC
1213 ") -> evaluation: " HOST_WIDEST_INT_PRINT_DEC
1214 ", threshold: %i\n",
1215 time_benefit, size_cost, (HOST_WIDE_INT) count_sum,
1216 evaluation, 500);
1218 return evaluation >= PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD);
1220 else
1222 HOST_WIDEST_INT evaluation = (((HOST_WIDEST_INT) time_benefit * freq_sum)
1223 / size_cost);
1225 if (dump_file && (dump_flags & TDF_DETAILS))
1226 fprintf (dump_file, " good_cloning_opportunity_p (time: %i, "
1227 "size: %i, freq_sum: %i) -> evaluation: "
1228 HOST_WIDEST_INT_PRINT_DEC ", threshold: %i\n",
1229 time_benefit, size_cost, freq_sum, evaluation,
1230 CGRAPH_FREQ_BASE /2);
1232 return evaluation >= PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD);
1237 /* Allocate KNOWN_CSTS and KNOWN_BINFOS and populate them with values of
1238 parameters that are known independent of the context. INFO describes the
1239 function. If REMOVABLE_PARAMS_COST is non-NULL, the movement cost of all
1240 removable parameters will be stored in it. */
1242 static bool
1243 gather_context_independent_values (struct ipa_node_params *info,
1244 VEC (tree, heap) **known_csts,
1245 VEC (tree, heap) **known_binfos,
1246 int *removable_params_cost)
1248 int i, count = ipa_get_param_count (info);
1249 bool ret = false;
1251 *known_csts = NULL;
1252 *known_binfos = NULL;
1253 VEC_safe_grow_cleared (tree, heap, *known_csts, count);
1254 VEC_safe_grow_cleared (tree, heap, *known_binfos, count);
1256 if (removable_params_cost)
1257 *removable_params_cost = 0;
1259 for (i = 0; i < count ; i++)
1261 struct ipcp_lattice *lat = ipa_get_lattice (info, i);
1263 if (ipa_lat_is_single_const (lat))
1265 struct ipcp_value *val = lat->values;
1266 if (TREE_CODE (val->value) != TREE_BINFO)
1268 VEC_replace (tree, *known_csts, i, val->value);
1269 if (removable_params_cost)
1270 *removable_params_cost
1271 += estimate_move_cost (TREE_TYPE (val->value));
1272 ret = true;
1274 else if (lat->virt_call)
1276 VEC_replace (tree, *known_binfos, i, val->value);
1277 ret = true;
1279 else if (removable_params_cost
1280 && !ipa_is_param_used (info, i))
1281 *removable_params_cost
1282 += estimate_move_cost (TREE_TYPE (ipa_get_param (info, i)));
1284 else if (removable_params_cost
1285 && !ipa_is_param_used (info, i))
1286 *removable_params_cost
1287 += estimate_move_cost (TREE_TYPE (ipa_get_param (info, i)));
1290 return ret;
1293 /* Iterate over known values of parameters of NODE and estimate the local
1294 effects in terms of time and size they have. */
1296 static void
1297 estimate_local_effects (struct cgraph_node *node)
1299 struct ipa_node_params *info = IPA_NODE_REF (node);
1300 int i, count = ipa_get_param_count (info);
1301 VEC (tree, heap) *known_csts, *known_binfos;
1302 bool always_const;
1303 int base_time = inline_summary (node)->time;
1304 int removable_params_cost;
1306 if (!count || !ipcp_versionable_function_p (node))
1307 return;
1309 if (dump_file && (dump_flags & TDF_DETAILS))
1310 fprintf (dump_file, "\nEstimating effects for %s/%i, base_time: %i.\n",
1311 cgraph_node_name (node), node->uid, base_time);
1313 always_const = gather_context_independent_values (info, &known_csts,
1314 &known_binfos,
1315 &removable_params_cost);
1316 if (always_const)
1318 struct caller_statistics stats;
1319 int time, size;
1321 init_caller_stats (&stats);
1322 cgraph_for_node_and_aliases (node, gather_caller_stats, &stats, false);
1323 estimate_ipcp_clone_size_and_time (node, known_csts, known_binfos,
1324 &size, &time);
1325 time -= devirtualization_time_bonus (node, known_csts, known_binfos);
1326 time -= removable_params_cost;
1327 size -= stats.n_calls * removable_params_cost;
1329 if (dump_file)
1330 fprintf (dump_file, " - context independent values, size: %i, "
1331 "time_benefit: %i\n", size, base_time - time);
1333 if (size <= 0
1334 || cgraph_will_be_removed_from_program_if_no_direct_calls (node))
1336 info->clone_for_all_contexts = true;
1337 base_time = time;
1339 if (dump_file)
1340 fprintf (dump_file, " Decided to specialize for all "
1341 "known contexts, code not going to grow.\n");
1343 else if (good_cloning_opportunity_p (node, base_time - time,
1344 stats.freq_sum, stats.count_sum,
1345 size))
1347 if (size + overall_size <= max_new_size)
1349 info->clone_for_all_contexts = true;
1350 base_time = time;
1351 overall_size += size;
1353 if (dump_file)
1354 fprintf (dump_file, " Decided to specialize for all "
1355 "known contexts, growth deemed beneficial.\n");
1357 else if (dump_file && (dump_flags & TDF_DETAILS))
1358 fprintf (dump_file, " Not cloning for all contexts because "
1359 "max_new_size would be reached with %li.\n",
1360 size + overall_size);
1364 for (i = 0; i < count ; i++)
1366 struct ipcp_lattice *lat = ipa_get_lattice (info, i);
1367 struct ipcp_value *val;
1368 int emc;
1370 if (lat->bottom
1371 || !lat->values
1372 || VEC_index (tree, known_csts, i)
1373 || VEC_index (tree, known_binfos, i))
1374 continue;
1376 for (val = lat->values; val; val = val->next)
1378 int time, size, time_benefit;
1380 if (TREE_CODE (val->value) != TREE_BINFO)
1382 VEC_replace (tree, known_csts, i, val->value);
1383 VEC_replace (tree, known_binfos, i, NULL_TREE);
1384 emc = estimate_move_cost (TREE_TYPE (val->value));
1386 else if (lat->virt_call)
1388 VEC_replace (tree, known_csts, i, NULL_TREE);
1389 VEC_replace (tree, known_binfos, i, val->value);
1390 emc = 0;
1392 else
1393 continue;
1395 estimate_ipcp_clone_size_and_time (node, known_csts, known_binfos,
1396 &size, &time);
1397 time_benefit = base_time - time
1398 + devirtualization_time_bonus (node, known_csts, known_binfos)
1399 + removable_params_cost + emc;
1401 gcc_checking_assert (size >=0);
1402 /* The inliner-heuristics based estimates may think that in certain
1403 contexts some functions do not have any size at all but we want
1404 all specializations to have at least a tiny cost, not least not to
1405 divide by zero. */
1406 if (size == 0)
1407 size = 1;
1409 if (dump_file && (dump_flags & TDF_DETAILS))
1411 fprintf (dump_file, " - estimates for value ");
1412 print_ipcp_constant_value (dump_file, val->value);
1413 fprintf (dump_file, " for parameter ");
1414 print_generic_expr (dump_file, ipa_get_param (info, i), 0);
1415 fprintf (dump_file, ": time_benefit: %i, size: %i\n",
1416 time_benefit, size);
1419 val->local_time_benefit = time_benefit;
1420 val->local_size_cost = size;
1424 VEC_free (tree, heap, known_csts);
1425 VEC_free (tree, heap, known_binfos);
1429 /* Add value CUR_VAL and all yet-unsorted values it is dependent on to the
1430 topological sort of values. */
1432 static void
1433 add_val_to_toposort (struct ipcp_value *cur_val)
1435 static int dfs_counter = 0;
1436 static struct ipcp_value *stack;
1437 struct ipcp_value_source *src;
1439 if (cur_val->dfs)
1440 return;
1442 dfs_counter++;
1443 cur_val->dfs = dfs_counter;
1444 cur_val->low_link = dfs_counter;
1446 cur_val->topo_next = stack;
1447 stack = cur_val;
1448 cur_val->on_stack = true;
1450 for (src = cur_val->sources; src; src = src->next)
1451 if (src->val)
1453 if (src->val->dfs == 0)
1455 add_val_to_toposort (src->val);
1456 if (src->val->low_link < cur_val->low_link)
1457 cur_val->low_link = src->val->low_link;
1459 else if (src->val->on_stack
1460 && src->val->dfs < cur_val->low_link)
1461 cur_val->low_link = src->val->dfs;
1464 if (cur_val->dfs == cur_val->low_link)
1466 struct ipcp_value *v, *scc_list = NULL;
1470 v = stack;
1471 stack = v->topo_next;
1472 v->on_stack = false;
1474 v->scc_next = scc_list;
1475 scc_list = v;
1477 while (v != cur_val);
1479 cur_val->topo_next = values_topo;
1480 values_topo = cur_val;
1484 /* Add all values in lattices associated with NODE to the topological sort if
1485 they are not there yet. */
1487 static void
1488 add_all_node_vals_to_toposort (struct cgraph_node *node)
1490 struct ipa_node_params *info = IPA_NODE_REF (node);
1491 int i, count = ipa_get_param_count (info);
1493 for (i = 0; i < count ; i++)
1495 struct ipcp_lattice *lat = ipa_get_lattice (info, i);
1496 struct ipcp_value *val;
1498 if (lat->bottom || !lat->values)
1499 continue;
1500 for (val = lat->values; val; val = val->next)
1501 add_val_to_toposort (val);
1505 /* One pass of constants propagation along the call graph edges, from callers
1506 to callees (requires topological ordering in TOPO), iterate over strongly
1507 connected components. */
1509 static void
1510 propagate_constants_topo (struct topo_info *topo)
1512 int i;
1514 for (i = topo->nnodes - 1; i >= 0; i--)
1516 struct cgraph_node *v, *node = topo->order[i];
1517 struct ipa_dfs_info *node_dfs_info;
1519 if (!cgraph_function_with_gimple_body_p (node))
1520 continue;
1522 node_dfs_info = (struct ipa_dfs_info *) node->symbol.aux;
1523 /* First, iteratively propagate within the strongly connected component
1524 until all lattices stabilize. */
1525 v = node_dfs_info->next_cycle;
1526 while (v)
1528 push_node_to_stack (topo, v);
1529 v = ((struct ipa_dfs_info *) v->symbol.aux)->next_cycle;
1532 v = node;
1533 while (v)
1535 struct cgraph_edge *cs;
1537 for (cs = v->callees; cs; cs = cs->next_callee)
1538 if (edge_within_scc (cs)
1539 && propagate_constants_accross_call (cs))
1540 push_node_to_stack (topo, cs->callee);
1541 v = pop_node_from_stack (topo);
1544 /* Afterwards, propagate along edges leading out of the SCC, calculates
1545 the local effects of the discovered constants and all valid values to
1546 their topological sort. */
1547 v = node;
1548 while (v)
1550 struct cgraph_edge *cs;
1552 estimate_local_effects (v);
1553 add_all_node_vals_to_toposort (v);
1554 for (cs = v->callees; cs; cs = cs->next_callee)
1555 if (!edge_within_scc (cs))
1556 propagate_constants_accross_call (cs);
1558 v = ((struct ipa_dfs_info *) v->symbol.aux)->next_cycle;
1564 /* Return the sum of A and B if none of them is bigger than INT_MAX/2, return
1565 the bigger one if otherwise. */
1567 static int
1568 safe_add (int a, int b)
1570 if (a > INT_MAX/2 || b > INT_MAX/2)
1571 return a > b ? a : b;
1572 else
1573 return a + b;
1577 /* Propagate the estimated effects of individual values along the topological
1578 from the dependent values to those they depend on. */
1580 static void
1581 propagate_effects (void)
1583 struct ipcp_value *base;
1585 for (base = values_topo; base; base = base->topo_next)
1587 struct ipcp_value_source *src;
1588 struct ipcp_value *val;
1589 int time = 0, size = 0;
1591 for (val = base; val; val = val->scc_next)
1593 time = safe_add (time,
1594 val->local_time_benefit + val->prop_time_benefit);
1595 size = safe_add (size, val->local_size_cost + val->prop_size_cost);
1598 for (val = base; val; val = val->scc_next)
1599 for (src = val->sources; src; src = src->next)
1600 if (src->val
1601 && cgraph_maybe_hot_edge_p (src->cs))
1603 src->val->prop_time_benefit = safe_add (time,
1604 src->val->prop_time_benefit);
1605 src->val->prop_size_cost = safe_add (size,
1606 src->val->prop_size_cost);
1612 /* Propagate constants, binfos and their effects from the summaries
1613 interprocedurally. */
1615 static void
1616 ipcp_propagate_stage (struct topo_info *topo)
1618 struct cgraph_node *node;
1620 if (dump_file)
1621 fprintf (dump_file, "\n Propagating constants:\n\n");
1623 if (in_lto_p)
1624 ipa_update_after_lto_read ();
1627 FOR_EACH_DEFINED_FUNCTION (node)
1629 struct ipa_node_params *info = IPA_NODE_REF (node);
1631 determine_versionability (node);
1632 if (cgraph_function_with_gimple_body_p (node))
1634 info->lattices = XCNEWVEC (struct ipcp_lattice,
1635 ipa_get_param_count (info));
1636 initialize_node_lattices (node);
1638 if (node->count > max_count)
1639 max_count = node->count;
1640 overall_size += inline_summary (node)->self_size;
1643 max_new_size = overall_size;
1644 if (max_new_size < PARAM_VALUE (PARAM_LARGE_UNIT_INSNS))
1645 max_new_size = PARAM_VALUE (PARAM_LARGE_UNIT_INSNS);
1646 max_new_size += max_new_size * PARAM_VALUE (PARAM_IPCP_UNIT_GROWTH) / 100 + 1;
1648 if (dump_file)
1649 fprintf (dump_file, "\noverall_size: %li, max_new_size: %li\n",
1650 overall_size, max_new_size);
1652 propagate_constants_topo (topo);
1653 #ifdef ENABLE_CHECKING
1654 ipcp_verify_propagated_values ();
1655 #endif
1656 propagate_effects ();
1658 if (dump_file)
1660 fprintf (dump_file, "\nIPA lattices after all propagation:\n");
1661 print_all_lattices (dump_file, (dump_flags & TDF_DETAILS), true);
1665 /* Discover newly direct outgoing edges from NODE which is a new clone with
1666 known KNOWN_VALS and make them direct. */
1668 static void
1669 ipcp_discover_new_direct_edges (struct cgraph_node *node,
1670 VEC (tree, heap) *known_vals)
1672 struct cgraph_edge *ie, *next_ie;
1674 for (ie = node->indirect_calls; ie; ie = next_ie)
1676 tree target;
1678 next_ie = ie->next_callee;
1679 target = ipa_get_indirect_edge_target (ie, known_vals, NULL);
1680 if (target)
1681 ipa_make_edge_direct_to_target (ie, target);
1685 /* Vector of pointers which for linked lists of clones of an original crgaph
1686 edge. */
1688 static VEC (cgraph_edge_p, heap) *next_edge_clone;
1690 static inline void
1691 grow_next_edge_clone_vector (void)
1693 if (VEC_length (cgraph_edge_p, next_edge_clone)
1694 <= (unsigned) cgraph_edge_max_uid)
1695 VEC_safe_grow_cleared (cgraph_edge_p, heap, next_edge_clone,
1696 cgraph_edge_max_uid + 1);
1699 /* Edge duplication hook to grow the appropriate linked list in
1700 next_edge_clone. */
1702 static void
1703 ipcp_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
1704 __attribute__((unused)) void *data)
1706 grow_next_edge_clone_vector ();
1707 VEC_replace (cgraph_edge_p, next_edge_clone, dst->uid,
1708 VEC_index (cgraph_edge_p, next_edge_clone, src->uid));
1709 VEC_replace (cgraph_edge_p, next_edge_clone, src->uid, dst);
1712 /* Get the next clone in the linked list of clones of an edge. */
1714 static inline struct cgraph_edge *
1715 get_next_cgraph_edge_clone (struct cgraph_edge *cs)
1717 return VEC_index (cgraph_edge_p, next_edge_clone, cs->uid);
1720 /* Return true if edge CS does bring about the value described by SRC. */
1722 static bool
1723 cgraph_edge_brings_value_p (struct cgraph_edge *cs,
1724 struct ipcp_value_source *src)
1726 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
1728 if (IPA_NODE_REF (cs->callee)->ipcp_orig_node
1729 || caller_info->node_dead)
1730 return false;
1731 if (!src->val)
1732 return true;
1734 if (caller_info->ipcp_orig_node)
1736 tree t = VEC_index (tree, caller_info->known_vals, src->index);
1737 return (t != NULL_TREE
1738 && values_equal_for_ipcp_p (src->val->value, t));
1740 else
1742 struct ipcp_lattice *lat = ipa_get_lattice (caller_info, src->index);
1743 if (ipa_lat_is_single_const (lat)
1744 && values_equal_for_ipcp_p (src->val->value, lat->values->value))
1745 return true;
1746 else
1747 return false;
1751 /* Given VAL, iterate over all its sources and if they still hold, add their
1752 edge frequency and their number into *FREQUENCY and *CALLER_COUNT
1753 respectively. */
1755 static bool
1756 get_info_about_necessary_edges (struct ipcp_value *val, int *freq_sum,
1757 gcov_type *count_sum, int *caller_count)
1759 struct ipcp_value_source *src;
1760 int freq = 0, count = 0;
1761 gcov_type cnt = 0;
1762 bool hot = false;
1764 for (src = val->sources; src; src = src->next)
1766 struct cgraph_edge *cs = src->cs;
1767 while (cs)
1769 if (cgraph_edge_brings_value_p (cs, src))
1771 count++;
1772 freq += cs->frequency;
1773 cnt += cs->count;
1774 hot |= cgraph_maybe_hot_edge_p (cs);
1776 cs = get_next_cgraph_edge_clone (cs);
1780 *freq_sum = freq;
1781 *count_sum = cnt;
1782 *caller_count = count;
1783 return hot;
1786 /* Return a vector of incoming edges that do bring value VAL. It is assumed
1787 their number is known and equal to CALLER_COUNT. */
1789 static VEC (cgraph_edge_p,heap) *
1790 gather_edges_for_value (struct ipcp_value *val, int caller_count)
1792 struct ipcp_value_source *src;
1793 VEC (cgraph_edge_p,heap) *ret;
1795 ret = VEC_alloc (cgraph_edge_p, heap, caller_count);
1796 for (src = val->sources; src; src = src->next)
1798 struct cgraph_edge *cs = src->cs;
1799 while (cs)
1801 if (cgraph_edge_brings_value_p (cs, src))
1802 VEC_quick_push (cgraph_edge_p, ret, cs);
1803 cs = get_next_cgraph_edge_clone (cs);
1807 return ret;
1810 /* Construct a replacement map for a know VALUE for a formal parameter PARAM.
1811 Return it or NULL if for some reason it cannot be created. */
1813 static struct ipa_replace_map *
1814 get_replacement_map (tree value, tree parm)
1816 tree req_type = TREE_TYPE (parm);
1817 struct ipa_replace_map *replace_map;
1819 if (!useless_type_conversion_p (req_type, TREE_TYPE (value)))
1821 if (fold_convertible_p (req_type, value))
1822 value = fold_build1 (NOP_EXPR, req_type, value);
1823 else if (TYPE_SIZE (req_type) == TYPE_SIZE (TREE_TYPE (value)))
1824 value = fold_build1 (VIEW_CONVERT_EXPR, req_type, value);
1825 else
1827 if (dump_file)
1829 fprintf (dump_file, " const ");
1830 print_generic_expr (dump_file, value, 0);
1831 fprintf (dump_file, " can't be converted to param ");
1832 print_generic_expr (dump_file, parm, 0);
1833 fprintf (dump_file, "\n");
1835 return NULL;
1839 replace_map = ggc_alloc_ipa_replace_map ();
1840 if (dump_file)
1842 fprintf (dump_file, " replacing param ");
1843 print_generic_expr (dump_file, parm, 0);
1844 fprintf (dump_file, " with const ");
1845 print_generic_expr (dump_file, value, 0);
1846 fprintf (dump_file, "\n");
1848 replace_map->old_tree = parm;
1849 replace_map->new_tree = value;
1850 replace_map->replace_p = true;
1851 replace_map->ref_p = false;
1853 return replace_map;
1856 /* Dump new profiling counts */
1858 static void
1859 dump_profile_updates (struct cgraph_node *orig_node,
1860 struct cgraph_node *new_node)
1862 struct cgraph_edge *cs;
1864 fprintf (dump_file, " setting count of the specialized node to "
1865 HOST_WIDE_INT_PRINT_DEC "\n", (HOST_WIDE_INT) new_node->count);
1866 for (cs = new_node->callees; cs ; cs = cs->next_callee)
1867 fprintf (dump_file, " edge to %s has count "
1868 HOST_WIDE_INT_PRINT_DEC "\n",
1869 cgraph_node_name (cs->callee), (HOST_WIDE_INT) cs->count);
1871 fprintf (dump_file, " setting count of the original node to "
1872 HOST_WIDE_INT_PRINT_DEC "\n", (HOST_WIDE_INT) orig_node->count);
1873 for (cs = orig_node->callees; cs ; cs = cs->next_callee)
1874 fprintf (dump_file, " edge to %s is left with "
1875 HOST_WIDE_INT_PRINT_DEC "\n",
1876 cgraph_node_name (cs->callee), (HOST_WIDE_INT) cs->count);
1879 /* After a specialized NEW_NODE version of ORIG_NODE has been created, update
1880 their profile information to reflect this. */
1882 static void
1883 update_profiling_info (struct cgraph_node *orig_node,
1884 struct cgraph_node *new_node)
1886 struct cgraph_edge *cs;
1887 struct caller_statistics stats;
1888 gcov_type new_sum, orig_sum;
1889 gcov_type remainder, orig_node_count = orig_node->count;
1891 if (orig_node_count == 0)
1892 return;
1894 init_caller_stats (&stats);
1895 cgraph_for_node_and_aliases (orig_node, gather_caller_stats, &stats, false);
1896 orig_sum = stats.count_sum;
1897 init_caller_stats (&stats);
1898 cgraph_for_node_and_aliases (new_node, gather_caller_stats, &stats, false);
1899 new_sum = stats.count_sum;
1901 if (orig_node_count < orig_sum + new_sum)
1903 if (dump_file)
1904 fprintf (dump_file, " Problem: node %s/%i has too low count "
1905 HOST_WIDE_INT_PRINT_DEC " while the sum of incoming "
1906 "counts is " HOST_WIDE_INT_PRINT_DEC "\n",
1907 cgraph_node_name (orig_node), orig_node->uid,
1908 (HOST_WIDE_INT) orig_node_count,
1909 (HOST_WIDE_INT) (orig_sum + new_sum));
1911 orig_node_count = (orig_sum + new_sum) * 12 / 10;
1912 if (dump_file)
1913 fprintf (dump_file, " proceeding by pretending it was "
1914 HOST_WIDE_INT_PRINT_DEC "\n",
1915 (HOST_WIDE_INT) orig_node_count);
1918 new_node->count = new_sum;
1919 remainder = orig_node_count - new_sum;
1920 orig_node->count = remainder;
1922 for (cs = new_node->callees; cs ; cs = cs->next_callee)
1923 if (cs->frequency)
1924 cs->count = cs->count * (new_sum * REG_BR_PROB_BASE
1925 / orig_node_count) / REG_BR_PROB_BASE;
1926 else
1927 cs->count = 0;
1929 for (cs = orig_node->callees; cs ; cs = cs->next_callee)
1930 cs->count = cs->count * (remainder * REG_BR_PROB_BASE
1931 / orig_node_count) / REG_BR_PROB_BASE;
1933 if (dump_file)
1934 dump_profile_updates (orig_node, new_node);
1937 /* Update the respective profile of specialized NEW_NODE and the original
1938 ORIG_NODE after additional edges with cumulative count sum REDIRECTED_SUM
1939 have been redirected to the specialized version. */
1941 static void
1942 update_specialized_profile (struct cgraph_node *new_node,
1943 struct cgraph_node *orig_node,
1944 gcov_type redirected_sum)
1946 struct cgraph_edge *cs;
1947 gcov_type new_node_count, orig_node_count = orig_node->count;
1949 if (dump_file)
1950 fprintf (dump_file, " the sum of counts of redirected edges is "
1951 HOST_WIDE_INT_PRINT_DEC "\n", (HOST_WIDE_INT) redirected_sum);
1952 if (orig_node_count == 0)
1953 return;
1955 gcc_assert (orig_node_count >= redirected_sum);
1957 new_node_count = new_node->count;
1958 new_node->count += redirected_sum;
1959 orig_node->count -= redirected_sum;
1961 for (cs = new_node->callees; cs ; cs = cs->next_callee)
1962 if (cs->frequency)
1963 cs->count += cs->count * redirected_sum / new_node_count;
1964 else
1965 cs->count = 0;
1967 for (cs = orig_node->callees; cs ; cs = cs->next_callee)
1969 gcov_type dec = cs->count * (redirected_sum * REG_BR_PROB_BASE
1970 / orig_node_count) / REG_BR_PROB_BASE;
1971 if (dec < cs->count)
1972 cs->count -= dec;
1973 else
1974 cs->count = 0;
1977 if (dump_file)
1978 dump_profile_updates (orig_node, new_node);
1981 /* Create a specialized version of NODE with known constants and types of
1982 parameters in KNOWN_VALS and redirect all edges in CALLERS to it. */
1984 static struct cgraph_node *
1985 create_specialized_node (struct cgraph_node *node,
1986 VEC (tree, heap) *known_vals,
1987 VEC (cgraph_edge_p,heap) *callers)
1989 struct ipa_node_params *new_info, *info = IPA_NODE_REF (node);
1990 VEC (ipa_replace_map_p,gc)* replace_trees = NULL;
1991 struct cgraph_node *new_node;
1992 int i, count = ipa_get_param_count (info);
1993 bitmap args_to_skip;
1995 gcc_assert (!info->ipcp_orig_node);
1997 if (node->local.can_change_signature)
1999 args_to_skip = BITMAP_GGC_ALLOC ();
2000 for (i = 0; i < count; i++)
2002 tree t = VEC_index (tree, known_vals, i);
2004 if ((t && TREE_CODE (t) != TREE_BINFO)
2005 || !ipa_is_param_used (info, i))
2006 bitmap_set_bit (args_to_skip, i);
2009 else
2011 args_to_skip = NULL;
2012 if (dump_file && (dump_flags & TDF_DETAILS))
2013 fprintf (dump_file, " cannot change function signature\n");
2016 for (i = 0; i < count ; i++)
2018 tree t = VEC_index (tree, known_vals, i);
2019 if (t && TREE_CODE (t) != TREE_BINFO)
2021 struct ipa_replace_map *replace_map;
2023 replace_map = get_replacement_map (t, ipa_get_param (info, i));
2024 if (replace_map)
2025 VEC_safe_push (ipa_replace_map_p, gc, replace_trees, replace_map);
2029 new_node = cgraph_create_virtual_clone (node, callers, replace_trees,
2030 args_to_skip, "constprop");
2031 if (dump_file && (dump_flags & TDF_DETAILS))
2032 fprintf (dump_file, " the new node is %s/%i.\n",
2033 cgraph_node_name (new_node), new_node->uid);
2034 gcc_checking_assert (ipa_node_params_vector
2035 && (VEC_length (ipa_node_params_t,
2036 ipa_node_params_vector)
2037 > (unsigned) cgraph_max_uid));
2038 update_profiling_info (node, new_node);
2039 new_info = IPA_NODE_REF (new_node);
2040 new_info->ipcp_orig_node = node;
2041 new_info->known_vals = known_vals;
2043 ipcp_discover_new_direct_edges (new_node, known_vals);
2045 VEC_free (cgraph_edge_p, heap, callers);
2046 return new_node;
2049 /* Given a NODE, and a subset of its CALLERS, try to populate blanks slots in
2050 KNOWN_VALS with constants and types that are also known for all of the
2051 CALLERS. */
2053 static void
2054 find_more_values_for_callers_subset (struct cgraph_node *node,
2055 VEC (tree, heap) *known_vals,
2056 VEC (cgraph_edge_p,heap) *callers)
2058 struct ipa_node_params *info = IPA_NODE_REF (node);
2059 int i, count = ipa_get_param_count (info);
2061 for (i = 0; i < count ; i++)
2063 struct cgraph_edge *cs;
2064 tree newval = NULL_TREE;
2065 int j;
2067 if (ipa_get_lattice (info, i)->bottom
2068 || VEC_index (tree, known_vals, i))
2069 continue;
2071 FOR_EACH_VEC_ELT (cgraph_edge_p, callers, j, cs)
2073 struct ipa_jump_func *jump_func;
2074 tree t;
2076 if (i >= ipa_get_cs_argument_count (IPA_EDGE_REF (cs)))
2078 newval = NULL_TREE;
2079 break;
2081 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
2082 t = ipa_value_from_jfunc (IPA_NODE_REF (cs->caller), jump_func);
2083 if (!t
2084 || (newval
2085 && !values_equal_for_ipcp_p (t, newval)))
2087 newval = NULL_TREE;
2088 break;
2090 else
2091 newval = t;
2094 if (newval)
2096 if (dump_file && (dump_flags & TDF_DETAILS))
2098 fprintf (dump_file, " adding an extra known value ");
2099 print_ipcp_constant_value (dump_file, newval);
2100 fprintf (dump_file, " for parameter ");
2101 print_generic_expr (dump_file, ipa_get_param (info, i), 0);
2102 fprintf (dump_file, "\n");
2105 VEC_replace (tree, known_vals, i, newval);
2110 /* Given an original NODE and a VAL for which we have already created a
2111 specialized clone, look whether there are incoming edges that still lead
2112 into the old node but now also bring the requested value and also conform to
2113 all other criteria such that they can be redirected the the special node.
2114 This function can therefore redirect the final edge in a SCC. */
2116 static void
2117 perhaps_add_new_callers (struct cgraph_node *node, struct ipcp_value *val)
2119 struct ipa_node_params *dest_info = IPA_NODE_REF (val->spec_node);
2120 struct ipcp_value_source *src;
2121 int count = ipa_get_param_count (dest_info);
2122 gcov_type redirected_sum = 0;
2124 for (src = val->sources; src; src = src->next)
2126 struct cgraph_edge *cs = src->cs;
2127 while (cs)
2129 enum availability availability;
2130 bool insufficient = false;
2132 if (cgraph_function_node (cs->callee, &availability) == node
2133 && availability > AVAIL_OVERWRITABLE
2134 && cgraph_edge_brings_value_p (cs, src))
2136 struct ipa_node_params *caller_info;
2137 struct ipa_edge_args *args;
2138 int i;
2140 caller_info = IPA_NODE_REF (cs->caller);
2141 args = IPA_EDGE_REF (cs);
2142 for (i = 0; i < count; i++)
2144 struct ipa_jump_func *jump_func;
2145 tree val, t;
2147 val = VEC_index (tree, dest_info->known_vals, i);
2148 if (!val)
2149 continue;
2151 if (i >= ipa_get_cs_argument_count (args))
2153 insufficient = true;
2154 break;
2156 jump_func = ipa_get_ith_jump_func (args, i);
2157 t = ipa_value_from_jfunc (caller_info, jump_func);
2158 if (!t || !values_equal_for_ipcp_p (val, t))
2160 insufficient = true;
2161 break;
2165 if (!insufficient)
2167 if (dump_file)
2168 fprintf (dump_file, " - adding an extra caller %s/%i"
2169 " of %s/%i\n",
2170 xstrdup (cgraph_node_name (cs->caller)),
2171 cs->caller->uid,
2172 xstrdup (cgraph_node_name (val->spec_node)),
2173 val->spec_node->uid);
2175 cgraph_redirect_edge_callee (cs, val->spec_node);
2176 redirected_sum += cs->count;
2179 cs = get_next_cgraph_edge_clone (cs);
2183 if (redirected_sum)
2184 update_specialized_profile (val->spec_node, node, redirected_sum);
2188 /* Copy KNOWN_BINFOS to KNOWN_VALS. */
2190 static void
2191 move_binfos_to_values (VEC (tree, heap) *known_vals,
2192 VEC (tree, heap) *known_binfos)
2194 tree t;
2195 int i;
2197 for (i = 0; VEC_iterate (tree, known_binfos, i, t); i++)
2198 if (t)
2199 VEC_replace (tree, known_vals, i, t);
2203 /* Decide whether and what specialized clones of NODE should be created. */
2205 static bool
2206 decide_whether_version_node (struct cgraph_node *node)
2208 struct ipa_node_params *info = IPA_NODE_REF (node);
2209 int i, count = ipa_get_param_count (info);
2210 VEC (tree, heap) *known_csts, *known_binfos;
2211 bool ret = false;
2213 if (count == 0)
2214 return false;
2216 if (dump_file && (dump_flags & TDF_DETAILS))
2217 fprintf (dump_file, "\nEvaluating opportunities for %s/%i.\n",
2218 cgraph_node_name (node), node->uid);
2220 gather_context_independent_values (info, &known_csts, &known_binfos,
2221 NULL);
2223 for (i = 0; i < count ; i++)
2225 struct ipcp_lattice *lat = ipa_get_lattice (info, i);
2226 struct ipcp_value *val;
2228 if (lat->bottom
2229 || VEC_index (tree, known_csts, i)
2230 || VEC_index (tree, known_binfos, i))
2231 continue;
2233 for (val = lat->values; val; val = val->next)
2235 int freq_sum, caller_count;
2236 gcov_type count_sum;
2237 VEC (cgraph_edge_p, heap) *callers;
2238 VEC (tree, heap) *kv;
2240 if (val->spec_node)
2242 perhaps_add_new_callers (node, val);
2243 continue;
2245 else if (val->local_size_cost + overall_size > max_new_size)
2247 if (dump_file && (dump_flags & TDF_DETAILS))
2248 fprintf (dump_file, " Ignoring candidate value because "
2249 "max_new_size would be reached with %li.\n",
2250 val->local_size_cost + overall_size);
2251 continue;
2253 else if (!get_info_about_necessary_edges (val, &freq_sum, &count_sum,
2254 &caller_count))
2255 continue;
2257 if (dump_file && (dump_flags & TDF_DETAILS))
2259 fprintf (dump_file, " - considering value ");
2260 print_ipcp_constant_value (dump_file, val->value);
2261 fprintf (dump_file, " for parameter ");
2262 print_generic_expr (dump_file, ipa_get_param (info, i), 0);
2263 fprintf (dump_file, " (caller_count: %i)\n", caller_count);
2267 if (!good_cloning_opportunity_p (node, val->local_time_benefit,
2268 freq_sum, count_sum,
2269 val->local_size_cost)
2270 && !good_cloning_opportunity_p (node,
2271 val->local_time_benefit
2272 + val->prop_time_benefit,
2273 freq_sum, count_sum,
2274 val->local_size_cost
2275 + val->prop_size_cost))
2276 continue;
2278 if (dump_file)
2279 fprintf (dump_file, " Creating a specialized node of %s/%i.\n",
2280 cgraph_node_name (node), node->uid);
2282 callers = gather_edges_for_value (val, caller_count);
2283 kv = VEC_copy (tree, heap, known_csts);
2284 move_binfos_to_values (kv, known_binfos);
2285 VEC_replace (tree, kv, i, val->value);
2286 find_more_values_for_callers_subset (node, kv, callers);
2287 val->spec_node = create_specialized_node (node, kv, callers);
2288 overall_size += val->local_size_cost;
2289 info = IPA_NODE_REF (node);
2291 /* TODO: If for some lattice there is only one other known value
2292 left, make a special node for it too. */
2293 ret = true;
2295 VEC_replace (tree, kv, i, val->value);
2299 if (info->clone_for_all_contexts)
2301 VEC (cgraph_edge_p, heap) *callers;
2303 if (dump_file)
2304 fprintf (dump_file, " - Creating a specialized node of %s/%i "
2305 "for all known contexts.\n", cgraph_node_name (node),
2306 node->uid);
2308 callers = collect_callers_of_node (node);
2309 move_binfos_to_values (known_csts, known_binfos);
2310 create_specialized_node (node, known_csts, callers);
2311 info = IPA_NODE_REF (node);
2312 info->clone_for_all_contexts = false;
2313 ret = true;
2315 else
2316 VEC_free (tree, heap, known_csts);
2318 VEC_free (tree, heap, known_binfos);
2319 return ret;
2322 /* Transitively mark all callees of NODE within the same SCC as not dead. */
2324 static void
2325 spread_undeadness (struct cgraph_node *node)
2327 struct cgraph_edge *cs;
2329 for (cs = node->callees; cs; cs = cs->next_callee)
2330 if (edge_within_scc (cs))
2332 struct cgraph_node *callee;
2333 struct ipa_node_params *info;
2335 callee = cgraph_function_node (cs->callee, NULL);
2336 info = IPA_NODE_REF (callee);
2338 if (info->node_dead)
2340 info->node_dead = 0;
2341 spread_undeadness (callee);
2346 /* Return true if NODE has a caller from outside of its SCC that is not
2347 dead. Worker callback for cgraph_for_node_and_aliases. */
2349 static bool
2350 has_undead_caller_from_outside_scc_p (struct cgraph_node *node,
2351 void *data ATTRIBUTE_UNUSED)
2353 struct cgraph_edge *cs;
2355 for (cs = node->callers; cs; cs = cs->next_caller)
2356 if (cs->caller->thunk.thunk_p
2357 && cgraph_for_node_and_aliases (cs->caller,
2358 has_undead_caller_from_outside_scc_p,
2359 NULL, true))
2360 return true;
2361 else if (!edge_within_scc (cs)
2362 && !IPA_NODE_REF (cs->caller)->node_dead)
2363 return true;
2364 return false;
2368 /* Identify nodes within the same SCC as NODE which are no longer needed
2369 because of new clones and will be removed as unreachable. */
2371 static void
2372 identify_dead_nodes (struct cgraph_node *node)
2374 struct cgraph_node *v;
2375 for (v = node; v ; v = ((struct ipa_dfs_info *) v->symbol.aux)->next_cycle)
2376 if (cgraph_will_be_removed_from_program_if_no_direct_calls (v)
2377 && !cgraph_for_node_and_aliases (v,
2378 has_undead_caller_from_outside_scc_p,
2379 NULL, true))
2380 IPA_NODE_REF (v)->node_dead = 1;
2382 for (v = node; v ; v = ((struct ipa_dfs_info *) v->symbol.aux)->next_cycle)
2383 if (!IPA_NODE_REF (v)->node_dead)
2384 spread_undeadness (v);
2386 if (dump_file && (dump_flags & TDF_DETAILS))
2388 for (v = node; v ; v = ((struct ipa_dfs_info *) v->symbol.aux)->next_cycle)
2389 if (IPA_NODE_REF (v)->node_dead)
2390 fprintf (dump_file, " Marking node as dead: %s/%i.\n",
2391 cgraph_node_name (v), v->uid);
2395 /* The decision stage. Iterate over the topological order of call graph nodes
2396 TOPO and make specialized clones if deemed beneficial. */
2398 static void
2399 ipcp_decision_stage (struct topo_info *topo)
2401 int i;
2403 if (dump_file)
2404 fprintf (dump_file, "\nIPA decision stage:\n\n");
2406 for (i = topo->nnodes - 1; i >= 0; i--)
2408 struct cgraph_node *node = topo->order[i];
2409 bool change = false, iterate = true;
2411 while (iterate)
2413 struct cgraph_node *v;
2414 iterate = false;
2415 for (v = node; v ; v = ((struct ipa_dfs_info *) v->symbol.aux)->next_cycle)
2416 if (cgraph_function_with_gimple_body_p (v)
2417 && ipcp_versionable_function_p (v))
2418 iterate |= decide_whether_version_node (v);
2420 change |= iterate;
2422 if (change)
2423 identify_dead_nodes (node);
2427 /* The IPCP driver. */
2429 static unsigned int
2430 ipcp_driver (void)
2432 struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
2433 struct topo_info topo;
2435 ipa_check_create_node_params ();
2436 ipa_check_create_edge_args ();
2437 grow_next_edge_clone_vector ();
2438 edge_duplication_hook_holder =
2439 cgraph_add_edge_duplication_hook (&ipcp_edge_duplication_hook, NULL);
2440 ipcp_values_pool = create_alloc_pool ("IPA-CP values",
2441 sizeof (struct ipcp_value), 32);
2442 ipcp_sources_pool = create_alloc_pool ("IPA-CP value sources",
2443 sizeof (struct ipcp_value_source), 64);
2444 if (dump_file)
2446 fprintf (dump_file, "\nIPA structures before propagation:\n");
2447 if (dump_flags & TDF_DETAILS)
2448 ipa_print_all_params (dump_file);
2449 ipa_print_all_jump_functions (dump_file);
2452 /* Topological sort. */
2453 build_toporder_info (&topo);
2454 /* Do the interprocedural propagation. */
2455 ipcp_propagate_stage (&topo);
2456 /* Decide what constant propagation and cloning should be performed. */
2457 ipcp_decision_stage (&topo);
2459 /* Free all IPCP structures. */
2460 free_toporder_info (&topo);
2461 VEC_free (cgraph_edge_p, heap, next_edge_clone);
2462 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
2463 ipa_free_all_structures_after_ipa_cp ();
2464 if (dump_file)
2465 fprintf (dump_file, "\nIPA constant propagation end\n");
2466 return 0;
2469 /* Initialization and computation of IPCP data structures. This is the initial
2470 intraprocedural analysis of functions, which gathers information to be
2471 propagated later on. */
2473 static void
2474 ipcp_generate_summary (void)
2476 struct cgraph_node *node;
2478 if (dump_file)
2479 fprintf (dump_file, "\nIPA constant propagation start:\n");
2480 ipa_register_cgraph_hooks ();
2482 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
2484 node->local.versionable
2485 = tree_versionable_function_p (node->symbol.decl);
2486 ipa_analyze_node (node);
2490 /* Write ipcp summary for nodes in SET. */
2492 static void
2493 ipcp_write_summary (cgraph_node_set set,
2494 varpool_node_set vset ATTRIBUTE_UNUSED)
2496 ipa_prop_write_jump_functions (set);
2499 /* Read ipcp summary. */
2501 static void
2502 ipcp_read_summary (void)
2504 ipa_prop_read_jump_functions ();
2507 /* Gate for IPCP optimization. */
2509 static bool
2510 cgraph_gate_cp (void)
2512 /* FIXME: We should remove the optimize check after we ensure we never run
2513 IPA passes when not optimizing. */
2514 return flag_ipa_cp && optimize;
2517 struct ipa_opt_pass_d pass_ipa_cp =
2520 IPA_PASS,
2521 "cp", /* name */
2522 cgraph_gate_cp, /* gate */
2523 ipcp_driver, /* execute */
2524 NULL, /* sub */
2525 NULL, /* next */
2526 0, /* static_pass_number */
2527 TV_IPA_CONSTANT_PROP, /* tv_id */
2528 0, /* properties_required */
2529 0, /* properties_provided */
2530 0, /* properties_destroyed */
2531 0, /* todo_flags_start */
2532 TODO_dump_symtab |
2533 TODO_remove_functions | TODO_ggc_collect /* todo_flags_finish */
2535 ipcp_generate_summary, /* generate_summary */
2536 ipcp_write_summary, /* write_summary */
2537 ipcp_read_summary, /* read_summary */
2538 NULL, /* write_optimization_summary */
2539 NULL, /* read_optimization_summary */
2540 NULL, /* stmt_fixup */
2541 0, /* TODOs */
2542 NULL, /* function_transform */
2543 NULL, /* variable_transform */