2014-01-24 Ed Smith-Rowland <3dw4rd@verizon.net>
[official-gcc.git] / gcc / ipa-cp.c
blob10fa4b6c23679973c9fbd2ffe6bbaa3452060c0d
1 /* Interprocedural constant propagation
2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
4 Contributed by Razya Ladelsky <RAZYA@il.ibm.com> and Martin Jambor
5 <mjambor@suse.cz>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Interprocedural constant propagation (IPA-CP).
25 The goal of this transformation is to
27 1) discover functions which are always invoked with some arguments with the
28 same known constant values and modify the functions so that the
29 subsequent optimizations can take advantage of the knowledge, and
31 2) partial specialization - create specialized versions of functions
32 transformed in this way if some parameters are known constants only in
33 certain contexts but the estimated tradeoff between speedup and cost size
34 is deemed good.
36 The algorithm also propagates types and attempts to perform type based
37 devirtualization. Types are propagated much like constants.
39 The algorithm basically consists of three stages. In the first, functions
40 are analyzed one at a time and jump functions are constructed for all known
41 call-sites. In the second phase, the pass propagates information from the
42 jump functions across the call to reveal what values are available at what
43 call sites, performs estimations of effects of known values on functions and
44 their callees, and finally decides what specialized extra versions should be
45 created. In the third, the special versions materialize and appropriate
46 calls are redirected.
48 The algorithm used is to a certain extent based on "Interprocedural Constant
49 Propagation", by David Callahan, Keith D Cooper, Ken Kennedy, Linda Torczon,
50 Comp86, pg 152-161 and "A Methodology for Procedure Cloning" by Keith D
51 Cooper, Mary W. Hall, and Ken Kennedy.
54 First stage - intraprocedural analysis
55 =======================================
57 This phase computes jump_function and modification flags.
59 A jump function for a call-site represents the values passed as an actual
60 arguments of a given call-site. In principle, there are three types of
61 values:
63 Pass through - the caller's formal parameter is passed as an actual
64 argument, plus an operation on it can be performed.
65 Constant - a constant is passed as an actual argument.
66 Unknown - neither of the above.
68 All jump function types are described in detail in ipa-prop.h, together with
69 the data structures that represent them and methods of accessing them.
71 ipcp_generate_summary() is the main function of the first stage.
73 Second stage - interprocedural analysis
74 ========================================
76 This stage is itself divided into two phases. In the first, we propagate
77 known values over the call graph, in the second, we make cloning decisions.
78 It uses a different algorithm than the original Callahan's paper.
80 First, we traverse the functions topologically from callers to callees and,
81 for each strongly connected component (SCC), we propagate constants
82 according to previously computed jump functions. We also record what known
83 values depend on other known values and estimate local effects. Finally, we
84 propagate cumulative information about these effects from dependent values
85 to those on which they depend.
87 Second, we again traverse the call graph in the same topological order and
88 make clones for functions which we know are called with the same values in
89 all contexts and decide about extra specialized clones of functions just for
90 some contexts - these decisions are based on both local estimates and
91 cumulative estimates propagated from callees.
93 ipcp_propagate_stage() and ipcp_decision_stage() together constitute the
94 third stage.
96 Third phase - materialization of clones, call statement updates.
97 ============================================
99 This stage is currently performed by call graph code (mainly in cgraphunit.c
100 and tree-inline.c) according to instructions inserted to the call graph by
101 the second stage. */
103 #include "config.h"
104 #include "system.h"
105 #include "coretypes.h"
106 #include "tree.h"
107 #include "gimple-fold.h"
108 #include "gimple-expr.h"
109 #include "target.h"
110 #include "ipa-prop.h"
111 #include "bitmap.h"
112 #include "tree-pass.h"
113 #include "flags.h"
114 #include "diagnostic.h"
115 #include "tree-pretty-print.h"
116 #include "tree-inline.h"
117 #include "params.h"
118 #include "ipa-inline.h"
119 #include "ipa-utils.h"
121 struct ipcp_value;
123 /* Describes a particular source for an IPA-CP value. */
125 struct ipcp_value_source
127 /* Aggregate offset of the source, negative if the source is scalar value of
128 the argument itself. */
129 HOST_WIDE_INT offset;
130 /* The incoming edge that brought the value. */
131 struct cgraph_edge *cs;
132 /* If the jump function that resulted into his value was a pass-through or an
133 ancestor, this is the ipcp_value of the caller from which the described
134 value has been derived. Otherwise it is NULL. */
135 struct ipcp_value *val;
136 /* Next pointer in a linked list of sources of a value. */
137 struct ipcp_value_source *next;
138 /* If the jump function that resulted into his value was a pass-through or an
139 ancestor, this is the index of the parameter of the caller the jump
140 function references. */
141 int index;
144 /* Describes one particular value stored in struct ipcp_lattice. */
146 struct ipcp_value
148 /* The actual value for the given parameter. This is either an IPA invariant
149 or a TREE_BINFO describing a type that can be used for
150 devirtualization. */
151 tree value;
152 /* The list of sources from which this value originates. */
153 struct ipcp_value_source *sources;
154 /* Next pointers in a linked list of all values in a lattice. */
155 struct ipcp_value *next;
156 /* Next pointers in a linked list of values in a strongly connected component
157 of values. */
158 struct ipcp_value *scc_next;
159 /* Next pointers in a linked list of SCCs of values sorted topologically
160 according their sources. */
161 struct ipcp_value *topo_next;
162 /* A specialized node created for this value, NULL if none has been (so far)
163 created. */
164 struct cgraph_node *spec_node;
165 /* Depth first search number and low link for topological sorting of
166 values. */
167 int dfs, low_link;
168 /* Time benefit and size cost that specializing the function for this value
169 would bring about in this function alone. */
170 int local_time_benefit, local_size_cost;
171 /* Time benefit and size cost that specializing the function for this value
172 can bring about in it's callees (transitively). */
173 int prop_time_benefit, prop_size_cost;
174 /* True if this valye is currently on the topo-sort stack. */
175 bool on_stack;
178 /* Lattice describing potential values of a formal parameter of a function, or
179 a part of an aggreagate. TOP is represented by a lattice with zero values
180 and with contains_variable and bottom flags cleared. BOTTOM is represented
181 by a lattice with the bottom flag set. In that case, values and
182 contains_variable flag should be disregarded. */
184 struct ipcp_lattice
186 /* The list of known values and types in this lattice. Note that values are
187 not deallocated if a lattice is set to bottom because there may be value
188 sources referencing them. */
189 struct ipcp_value *values;
190 /* Number of known values and types in this lattice. */
191 int values_count;
192 /* The lattice contains a variable component (in addition to values). */
193 bool contains_variable;
194 /* The value of the lattice is bottom (i.e. variable and unusable for any
195 propagation). */
196 bool bottom;
199 /* Lattice with an offset to describe a part of an aggregate. */
201 struct ipcp_agg_lattice : public ipcp_lattice
203 /* Offset that is being described by this lattice. */
204 HOST_WIDE_INT offset;
205 /* Size so that we don't have to re-compute it every time we traverse the
206 list. Must correspond to TYPE_SIZE of all lat values. */
207 HOST_WIDE_INT size;
208 /* Next element of the linked list. */
209 struct ipcp_agg_lattice *next;
212 /* Structure containing lattices for a parameter itself and for pieces of
213 aggregates that are passed in the parameter or by a reference in a parameter
214 plus some other useful flags. */
216 struct ipcp_param_lattices
218 /* Lattice describing the value of the parameter itself. */
219 struct ipcp_lattice itself;
220 /* Lattices describing aggregate parts. */
221 struct ipcp_agg_lattice *aggs;
222 /* Number of aggregate lattices */
223 int aggs_count;
224 /* True if aggregate data were passed by reference (as opposed to by
225 value). */
226 bool aggs_by_ref;
227 /* All aggregate lattices contain a variable component (in addition to
228 values). */
229 bool aggs_contain_variable;
230 /* The value of all aggregate lattices is bottom (i.e. variable and unusable
231 for any propagation). */
232 bool aggs_bottom;
234 /* There is a virtual call based on this parameter. */
235 bool virt_call;
238 /* Allocation pools for values and their sources in ipa-cp. */
240 alloc_pool ipcp_values_pool;
241 alloc_pool ipcp_sources_pool;
242 alloc_pool ipcp_agg_lattice_pool;
244 /* Maximal count found in program. */
246 static gcov_type max_count;
248 /* Original overall size of the program. */
250 static long overall_size, max_new_size;
252 /* Head of the linked list of topologically sorted values. */
254 static struct ipcp_value *values_topo;
256 /* Return the param lattices structure corresponding to the Ith formal
257 parameter of the function described by INFO. */
258 static inline struct ipcp_param_lattices *
259 ipa_get_parm_lattices (struct ipa_node_params *info, int i)
261 gcc_assert (i >= 0 && i < ipa_get_param_count (info));
262 gcc_checking_assert (!info->ipcp_orig_node);
263 gcc_checking_assert (info->lattices);
264 return &(info->lattices[i]);
267 /* Return the lattice corresponding to the scalar value of the Ith formal
268 parameter of the function described by INFO. */
269 static inline struct ipcp_lattice *
270 ipa_get_scalar_lat (struct ipa_node_params *info, int i)
272 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
273 return &plats->itself;
276 /* Return whether LAT is a lattice with a single constant and without an
277 undefined value. */
279 static inline bool
280 ipa_lat_is_single_const (struct ipcp_lattice *lat)
282 if (lat->bottom
283 || lat->contains_variable
284 || lat->values_count != 1)
285 return false;
286 else
287 return true;
290 /* Print V which is extracted from a value in a lattice to F. */
292 static void
293 print_ipcp_constant_value (FILE * f, tree v)
295 if (TREE_CODE (v) == TREE_BINFO)
297 fprintf (f, "BINFO ");
298 print_generic_expr (f, BINFO_TYPE (v), 0);
300 else if (TREE_CODE (v) == ADDR_EXPR
301 && TREE_CODE (TREE_OPERAND (v, 0)) == CONST_DECL)
303 fprintf (f, "& ");
304 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (v, 0)), 0);
306 else
307 print_generic_expr (f, v, 0);
310 /* Print a lattice LAT to F. */
312 static void
313 print_lattice (FILE * f, struct ipcp_lattice *lat,
314 bool dump_sources, bool dump_benefits)
316 struct ipcp_value *val;
317 bool prev = false;
319 if (lat->bottom)
321 fprintf (f, "BOTTOM\n");
322 return;
325 if (!lat->values_count && !lat->contains_variable)
327 fprintf (f, "TOP\n");
328 return;
331 if (lat->contains_variable)
333 fprintf (f, "VARIABLE");
334 prev = true;
335 if (dump_benefits)
336 fprintf (f, "\n");
339 for (val = lat->values; val; val = val->next)
341 if (dump_benefits && prev)
342 fprintf (f, " ");
343 else if (!dump_benefits && prev)
344 fprintf (f, ", ");
345 else
346 prev = true;
348 print_ipcp_constant_value (f, val->value);
350 if (dump_sources)
352 struct ipcp_value_source *s;
354 fprintf (f, " [from:");
355 for (s = val->sources; s; s = s->next)
356 fprintf (f, " %i(%i)", s->cs->caller->order,
357 s->cs->frequency);
358 fprintf (f, "]");
361 if (dump_benefits)
362 fprintf (f, " [loc_time: %i, loc_size: %i, "
363 "prop_time: %i, prop_size: %i]\n",
364 val->local_time_benefit, val->local_size_cost,
365 val->prop_time_benefit, val->prop_size_cost);
367 if (!dump_benefits)
368 fprintf (f, "\n");
371 /* Print all ipcp_lattices of all functions to F. */
373 static void
374 print_all_lattices (FILE * f, bool dump_sources, bool dump_benefits)
376 struct cgraph_node *node;
377 int i, count;
379 fprintf (f, "\nLattices:\n");
380 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
382 struct ipa_node_params *info;
384 info = IPA_NODE_REF (node);
385 fprintf (f, " Node: %s/%i:\n", node->name (),
386 node->order);
387 count = ipa_get_param_count (info);
388 for (i = 0; i < count; i++)
390 struct ipcp_agg_lattice *aglat;
391 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
392 fprintf (f, " param [%d]: ", i);
393 print_lattice (f, &plats->itself, dump_sources, dump_benefits);
395 if (plats->virt_call)
396 fprintf (f, " virt_call flag set\n");
398 if (plats->aggs_bottom)
400 fprintf (f, " AGGS BOTTOM\n");
401 continue;
403 if (plats->aggs_contain_variable)
404 fprintf (f, " AGGS VARIABLE\n");
405 for (aglat = plats->aggs; aglat; aglat = aglat->next)
407 fprintf (f, " %soffset " HOST_WIDE_INT_PRINT_DEC ": ",
408 plats->aggs_by_ref ? "ref " : "", aglat->offset);
409 print_lattice (f, aglat, dump_sources, dump_benefits);
415 /* Determine whether it is at all technically possible to create clones of NODE
416 and store this information in the ipa_node_params structure associated
417 with NODE. */
419 static void
420 determine_versionability (struct cgraph_node *node)
422 const char *reason = NULL;
424 /* There are a number of generic reasons functions cannot be versioned. We
425 also cannot remove parameters if there are type attributes such as fnspec
426 present. */
427 if (node->alias || node->thunk.thunk_p)
428 reason = "alias or thunk";
429 else if (!node->local.versionable)
430 reason = "not a tree_versionable_function";
431 else if (cgraph_function_body_availability (node) <= AVAIL_OVERWRITABLE)
432 reason = "insufficient body availability";
433 else if (lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (node->decl)))
435 /* Ideally we should clone the SIMD clones themselves and create
436 vector copies of them, so IPA-cp and SIMD clones can happily
437 coexist, but that may not be worth the effort. */
438 reason = "function has SIMD clones";
440 /* Don't clone decls local to a comdat group; it breaks and for C++
441 decloned constructors, inlining is always better anyway. */
442 else if (symtab_comdat_local_p (node))
443 reason = "comdat-local function";
445 if (reason && dump_file && !node->alias && !node->thunk.thunk_p)
446 fprintf (dump_file, "Function %s/%i is not versionable, reason: %s.\n",
447 node->name (), node->order, reason);
449 node->local.versionable = (reason == NULL);
452 /* Return true if it is at all technically possible to create clones of a
453 NODE. */
455 static bool
456 ipcp_versionable_function_p (struct cgraph_node *node)
458 return node->local.versionable;
461 /* Structure holding accumulated information about callers of a node. */
463 struct caller_statistics
465 gcov_type count_sum;
466 int n_calls, n_hot_calls, freq_sum;
469 /* Initialize fields of STAT to zeroes. */
471 static inline void
472 init_caller_stats (struct caller_statistics *stats)
474 stats->count_sum = 0;
475 stats->n_calls = 0;
476 stats->n_hot_calls = 0;
477 stats->freq_sum = 0;
480 /* Worker callback of cgraph_for_node_and_aliases accumulating statistics of
481 non-thunk incoming edges to NODE. */
483 static bool
484 gather_caller_stats (struct cgraph_node *node, void *data)
486 struct caller_statistics *stats = (struct caller_statistics *) data;
487 struct cgraph_edge *cs;
489 for (cs = node->callers; cs; cs = cs->next_caller)
490 if (cs->caller->thunk.thunk_p)
491 cgraph_for_node_and_aliases (cs->caller, gather_caller_stats,
492 stats, false);
493 else
495 stats->count_sum += cs->count;
496 stats->freq_sum += cs->frequency;
497 stats->n_calls++;
498 if (cgraph_maybe_hot_edge_p (cs))
499 stats->n_hot_calls ++;
501 return false;
505 /* Return true if this NODE is viable candidate for cloning. */
507 static bool
508 ipcp_cloning_candidate_p (struct cgraph_node *node)
510 struct caller_statistics stats;
512 gcc_checking_assert (cgraph_function_with_gimple_body_p (node));
514 if (!flag_ipa_cp_clone)
516 if (dump_file)
517 fprintf (dump_file, "Not considering %s for cloning; "
518 "-fipa-cp-clone disabled.\n",
519 node->name ());
520 return false;
523 if (!optimize_function_for_speed_p (DECL_STRUCT_FUNCTION (node->decl)))
525 if (dump_file)
526 fprintf (dump_file, "Not considering %s for cloning; "
527 "optimizing it for size.\n",
528 node->name ());
529 return false;
532 init_caller_stats (&stats);
533 cgraph_for_node_and_aliases (node, gather_caller_stats, &stats, false);
535 if (inline_summary (node)->self_size < stats.n_calls)
537 if (dump_file)
538 fprintf (dump_file, "Considering %s for cloning; code might shrink.\n",
539 node->name ());
540 return true;
543 /* When profile is available and function is hot, propagate into it even if
544 calls seems cold; constant propagation can improve function's speed
545 significantly. */
546 if (max_count)
548 if (stats.count_sum > node->count * 90 / 100)
550 if (dump_file)
551 fprintf (dump_file, "Considering %s for cloning; "
552 "usually called directly.\n",
553 node->name ());
554 return true;
557 if (!stats.n_hot_calls)
559 if (dump_file)
560 fprintf (dump_file, "Not considering %s for cloning; no hot calls.\n",
561 node->name ());
562 return false;
564 if (dump_file)
565 fprintf (dump_file, "Considering %s for cloning.\n",
566 node->name ());
567 return true;
570 /* Arrays representing a topological ordering of call graph nodes and a stack
571 of noes used during constant propagation. */
573 struct topo_info
575 struct cgraph_node **order;
576 struct cgraph_node **stack;
577 int nnodes, stack_top;
580 /* Allocate the arrays in TOPO and topologically sort the nodes into order. */
582 static void
583 build_toporder_info (struct topo_info *topo)
585 topo->order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
586 topo->stack = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
587 topo->stack_top = 0;
588 topo->nnodes = ipa_reduced_postorder (topo->order, true, true, NULL);
591 /* Free information about strongly connected components and the arrays in
592 TOPO. */
594 static void
595 free_toporder_info (struct topo_info *topo)
597 ipa_free_postorder_info ();
598 free (topo->order);
599 free (topo->stack);
602 /* Add NODE to the stack in TOPO, unless it is already there. */
604 static inline void
605 push_node_to_stack (struct topo_info *topo, struct cgraph_node *node)
607 struct ipa_node_params *info = IPA_NODE_REF (node);
608 if (info->node_enqueued)
609 return;
610 info->node_enqueued = 1;
611 topo->stack[topo->stack_top++] = node;
614 /* Pop a node from the stack in TOPO and return it or return NULL if the stack
615 is empty. */
617 static struct cgraph_node *
618 pop_node_from_stack (struct topo_info *topo)
620 if (topo->stack_top)
622 struct cgraph_node *node;
623 topo->stack_top--;
624 node = topo->stack[topo->stack_top];
625 IPA_NODE_REF (node)->node_enqueued = 0;
626 return node;
628 else
629 return NULL;
632 /* Set lattice LAT to bottom and return true if it previously was not set as
633 such. */
635 static inline bool
636 set_lattice_to_bottom (struct ipcp_lattice *lat)
638 bool ret = !lat->bottom;
639 lat->bottom = true;
640 return ret;
643 /* Mark lattice as containing an unknown value and return true if it previously
644 was not marked as such. */
646 static inline bool
647 set_lattice_contains_variable (struct ipcp_lattice *lat)
649 bool ret = !lat->contains_variable;
650 lat->contains_variable = true;
651 return ret;
654 /* Set all aggegate lattices in PLATS to bottom and return true if they were
655 not previously set as such. */
657 static inline bool
658 set_agg_lats_to_bottom (struct ipcp_param_lattices *plats)
660 bool ret = !plats->aggs_bottom;
661 plats->aggs_bottom = true;
662 return ret;
665 /* Mark all aggegate lattices in PLATS as containing an unknown value and
666 return true if they were not previously marked as such. */
668 static inline bool
669 set_agg_lats_contain_variable (struct ipcp_param_lattices *plats)
671 bool ret = !plats->aggs_contain_variable;
672 plats->aggs_contain_variable = true;
673 return ret;
676 /* Mark bot aggregate and scalar lattices as containing an unknown variable,
677 return true is any of them has not been marked as such so far. */
679 static inline bool
680 set_all_contains_variable (struct ipcp_param_lattices *plats)
682 bool ret = !plats->itself.contains_variable || !plats->aggs_contain_variable;
683 plats->itself.contains_variable = true;
684 plats->aggs_contain_variable = true;
685 return ret;
688 /* Initialize ipcp_lattices. */
690 static void
691 initialize_node_lattices (struct cgraph_node *node)
693 struct ipa_node_params *info = IPA_NODE_REF (node);
694 struct cgraph_edge *ie;
695 bool disable = false, variable = false;
696 int i;
698 gcc_checking_assert (cgraph_function_with_gimple_body_p (node));
699 if (!node->local.local)
701 /* When cloning is allowed, we can assume that externally visible
702 functions are not called. We will compensate this by cloning
703 later. */
704 if (ipcp_versionable_function_p (node)
705 && ipcp_cloning_candidate_p (node))
706 variable = true;
707 else
708 disable = true;
711 if (disable || variable)
713 for (i = 0; i < ipa_get_param_count (info) ; i++)
715 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
716 if (disable)
718 set_lattice_to_bottom (&plats->itself);
719 set_agg_lats_to_bottom (plats);
721 else
722 set_all_contains_variable (plats);
724 if (dump_file && (dump_flags & TDF_DETAILS)
725 && !node->alias && !node->thunk.thunk_p)
726 fprintf (dump_file, "Marking all lattices of %s/%i as %s\n",
727 node->name (), node->order,
728 disable ? "BOTTOM" : "VARIABLE");
731 for (ie = node->indirect_calls; ie; ie = ie->next_callee)
732 if (ie->indirect_info->polymorphic
733 && ie->indirect_info->param_index >= 0)
735 gcc_checking_assert (ie->indirect_info->param_index >= 0);
736 ipa_get_parm_lattices (info,
737 ie->indirect_info->param_index)->virt_call = 1;
741 /* Return the result of a (possibly arithmetic) pass through jump function
742 JFUNC on the constant value INPUT. Return NULL_TREE if that cannot be
743 determined or be considered an interprocedural invariant. */
745 static tree
746 ipa_get_jf_pass_through_result (struct ipa_jump_func *jfunc, tree input)
748 tree restype, res;
750 if (TREE_CODE (input) == TREE_BINFO)
752 if (ipa_get_jf_pass_through_type_preserved (jfunc))
754 gcc_checking_assert (ipa_get_jf_pass_through_operation (jfunc)
755 == NOP_EXPR);
756 return input;
758 return NULL_TREE;
761 if (ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
762 return input;
764 gcc_checking_assert (is_gimple_ip_invariant (input));
765 if (TREE_CODE_CLASS (ipa_get_jf_pass_through_operation (jfunc))
766 == tcc_comparison)
767 restype = boolean_type_node;
768 else
769 restype = TREE_TYPE (input);
770 res = fold_binary (ipa_get_jf_pass_through_operation (jfunc), restype,
771 input, ipa_get_jf_pass_through_operand (jfunc));
773 if (res && !is_gimple_ip_invariant (res))
774 return NULL_TREE;
776 return res;
779 /* Return the result of an ancestor jump function JFUNC on the constant value
780 INPUT. Return NULL_TREE if that cannot be determined. */
782 static tree
783 ipa_get_jf_ancestor_result (struct ipa_jump_func *jfunc, tree input)
785 if (TREE_CODE (input) == TREE_BINFO)
787 if (!ipa_get_jf_ancestor_type_preserved (jfunc))
788 return NULL;
789 return get_binfo_at_offset (input,
790 ipa_get_jf_ancestor_offset (jfunc),
791 ipa_get_jf_ancestor_type (jfunc));
793 else if (TREE_CODE (input) == ADDR_EXPR)
795 tree t = TREE_OPERAND (input, 0);
796 t = build_ref_for_offset (EXPR_LOCATION (t), t,
797 ipa_get_jf_ancestor_offset (jfunc),
798 ipa_get_jf_ancestor_type (jfunc), NULL, false);
799 return build_fold_addr_expr (t);
801 else
802 return NULL_TREE;
805 /* Determine whether JFUNC evaluates to a known value (that is either a
806 constant or a binfo) and if so, return it. Otherwise return NULL. INFO
807 describes the caller node so that pass-through jump functions can be
808 evaluated. */
810 tree
811 ipa_value_from_jfunc (struct ipa_node_params *info, struct ipa_jump_func *jfunc)
813 if (jfunc->type == IPA_JF_CONST)
814 return ipa_get_jf_constant (jfunc);
815 else if (jfunc->type == IPA_JF_KNOWN_TYPE)
816 return ipa_binfo_from_known_type_jfunc (jfunc);
817 else if (jfunc->type == IPA_JF_PASS_THROUGH
818 || jfunc->type == IPA_JF_ANCESTOR)
820 tree input;
821 int idx;
823 if (jfunc->type == IPA_JF_PASS_THROUGH)
824 idx = ipa_get_jf_pass_through_formal_id (jfunc);
825 else
826 idx = ipa_get_jf_ancestor_formal_id (jfunc);
828 if (info->ipcp_orig_node)
829 input = info->known_vals[idx];
830 else
832 struct ipcp_lattice *lat;
834 if (!info->lattices)
836 gcc_checking_assert (!flag_ipa_cp);
837 return NULL_TREE;
839 lat = ipa_get_scalar_lat (info, idx);
840 if (!ipa_lat_is_single_const (lat))
841 return NULL_TREE;
842 input = lat->values->value;
845 if (!input)
846 return NULL_TREE;
848 if (jfunc->type == IPA_JF_PASS_THROUGH)
849 return ipa_get_jf_pass_through_result (jfunc, input);
850 else
851 return ipa_get_jf_ancestor_result (jfunc, input);
853 else
854 return NULL_TREE;
858 /* If checking is enabled, verify that no lattice is in the TOP state, i.e. not
859 bottom, not containing a variable component and without any known value at
860 the same time. */
862 DEBUG_FUNCTION void
863 ipcp_verify_propagated_values (void)
865 struct cgraph_node *node;
867 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
869 struct ipa_node_params *info = IPA_NODE_REF (node);
870 int i, count = ipa_get_param_count (info);
872 for (i = 0; i < count; i++)
874 struct ipcp_lattice *lat = ipa_get_scalar_lat (info, i);
876 if (!lat->bottom
877 && !lat->contains_variable
878 && lat->values_count == 0)
880 if (dump_file)
882 fprintf (dump_file, "\nIPA lattices after constant "
883 "propagation:\n");
884 print_all_lattices (dump_file, true, false);
887 gcc_unreachable ();
893 /* Return true iff X and Y should be considered equal values by IPA-CP. */
895 static bool
896 values_equal_for_ipcp_p (tree x, tree y)
898 gcc_checking_assert (x != NULL_TREE && y != NULL_TREE);
900 if (x == y)
901 return true;
903 if (TREE_CODE (x) == TREE_BINFO || TREE_CODE (y) == TREE_BINFO)
904 return false;
906 if (TREE_CODE (x) == ADDR_EXPR
907 && TREE_CODE (y) == ADDR_EXPR
908 && TREE_CODE (TREE_OPERAND (x, 0)) == CONST_DECL
909 && TREE_CODE (TREE_OPERAND (y, 0)) == CONST_DECL)
910 return operand_equal_p (DECL_INITIAL (TREE_OPERAND (x, 0)),
911 DECL_INITIAL (TREE_OPERAND (y, 0)), 0);
912 else
913 return operand_equal_p (x, y, 0);
916 /* Add a new value source to VAL, marking that a value comes from edge CS and
917 (if the underlying jump function is a pass-through or an ancestor one) from
918 a caller value SRC_VAL of a caller parameter described by SRC_INDEX. OFFSET
919 is negative if the source was the scalar value of the parameter itself or
920 the offset within an aggregate. */
922 static void
923 add_value_source (struct ipcp_value *val, struct cgraph_edge *cs,
924 struct ipcp_value *src_val, int src_idx, HOST_WIDE_INT offset)
926 struct ipcp_value_source *src;
928 src = (struct ipcp_value_source *) pool_alloc (ipcp_sources_pool);
929 src->offset = offset;
930 src->cs = cs;
931 src->val = src_val;
932 src->index = src_idx;
934 src->next = val->sources;
935 val->sources = src;
938 /* Try to add NEWVAL to LAT, potentially creating a new struct ipcp_value for
939 it. CS, SRC_VAL SRC_INDEX and OFFSET are meant for add_value_source and
940 have the same meaning. */
942 static bool
943 add_value_to_lattice (struct ipcp_lattice *lat, tree newval,
944 struct cgraph_edge *cs, struct ipcp_value *src_val,
945 int src_idx, HOST_WIDE_INT offset)
947 struct ipcp_value *val;
949 if (lat->bottom)
950 return false;
952 for (val = lat->values; val; val = val->next)
953 if (values_equal_for_ipcp_p (val->value, newval))
955 if (ipa_edge_within_scc (cs))
957 struct ipcp_value_source *s;
958 for (s = val->sources; s ; s = s->next)
959 if (s->cs == cs)
960 break;
961 if (s)
962 return false;
965 add_value_source (val, cs, src_val, src_idx, offset);
966 return false;
969 if (lat->values_count == PARAM_VALUE (PARAM_IPA_CP_VALUE_LIST_SIZE))
971 /* We can only free sources, not the values themselves, because sources
972 of other values in this this SCC might point to them. */
973 for (val = lat->values; val; val = val->next)
975 while (val->sources)
977 struct ipcp_value_source *src = val->sources;
978 val->sources = src->next;
979 pool_free (ipcp_sources_pool, src);
983 lat->values = NULL;
984 return set_lattice_to_bottom (lat);
987 lat->values_count++;
988 val = (struct ipcp_value *) pool_alloc (ipcp_values_pool);
989 memset (val, 0, sizeof (*val));
991 add_value_source (val, cs, src_val, src_idx, offset);
992 val->value = newval;
993 val->next = lat->values;
994 lat->values = val;
995 return true;
998 /* Like above but passes a special value of offset to distinguish that the
999 origin is the scalar value of the parameter rather than a part of an
1000 aggregate. */
1002 static inline bool
1003 add_scalar_value_to_lattice (struct ipcp_lattice *lat, tree newval,
1004 struct cgraph_edge *cs,
1005 struct ipcp_value *src_val, int src_idx)
1007 return add_value_to_lattice (lat, newval, cs, src_val, src_idx, -1);
1010 /* Propagate values through a pass-through jump function JFUNC associated with
1011 edge CS, taking values from SRC_LAT and putting them into DEST_LAT. SRC_IDX
1012 is the index of the source parameter. */
1014 static bool
1015 propagate_vals_accross_pass_through (struct cgraph_edge *cs,
1016 struct ipa_jump_func *jfunc,
1017 struct ipcp_lattice *src_lat,
1018 struct ipcp_lattice *dest_lat,
1019 int src_idx)
1021 struct ipcp_value *src_val;
1022 bool ret = false;
1024 /* Do not create new values when propagating within an SCC because if there
1025 are arithmetic functions with circular dependencies, there is infinite
1026 number of them and we would just make lattices bottom. */
1027 if ((ipa_get_jf_pass_through_operation (jfunc) != NOP_EXPR)
1028 && ipa_edge_within_scc (cs))
1029 ret = set_lattice_contains_variable (dest_lat);
1030 else
1031 for (src_val = src_lat->values; src_val; src_val = src_val->next)
1033 tree cstval = ipa_get_jf_pass_through_result (jfunc, src_val->value);
1035 if (cstval)
1036 ret |= add_scalar_value_to_lattice (dest_lat, cstval, cs, src_val,
1037 src_idx);
1038 else
1039 ret |= set_lattice_contains_variable (dest_lat);
1042 return ret;
1045 /* Propagate values through an ancestor jump function JFUNC associated with
1046 edge CS, taking values from SRC_LAT and putting them into DEST_LAT. SRC_IDX
1047 is the index of the source parameter. */
1049 static bool
1050 propagate_vals_accross_ancestor (struct cgraph_edge *cs,
1051 struct ipa_jump_func *jfunc,
1052 struct ipcp_lattice *src_lat,
1053 struct ipcp_lattice *dest_lat,
1054 int src_idx)
1056 struct ipcp_value *src_val;
1057 bool ret = false;
1059 if (ipa_edge_within_scc (cs))
1060 return set_lattice_contains_variable (dest_lat);
1062 for (src_val = src_lat->values; src_val; src_val = src_val->next)
1064 tree t = ipa_get_jf_ancestor_result (jfunc, src_val->value);
1066 if (t)
1067 ret |= add_scalar_value_to_lattice (dest_lat, t, cs, src_val, src_idx);
1068 else
1069 ret |= set_lattice_contains_variable (dest_lat);
1072 return ret;
1075 /* Propagate scalar values across jump function JFUNC that is associated with
1076 edge CS and put the values into DEST_LAT. */
1078 static bool
1079 propagate_scalar_accross_jump_function (struct cgraph_edge *cs,
1080 struct ipa_jump_func *jfunc,
1081 struct ipcp_lattice *dest_lat)
1083 if (dest_lat->bottom)
1084 return false;
1086 if (jfunc->type == IPA_JF_CONST
1087 || jfunc->type == IPA_JF_KNOWN_TYPE)
1089 tree val;
1091 if (jfunc->type == IPA_JF_KNOWN_TYPE)
1093 val = ipa_binfo_from_known_type_jfunc (jfunc);
1094 if (!val)
1095 return set_lattice_contains_variable (dest_lat);
1097 else
1098 val = ipa_get_jf_constant (jfunc);
1099 return add_scalar_value_to_lattice (dest_lat, val, cs, NULL, 0);
1101 else if (jfunc->type == IPA_JF_PASS_THROUGH
1102 || jfunc->type == IPA_JF_ANCESTOR)
1104 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
1105 struct ipcp_lattice *src_lat;
1106 int src_idx;
1107 bool ret;
1109 if (jfunc->type == IPA_JF_PASS_THROUGH)
1110 src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
1111 else
1112 src_idx = ipa_get_jf_ancestor_formal_id (jfunc);
1114 src_lat = ipa_get_scalar_lat (caller_info, src_idx);
1115 if (src_lat->bottom)
1116 return set_lattice_contains_variable (dest_lat);
1118 /* If we would need to clone the caller and cannot, do not propagate. */
1119 if (!ipcp_versionable_function_p (cs->caller)
1120 && (src_lat->contains_variable
1121 || (src_lat->values_count > 1)))
1122 return set_lattice_contains_variable (dest_lat);
1124 if (jfunc->type == IPA_JF_PASS_THROUGH)
1125 ret = propagate_vals_accross_pass_through (cs, jfunc, src_lat,
1126 dest_lat, src_idx);
1127 else
1128 ret = propagate_vals_accross_ancestor (cs, jfunc, src_lat, dest_lat,
1129 src_idx);
1131 if (src_lat->contains_variable)
1132 ret |= set_lattice_contains_variable (dest_lat);
1134 return ret;
1137 /* TODO: We currently do not handle member method pointers in IPA-CP (we only
1138 use it for indirect inlining), we should propagate them too. */
1139 return set_lattice_contains_variable (dest_lat);
1142 /* If DEST_PLATS already has aggregate items, check that aggs_by_ref matches
1143 NEW_AGGS_BY_REF and if not, mark all aggs as bottoms and return true (in all
1144 other cases, return false). If there are no aggregate items, set
1145 aggs_by_ref to NEW_AGGS_BY_REF. */
1147 static bool
1148 set_check_aggs_by_ref (struct ipcp_param_lattices *dest_plats,
1149 bool new_aggs_by_ref)
1151 if (dest_plats->aggs)
1153 if (dest_plats->aggs_by_ref != new_aggs_by_ref)
1155 set_agg_lats_to_bottom (dest_plats);
1156 return true;
1159 else
1160 dest_plats->aggs_by_ref = new_aggs_by_ref;
1161 return false;
1164 /* Walk aggregate lattices in DEST_PLATS from ***AGLAT on, until ***aglat is an
1165 already existing lattice for the given OFFSET and SIZE, marking all skipped
1166 lattices as containing variable and checking for overlaps. If there is no
1167 already existing lattice for the OFFSET and VAL_SIZE, create one, initialize
1168 it with offset, size and contains_variable to PRE_EXISTING, and return true,
1169 unless there are too many already. If there are two many, return false. If
1170 there are overlaps turn whole DEST_PLATS to bottom and return false. If any
1171 skipped lattices were newly marked as containing variable, set *CHANGE to
1172 true. */
1174 static bool
1175 merge_agg_lats_step (struct ipcp_param_lattices *dest_plats,
1176 HOST_WIDE_INT offset, HOST_WIDE_INT val_size,
1177 struct ipcp_agg_lattice ***aglat,
1178 bool pre_existing, bool *change)
1180 gcc_checking_assert (offset >= 0);
1182 while (**aglat && (**aglat)->offset < offset)
1184 if ((**aglat)->offset + (**aglat)->size > offset)
1186 set_agg_lats_to_bottom (dest_plats);
1187 return false;
1189 *change |= set_lattice_contains_variable (**aglat);
1190 *aglat = &(**aglat)->next;
1193 if (**aglat && (**aglat)->offset == offset)
1195 if ((**aglat)->size != val_size
1196 || ((**aglat)->next
1197 && (**aglat)->next->offset < offset + val_size))
1199 set_agg_lats_to_bottom (dest_plats);
1200 return false;
1202 gcc_checking_assert (!(**aglat)->next
1203 || (**aglat)->next->offset >= offset + val_size);
1204 return true;
1206 else
1208 struct ipcp_agg_lattice *new_al;
1210 if (**aglat && (**aglat)->offset < offset + val_size)
1212 set_agg_lats_to_bottom (dest_plats);
1213 return false;
1215 if (dest_plats->aggs_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1216 return false;
1217 dest_plats->aggs_count++;
1218 new_al = (struct ipcp_agg_lattice *) pool_alloc (ipcp_agg_lattice_pool);
1219 memset (new_al, 0, sizeof (*new_al));
1221 new_al->offset = offset;
1222 new_al->size = val_size;
1223 new_al->contains_variable = pre_existing;
1225 new_al->next = **aglat;
1226 **aglat = new_al;
1227 return true;
1231 /* Set all AGLAT and all other aggregate lattices reachable by next pointers as
1232 containing an unknown value. */
1234 static bool
1235 set_chain_of_aglats_contains_variable (struct ipcp_agg_lattice *aglat)
1237 bool ret = false;
1238 while (aglat)
1240 ret |= set_lattice_contains_variable (aglat);
1241 aglat = aglat->next;
1243 return ret;
1246 /* Merge existing aggregate lattices in SRC_PLATS to DEST_PLATS, subtracting
1247 DELTA_OFFSET. CS is the call graph edge and SRC_IDX the index of the source
1248 parameter used for lattice value sources. Return true if DEST_PLATS changed
1249 in any way. */
1251 static bool
1252 merge_aggregate_lattices (struct cgraph_edge *cs,
1253 struct ipcp_param_lattices *dest_plats,
1254 struct ipcp_param_lattices *src_plats,
1255 int src_idx, HOST_WIDE_INT offset_delta)
1257 bool pre_existing = dest_plats->aggs != NULL;
1258 struct ipcp_agg_lattice **dst_aglat;
1259 bool ret = false;
1261 if (set_check_aggs_by_ref (dest_plats, src_plats->aggs_by_ref))
1262 return true;
1263 if (src_plats->aggs_bottom)
1264 return set_agg_lats_contain_variable (dest_plats);
1265 if (src_plats->aggs_contain_variable)
1266 ret |= set_agg_lats_contain_variable (dest_plats);
1267 dst_aglat = &dest_plats->aggs;
1269 for (struct ipcp_agg_lattice *src_aglat = src_plats->aggs;
1270 src_aglat;
1271 src_aglat = src_aglat->next)
1273 HOST_WIDE_INT new_offset = src_aglat->offset - offset_delta;
1275 if (new_offset < 0)
1276 continue;
1277 if (merge_agg_lats_step (dest_plats, new_offset, src_aglat->size,
1278 &dst_aglat, pre_existing, &ret))
1280 struct ipcp_agg_lattice *new_al = *dst_aglat;
1282 dst_aglat = &(*dst_aglat)->next;
1283 if (src_aglat->bottom)
1285 ret |= set_lattice_contains_variable (new_al);
1286 continue;
1288 if (src_aglat->contains_variable)
1289 ret |= set_lattice_contains_variable (new_al);
1290 for (struct ipcp_value *val = src_aglat->values;
1291 val;
1292 val = val->next)
1293 ret |= add_value_to_lattice (new_al, val->value, cs, val, src_idx,
1294 src_aglat->offset);
1296 else if (dest_plats->aggs_bottom)
1297 return true;
1299 ret |= set_chain_of_aglats_contains_variable (*dst_aglat);
1300 return ret;
1303 /* Determine whether there is anything to propagate FROM SRC_PLATS through a
1304 pass-through JFUNC and if so, whether it has conform and conforms to the
1305 rules about propagating values passed by reference. */
1307 static bool
1308 agg_pass_through_permissible_p (struct ipcp_param_lattices *src_plats,
1309 struct ipa_jump_func *jfunc)
1311 return src_plats->aggs
1312 && (!src_plats->aggs_by_ref
1313 || ipa_get_jf_pass_through_agg_preserved (jfunc));
1316 /* Propagate scalar values across jump function JFUNC that is associated with
1317 edge CS and put the values into DEST_LAT. */
1319 static bool
1320 propagate_aggs_accross_jump_function (struct cgraph_edge *cs,
1321 struct ipa_jump_func *jfunc,
1322 struct ipcp_param_lattices *dest_plats)
1324 bool ret = false;
1326 if (dest_plats->aggs_bottom)
1327 return false;
1329 if (jfunc->type == IPA_JF_PASS_THROUGH
1330 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
1332 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
1333 int src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
1334 struct ipcp_param_lattices *src_plats;
1336 src_plats = ipa_get_parm_lattices (caller_info, src_idx);
1337 if (agg_pass_through_permissible_p (src_plats, jfunc))
1339 /* Currently we do not produce clobber aggregate jump
1340 functions, replace with merging when we do. */
1341 gcc_assert (!jfunc->agg.items);
1342 ret |= merge_aggregate_lattices (cs, dest_plats, src_plats,
1343 src_idx, 0);
1345 else
1346 ret |= set_agg_lats_contain_variable (dest_plats);
1348 else if (jfunc->type == IPA_JF_ANCESTOR
1349 && ipa_get_jf_ancestor_agg_preserved (jfunc))
1351 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
1352 int src_idx = ipa_get_jf_ancestor_formal_id (jfunc);
1353 struct ipcp_param_lattices *src_plats;
1355 src_plats = ipa_get_parm_lattices (caller_info, src_idx);
1356 if (src_plats->aggs && src_plats->aggs_by_ref)
1358 /* Currently we do not produce clobber aggregate jump
1359 functions, replace with merging when we do. */
1360 gcc_assert (!jfunc->agg.items);
1361 ret |= merge_aggregate_lattices (cs, dest_plats, src_plats, src_idx,
1362 ipa_get_jf_ancestor_offset (jfunc));
1364 else if (!src_plats->aggs_by_ref)
1365 ret |= set_agg_lats_to_bottom (dest_plats);
1366 else
1367 ret |= set_agg_lats_contain_variable (dest_plats);
1369 else if (jfunc->agg.items)
1371 bool pre_existing = dest_plats->aggs != NULL;
1372 struct ipcp_agg_lattice **aglat = &dest_plats->aggs;
1373 struct ipa_agg_jf_item *item;
1374 int i;
1376 if (set_check_aggs_by_ref (dest_plats, jfunc->agg.by_ref))
1377 return true;
1379 FOR_EACH_VEC_ELT (*jfunc->agg.items, i, item)
1381 HOST_WIDE_INT val_size;
1383 if (item->offset < 0)
1384 continue;
1385 gcc_checking_assert (is_gimple_ip_invariant (item->value));
1386 val_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (item->value)));
1388 if (merge_agg_lats_step (dest_plats, item->offset, val_size,
1389 &aglat, pre_existing, &ret))
1391 ret |= add_value_to_lattice (*aglat, item->value, cs, NULL, 0, 0);
1392 aglat = &(*aglat)->next;
1394 else if (dest_plats->aggs_bottom)
1395 return true;
1398 ret |= set_chain_of_aglats_contains_variable (*aglat);
1400 else
1401 ret |= set_agg_lats_contain_variable (dest_plats);
1403 return ret;
1406 /* Propagate constants from the caller to the callee of CS. INFO describes the
1407 caller. */
1409 static bool
1410 propagate_constants_accross_call (struct cgraph_edge *cs)
1412 struct ipa_node_params *callee_info;
1413 enum availability availability;
1414 struct cgraph_node *callee, *alias_or_thunk;
1415 struct ipa_edge_args *args;
1416 bool ret = false;
1417 int i, args_count, parms_count;
1419 callee = cgraph_function_node (cs->callee, &availability);
1420 if (!callee->definition)
1421 return false;
1422 gcc_checking_assert (cgraph_function_with_gimple_body_p (callee));
1423 callee_info = IPA_NODE_REF (callee);
1425 args = IPA_EDGE_REF (cs);
1426 args_count = ipa_get_cs_argument_count (args);
1427 parms_count = ipa_get_param_count (callee_info);
1429 /* If this call goes through a thunk we must not propagate to the first (0th)
1430 parameter. However, we might need to uncover a thunk from below a series
1431 of aliases first. */
1432 alias_or_thunk = cs->callee;
1433 while (alias_or_thunk->alias)
1434 alias_or_thunk = cgraph_alias_target (alias_or_thunk);
1435 if (alias_or_thunk->thunk.thunk_p)
1437 ret |= set_all_contains_variable (ipa_get_parm_lattices (callee_info,
1438 0));
1439 i = 1;
1441 else
1442 i = 0;
1444 for (; (i < args_count) && (i < parms_count); i++)
1446 struct ipa_jump_func *jump_func = ipa_get_ith_jump_func (args, i);
1447 struct ipcp_param_lattices *dest_plats;
1449 dest_plats = ipa_get_parm_lattices (callee_info, i);
1450 if (availability == AVAIL_OVERWRITABLE)
1451 ret |= set_all_contains_variable (dest_plats);
1452 else
1454 ret |= propagate_scalar_accross_jump_function (cs, jump_func,
1455 &dest_plats->itself);
1456 ret |= propagate_aggs_accross_jump_function (cs, jump_func,
1457 dest_plats);
1460 for (; i < parms_count; i++)
1461 ret |= set_all_contains_variable (ipa_get_parm_lattices (callee_info, i));
1463 return ret;
1466 /* If an indirect edge IE can be turned into a direct one based on KNOWN_VALS
1467 (which can contain both constants and binfos), KNOWN_BINFOS, KNOWN_AGGS or
1468 AGG_REPS return the destination. The latter three can be NULL. If AGG_REPS
1469 is not NULL, KNOWN_AGGS is ignored. */
1471 static tree
1472 ipa_get_indirect_edge_target_1 (struct cgraph_edge *ie,
1473 vec<tree> known_vals,
1474 vec<tree> known_binfos,
1475 vec<ipa_agg_jump_function_p> known_aggs,
1476 struct ipa_agg_replacement_value *agg_reps)
1478 int param_index = ie->indirect_info->param_index;
1479 HOST_WIDE_INT token, anc_offset;
1480 tree otr_type;
1481 tree t;
1482 tree target;
1484 if (param_index == -1
1485 || known_vals.length () <= (unsigned int) param_index)
1486 return NULL_TREE;
1488 if (!ie->indirect_info->polymorphic)
1490 tree t;
1492 if (ie->indirect_info->agg_contents)
1494 if (agg_reps)
1496 t = NULL;
1497 while (agg_reps)
1499 if (agg_reps->index == param_index
1500 && agg_reps->offset == ie->indirect_info->offset
1501 && agg_reps->by_ref == ie->indirect_info->by_ref)
1503 t = agg_reps->value;
1504 break;
1506 agg_reps = agg_reps->next;
1509 else if (known_aggs.length () > (unsigned int) param_index)
1511 struct ipa_agg_jump_function *agg;
1512 agg = known_aggs[param_index];
1513 t = ipa_find_agg_cst_for_param (agg, ie->indirect_info->offset,
1514 ie->indirect_info->by_ref);
1516 else
1517 t = NULL;
1519 else
1520 t = known_vals[param_index];
1522 if (t &&
1523 TREE_CODE (t) == ADDR_EXPR
1524 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL)
1525 return TREE_OPERAND (t, 0);
1526 else
1527 return NULL_TREE;
1530 gcc_assert (!ie->indirect_info->agg_contents);
1531 token = ie->indirect_info->otr_token;
1532 anc_offset = ie->indirect_info->offset;
1533 otr_type = ie->indirect_info->otr_type;
1535 t = known_vals[param_index];
1536 if (!t && known_binfos.length () > (unsigned int) param_index)
1537 t = known_binfos[param_index];
1538 if (!t)
1539 return NULL_TREE;
1541 if (TREE_CODE (t) != TREE_BINFO)
1543 tree binfo;
1544 binfo = gimple_extract_devirt_binfo_from_cst
1545 (t, ie->indirect_info->otr_type);
1546 if (!binfo)
1547 return NULL_TREE;
1548 binfo = get_binfo_at_offset (binfo, anc_offset, otr_type);
1549 if (!binfo)
1550 return NULL_TREE;
1551 target = gimple_get_virt_method_for_binfo (token, binfo);
1553 else
1555 tree binfo;
1557 binfo = get_binfo_at_offset (t, anc_offset, otr_type);
1558 if (!binfo)
1559 return NULL_TREE;
1560 target = gimple_get_virt_method_for_binfo (token, binfo);
1562 #ifdef ENABLE_CHECKING
1563 if (target)
1564 gcc_assert (possible_polymorphic_call_target_p
1565 (ie, cgraph_get_node (target)));
1566 #endif
1568 return target;
1572 /* If an indirect edge IE can be turned into a direct one based on KNOWN_VALS
1573 (which can contain both constants and binfos), KNOWN_BINFOS (which can be
1574 NULL) or KNOWN_AGGS (which also can be NULL) return the destination. */
1576 tree
1577 ipa_get_indirect_edge_target (struct cgraph_edge *ie,
1578 vec<tree> known_vals,
1579 vec<tree> known_binfos,
1580 vec<ipa_agg_jump_function_p> known_aggs)
1582 return ipa_get_indirect_edge_target_1 (ie, known_vals, known_binfos,
1583 known_aggs, NULL);
1586 /* Calculate devirtualization time bonus for NODE, assuming we know KNOWN_CSTS
1587 and KNOWN_BINFOS. */
1589 static int
1590 devirtualization_time_bonus (struct cgraph_node *node,
1591 vec<tree> known_csts,
1592 vec<tree> known_binfos,
1593 vec<ipa_agg_jump_function_p> known_aggs)
1595 struct cgraph_edge *ie;
1596 int res = 0;
1598 for (ie = node->indirect_calls; ie; ie = ie->next_callee)
1600 struct cgraph_node *callee;
1601 struct inline_summary *isummary;
1602 tree target;
1604 target = ipa_get_indirect_edge_target (ie, known_csts, known_binfos,
1605 known_aggs);
1606 if (!target)
1607 continue;
1609 /* Only bare minimum benefit for clearly un-inlineable targets. */
1610 res += 1;
1611 callee = cgraph_get_node (target);
1612 if (!callee || !callee->definition)
1613 continue;
1614 isummary = inline_summary (callee);
1615 if (!isummary->inlinable)
1616 continue;
1618 /* FIXME: The values below need re-considering and perhaps also
1619 integrating into the cost metrics, at lest in some very basic way. */
1620 if (isummary->size <= MAX_INLINE_INSNS_AUTO / 4)
1621 res += 31;
1622 else if (isummary->size <= MAX_INLINE_INSNS_AUTO / 2)
1623 res += 15;
1624 else if (isummary->size <= MAX_INLINE_INSNS_AUTO
1625 || DECL_DECLARED_INLINE_P (callee->decl))
1626 res += 7;
1629 return res;
1632 /* Return time bonus incurred because of HINTS. */
1634 static int
1635 hint_time_bonus (inline_hints hints)
1637 int result = 0;
1638 if (hints & (INLINE_HINT_loop_iterations | INLINE_HINT_loop_stride))
1639 result += PARAM_VALUE (PARAM_IPA_CP_LOOP_HINT_BONUS);
1640 if (hints & INLINE_HINT_array_index)
1641 result += PARAM_VALUE (PARAM_IPA_CP_ARRAY_INDEX_HINT_BONUS);
1642 return result;
1645 /* Return true if cloning NODE is a good idea, given the estimated TIME_BENEFIT
1646 and SIZE_COST and with the sum of frequencies of incoming edges to the
1647 potential new clone in FREQUENCIES. */
1649 static bool
1650 good_cloning_opportunity_p (struct cgraph_node *node, int time_benefit,
1651 int freq_sum, gcov_type count_sum, int size_cost)
1653 if (time_benefit == 0
1654 || !flag_ipa_cp_clone
1655 || !optimize_function_for_speed_p (DECL_STRUCT_FUNCTION (node->decl)))
1656 return false;
1658 gcc_assert (size_cost > 0);
1660 if (max_count)
1662 int factor = (count_sum * 1000) / max_count;
1663 HOST_WIDEST_INT evaluation = (((HOST_WIDEST_INT) time_benefit * factor)
1664 / size_cost);
1666 if (dump_file && (dump_flags & TDF_DETAILS))
1667 fprintf (dump_file, " good_cloning_opportunity_p (time: %i, "
1668 "size: %i, count_sum: " HOST_WIDE_INT_PRINT_DEC
1669 ") -> evaluation: " HOST_WIDEST_INT_PRINT_DEC
1670 ", threshold: %i\n",
1671 time_benefit, size_cost, (HOST_WIDE_INT) count_sum,
1672 evaluation, PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD));
1674 return evaluation >= PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD);
1676 else
1678 HOST_WIDEST_INT evaluation = (((HOST_WIDEST_INT) time_benefit * freq_sum)
1679 / size_cost);
1681 if (dump_file && (dump_flags & TDF_DETAILS))
1682 fprintf (dump_file, " good_cloning_opportunity_p (time: %i, "
1683 "size: %i, freq_sum: %i) -> evaluation: "
1684 HOST_WIDEST_INT_PRINT_DEC ", threshold: %i\n",
1685 time_benefit, size_cost, freq_sum, evaluation,
1686 PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD));
1688 return evaluation >= PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD);
1692 /* Return all context independent values from aggregate lattices in PLATS in a
1693 vector. Return NULL if there are none. */
1695 static vec<ipa_agg_jf_item, va_gc> *
1696 context_independent_aggregate_values (struct ipcp_param_lattices *plats)
1698 vec<ipa_agg_jf_item, va_gc> *res = NULL;
1700 if (plats->aggs_bottom
1701 || plats->aggs_contain_variable
1702 || plats->aggs_count == 0)
1703 return NULL;
1705 for (struct ipcp_agg_lattice *aglat = plats->aggs;
1706 aglat;
1707 aglat = aglat->next)
1708 if (ipa_lat_is_single_const (aglat))
1710 struct ipa_agg_jf_item item;
1711 item.offset = aglat->offset;
1712 item.value = aglat->values->value;
1713 vec_safe_push (res, item);
1715 return res;
1718 /* Allocate KNOWN_CSTS, KNOWN_BINFOS and, if non-NULL, KNOWN_AGGS and populate
1719 them with values of parameters that are known independent of the context.
1720 INFO describes the function. If REMOVABLE_PARAMS_COST is non-NULL, the
1721 movement cost of all removable parameters will be stored in it. */
1723 static bool
1724 gather_context_independent_values (struct ipa_node_params *info,
1725 vec<tree> *known_csts,
1726 vec<tree> *known_binfos,
1727 vec<ipa_agg_jump_function> *known_aggs,
1728 int *removable_params_cost)
1730 int i, count = ipa_get_param_count (info);
1731 bool ret = false;
1733 known_csts->create (0);
1734 known_binfos->create (0);
1735 known_csts->safe_grow_cleared (count);
1736 known_binfos->safe_grow_cleared (count);
1737 if (known_aggs)
1739 known_aggs->create (0);
1740 known_aggs->safe_grow_cleared (count);
1743 if (removable_params_cost)
1744 *removable_params_cost = 0;
1746 for (i = 0; i < count ; i++)
1748 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
1749 struct ipcp_lattice *lat = &plats->itself;
1751 if (ipa_lat_is_single_const (lat))
1753 struct ipcp_value *val = lat->values;
1754 if (TREE_CODE (val->value) != TREE_BINFO)
1756 (*known_csts)[i] = val->value;
1757 if (removable_params_cost)
1758 *removable_params_cost
1759 += estimate_move_cost (TREE_TYPE (val->value));
1760 ret = true;
1762 else if (plats->virt_call)
1764 (*known_binfos)[i] = val->value;
1765 ret = true;
1767 else if (removable_params_cost
1768 && !ipa_is_param_used (info, i))
1769 *removable_params_cost += ipa_get_param_move_cost (info, i);
1771 else if (removable_params_cost
1772 && !ipa_is_param_used (info, i))
1773 *removable_params_cost
1774 += ipa_get_param_move_cost (info, i);
1776 if (known_aggs)
1778 vec<ipa_agg_jf_item, va_gc> *agg_items;
1779 struct ipa_agg_jump_function *ajf;
1781 agg_items = context_independent_aggregate_values (plats);
1782 ajf = &(*known_aggs)[i];
1783 ajf->items = agg_items;
1784 ajf->by_ref = plats->aggs_by_ref;
1785 ret |= agg_items != NULL;
1789 return ret;
1792 /* The current interface in ipa-inline-analysis requires a pointer vector.
1793 Create it.
1795 FIXME: That interface should be re-worked, this is slightly silly. Still,
1796 I'd like to discuss how to change it first and this demonstrates the
1797 issue. */
1799 static vec<ipa_agg_jump_function_p>
1800 agg_jmp_p_vec_for_t_vec (vec<ipa_agg_jump_function> known_aggs)
1802 vec<ipa_agg_jump_function_p> ret;
1803 struct ipa_agg_jump_function *ajf;
1804 int i;
1806 ret.create (known_aggs.length ());
1807 FOR_EACH_VEC_ELT (known_aggs, i, ajf)
1808 ret.quick_push (ajf);
1809 return ret;
1812 /* Iterate over known values of parameters of NODE and estimate the local
1813 effects in terms of time and size they have. */
1815 static void
1816 estimate_local_effects (struct cgraph_node *node)
1818 struct ipa_node_params *info = IPA_NODE_REF (node);
1819 int i, count = ipa_get_param_count (info);
1820 vec<tree> known_csts, known_binfos;
1821 vec<ipa_agg_jump_function> known_aggs;
1822 vec<ipa_agg_jump_function_p> known_aggs_ptrs;
1823 bool always_const;
1824 int base_time = inline_summary (node)->time;
1825 int removable_params_cost;
1827 if (!count || !ipcp_versionable_function_p (node))
1828 return;
1830 if (dump_file && (dump_flags & TDF_DETAILS))
1831 fprintf (dump_file, "\nEstimating effects for %s/%i, base_time: %i.\n",
1832 node->name (), node->order, base_time);
1834 always_const = gather_context_independent_values (info, &known_csts,
1835 &known_binfos, &known_aggs,
1836 &removable_params_cost);
1837 known_aggs_ptrs = agg_jmp_p_vec_for_t_vec (known_aggs);
1838 if (always_const)
1840 struct caller_statistics stats;
1841 inline_hints hints;
1842 int time, size;
1844 init_caller_stats (&stats);
1845 cgraph_for_node_and_aliases (node, gather_caller_stats, &stats, false);
1846 estimate_ipcp_clone_size_and_time (node, known_csts, known_binfos,
1847 known_aggs_ptrs, &size, &time, &hints);
1848 time -= devirtualization_time_bonus (node, known_csts, known_binfos,
1849 known_aggs_ptrs);
1850 time -= hint_time_bonus (hints);
1851 time -= removable_params_cost;
1852 size -= stats.n_calls * removable_params_cost;
1854 if (dump_file)
1855 fprintf (dump_file, " - context independent values, size: %i, "
1856 "time_benefit: %i\n", size, base_time - time);
1858 if (size <= 0
1859 || cgraph_will_be_removed_from_program_if_no_direct_calls (node))
1861 info->do_clone_for_all_contexts = true;
1862 base_time = time;
1864 if (dump_file)
1865 fprintf (dump_file, " Decided to specialize for all "
1866 "known contexts, code not going to grow.\n");
1868 else if (good_cloning_opportunity_p (node, base_time - time,
1869 stats.freq_sum, stats.count_sum,
1870 size))
1872 if (size + overall_size <= max_new_size)
1874 info->do_clone_for_all_contexts = true;
1875 base_time = time;
1876 overall_size += size;
1878 if (dump_file)
1879 fprintf (dump_file, " Decided to specialize for all "
1880 "known contexts, growth deemed beneficial.\n");
1882 else if (dump_file && (dump_flags & TDF_DETAILS))
1883 fprintf (dump_file, " Not cloning for all contexts because "
1884 "max_new_size would be reached with %li.\n",
1885 size + overall_size);
1889 for (i = 0; i < count ; i++)
1891 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
1892 struct ipcp_lattice *lat = &plats->itself;
1893 struct ipcp_value *val;
1894 int emc;
1896 if (lat->bottom
1897 || !lat->values
1898 || known_csts[i]
1899 || known_binfos[i])
1900 continue;
1902 for (val = lat->values; val; val = val->next)
1904 int time, size, time_benefit;
1905 inline_hints hints;
1907 if (TREE_CODE (val->value) != TREE_BINFO)
1909 known_csts[i] = val->value;
1910 known_binfos[i] = NULL_TREE;
1911 emc = estimate_move_cost (TREE_TYPE (val->value));
1913 else if (plats->virt_call)
1915 known_csts[i] = NULL_TREE;
1916 known_binfos[i] = val->value;
1917 emc = 0;
1919 else
1920 continue;
1922 estimate_ipcp_clone_size_and_time (node, known_csts, known_binfos,
1923 known_aggs_ptrs, &size, &time,
1924 &hints);
1925 time_benefit = base_time - time
1926 + devirtualization_time_bonus (node, known_csts, known_binfos,
1927 known_aggs_ptrs)
1928 + hint_time_bonus (hints)
1929 + removable_params_cost + emc;
1931 gcc_checking_assert (size >=0);
1932 /* The inliner-heuristics based estimates may think that in certain
1933 contexts some functions do not have any size at all but we want
1934 all specializations to have at least a tiny cost, not least not to
1935 divide by zero. */
1936 if (size == 0)
1937 size = 1;
1939 if (dump_file && (dump_flags & TDF_DETAILS))
1941 fprintf (dump_file, " - estimates for value ");
1942 print_ipcp_constant_value (dump_file, val->value);
1943 fprintf (dump_file, " for ");
1944 ipa_dump_param (dump_file, info, i);
1945 fprintf (dump_file, ": time_benefit: %i, size: %i\n",
1946 time_benefit, size);
1949 val->local_time_benefit = time_benefit;
1950 val->local_size_cost = size;
1952 known_binfos[i] = NULL_TREE;
1953 known_csts[i] = NULL_TREE;
1956 for (i = 0; i < count ; i++)
1958 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
1959 struct ipa_agg_jump_function *ajf;
1960 struct ipcp_agg_lattice *aglat;
1962 if (plats->aggs_bottom || !plats->aggs)
1963 continue;
1965 ajf = &known_aggs[i];
1966 for (aglat = plats->aggs; aglat; aglat = aglat->next)
1968 struct ipcp_value *val;
1969 if (aglat->bottom || !aglat->values
1970 /* If the following is true, the one value is in known_aggs. */
1971 || (!plats->aggs_contain_variable
1972 && ipa_lat_is_single_const (aglat)))
1973 continue;
1975 for (val = aglat->values; val; val = val->next)
1977 int time, size, time_benefit;
1978 struct ipa_agg_jf_item item;
1979 inline_hints hints;
1981 item.offset = aglat->offset;
1982 item.value = val->value;
1983 vec_safe_push (ajf->items, item);
1985 estimate_ipcp_clone_size_and_time (node, known_csts, known_binfos,
1986 known_aggs_ptrs, &size, &time,
1987 &hints);
1988 time_benefit = base_time - time
1989 + devirtualization_time_bonus (node, known_csts, known_binfos,
1990 known_aggs_ptrs)
1991 + hint_time_bonus (hints);
1992 gcc_checking_assert (size >=0);
1993 if (size == 0)
1994 size = 1;
1996 if (dump_file && (dump_flags & TDF_DETAILS))
1998 fprintf (dump_file, " - estimates for value ");
1999 print_ipcp_constant_value (dump_file, val->value);
2000 fprintf (dump_file, " for ");
2001 ipa_dump_param (dump_file, info, i);
2002 fprintf (dump_file, "[%soffset: " HOST_WIDE_INT_PRINT_DEC
2003 "]: time_benefit: %i, size: %i\n",
2004 plats->aggs_by_ref ? "ref " : "",
2005 aglat->offset, time_benefit, size);
2008 val->local_time_benefit = time_benefit;
2009 val->local_size_cost = size;
2010 ajf->items->pop ();
2015 for (i = 0; i < count ; i++)
2016 vec_free (known_aggs[i].items);
2018 known_csts.release ();
2019 known_binfos.release ();
2020 known_aggs.release ();
2021 known_aggs_ptrs.release ();
2025 /* Add value CUR_VAL and all yet-unsorted values it is dependent on to the
2026 topological sort of values. */
2028 static void
2029 add_val_to_toposort (struct ipcp_value *cur_val)
2031 static int dfs_counter = 0;
2032 static struct ipcp_value *stack;
2033 struct ipcp_value_source *src;
2035 if (cur_val->dfs)
2036 return;
2038 dfs_counter++;
2039 cur_val->dfs = dfs_counter;
2040 cur_val->low_link = dfs_counter;
2042 cur_val->topo_next = stack;
2043 stack = cur_val;
2044 cur_val->on_stack = true;
2046 for (src = cur_val->sources; src; src = src->next)
2047 if (src->val)
2049 if (src->val->dfs == 0)
2051 add_val_to_toposort (src->val);
2052 if (src->val->low_link < cur_val->low_link)
2053 cur_val->low_link = src->val->low_link;
2055 else if (src->val->on_stack
2056 && src->val->dfs < cur_val->low_link)
2057 cur_val->low_link = src->val->dfs;
2060 if (cur_val->dfs == cur_val->low_link)
2062 struct ipcp_value *v, *scc_list = NULL;
2066 v = stack;
2067 stack = v->topo_next;
2068 v->on_stack = false;
2070 v->scc_next = scc_list;
2071 scc_list = v;
2073 while (v != cur_val);
2075 cur_val->topo_next = values_topo;
2076 values_topo = cur_val;
2080 /* Add all values in lattices associated with NODE to the topological sort if
2081 they are not there yet. */
2083 static void
2084 add_all_node_vals_to_toposort (struct cgraph_node *node)
2086 struct ipa_node_params *info = IPA_NODE_REF (node);
2087 int i, count = ipa_get_param_count (info);
2089 for (i = 0; i < count ; i++)
2091 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
2092 struct ipcp_lattice *lat = &plats->itself;
2093 struct ipcp_agg_lattice *aglat;
2094 struct ipcp_value *val;
2096 if (!lat->bottom)
2097 for (val = lat->values; val; val = val->next)
2098 add_val_to_toposort (val);
2100 if (!plats->aggs_bottom)
2101 for (aglat = plats->aggs; aglat; aglat = aglat->next)
2102 if (!aglat->bottom)
2103 for (val = aglat->values; val; val = val->next)
2104 add_val_to_toposort (val);
2108 /* One pass of constants propagation along the call graph edges, from callers
2109 to callees (requires topological ordering in TOPO), iterate over strongly
2110 connected components. */
2112 static void
2113 propagate_constants_topo (struct topo_info *topo)
2115 int i;
2117 for (i = topo->nnodes - 1; i >= 0; i--)
2119 unsigned j;
2120 struct cgraph_node *v, *node = topo->order[i];
2121 vec<cgraph_node_ptr> cycle_nodes = ipa_get_nodes_in_cycle (node);
2123 /* First, iteratively propagate within the strongly connected component
2124 until all lattices stabilize. */
2125 FOR_EACH_VEC_ELT (cycle_nodes, j, v)
2126 if (cgraph_function_with_gimple_body_p (v))
2127 push_node_to_stack (topo, v);
2129 v = pop_node_from_stack (topo);
2130 while (v)
2132 struct cgraph_edge *cs;
2134 for (cs = v->callees; cs; cs = cs->next_callee)
2135 if (ipa_edge_within_scc (cs)
2136 && propagate_constants_accross_call (cs))
2137 push_node_to_stack (topo, cs->callee);
2138 v = pop_node_from_stack (topo);
2141 /* Afterwards, propagate along edges leading out of the SCC, calculates
2142 the local effects of the discovered constants and all valid values to
2143 their topological sort. */
2144 FOR_EACH_VEC_ELT (cycle_nodes, j, v)
2145 if (cgraph_function_with_gimple_body_p (v))
2147 struct cgraph_edge *cs;
2149 estimate_local_effects (v);
2150 add_all_node_vals_to_toposort (v);
2151 for (cs = v->callees; cs; cs = cs->next_callee)
2152 if (!ipa_edge_within_scc (cs))
2153 propagate_constants_accross_call (cs);
2155 cycle_nodes.release ();
2160 /* Return the sum of A and B if none of them is bigger than INT_MAX/2, return
2161 the bigger one if otherwise. */
2163 static int
2164 safe_add (int a, int b)
2166 if (a > INT_MAX/2 || b > INT_MAX/2)
2167 return a > b ? a : b;
2168 else
2169 return a + b;
2173 /* Propagate the estimated effects of individual values along the topological
2174 from the dependent values to those they depend on. */
2176 static void
2177 propagate_effects (void)
2179 struct ipcp_value *base;
2181 for (base = values_topo; base; base = base->topo_next)
2183 struct ipcp_value_source *src;
2184 struct ipcp_value *val;
2185 int time = 0, size = 0;
2187 for (val = base; val; val = val->scc_next)
2189 time = safe_add (time,
2190 val->local_time_benefit + val->prop_time_benefit);
2191 size = safe_add (size, val->local_size_cost + val->prop_size_cost);
2194 for (val = base; val; val = val->scc_next)
2195 for (src = val->sources; src; src = src->next)
2196 if (src->val
2197 && cgraph_maybe_hot_edge_p (src->cs))
2199 src->val->prop_time_benefit = safe_add (time,
2200 src->val->prop_time_benefit);
2201 src->val->prop_size_cost = safe_add (size,
2202 src->val->prop_size_cost);
2208 /* Propagate constants, binfos and their effects from the summaries
2209 interprocedurally. */
2211 static void
2212 ipcp_propagate_stage (struct topo_info *topo)
2214 struct cgraph_node *node;
2216 if (dump_file)
2217 fprintf (dump_file, "\n Propagating constants:\n\n");
2219 if (in_lto_p)
2220 ipa_update_after_lto_read ();
2223 FOR_EACH_DEFINED_FUNCTION (node)
2225 struct ipa_node_params *info = IPA_NODE_REF (node);
2227 determine_versionability (node);
2228 if (cgraph_function_with_gimple_body_p (node))
2230 info->lattices = XCNEWVEC (struct ipcp_param_lattices,
2231 ipa_get_param_count (info));
2232 initialize_node_lattices (node);
2234 if (node->definition && !node->alias)
2235 overall_size += inline_summary (node)->self_size;
2236 if (node->count > max_count)
2237 max_count = node->count;
2240 max_new_size = overall_size;
2241 if (max_new_size < PARAM_VALUE (PARAM_LARGE_UNIT_INSNS))
2242 max_new_size = PARAM_VALUE (PARAM_LARGE_UNIT_INSNS);
2243 max_new_size += max_new_size * PARAM_VALUE (PARAM_IPCP_UNIT_GROWTH) / 100 + 1;
2245 if (dump_file)
2246 fprintf (dump_file, "\noverall_size: %li, max_new_size: %li\n",
2247 overall_size, max_new_size);
2249 propagate_constants_topo (topo);
2250 #ifdef ENABLE_CHECKING
2251 ipcp_verify_propagated_values ();
2252 #endif
2253 propagate_effects ();
2255 if (dump_file)
2257 fprintf (dump_file, "\nIPA lattices after all propagation:\n");
2258 print_all_lattices (dump_file, (dump_flags & TDF_DETAILS), true);
2262 /* Discover newly direct outgoing edges from NODE which is a new clone with
2263 known KNOWN_VALS and make them direct. */
2265 static void
2266 ipcp_discover_new_direct_edges (struct cgraph_node *node,
2267 vec<tree> known_vals,
2268 struct ipa_agg_replacement_value *aggvals)
2270 struct cgraph_edge *ie, *next_ie;
2271 bool found = false;
2273 for (ie = node->indirect_calls; ie; ie = next_ie)
2275 tree target;
2277 next_ie = ie->next_callee;
2278 target = ipa_get_indirect_edge_target_1 (ie, known_vals, vNULL, vNULL,
2279 aggvals);
2280 if (target)
2282 bool agg_contents = ie->indirect_info->agg_contents;
2283 bool polymorphic = ie->indirect_info->polymorphic;
2284 int param_index = ie->indirect_info->param_index;
2285 struct cgraph_edge *cs = ipa_make_edge_direct_to_target (ie, target);
2286 found = true;
2288 if (cs && !agg_contents && !polymorphic)
2290 struct ipa_node_params *info = IPA_NODE_REF (node);
2291 int c = ipa_get_controlled_uses (info, param_index);
2292 if (c != IPA_UNDESCRIBED_USE)
2294 struct ipa_ref *to_del;
2296 c--;
2297 ipa_set_controlled_uses (info, param_index, c);
2298 if (dump_file && (dump_flags & TDF_DETAILS))
2299 fprintf (dump_file, " controlled uses count of param "
2300 "%i bumped down to %i\n", param_index, c);
2301 if (c == 0
2302 && (to_del = ipa_find_reference (node,
2303 cs->callee,
2304 NULL, 0)))
2306 if (dump_file && (dump_flags & TDF_DETAILS))
2307 fprintf (dump_file, " and even removing its "
2308 "cloning-created reference\n");
2309 ipa_remove_reference (to_del);
2315 /* Turning calls to direct calls will improve overall summary. */
2316 if (found)
2317 inline_update_overall_summary (node);
2320 /* Vector of pointers which for linked lists of clones of an original crgaph
2321 edge. */
2323 static vec<cgraph_edge_p> next_edge_clone;
2324 static vec<cgraph_edge_p> prev_edge_clone;
2326 static inline void
2327 grow_edge_clone_vectors (void)
2329 if (next_edge_clone.length ()
2330 <= (unsigned) cgraph_edge_max_uid)
2331 next_edge_clone.safe_grow_cleared (cgraph_edge_max_uid + 1);
2332 if (prev_edge_clone.length ()
2333 <= (unsigned) cgraph_edge_max_uid)
2334 prev_edge_clone.safe_grow_cleared (cgraph_edge_max_uid + 1);
2337 /* Edge duplication hook to grow the appropriate linked list in
2338 next_edge_clone. */
2340 static void
2341 ipcp_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
2342 void *)
2344 grow_edge_clone_vectors ();
2346 struct cgraph_edge *old_next = next_edge_clone[src->uid];
2347 if (old_next)
2348 prev_edge_clone[old_next->uid] = dst;
2349 prev_edge_clone[dst->uid] = src;
2351 next_edge_clone[dst->uid] = old_next;
2352 next_edge_clone[src->uid] = dst;
2355 /* Hook that is called by cgraph.c when an edge is removed. */
2357 static void
2358 ipcp_edge_removal_hook (struct cgraph_edge *cs, void *)
2360 grow_edge_clone_vectors ();
2362 struct cgraph_edge *prev = prev_edge_clone[cs->uid];
2363 struct cgraph_edge *next = next_edge_clone[cs->uid];
2364 if (prev)
2365 next_edge_clone[prev->uid] = next;
2366 if (next)
2367 prev_edge_clone[next->uid] = prev;
2370 /* See if NODE is a clone with a known aggregate value at a given OFFSET of a
2371 parameter with the given INDEX. */
2373 static tree
2374 get_clone_agg_value (struct cgraph_node *node, HOST_WIDEST_INT offset,
2375 int index)
2377 struct ipa_agg_replacement_value *aggval;
2379 aggval = ipa_get_agg_replacements_for_node (node);
2380 while (aggval)
2382 if (aggval->offset == offset
2383 && aggval->index == index)
2384 return aggval->value;
2385 aggval = aggval->next;
2387 return NULL_TREE;
2390 /* Return true if edge CS does bring about the value described by SRC. */
2392 static bool
2393 cgraph_edge_brings_value_p (struct cgraph_edge *cs,
2394 struct ipcp_value_source *src)
2396 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
2397 struct ipa_node_params *dst_info = IPA_NODE_REF (cs->callee);
2399 if ((dst_info->ipcp_orig_node && !dst_info->is_all_contexts_clone)
2400 || caller_info->node_dead)
2401 return false;
2402 if (!src->val)
2403 return true;
2405 if (caller_info->ipcp_orig_node)
2407 tree t;
2408 if (src->offset == -1)
2409 t = caller_info->known_vals[src->index];
2410 else
2411 t = get_clone_agg_value (cs->caller, src->offset, src->index);
2412 return (t != NULL_TREE
2413 && values_equal_for_ipcp_p (src->val->value, t));
2415 else
2417 struct ipcp_agg_lattice *aglat;
2418 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (caller_info,
2419 src->index);
2420 if (src->offset == -1)
2421 return (ipa_lat_is_single_const (&plats->itself)
2422 && values_equal_for_ipcp_p (src->val->value,
2423 plats->itself.values->value));
2424 else
2426 if (plats->aggs_bottom || plats->aggs_contain_variable)
2427 return false;
2428 for (aglat = plats->aggs; aglat; aglat = aglat->next)
2429 if (aglat->offset == src->offset)
2430 return (ipa_lat_is_single_const (aglat)
2431 && values_equal_for_ipcp_p (src->val->value,
2432 aglat->values->value));
2434 return false;
2438 /* Get the next clone in the linked list of clones of an edge. */
2440 static inline struct cgraph_edge *
2441 get_next_cgraph_edge_clone (struct cgraph_edge *cs)
2443 return next_edge_clone[cs->uid];
2446 /* Given VAL, iterate over all its sources and if they still hold, add their
2447 edge frequency and their number into *FREQUENCY and *CALLER_COUNT
2448 respectively. */
2450 static bool
2451 get_info_about_necessary_edges (struct ipcp_value *val, int *freq_sum,
2452 gcov_type *count_sum, int *caller_count)
2454 struct ipcp_value_source *src;
2455 int freq = 0, count = 0;
2456 gcov_type cnt = 0;
2457 bool hot = false;
2459 for (src = val->sources; src; src = src->next)
2461 struct cgraph_edge *cs = src->cs;
2462 while (cs)
2464 if (cgraph_edge_brings_value_p (cs, src))
2466 count++;
2467 freq += cs->frequency;
2468 cnt += cs->count;
2469 hot |= cgraph_maybe_hot_edge_p (cs);
2471 cs = get_next_cgraph_edge_clone (cs);
2475 *freq_sum = freq;
2476 *count_sum = cnt;
2477 *caller_count = count;
2478 return hot;
2481 /* Return a vector of incoming edges that do bring value VAL. It is assumed
2482 their number is known and equal to CALLER_COUNT. */
2484 static vec<cgraph_edge_p>
2485 gather_edges_for_value (struct ipcp_value *val, int caller_count)
2487 struct ipcp_value_source *src;
2488 vec<cgraph_edge_p> ret;
2490 ret.create (caller_count);
2491 for (src = val->sources; src; src = src->next)
2493 struct cgraph_edge *cs = src->cs;
2494 while (cs)
2496 if (cgraph_edge_brings_value_p (cs, src))
2497 ret.quick_push (cs);
2498 cs = get_next_cgraph_edge_clone (cs);
2502 return ret;
2505 /* Construct a replacement map for a know VALUE for a formal parameter PARAM.
2506 Return it or NULL if for some reason it cannot be created. */
2508 static struct ipa_replace_map *
2509 get_replacement_map (struct ipa_node_params *info, tree value, int parm_num)
2511 struct ipa_replace_map *replace_map;
2514 replace_map = ggc_alloc_ipa_replace_map ();
2515 if (dump_file)
2517 fprintf (dump_file, " replacing ");
2518 ipa_dump_param (dump_file, info, parm_num);
2520 fprintf (dump_file, " with const ");
2521 print_generic_expr (dump_file, value, 0);
2522 fprintf (dump_file, "\n");
2524 replace_map->old_tree = NULL;
2525 replace_map->parm_num = parm_num;
2526 replace_map->new_tree = value;
2527 replace_map->replace_p = true;
2528 replace_map->ref_p = false;
2530 return replace_map;
2533 /* Dump new profiling counts */
2535 static void
2536 dump_profile_updates (struct cgraph_node *orig_node,
2537 struct cgraph_node *new_node)
2539 struct cgraph_edge *cs;
2541 fprintf (dump_file, " setting count of the specialized node to "
2542 HOST_WIDE_INT_PRINT_DEC "\n", (HOST_WIDE_INT) new_node->count);
2543 for (cs = new_node->callees; cs ; cs = cs->next_callee)
2544 fprintf (dump_file, " edge to %s has count "
2545 HOST_WIDE_INT_PRINT_DEC "\n",
2546 cs->callee->name (), (HOST_WIDE_INT) cs->count);
2548 fprintf (dump_file, " setting count of the original node to "
2549 HOST_WIDE_INT_PRINT_DEC "\n", (HOST_WIDE_INT) orig_node->count);
2550 for (cs = orig_node->callees; cs ; cs = cs->next_callee)
2551 fprintf (dump_file, " edge to %s is left with "
2552 HOST_WIDE_INT_PRINT_DEC "\n",
2553 cs->callee->name (), (HOST_WIDE_INT) cs->count);
2556 /* After a specialized NEW_NODE version of ORIG_NODE has been created, update
2557 their profile information to reflect this. */
2559 static void
2560 update_profiling_info (struct cgraph_node *orig_node,
2561 struct cgraph_node *new_node)
2563 struct cgraph_edge *cs;
2564 struct caller_statistics stats;
2565 gcov_type new_sum, orig_sum;
2566 gcov_type remainder, orig_node_count = orig_node->count;
2568 if (orig_node_count == 0)
2569 return;
2571 init_caller_stats (&stats);
2572 cgraph_for_node_and_aliases (orig_node, gather_caller_stats, &stats, false);
2573 orig_sum = stats.count_sum;
2574 init_caller_stats (&stats);
2575 cgraph_for_node_and_aliases (new_node, gather_caller_stats, &stats, false);
2576 new_sum = stats.count_sum;
2578 if (orig_node_count < orig_sum + new_sum)
2580 if (dump_file)
2581 fprintf (dump_file, " Problem: node %s/%i has too low count "
2582 HOST_WIDE_INT_PRINT_DEC " while the sum of incoming "
2583 "counts is " HOST_WIDE_INT_PRINT_DEC "\n",
2584 orig_node->name (), orig_node->order,
2585 (HOST_WIDE_INT) orig_node_count,
2586 (HOST_WIDE_INT) (orig_sum + new_sum));
2588 orig_node_count = (orig_sum + new_sum) * 12 / 10;
2589 if (dump_file)
2590 fprintf (dump_file, " proceeding by pretending it was "
2591 HOST_WIDE_INT_PRINT_DEC "\n",
2592 (HOST_WIDE_INT) orig_node_count);
2595 new_node->count = new_sum;
2596 remainder = orig_node_count - new_sum;
2597 orig_node->count = remainder;
2599 for (cs = new_node->callees; cs ; cs = cs->next_callee)
2600 if (cs->frequency)
2601 cs->count = apply_probability (cs->count,
2602 GCOV_COMPUTE_SCALE (new_sum,
2603 orig_node_count));
2604 else
2605 cs->count = 0;
2607 for (cs = orig_node->callees; cs ; cs = cs->next_callee)
2608 cs->count = apply_probability (cs->count,
2609 GCOV_COMPUTE_SCALE (remainder,
2610 orig_node_count));
2612 if (dump_file)
2613 dump_profile_updates (orig_node, new_node);
2616 /* Update the respective profile of specialized NEW_NODE and the original
2617 ORIG_NODE after additional edges with cumulative count sum REDIRECTED_SUM
2618 have been redirected to the specialized version. */
2620 static void
2621 update_specialized_profile (struct cgraph_node *new_node,
2622 struct cgraph_node *orig_node,
2623 gcov_type redirected_sum)
2625 struct cgraph_edge *cs;
2626 gcov_type new_node_count, orig_node_count = orig_node->count;
2628 if (dump_file)
2629 fprintf (dump_file, " the sum of counts of redirected edges is "
2630 HOST_WIDE_INT_PRINT_DEC "\n", (HOST_WIDE_INT) redirected_sum);
2631 if (orig_node_count == 0)
2632 return;
2634 gcc_assert (orig_node_count >= redirected_sum);
2636 new_node_count = new_node->count;
2637 new_node->count += redirected_sum;
2638 orig_node->count -= redirected_sum;
2640 for (cs = new_node->callees; cs ; cs = cs->next_callee)
2641 if (cs->frequency)
2642 cs->count += apply_probability (cs->count,
2643 GCOV_COMPUTE_SCALE (redirected_sum,
2644 new_node_count));
2645 else
2646 cs->count = 0;
2648 for (cs = orig_node->callees; cs ; cs = cs->next_callee)
2650 gcov_type dec = apply_probability (cs->count,
2651 GCOV_COMPUTE_SCALE (redirected_sum,
2652 orig_node_count));
2653 if (dec < cs->count)
2654 cs->count -= dec;
2655 else
2656 cs->count = 0;
2659 if (dump_file)
2660 dump_profile_updates (orig_node, new_node);
2663 /* Create a specialized version of NODE with known constants and types of
2664 parameters in KNOWN_VALS and redirect all edges in CALLERS to it. */
2666 static struct cgraph_node *
2667 create_specialized_node (struct cgraph_node *node,
2668 vec<tree> known_vals,
2669 struct ipa_agg_replacement_value *aggvals,
2670 vec<cgraph_edge_p> callers)
2672 struct ipa_node_params *new_info, *info = IPA_NODE_REF (node);
2673 vec<ipa_replace_map_p, va_gc> *replace_trees = NULL;
2674 struct ipa_agg_replacement_value *av;
2675 struct cgraph_node *new_node;
2676 int i, count = ipa_get_param_count (info);
2677 bitmap args_to_skip;
2679 gcc_assert (!info->ipcp_orig_node);
2681 if (node->local.can_change_signature)
2683 args_to_skip = BITMAP_GGC_ALLOC ();
2684 for (i = 0; i < count; i++)
2686 tree t = known_vals[i];
2688 if ((t && TREE_CODE (t) != TREE_BINFO)
2689 || !ipa_is_param_used (info, i))
2690 bitmap_set_bit (args_to_skip, i);
2693 else
2695 args_to_skip = NULL;
2696 if (dump_file && (dump_flags & TDF_DETAILS))
2697 fprintf (dump_file, " cannot change function signature\n");
2700 for (i = 0; i < count ; i++)
2702 tree t = known_vals[i];
2703 if (t && TREE_CODE (t) != TREE_BINFO)
2705 struct ipa_replace_map *replace_map;
2707 replace_map = get_replacement_map (info, t, i);
2708 if (replace_map)
2709 vec_safe_push (replace_trees, replace_map);
2713 new_node = cgraph_create_virtual_clone (node, callers, replace_trees,
2714 args_to_skip, "constprop");
2715 ipa_set_node_agg_value_chain (new_node, aggvals);
2716 for (av = aggvals; av; av = av->next)
2717 ipa_maybe_record_reference (new_node, av->value,
2718 IPA_REF_ADDR, NULL);
2720 if (dump_file && (dump_flags & TDF_DETAILS))
2722 fprintf (dump_file, " the new node is %s/%i.\n",
2723 new_node->name (), new_node->order);
2724 if (aggvals)
2725 ipa_dump_agg_replacement_values (dump_file, aggvals);
2727 gcc_checking_assert (ipa_node_params_vector.exists ()
2728 && (ipa_node_params_vector.length ()
2729 > (unsigned) cgraph_max_uid));
2730 update_profiling_info (node, new_node);
2731 new_info = IPA_NODE_REF (new_node);
2732 new_info->ipcp_orig_node = node;
2733 new_info->known_vals = known_vals;
2735 ipcp_discover_new_direct_edges (new_node, known_vals, aggvals);
2737 callers.release ();
2738 return new_node;
2741 /* Given a NODE, and a subset of its CALLERS, try to populate blanks slots in
2742 KNOWN_VALS with constants and types that are also known for all of the
2743 CALLERS. */
2745 static void
2746 find_more_scalar_values_for_callers_subset (struct cgraph_node *node,
2747 vec<tree> known_vals,
2748 vec<cgraph_edge_p> callers)
2750 struct ipa_node_params *info = IPA_NODE_REF (node);
2751 int i, count = ipa_get_param_count (info);
2753 for (i = 0; i < count ; i++)
2755 struct cgraph_edge *cs;
2756 tree newval = NULL_TREE;
2757 int j;
2759 if (ipa_get_scalar_lat (info, i)->bottom || known_vals[i])
2760 continue;
2762 FOR_EACH_VEC_ELT (callers, j, cs)
2764 struct ipa_jump_func *jump_func;
2765 tree t;
2767 if (i >= ipa_get_cs_argument_count (IPA_EDGE_REF (cs)))
2769 newval = NULL_TREE;
2770 break;
2772 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
2773 t = ipa_value_from_jfunc (IPA_NODE_REF (cs->caller), jump_func);
2774 if (!t
2775 || (newval
2776 && !values_equal_for_ipcp_p (t, newval)))
2778 newval = NULL_TREE;
2779 break;
2781 else
2782 newval = t;
2785 if (newval)
2787 if (dump_file && (dump_flags & TDF_DETAILS))
2789 fprintf (dump_file, " adding an extra known scalar value ");
2790 print_ipcp_constant_value (dump_file, newval);
2791 fprintf (dump_file, " for ");
2792 ipa_dump_param (dump_file, info, i);
2793 fprintf (dump_file, "\n");
2796 known_vals[i] = newval;
2801 /* Go through PLATS and create a vector of values consisting of values and
2802 offsets (minus OFFSET) of lattices that contain only a single value. */
2804 static vec<ipa_agg_jf_item>
2805 copy_plats_to_inter (struct ipcp_param_lattices *plats, HOST_WIDE_INT offset)
2807 vec<ipa_agg_jf_item> res = vNULL;
2809 if (!plats->aggs || plats->aggs_contain_variable || plats->aggs_bottom)
2810 return vNULL;
2812 for (struct ipcp_agg_lattice *aglat = plats->aggs; aglat; aglat = aglat->next)
2813 if (ipa_lat_is_single_const (aglat))
2815 struct ipa_agg_jf_item ti;
2816 ti.offset = aglat->offset - offset;
2817 ti.value = aglat->values->value;
2818 res.safe_push (ti);
2820 return res;
2823 /* Intersect all values in INTER with single value lattices in PLATS (while
2824 subtracting OFFSET). */
2826 static void
2827 intersect_with_plats (struct ipcp_param_lattices *plats,
2828 vec<ipa_agg_jf_item> *inter,
2829 HOST_WIDE_INT offset)
2831 struct ipcp_agg_lattice *aglat;
2832 struct ipa_agg_jf_item *item;
2833 int k;
2835 if (!plats->aggs || plats->aggs_contain_variable || plats->aggs_bottom)
2837 inter->release ();
2838 return;
2841 aglat = plats->aggs;
2842 FOR_EACH_VEC_ELT (*inter, k, item)
2844 bool found = false;
2845 if (!item->value)
2846 continue;
2847 while (aglat)
2849 if (aglat->offset - offset > item->offset)
2850 break;
2851 if (aglat->offset - offset == item->offset)
2853 gcc_checking_assert (item->value);
2854 if (values_equal_for_ipcp_p (item->value, aglat->values->value))
2855 found = true;
2856 break;
2858 aglat = aglat->next;
2860 if (!found)
2861 item->value = NULL_TREE;
2865 /* Copy agggregate replacement values of NODE (which is an IPA-CP clone) to the
2866 vector result while subtracting OFFSET from the individual value offsets. */
2868 static vec<ipa_agg_jf_item>
2869 agg_replacements_to_vector (struct cgraph_node *node, int index,
2870 HOST_WIDE_INT offset)
2872 struct ipa_agg_replacement_value *av;
2873 vec<ipa_agg_jf_item> res = vNULL;
2875 for (av = ipa_get_agg_replacements_for_node (node); av; av = av->next)
2876 if (av->index == index
2877 && (av->offset - offset) >= 0)
2879 struct ipa_agg_jf_item item;
2880 gcc_checking_assert (av->value);
2881 item.offset = av->offset - offset;
2882 item.value = av->value;
2883 res.safe_push (item);
2886 return res;
2889 /* Intersect all values in INTER with those that we have already scheduled to
2890 be replaced in parameter number INDEX of NODE, which is an IPA-CP clone
2891 (while subtracting OFFSET). */
2893 static void
2894 intersect_with_agg_replacements (struct cgraph_node *node, int index,
2895 vec<ipa_agg_jf_item> *inter,
2896 HOST_WIDE_INT offset)
2898 struct ipa_agg_replacement_value *srcvals;
2899 struct ipa_agg_jf_item *item;
2900 int i;
2902 srcvals = ipa_get_agg_replacements_for_node (node);
2903 if (!srcvals)
2905 inter->release ();
2906 return;
2909 FOR_EACH_VEC_ELT (*inter, i, item)
2911 struct ipa_agg_replacement_value *av;
2912 bool found = false;
2913 if (!item->value)
2914 continue;
2915 for (av = srcvals; av; av = av->next)
2917 gcc_checking_assert (av->value);
2918 if (av->index == index
2919 && av->offset - offset == item->offset)
2921 if (values_equal_for_ipcp_p (item->value, av->value))
2922 found = true;
2923 break;
2926 if (!found)
2927 item->value = NULL_TREE;
2931 /* Intersect values in INTER with aggregate values that come along edge CS to
2932 parameter number INDEX and return it. If INTER does not actually exist yet,
2933 copy all incoming values to it. If we determine we ended up with no values
2934 whatsoever, return a released vector. */
2936 static vec<ipa_agg_jf_item>
2937 intersect_aggregates_with_edge (struct cgraph_edge *cs, int index,
2938 vec<ipa_agg_jf_item> inter)
2940 struct ipa_jump_func *jfunc;
2941 jfunc = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), index);
2942 if (jfunc->type == IPA_JF_PASS_THROUGH
2943 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
2945 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
2946 int src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
2948 if (caller_info->ipcp_orig_node)
2950 struct cgraph_node *orig_node = caller_info->ipcp_orig_node;
2951 struct ipcp_param_lattices *orig_plats;
2952 orig_plats = ipa_get_parm_lattices (IPA_NODE_REF (orig_node),
2953 src_idx);
2954 if (agg_pass_through_permissible_p (orig_plats, jfunc))
2956 if (!inter.exists ())
2957 inter = agg_replacements_to_vector (cs->caller, src_idx, 0);
2958 else
2959 intersect_with_agg_replacements (cs->caller, src_idx,
2960 &inter, 0);
2963 else
2965 struct ipcp_param_lattices *src_plats;
2966 src_plats = ipa_get_parm_lattices (caller_info, src_idx);
2967 if (agg_pass_through_permissible_p (src_plats, jfunc))
2969 /* Currently we do not produce clobber aggregate jump
2970 functions, adjust when we do. */
2971 gcc_checking_assert (!jfunc->agg.items);
2972 if (!inter.exists ())
2973 inter = copy_plats_to_inter (src_plats, 0);
2974 else
2975 intersect_with_plats (src_plats, &inter, 0);
2979 else if (jfunc->type == IPA_JF_ANCESTOR
2980 && ipa_get_jf_ancestor_agg_preserved (jfunc))
2982 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
2983 int src_idx = ipa_get_jf_ancestor_formal_id (jfunc);
2984 struct ipcp_param_lattices *src_plats;
2985 HOST_WIDE_INT delta = ipa_get_jf_ancestor_offset (jfunc);
2987 if (caller_info->ipcp_orig_node)
2989 if (!inter.exists ())
2990 inter = agg_replacements_to_vector (cs->caller, src_idx, delta);
2991 else
2992 intersect_with_agg_replacements (cs->caller, src_idx, &inter,
2993 delta);
2995 else
2997 src_plats = ipa_get_parm_lattices (caller_info, src_idx);;
2998 /* Currently we do not produce clobber aggregate jump
2999 functions, adjust when we do. */
3000 gcc_checking_assert (!src_plats->aggs || !jfunc->agg.items);
3001 if (!inter.exists ())
3002 inter = copy_plats_to_inter (src_plats, delta);
3003 else
3004 intersect_with_plats (src_plats, &inter, delta);
3007 else if (jfunc->agg.items)
3009 struct ipa_agg_jf_item *item;
3010 int k;
3012 if (!inter.exists ())
3013 for (unsigned i = 0; i < jfunc->agg.items->length (); i++)
3014 inter.safe_push ((*jfunc->agg.items)[i]);
3015 else
3016 FOR_EACH_VEC_ELT (inter, k, item)
3018 int l = 0;
3019 bool found = false;;
3021 if (!item->value)
3022 continue;
3024 while ((unsigned) l < jfunc->agg.items->length ())
3026 struct ipa_agg_jf_item *ti;
3027 ti = &(*jfunc->agg.items)[l];
3028 if (ti->offset > item->offset)
3029 break;
3030 if (ti->offset == item->offset)
3032 gcc_checking_assert (ti->value);
3033 if (values_equal_for_ipcp_p (item->value,
3034 ti->value))
3035 found = true;
3036 break;
3038 l++;
3040 if (!found)
3041 item->value = NULL;
3044 else
3046 inter.release ();
3047 return vec<ipa_agg_jf_item>();
3049 return inter;
3052 /* Look at edges in CALLERS and collect all known aggregate values that arrive
3053 from all of them. */
3055 static struct ipa_agg_replacement_value *
3056 find_aggregate_values_for_callers_subset (struct cgraph_node *node,
3057 vec<cgraph_edge_p> callers)
3059 struct ipa_node_params *dest_info = IPA_NODE_REF (node);
3060 struct ipa_agg_replacement_value *res = NULL;
3061 struct cgraph_edge *cs;
3062 int i, j, count = ipa_get_param_count (dest_info);
3064 FOR_EACH_VEC_ELT (callers, j, cs)
3066 int c = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
3067 if (c < count)
3068 count = c;
3071 for (i = 0; i < count ; i++)
3073 struct cgraph_edge *cs;
3074 vec<ipa_agg_jf_item> inter = vNULL;
3075 struct ipa_agg_jf_item *item;
3076 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (dest_info, i);
3077 int j;
3079 /* Among other things, the following check should deal with all by_ref
3080 mismatches. */
3081 if (plats->aggs_bottom)
3082 continue;
3084 FOR_EACH_VEC_ELT (callers, j, cs)
3086 inter = intersect_aggregates_with_edge (cs, i, inter);
3088 if (!inter.exists ())
3089 goto next_param;
3092 FOR_EACH_VEC_ELT (inter, j, item)
3094 struct ipa_agg_replacement_value *v;
3096 if (!item->value)
3097 continue;
3099 v = ggc_alloc_ipa_agg_replacement_value ();
3100 v->index = i;
3101 v->offset = item->offset;
3102 v->value = item->value;
3103 v->by_ref = plats->aggs_by_ref;
3104 v->next = res;
3105 res = v;
3108 next_param:
3109 if (inter.exists ())
3110 inter.release ();
3112 return res;
3115 /* Turn KNOWN_AGGS into a list of aggreate replacement values. */
3117 static struct ipa_agg_replacement_value *
3118 known_aggs_to_agg_replacement_list (vec<ipa_agg_jump_function> known_aggs)
3120 struct ipa_agg_replacement_value *res = NULL;
3121 struct ipa_agg_jump_function *aggjf;
3122 struct ipa_agg_jf_item *item;
3123 int i, j;
3125 FOR_EACH_VEC_ELT (known_aggs, i, aggjf)
3126 FOR_EACH_VEC_SAFE_ELT (aggjf->items, j, item)
3128 struct ipa_agg_replacement_value *v;
3129 v = ggc_alloc_ipa_agg_replacement_value ();
3130 v->index = i;
3131 v->offset = item->offset;
3132 v->value = item->value;
3133 v->by_ref = aggjf->by_ref;
3134 v->next = res;
3135 res = v;
3137 return res;
3140 /* Determine whether CS also brings all scalar values that the NODE is
3141 specialized for. */
3143 static bool
3144 cgraph_edge_brings_all_scalars_for_node (struct cgraph_edge *cs,
3145 struct cgraph_node *node)
3147 struct ipa_node_params *dest_info = IPA_NODE_REF (node);
3148 int count = ipa_get_param_count (dest_info);
3149 struct ipa_node_params *caller_info;
3150 struct ipa_edge_args *args;
3151 int i;
3153 caller_info = IPA_NODE_REF (cs->caller);
3154 args = IPA_EDGE_REF (cs);
3155 for (i = 0; i < count; i++)
3157 struct ipa_jump_func *jump_func;
3158 tree val, t;
3160 val = dest_info->known_vals[i];
3161 if (!val)
3162 continue;
3164 if (i >= ipa_get_cs_argument_count (args))
3165 return false;
3166 jump_func = ipa_get_ith_jump_func (args, i);
3167 t = ipa_value_from_jfunc (caller_info, jump_func);
3168 if (!t || !values_equal_for_ipcp_p (val, t))
3169 return false;
3171 return true;
3174 /* Determine whether CS also brings all aggregate values that NODE is
3175 specialized for. */
3176 static bool
3177 cgraph_edge_brings_all_agg_vals_for_node (struct cgraph_edge *cs,
3178 struct cgraph_node *node)
3180 struct ipa_node_params *orig_caller_info = IPA_NODE_REF (cs->caller);
3181 struct ipa_agg_replacement_value *aggval;
3182 int i, ec, count;
3184 aggval = ipa_get_agg_replacements_for_node (node);
3185 if (!aggval)
3186 return true;
3188 count = ipa_get_param_count (IPA_NODE_REF (node));
3189 ec = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
3190 if (ec < count)
3191 for (struct ipa_agg_replacement_value *av = aggval; av; av = av->next)
3192 if (aggval->index >= ec)
3193 return false;
3195 if (orig_caller_info->ipcp_orig_node)
3196 orig_caller_info = IPA_NODE_REF (orig_caller_info->ipcp_orig_node);
3198 for (i = 0; i < count; i++)
3200 static vec<ipa_agg_jf_item> values = vec<ipa_agg_jf_item>();
3201 struct ipcp_param_lattices *plats;
3202 bool interesting = false;
3203 for (struct ipa_agg_replacement_value *av = aggval; av; av = av->next)
3204 if (aggval->index == i)
3206 interesting = true;
3207 break;
3209 if (!interesting)
3210 continue;
3212 plats = ipa_get_parm_lattices (orig_caller_info, aggval->index);
3213 if (plats->aggs_bottom)
3214 return false;
3216 values = intersect_aggregates_with_edge (cs, i, values);
3217 if (!values.exists ())
3218 return false;
3220 for (struct ipa_agg_replacement_value *av = aggval; av; av = av->next)
3221 if (aggval->index == i)
3223 struct ipa_agg_jf_item *item;
3224 int j;
3225 bool found = false;
3226 FOR_EACH_VEC_ELT (values, j, item)
3227 if (item->value
3228 && item->offset == av->offset
3229 && values_equal_for_ipcp_p (item->value, av->value))
3231 found = true;
3232 break;
3234 if (!found)
3236 values.release ();
3237 return false;
3241 return true;
3244 /* Given an original NODE and a VAL for which we have already created a
3245 specialized clone, look whether there are incoming edges that still lead
3246 into the old node but now also bring the requested value and also conform to
3247 all other criteria such that they can be redirected the the special node.
3248 This function can therefore redirect the final edge in a SCC. */
3250 static void
3251 perhaps_add_new_callers (struct cgraph_node *node, struct ipcp_value *val)
3253 struct ipcp_value_source *src;
3254 gcov_type redirected_sum = 0;
3256 for (src = val->sources; src; src = src->next)
3258 struct cgraph_edge *cs = src->cs;
3259 while (cs)
3261 enum availability availability;
3262 struct cgraph_node *dst = cgraph_function_node (cs->callee,
3263 &availability);
3264 if ((dst == node || IPA_NODE_REF (dst)->is_all_contexts_clone)
3265 && availability > AVAIL_OVERWRITABLE
3266 && cgraph_edge_brings_value_p (cs, src))
3268 if (cgraph_edge_brings_all_scalars_for_node (cs, val->spec_node)
3269 && cgraph_edge_brings_all_agg_vals_for_node (cs,
3270 val->spec_node))
3272 if (dump_file)
3273 fprintf (dump_file, " - adding an extra caller %s/%i"
3274 " of %s/%i\n",
3275 xstrdup (cs->caller->name ()),
3276 cs->caller->order,
3277 xstrdup (val->spec_node->name ()),
3278 val->spec_node->order);
3280 cgraph_redirect_edge_callee (cs, val->spec_node);
3281 redirected_sum += cs->count;
3284 cs = get_next_cgraph_edge_clone (cs);
3288 if (redirected_sum)
3289 update_specialized_profile (val->spec_node, node, redirected_sum);
3293 /* Copy KNOWN_BINFOS to KNOWN_VALS. */
3295 static void
3296 move_binfos_to_values (vec<tree> known_vals,
3297 vec<tree> known_binfos)
3299 tree t;
3300 int i;
3302 for (i = 0; known_binfos.iterate (i, &t); i++)
3303 if (t)
3304 known_vals[i] = t;
3307 /* Return true if there is a replacement equivalent to VALUE, INDEX and OFFSET
3308 among those in the AGGVALS list. */
3310 DEBUG_FUNCTION bool
3311 ipcp_val_in_agg_replacements_p (struct ipa_agg_replacement_value *aggvals,
3312 int index, HOST_WIDE_INT offset, tree value)
3314 while (aggvals)
3316 if (aggvals->index == index
3317 && aggvals->offset == offset
3318 && values_equal_for_ipcp_p (aggvals->value, value))
3319 return true;
3320 aggvals = aggvals->next;
3322 return false;
3325 /* Decide wheter to create a special version of NODE for value VAL of parameter
3326 at the given INDEX. If OFFSET is -1, the value is for the parameter itself,
3327 otherwise it is stored at the given OFFSET of the parameter. KNOWN_CSTS,
3328 KNOWN_BINFOS and KNOWN_AGGS describe the other already known values. */
3330 static bool
3331 decide_about_value (struct cgraph_node *node, int index, HOST_WIDE_INT offset,
3332 struct ipcp_value *val, vec<tree> known_csts,
3333 vec<tree> known_binfos)
3335 struct ipa_agg_replacement_value *aggvals;
3336 int freq_sum, caller_count;
3337 gcov_type count_sum;
3338 vec<cgraph_edge_p> callers;
3339 vec<tree> kv;
3341 if (val->spec_node)
3343 perhaps_add_new_callers (node, val);
3344 return false;
3346 else if (val->local_size_cost + overall_size > max_new_size)
3348 if (dump_file && (dump_flags & TDF_DETAILS))
3349 fprintf (dump_file, " Ignoring candidate value because "
3350 "max_new_size would be reached with %li.\n",
3351 val->local_size_cost + overall_size);
3352 return false;
3354 else if (!get_info_about_necessary_edges (val, &freq_sum, &count_sum,
3355 &caller_count))
3356 return false;
3358 if (dump_file && (dump_flags & TDF_DETAILS))
3360 fprintf (dump_file, " - considering value ");
3361 print_ipcp_constant_value (dump_file, val->value);
3362 fprintf (dump_file, " for ");
3363 ipa_dump_param (dump_file, IPA_NODE_REF (node), index);
3364 if (offset != -1)
3365 fprintf (dump_file, ", offset: " HOST_WIDE_INT_PRINT_DEC, offset);
3366 fprintf (dump_file, " (caller_count: %i)\n", caller_count);
3369 if (!good_cloning_opportunity_p (node, val->local_time_benefit,
3370 freq_sum, count_sum,
3371 val->local_size_cost)
3372 && !good_cloning_opportunity_p (node,
3373 val->local_time_benefit
3374 + val->prop_time_benefit,
3375 freq_sum, count_sum,
3376 val->local_size_cost
3377 + val->prop_size_cost))
3378 return false;
3380 if (dump_file)
3381 fprintf (dump_file, " Creating a specialized node of %s/%i.\n",
3382 node->name (), node->order);
3384 callers = gather_edges_for_value (val, caller_count);
3385 kv = known_csts.copy ();
3386 move_binfos_to_values (kv, known_binfos);
3387 if (offset == -1)
3388 kv[index] = val->value;
3389 find_more_scalar_values_for_callers_subset (node, kv, callers);
3390 aggvals = find_aggregate_values_for_callers_subset (node, callers);
3391 gcc_checking_assert (offset == -1
3392 || ipcp_val_in_agg_replacements_p (aggvals, index,
3393 offset, val->value));
3394 val->spec_node = create_specialized_node (node, kv, aggvals, callers);
3395 overall_size += val->local_size_cost;
3397 /* TODO: If for some lattice there is only one other known value
3398 left, make a special node for it too. */
3400 return true;
3403 /* Decide whether and what specialized clones of NODE should be created. */
3405 static bool
3406 decide_whether_version_node (struct cgraph_node *node)
3408 struct ipa_node_params *info = IPA_NODE_REF (node);
3409 int i, count = ipa_get_param_count (info);
3410 vec<tree> known_csts, known_binfos;
3411 vec<ipa_agg_jump_function> known_aggs = vNULL;
3412 bool ret = false;
3414 if (count == 0)
3415 return false;
3417 if (dump_file && (dump_flags & TDF_DETAILS))
3418 fprintf (dump_file, "\nEvaluating opportunities for %s/%i.\n",
3419 node->name (), node->order);
3421 gather_context_independent_values (info, &known_csts, &known_binfos,
3422 info->do_clone_for_all_contexts ? &known_aggs
3423 : NULL, NULL);
3425 for (i = 0; i < count ;i++)
3427 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
3428 struct ipcp_lattice *lat = &plats->itself;
3429 struct ipcp_value *val;
3431 if (!lat->bottom
3432 && !known_csts[i]
3433 && !known_binfos[i])
3434 for (val = lat->values; val; val = val->next)
3435 ret |= decide_about_value (node, i, -1, val, known_csts,
3436 known_binfos);
3438 if (!plats->aggs_bottom)
3440 struct ipcp_agg_lattice *aglat;
3441 struct ipcp_value *val;
3442 for (aglat = plats->aggs; aglat; aglat = aglat->next)
3443 if (!aglat->bottom && aglat->values
3444 /* If the following is false, the one value is in
3445 known_aggs. */
3446 && (plats->aggs_contain_variable
3447 || !ipa_lat_is_single_const (aglat)))
3448 for (val = aglat->values; val; val = val->next)
3449 ret |= decide_about_value (node, i, aglat->offset, val,
3450 known_csts, known_binfos);
3452 info = IPA_NODE_REF (node);
3455 if (info->do_clone_for_all_contexts)
3457 struct cgraph_node *clone;
3458 vec<cgraph_edge_p> callers;
3460 if (dump_file)
3461 fprintf (dump_file, " - Creating a specialized node of %s/%i "
3462 "for all known contexts.\n", node->name (),
3463 node->order);
3465 callers = collect_callers_of_node (node);
3466 move_binfos_to_values (known_csts, known_binfos);
3467 clone = create_specialized_node (node, known_csts,
3468 known_aggs_to_agg_replacement_list (known_aggs),
3469 callers);
3470 info = IPA_NODE_REF (node);
3471 info->do_clone_for_all_contexts = false;
3472 IPA_NODE_REF (clone)->is_all_contexts_clone = true;
3473 for (i = 0; i < count ; i++)
3474 vec_free (known_aggs[i].items);
3475 known_aggs.release ();
3476 ret = true;
3478 else
3479 known_csts.release ();
3481 known_binfos.release ();
3482 return ret;
3485 /* Transitively mark all callees of NODE within the same SCC as not dead. */
3487 static void
3488 spread_undeadness (struct cgraph_node *node)
3490 struct cgraph_edge *cs;
3492 for (cs = node->callees; cs; cs = cs->next_callee)
3493 if (ipa_edge_within_scc (cs))
3495 struct cgraph_node *callee;
3496 struct ipa_node_params *info;
3498 callee = cgraph_function_node (cs->callee, NULL);
3499 info = IPA_NODE_REF (callee);
3501 if (info->node_dead)
3503 info->node_dead = 0;
3504 spread_undeadness (callee);
3509 /* Return true if NODE has a caller from outside of its SCC that is not
3510 dead. Worker callback for cgraph_for_node_and_aliases. */
3512 static bool
3513 has_undead_caller_from_outside_scc_p (struct cgraph_node *node,
3514 void *data ATTRIBUTE_UNUSED)
3516 struct cgraph_edge *cs;
3518 for (cs = node->callers; cs; cs = cs->next_caller)
3519 if (cs->caller->thunk.thunk_p
3520 && cgraph_for_node_and_aliases (cs->caller,
3521 has_undead_caller_from_outside_scc_p,
3522 NULL, true))
3523 return true;
3524 else if (!ipa_edge_within_scc (cs)
3525 && !IPA_NODE_REF (cs->caller)->node_dead)
3526 return true;
3527 return false;
3531 /* Identify nodes within the same SCC as NODE which are no longer needed
3532 because of new clones and will be removed as unreachable. */
3534 static void
3535 identify_dead_nodes (struct cgraph_node *node)
3537 struct cgraph_node *v;
3538 for (v = node; v ; v = ((struct ipa_dfs_info *) v->aux)->next_cycle)
3539 if (cgraph_will_be_removed_from_program_if_no_direct_calls (v)
3540 && !cgraph_for_node_and_aliases (v,
3541 has_undead_caller_from_outside_scc_p,
3542 NULL, true))
3543 IPA_NODE_REF (v)->node_dead = 1;
3545 for (v = node; v ; v = ((struct ipa_dfs_info *) v->aux)->next_cycle)
3546 if (!IPA_NODE_REF (v)->node_dead)
3547 spread_undeadness (v);
3549 if (dump_file && (dump_flags & TDF_DETAILS))
3551 for (v = node; v ; v = ((struct ipa_dfs_info *) v->aux)->next_cycle)
3552 if (IPA_NODE_REF (v)->node_dead)
3553 fprintf (dump_file, " Marking node as dead: %s/%i.\n",
3554 v->name (), v->order);
3558 /* The decision stage. Iterate over the topological order of call graph nodes
3559 TOPO and make specialized clones if deemed beneficial. */
3561 static void
3562 ipcp_decision_stage (struct topo_info *topo)
3564 int i;
3566 if (dump_file)
3567 fprintf (dump_file, "\nIPA decision stage:\n\n");
3569 for (i = topo->nnodes - 1; i >= 0; i--)
3571 struct cgraph_node *node = topo->order[i];
3572 bool change = false, iterate = true;
3574 while (iterate)
3576 struct cgraph_node *v;
3577 iterate = false;
3578 for (v = node; v ; v = ((struct ipa_dfs_info *) v->aux)->next_cycle)
3579 if (cgraph_function_with_gimple_body_p (v)
3580 && ipcp_versionable_function_p (v))
3581 iterate |= decide_whether_version_node (v);
3583 change |= iterate;
3585 if (change)
3586 identify_dead_nodes (node);
3590 /* The IPCP driver. */
3592 static unsigned int
3593 ipcp_driver (void)
3595 struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
3596 struct cgraph_edge_hook_list *edge_removal_hook_holder;
3597 struct topo_info topo;
3599 ipa_check_create_node_params ();
3600 ipa_check_create_edge_args ();
3601 grow_edge_clone_vectors ();
3602 edge_duplication_hook_holder =
3603 cgraph_add_edge_duplication_hook (&ipcp_edge_duplication_hook, NULL);
3604 edge_removal_hook_holder =
3605 cgraph_add_edge_removal_hook (&ipcp_edge_removal_hook, NULL);
3607 ipcp_values_pool = create_alloc_pool ("IPA-CP values",
3608 sizeof (struct ipcp_value), 32);
3609 ipcp_sources_pool = create_alloc_pool ("IPA-CP value sources",
3610 sizeof (struct ipcp_value_source), 64);
3611 ipcp_agg_lattice_pool = create_alloc_pool ("IPA_CP aggregate lattices",
3612 sizeof (struct ipcp_agg_lattice),
3613 32);
3614 if (dump_file)
3616 fprintf (dump_file, "\nIPA structures before propagation:\n");
3617 if (dump_flags & TDF_DETAILS)
3618 ipa_print_all_params (dump_file);
3619 ipa_print_all_jump_functions (dump_file);
3622 /* Topological sort. */
3623 build_toporder_info (&topo);
3624 /* Do the interprocedural propagation. */
3625 ipcp_propagate_stage (&topo);
3626 /* Decide what constant propagation and cloning should be performed. */
3627 ipcp_decision_stage (&topo);
3629 /* Free all IPCP structures. */
3630 free_toporder_info (&topo);
3631 next_edge_clone.release ();
3632 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
3633 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
3634 ipa_free_all_structures_after_ipa_cp ();
3635 if (dump_file)
3636 fprintf (dump_file, "\nIPA constant propagation end\n");
3637 return 0;
3640 /* Initialization and computation of IPCP data structures. This is the initial
3641 intraprocedural analysis of functions, which gathers information to be
3642 propagated later on. */
3644 static void
3645 ipcp_generate_summary (void)
3647 struct cgraph_node *node;
3649 if (dump_file)
3650 fprintf (dump_file, "\nIPA constant propagation start:\n");
3651 ipa_register_cgraph_hooks ();
3653 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
3655 node->local.versionable
3656 = tree_versionable_function_p (node->decl);
3657 ipa_analyze_node (node);
3661 /* Write ipcp summary for nodes in SET. */
3663 static void
3664 ipcp_write_summary (void)
3666 ipa_prop_write_jump_functions ();
3669 /* Read ipcp summary. */
3671 static void
3672 ipcp_read_summary (void)
3674 ipa_prop_read_jump_functions ();
3677 /* Gate for IPCP optimization. */
3679 static bool
3680 cgraph_gate_cp (void)
3682 /* FIXME: We should remove the optimize check after we ensure we never run
3683 IPA passes when not optimizing. */
3684 return flag_ipa_cp && optimize;
3687 namespace {
3689 const pass_data pass_data_ipa_cp =
3691 IPA_PASS, /* type */
3692 "cp", /* name */
3693 OPTGROUP_NONE, /* optinfo_flags */
3694 true, /* has_gate */
3695 true, /* has_execute */
3696 TV_IPA_CONSTANT_PROP, /* tv_id */
3697 0, /* properties_required */
3698 0, /* properties_provided */
3699 0, /* properties_destroyed */
3700 0, /* todo_flags_start */
3701 ( TODO_dump_symtab | TODO_remove_functions ), /* todo_flags_finish */
3704 class pass_ipa_cp : public ipa_opt_pass_d
3706 public:
3707 pass_ipa_cp (gcc::context *ctxt)
3708 : ipa_opt_pass_d (pass_data_ipa_cp, ctxt,
3709 ipcp_generate_summary, /* generate_summary */
3710 ipcp_write_summary, /* write_summary */
3711 ipcp_read_summary, /* read_summary */
3712 ipa_prop_write_all_agg_replacement, /*
3713 write_optimization_summary */
3714 ipa_prop_read_all_agg_replacement, /*
3715 read_optimization_summary */
3716 NULL, /* stmt_fixup */
3717 0, /* function_transform_todo_flags_start */
3718 ipcp_transform_function, /* function_transform */
3719 NULL) /* variable_transform */
3722 /* opt_pass methods: */
3723 bool gate () { return cgraph_gate_cp (); }
3724 unsigned int execute () { return ipcp_driver (); }
3726 }; // class pass_ipa_cp
3728 } // anon namespace
3730 ipa_opt_pass_d *
3731 make_pass_ipa_cp (gcc::context *ctxt)
3733 return new pass_ipa_cp (ctxt);