Daily bump.
[official-gcc.git] / gcc / ipa-cp.c
bloba6a44e6d81aceabbf5dca273cba702771376f50e
1 /* Interprocedural constant propagation
2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
4 Contributed by Razya Ladelsky <RAZYA@il.ibm.com> and Martin Jambor
5 <mjambor@suse.cz>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Interprocedural constant propagation (IPA-CP).
25 The goal of this transformation is to
27 1) discover functions which are always invoked with some arguments with the
28 same known constant values and modify the functions so that the
29 subsequent optimizations can take advantage of the knowledge, and
31 2) partial specialization - create specialized versions of functions
32 transformed in this way if some parameters are known constants only in
33 certain contexts but the estimated tradeoff between speedup and cost size
34 is deemed good.
36 The algorithm also propagates types and attempts to perform type based
37 devirtualization. Types are propagated much like constants.
39 The algorithm basically consists of three stages. In the first, functions
40 are analyzed one at a time and jump functions are constructed for all known
41 call-sites. In the second phase, the pass propagates information from the
42 jump functions across the call to reveal what values are available at what
43 call sites, performs estimations of effects of known values on functions and
44 their callees, and finally decides what specialized extra versions should be
45 created. In the third, the special versions materialize and appropriate
46 calls are redirected.
48 The algorithm used is to a certain extent based on "Interprocedural Constant
49 Propagation", by David Callahan, Keith D Cooper, Ken Kennedy, Linda Torczon,
50 Comp86, pg 152-161 and "A Methodology for Procedure Cloning" by Keith D
51 Cooper, Mary W. Hall, and Ken Kennedy.
54 First stage - intraprocedural analysis
55 =======================================
57 This phase computes jump_function and modification flags.
59 A jump function for a call-site represents the values passed as an actual
60 arguments of a given call-site. In principle, there are three types of
61 values:
63 Pass through - the caller's formal parameter is passed as an actual
64 argument, plus an operation on it can be performed.
65 Constant - a constant is passed as an actual argument.
66 Unknown - neither of the above.
68 All jump function types are described in detail in ipa-prop.h, together with
69 the data structures that represent them and methods of accessing them.
71 ipcp_generate_summary() is the main function of the first stage.
73 Second stage - interprocedural analysis
74 ========================================
76 This stage is itself divided into two phases. In the first, we propagate
77 known values over the call graph, in the second, we make cloning decisions.
78 It uses a different algorithm than the original Callahan's paper.
80 First, we traverse the functions topologically from callers to callees and,
81 for each strongly connected component (SCC), we propagate constants
82 according to previously computed jump functions. We also record what known
83 values depend on other known values and estimate local effects. Finally, we
84 propagate cumulative information about these effects from dependent values
85 to those on which they depend.
87 Second, we again traverse the call graph in the same topological order and
88 make clones for functions which we know are called with the same values in
89 all contexts and decide about extra specialized clones of functions just for
90 some contexts - these decisions are based on both local estimates and
91 cumulative estimates propagated from callees.
93 ipcp_propagate_stage() and ipcp_decision_stage() together constitute the
94 third stage.
96 Third phase - materialization of clones, call statement updates.
97 ============================================
99 This stage is currently performed by call graph code (mainly in cgraphunit.c
100 and tree-inline.c) according to instructions inserted to the call graph by
101 the second stage. */
103 #include "config.h"
104 #include "system.h"
105 #include "coretypes.h"
106 #include "tree.h"
107 #include "gimple-fold.h"
108 #include "gimple-expr.h"
109 #include "target.h"
110 #include "ipa-prop.h"
111 #include "bitmap.h"
112 #include "tree-pass.h"
113 #include "flags.h"
114 #include "diagnostic.h"
115 #include "tree-pretty-print.h"
116 #include "tree-inline.h"
117 #include "params.h"
118 #include "ipa-inline.h"
119 #include "ipa-utils.h"
121 struct ipcp_value;
123 /* Describes a particular source for an IPA-CP value. */
125 struct ipcp_value_source
127 /* Aggregate offset of the source, negative if the source is scalar value of
128 the argument itself. */
129 HOST_WIDE_INT offset;
130 /* The incoming edge that brought the value. */
131 struct cgraph_edge *cs;
132 /* If the jump function that resulted into his value was a pass-through or an
133 ancestor, this is the ipcp_value of the caller from which the described
134 value has been derived. Otherwise it is NULL. */
135 struct ipcp_value *val;
136 /* Next pointer in a linked list of sources of a value. */
137 struct ipcp_value_source *next;
138 /* If the jump function that resulted into his value was a pass-through or an
139 ancestor, this is the index of the parameter of the caller the jump
140 function references. */
141 int index;
144 /* Describes one particular value stored in struct ipcp_lattice. */
146 struct ipcp_value
148 /* The actual value for the given parameter. This is either an IPA invariant
149 or a TREE_BINFO describing a type that can be used for
150 devirtualization. */
151 tree value;
152 /* The list of sources from which this value originates. */
153 struct ipcp_value_source *sources;
154 /* Next pointers in a linked list of all values in a lattice. */
155 struct ipcp_value *next;
156 /* Next pointers in a linked list of values in a strongly connected component
157 of values. */
158 struct ipcp_value *scc_next;
159 /* Next pointers in a linked list of SCCs of values sorted topologically
160 according their sources. */
161 struct ipcp_value *topo_next;
162 /* A specialized node created for this value, NULL if none has been (so far)
163 created. */
164 struct cgraph_node *spec_node;
165 /* Depth first search number and low link for topological sorting of
166 values. */
167 int dfs, low_link;
168 /* Time benefit and size cost that specializing the function for this value
169 would bring about in this function alone. */
170 int local_time_benefit, local_size_cost;
171 /* Time benefit and size cost that specializing the function for this value
172 can bring about in it's callees (transitively). */
173 int prop_time_benefit, prop_size_cost;
174 /* True if this valye is currently on the topo-sort stack. */
175 bool on_stack;
178 /* Lattice describing potential values of a formal parameter of a function, or
179 a part of an aggreagate. TOP is represented by a lattice with zero values
180 and with contains_variable and bottom flags cleared. BOTTOM is represented
181 by a lattice with the bottom flag set. In that case, values and
182 contains_variable flag should be disregarded. */
184 struct ipcp_lattice
186 /* The list of known values and types in this lattice. Note that values are
187 not deallocated if a lattice is set to bottom because there may be value
188 sources referencing them. */
189 struct ipcp_value *values;
190 /* Number of known values and types in this lattice. */
191 int values_count;
192 /* The lattice contains a variable component (in addition to values). */
193 bool contains_variable;
194 /* The value of the lattice is bottom (i.e. variable and unusable for any
195 propagation). */
196 bool bottom;
199 /* Lattice with an offset to describe a part of an aggregate. */
201 struct ipcp_agg_lattice : public ipcp_lattice
203 /* Offset that is being described by this lattice. */
204 HOST_WIDE_INT offset;
205 /* Size so that we don't have to re-compute it every time we traverse the
206 list. Must correspond to TYPE_SIZE of all lat values. */
207 HOST_WIDE_INT size;
208 /* Next element of the linked list. */
209 struct ipcp_agg_lattice *next;
212 /* Structure containing lattices for a parameter itself and for pieces of
213 aggregates that are passed in the parameter or by a reference in a parameter
214 plus some other useful flags. */
216 struct ipcp_param_lattices
218 /* Lattice describing the value of the parameter itself. */
219 struct ipcp_lattice itself;
220 /* Lattices describing aggregate parts. */
221 struct ipcp_agg_lattice *aggs;
222 /* Number of aggregate lattices */
223 int aggs_count;
224 /* True if aggregate data were passed by reference (as opposed to by
225 value). */
226 bool aggs_by_ref;
227 /* All aggregate lattices contain a variable component (in addition to
228 values). */
229 bool aggs_contain_variable;
230 /* The value of all aggregate lattices is bottom (i.e. variable and unusable
231 for any propagation). */
232 bool aggs_bottom;
234 /* There is a virtual call based on this parameter. */
235 bool virt_call;
238 /* Allocation pools for values and their sources in ipa-cp. */
240 alloc_pool ipcp_values_pool;
241 alloc_pool ipcp_sources_pool;
242 alloc_pool ipcp_agg_lattice_pool;
244 /* Maximal count found in program. */
246 static gcov_type max_count;
248 /* Original overall size of the program. */
250 static long overall_size, max_new_size;
252 /* Head of the linked list of topologically sorted values. */
254 static struct ipcp_value *values_topo;
256 /* Return the param lattices structure corresponding to the Ith formal
257 parameter of the function described by INFO. */
258 static inline struct ipcp_param_lattices *
259 ipa_get_parm_lattices (struct ipa_node_params *info, int i)
261 gcc_assert (i >= 0 && i < ipa_get_param_count (info));
262 gcc_checking_assert (!info->ipcp_orig_node);
263 gcc_checking_assert (info->lattices);
264 return &(info->lattices[i]);
267 /* Return the lattice corresponding to the scalar value of the Ith formal
268 parameter of the function described by INFO. */
269 static inline struct ipcp_lattice *
270 ipa_get_scalar_lat (struct ipa_node_params *info, int i)
272 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
273 return &plats->itself;
276 /* Return whether LAT is a lattice with a single constant and without an
277 undefined value. */
279 static inline bool
280 ipa_lat_is_single_const (struct ipcp_lattice *lat)
282 if (lat->bottom
283 || lat->contains_variable
284 || lat->values_count != 1)
285 return false;
286 else
287 return true;
290 /* Print V which is extracted from a value in a lattice to F. */
292 static void
293 print_ipcp_constant_value (FILE * f, tree v)
295 if (TREE_CODE (v) == TREE_BINFO)
297 fprintf (f, "BINFO ");
298 print_generic_expr (f, BINFO_TYPE (v), 0);
300 else if (TREE_CODE (v) == ADDR_EXPR
301 && TREE_CODE (TREE_OPERAND (v, 0)) == CONST_DECL)
303 fprintf (f, "& ");
304 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (v, 0)), 0);
306 else
307 print_generic_expr (f, v, 0);
310 /* Print a lattice LAT to F. */
312 static void
313 print_lattice (FILE * f, struct ipcp_lattice *lat,
314 bool dump_sources, bool dump_benefits)
316 struct ipcp_value *val;
317 bool prev = false;
319 if (lat->bottom)
321 fprintf (f, "BOTTOM\n");
322 return;
325 if (!lat->values_count && !lat->contains_variable)
327 fprintf (f, "TOP\n");
328 return;
331 if (lat->contains_variable)
333 fprintf (f, "VARIABLE");
334 prev = true;
335 if (dump_benefits)
336 fprintf (f, "\n");
339 for (val = lat->values; val; val = val->next)
341 if (dump_benefits && prev)
342 fprintf (f, " ");
343 else if (!dump_benefits && prev)
344 fprintf (f, ", ");
345 else
346 prev = true;
348 print_ipcp_constant_value (f, val->value);
350 if (dump_sources)
352 struct ipcp_value_source *s;
354 fprintf (f, " [from:");
355 for (s = val->sources; s; s = s->next)
356 fprintf (f, " %i(%i)", s->cs->caller->order,
357 s->cs->frequency);
358 fprintf (f, "]");
361 if (dump_benefits)
362 fprintf (f, " [loc_time: %i, loc_size: %i, "
363 "prop_time: %i, prop_size: %i]\n",
364 val->local_time_benefit, val->local_size_cost,
365 val->prop_time_benefit, val->prop_size_cost);
367 if (!dump_benefits)
368 fprintf (f, "\n");
371 /* Print all ipcp_lattices of all functions to F. */
373 static void
374 print_all_lattices (FILE * f, bool dump_sources, bool dump_benefits)
376 struct cgraph_node *node;
377 int i, count;
379 fprintf (f, "\nLattices:\n");
380 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
382 struct ipa_node_params *info;
384 info = IPA_NODE_REF (node);
385 fprintf (f, " Node: %s/%i:\n", node->name (),
386 node->order);
387 count = ipa_get_param_count (info);
388 for (i = 0; i < count; i++)
390 struct ipcp_agg_lattice *aglat;
391 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
392 fprintf (f, " param [%d]: ", i);
393 print_lattice (f, &plats->itself, dump_sources, dump_benefits);
395 if (plats->virt_call)
396 fprintf (f, " virt_call flag set\n");
398 if (plats->aggs_bottom)
400 fprintf (f, " AGGS BOTTOM\n");
401 continue;
403 if (plats->aggs_contain_variable)
404 fprintf (f, " AGGS VARIABLE\n");
405 for (aglat = plats->aggs; aglat; aglat = aglat->next)
407 fprintf (f, " %soffset " HOST_WIDE_INT_PRINT_DEC ": ",
408 plats->aggs_by_ref ? "ref " : "", aglat->offset);
409 print_lattice (f, aglat, dump_sources, dump_benefits);
415 /* Determine whether it is at all technically possible to create clones of NODE
416 and store this information in the ipa_node_params structure associated
417 with NODE. */
419 static void
420 determine_versionability (struct cgraph_node *node)
422 const char *reason = NULL;
424 /* There are a number of generic reasons functions cannot be versioned. We
425 also cannot remove parameters if there are type attributes such as fnspec
426 present. */
427 if (node->alias || node->thunk.thunk_p)
428 reason = "alias or thunk";
429 else if (!node->local.versionable)
430 reason = "not a tree_versionable_function";
431 else if (cgraph_function_body_availability (node) <= AVAIL_OVERWRITABLE)
432 reason = "insufficient body availability";
433 else if (lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (node->decl)))
435 /* Ideally we should clone the SIMD clones themselves and create
436 vector copies of them, so IPA-cp and SIMD clones can happily
437 coexist, but that may not be worth the effort. */
438 reason = "function has SIMD clones";
440 /* Don't clone decls local to a comdat group; it breaks and for C++
441 decloned constructors, inlining is always better anyway. */
442 else if (symtab_comdat_local_p (node))
443 reason = "comdat-local function";
445 if (reason && dump_file && !node->alias && !node->thunk.thunk_p)
446 fprintf (dump_file, "Function %s/%i is not versionable, reason: %s.\n",
447 node->name (), node->order, reason);
449 node->local.versionable = (reason == NULL);
452 /* Return true if it is at all technically possible to create clones of a
453 NODE. */
455 static bool
456 ipcp_versionable_function_p (struct cgraph_node *node)
458 return node->local.versionable;
461 /* Structure holding accumulated information about callers of a node. */
463 struct caller_statistics
465 gcov_type count_sum;
466 int n_calls, n_hot_calls, freq_sum;
469 /* Initialize fields of STAT to zeroes. */
471 static inline void
472 init_caller_stats (struct caller_statistics *stats)
474 stats->count_sum = 0;
475 stats->n_calls = 0;
476 stats->n_hot_calls = 0;
477 stats->freq_sum = 0;
480 /* Worker callback of cgraph_for_node_and_aliases accumulating statistics of
481 non-thunk incoming edges to NODE. */
483 static bool
484 gather_caller_stats (struct cgraph_node *node, void *data)
486 struct caller_statistics *stats = (struct caller_statistics *) data;
487 struct cgraph_edge *cs;
489 for (cs = node->callers; cs; cs = cs->next_caller)
490 if (cs->caller->thunk.thunk_p)
491 cgraph_for_node_and_aliases (cs->caller, gather_caller_stats,
492 stats, false);
493 else
495 stats->count_sum += cs->count;
496 stats->freq_sum += cs->frequency;
497 stats->n_calls++;
498 if (cgraph_maybe_hot_edge_p (cs))
499 stats->n_hot_calls ++;
501 return false;
505 /* Return true if this NODE is viable candidate for cloning. */
507 static bool
508 ipcp_cloning_candidate_p (struct cgraph_node *node)
510 struct caller_statistics stats;
512 gcc_checking_assert (cgraph_function_with_gimple_body_p (node));
514 if (!flag_ipa_cp_clone)
516 if (dump_file)
517 fprintf (dump_file, "Not considering %s for cloning; "
518 "-fipa-cp-clone disabled.\n",
519 node->name ());
520 return false;
523 if (!optimize_function_for_speed_p (DECL_STRUCT_FUNCTION (node->decl)))
525 if (dump_file)
526 fprintf (dump_file, "Not considering %s for cloning; "
527 "optimizing it for size.\n",
528 node->name ());
529 return false;
532 init_caller_stats (&stats);
533 cgraph_for_node_and_aliases (node, gather_caller_stats, &stats, false);
535 if (inline_summary (node)->self_size < stats.n_calls)
537 if (dump_file)
538 fprintf (dump_file, "Considering %s for cloning; code might shrink.\n",
539 node->name ());
540 return true;
543 /* When profile is available and function is hot, propagate into it even if
544 calls seems cold; constant propagation can improve function's speed
545 significantly. */
546 if (max_count)
548 if (stats.count_sum > node->count * 90 / 100)
550 if (dump_file)
551 fprintf (dump_file, "Considering %s for cloning; "
552 "usually called directly.\n",
553 node->name ());
554 return true;
557 if (!stats.n_hot_calls)
559 if (dump_file)
560 fprintf (dump_file, "Not considering %s for cloning; no hot calls.\n",
561 node->name ());
562 return false;
564 if (dump_file)
565 fprintf (dump_file, "Considering %s for cloning.\n",
566 node->name ());
567 return true;
570 /* Arrays representing a topological ordering of call graph nodes and a stack
571 of noes used during constant propagation. */
573 struct topo_info
575 struct cgraph_node **order;
576 struct cgraph_node **stack;
577 int nnodes, stack_top;
580 /* Allocate the arrays in TOPO and topologically sort the nodes into order. */
582 static void
583 build_toporder_info (struct topo_info *topo)
585 topo->order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
586 topo->stack = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
587 topo->stack_top = 0;
588 topo->nnodes = ipa_reduced_postorder (topo->order, true, true, NULL);
591 /* Free information about strongly connected components and the arrays in
592 TOPO. */
594 static void
595 free_toporder_info (struct topo_info *topo)
597 ipa_free_postorder_info ();
598 free (topo->order);
599 free (topo->stack);
602 /* Add NODE to the stack in TOPO, unless it is already there. */
604 static inline void
605 push_node_to_stack (struct topo_info *topo, struct cgraph_node *node)
607 struct ipa_node_params *info = IPA_NODE_REF (node);
608 if (info->node_enqueued)
609 return;
610 info->node_enqueued = 1;
611 topo->stack[topo->stack_top++] = node;
614 /* Pop a node from the stack in TOPO and return it or return NULL if the stack
615 is empty. */
617 static struct cgraph_node *
618 pop_node_from_stack (struct topo_info *topo)
620 if (topo->stack_top)
622 struct cgraph_node *node;
623 topo->stack_top--;
624 node = topo->stack[topo->stack_top];
625 IPA_NODE_REF (node)->node_enqueued = 0;
626 return node;
628 else
629 return NULL;
632 /* Set lattice LAT to bottom and return true if it previously was not set as
633 such. */
635 static inline bool
636 set_lattice_to_bottom (struct ipcp_lattice *lat)
638 bool ret = !lat->bottom;
639 lat->bottom = true;
640 return ret;
643 /* Mark lattice as containing an unknown value and return true if it previously
644 was not marked as such. */
646 static inline bool
647 set_lattice_contains_variable (struct ipcp_lattice *lat)
649 bool ret = !lat->contains_variable;
650 lat->contains_variable = true;
651 return ret;
654 /* Set all aggegate lattices in PLATS to bottom and return true if they were
655 not previously set as such. */
657 static inline bool
658 set_agg_lats_to_bottom (struct ipcp_param_lattices *plats)
660 bool ret = !plats->aggs_bottom;
661 plats->aggs_bottom = true;
662 return ret;
665 /* Mark all aggegate lattices in PLATS as containing an unknown value and
666 return true if they were not previously marked as such. */
668 static inline bool
669 set_agg_lats_contain_variable (struct ipcp_param_lattices *plats)
671 bool ret = !plats->aggs_contain_variable;
672 plats->aggs_contain_variable = true;
673 return ret;
676 /* Mark bot aggregate and scalar lattices as containing an unknown variable,
677 return true is any of them has not been marked as such so far. */
679 static inline bool
680 set_all_contains_variable (struct ipcp_param_lattices *plats)
682 bool ret = !plats->itself.contains_variable || !plats->aggs_contain_variable;
683 plats->itself.contains_variable = true;
684 plats->aggs_contain_variable = true;
685 return ret;
688 /* Initialize ipcp_lattices. */
690 static void
691 initialize_node_lattices (struct cgraph_node *node)
693 struct ipa_node_params *info = IPA_NODE_REF (node);
694 struct cgraph_edge *ie;
695 bool disable = false, variable = false;
696 int i;
698 gcc_checking_assert (cgraph_function_with_gimple_body_p (node));
699 if (!node->local.local)
701 /* When cloning is allowed, we can assume that externally visible
702 functions are not called. We will compensate this by cloning
703 later. */
704 if (ipcp_versionable_function_p (node)
705 && ipcp_cloning_candidate_p (node))
706 variable = true;
707 else
708 disable = true;
711 if (disable || variable)
713 for (i = 0; i < ipa_get_param_count (info) ; i++)
715 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
716 if (disable)
718 set_lattice_to_bottom (&plats->itself);
719 set_agg_lats_to_bottom (plats);
721 else
722 set_all_contains_variable (plats);
724 if (dump_file && (dump_flags & TDF_DETAILS)
725 && !node->alias && !node->thunk.thunk_p)
726 fprintf (dump_file, "Marking all lattices of %s/%i as %s\n",
727 node->name (), node->order,
728 disable ? "BOTTOM" : "VARIABLE");
731 for (ie = node->indirect_calls; ie; ie = ie->next_callee)
732 if (ie->indirect_info->polymorphic
733 && ie->indirect_info->param_index >= 0)
735 gcc_checking_assert (ie->indirect_info->param_index >= 0);
736 ipa_get_parm_lattices (info,
737 ie->indirect_info->param_index)->virt_call = 1;
741 /* Return the result of a (possibly arithmetic) pass through jump function
742 JFUNC on the constant value INPUT. Return NULL_TREE if that cannot be
743 determined or be considered an interprocedural invariant. */
745 static tree
746 ipa_get_jf_pass_through_result (struct ipa_jump_func *jfunc, tree input)
748 tree restype, res;
750 if (TREE_CODE (input) == TREE_BINFO)
752 if (ipa_get_jf_pass_through_type_preserved (jfunc))
754 gcc_checking_assert (ipa_get_jf_pass_through_operation (jfunc)
755 == NOP_EXPR);
756 return input;
758 return NULL_TREE;
761 if (ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
762 return input;
764 gcc_checking_assert (is_gimple_ip_invariant (input));
765 if (TREE_CODE_CLASS (ipa_get_jf_pass_through_operation (jfunc))
766 == tcc_comparison)
767 restype = boolean_type_node;
768 else
769 restype = TREE_TYPE (input);
770 res = fold_binary (ipa_get_jf_pass_through_operation (jfunc), restype,
771 input, ipa_get_jf_pass_through_operand (jfunc));
773 if (res && !is_gimple_ip_invariant (res))
774 return NULL_TREE;
776 return res;
779 /* Return the result of an ancestor jump function JFUNC on the constant value
780 INPUT. Return NULL_TREE if that cannot be determined. */
782 static tree
783 ipa_get_jf_ancestor_result (struct ipa_jump_func *jfunc, tree input)
785 if (TREE_CODE (input) == TREE_BINFO)
787 if (!ipa_get_jf_ancestor_type_preserved (jfunc))
788 return NULL;
789 return get_binfo_at_offset (input,
790 ipa_get_jf_ancestor_offset (jfunc),
791 ipa_get_jf_ancestor_type (jfunc));
793 else if (TREE_CODE (input) == ADDR_EXPR)
795 tree t = TREE_OPERAND (input, 0);
796 t = build_ref_for_offset (EXPR_LOCATION (t), t,
797 ipa_get_jf_ancestor_offset (jfunc),
798 ipa_get_jf_ancestor_type (jfunc), NULL, false);
799 return build_fold_addr_expr (t);
801 else
802 return NULL_TREE;
805 /* Determine whether JFUNC evaluates to a known value (that is either a
806 constant or a binfo) and if so, return it. Otherwise return NULL. INFO
807 describes the caller node so that pass-through jump functions can be
808 evaluated. */
810 tree
811 ipa_value_from_jfunc (struct ipa_node_params *info, struct ipa_jump_func *jfunc)
813 if (jfunc->type == IPA_JF_CONST)
814 return ipa_get_jf_constant (jfunc);
815 else if (jfunc->type == IPA_JF_KNOWN_TYPE)
816 return ipa_binfo_from_known_type_jfunc (jfunc);
817 else if (jfunc->type == IPA_JF_PASS_THROUGH
818 || jfunc->type == IPA_JF_ANCESTOR)
820 tree input;
821 int idx;
823 if (jfunc->type == IPA_JF_PASS_THROUGH)
824 idx = ipa_get_jf_pass_through_formal_id (jfunc);
825 else
826 idx = ipa_get_jf_ancestor_formal_id (jfunc);
828 if (info->ipcp_orig_node)
829 input = info->known_vals[idx];
830 else
832 struct ipcp_lattice *lat;
834 if (!info->lattices)
836 gcc_checking_assert (!flag_ipa_cp);
837 return NULL_TREE;
839 lat = ipa_get_scalar_lat (info, idx);
840 if (!ipa_lat_is_single_const (lat))
841 return NULL_TREE;
842 input = lat->values->value;
845 if (!input)
846 return NULL_TREE;
848 if (jfunc->type == IPA_JF_PASS_THROUGH)
849 return ipa_get_jf_pass_through_result (jfunc, input);
850 else
851 return ipa_get_jf_ancestor_result (jfunc, input);
853 else
854 return NULL_TREE;
858 /* If checking is enabled, verify that no lattice is in the TOP state, i.e. not
859 bottom, not containing a variable component and without any known value at
860 the same time. */
862 DEBUG_FUNCTION void
863 ipcp_verify_propagated_values (void)
865 struct cgraph_node *node;
867 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
869 struct ipa_node_params *info = IPA_NODE_REF (node);
870 int i, count = ipa_get_param_count (info);
872 for (i = 0; i < count; i++)
874 struct ipcp_lattice *lat = ipa_get_scalar_lat (info, i);
876 if (!lat->bottom
877 && !lat->contains_variable
878 && lat->values_count == 0)
880 if (dump_file)
882 fprintf (dump_file, "\nIPA lattices after constant "
883 "propagation:\n");
884 print_all_lattices (dump_file, true, false);
887 gcc_unreachable ();
893 /* Return true iff X and Y should be considered equal values by IPA-CP. */
895 static bool
896 values_equal_for_ipcp_p (tree x, tree y)
898 gcc_checking_assert (x != NULL_TREE && y != NULL_TREE);
900 if (x == y)
901 return true;
903 if (TREE_CODE (x) == TREE_BINFO || TREE_CODE (y) == TREE_BINFO)
904 return false;
906 if (TREE_CODE (x) == ADDR_EXPR
907 && TREE_CODE (y) == ADDR_EXPR
908 && TREE_CODE (TREE_OPERAND (x, 0)) == CONST_DECL
909 && TREE_CODE (TREE_OPERAND (y, 0)) == CONST_DECL)
910 return operand_equal_p (DECL_INITIAL (TREE_OPERAND (x, 0)),
911 DECL_INITIAL (TREE_OPERAND (y, 0)), 0);
912 else
913 return operand_equal_p (x, y, 0);
916 /* Add a new value source to VAL, marking that a value comes from edge CS and
917 (if the underlying jump function is a pass-through or an ancestor one) from
918 a caller value SRC_VAL of a caller parameter described by SRC_INDEX. OFFSET
919 is negative if the source was the scalar value of the parameter itself or
920 the offset within an aggregate. */
922 static void
923 add_value_source (struct ipcp_value *val, struct cgraph_edge *cs,
924 struct ipcp_value *src_val, int src_idx, HOST_WIDE_INT offset)
926 struct ipcp_value_source *src;
928 src = (struct ipcp_value_source *) pool_alloc (ipcp_sources_pool);
929 src->offset = offset;
930 src->cs = cs;
931 src->val = src_val;
932 src->index = src_idx;
934 src->next = val->sources;
935 val->sources = src;
938 /* Try to add NEWVAL to LAT, potentially creating a new struct ipcp_value for
939 it. CS, SRC_VAL SRC_INDEX and OFFSET are meant for add_value_source and
940 have the same meaning. */
942 static bool
943 add_value_to_lattice (struct ipcp_lattice *lat, tree newval,
944 struct cgraph_edge *cs, struct ipcp_value *src_val,
945 int src_idx, HOST_WIDE_INT offset)
947 struct ipcp_value *val;
949 if (lat->bottom)
950 return false;
952 for (val = lat->values; val; val = val->next)
953 if (values_equal_for_ipcp_p (val->value, newval))
955 if (ipa_edge_within_scc (cs))
957 struct ipcp_value_source *s;
958 for (s = val->sources; s ; s = s->next)
959 if (s->cs == cs)
960 break;
961 if (s)
962 return false;
965 add_value_source (val, cs, src_val, src_idx, offset);
966 return false;
969 if (lat->values_count == PARAM_VALUE (PARAM_IPA_CP_VALUE_LIST_SIZE))
971 /* We can only free sources, not the values themselves, because sources
972 of other values in this this SCC might point to them. */
973 for (val = lat->values; val; val = val->next)
975 while (val->sources)
977 struct ipcp_value_source *src = val->sources;
978 val->sources = src->next;
979 pool_free (ipcp_sources_pool, src);
983 lat->values = NULL;
984 return set_lattice_to_bottom (lat);
987 lat->values_count++;
988 val = (struct ipcp_value *) pool_alloc (ipcp_values_pool);
989 memset (val, 0, sizeof (*val));
991 add_value_source (val, cs, src_val, src_idx, offset);
992 val->value = newval;
993 val->next = lat->values;
994 lat->values = val;
995 return true;
998 /* Like above but passes a special value of offset to distinguish that the
999 origin is the scalar value of the parameter rather than a part of an
1000 aggregate. */
1002 static inline bool
1003 add_scalar_value_to_lattice (struct ipcp_lattice *lat, tree newval,
1004 struct cgraph_edge *cs,
1005 struct ipcp_value *src_val, int src_idx)
1007 return add_value_to_lattice (lat, newval, cs, src_val, src_idx, -1);
1010 /* Propagate values through a pass-through jump function JFUNC associated with
1011 edge CS, taking values from SRC_LAT and putting them into DEST_LAT. SRC_IDX
1012 is the index of the source parameter. */
1014 static bool
1015 propagate_vals_accross_pass_through (struct cgraph_edge *cs,
1016 struct ipa_jump_func *jfunc,
1017 struct ipcp_lattice *src_lat,
1018 struct ipcp_lattice *dest_lat,
1019 int src_idx)
1021 struct ipcp_value *src_val;
1022 bool ret = false;
1024 /* Do not create new values when propagating within an SCC because if there
1025 are arithmetic functions with circular dependencies, there is infinite
1026 number of them and we would just make lattices bottom. */
1027 if ((ipa_get_jf_pass_through_operation (jfunc) != NOP_EXPR)
1028 && ipa_edge_within_scc (cs))
1029 ret = set_lattice_contains_variable (dest_lat);
1030 else
1031 for (src_val = src_lat->values; src_val; src_val = src_val->next)
1033 tree cstval = ipa_get_jf_pass_through_result (jfunc, src_val->value);
1035 if (cstval)
1036 ret |= add_scalar_value_to_lattice (dest_lat, cstval, cs, src_val,
1037 src_idx);
1038 else
1039 ret |= set_lattice_contains_variable (dest_lat);
1042 return ret;
1045 /* Propagate values through an ancestor jump function JFUNC associated with
1046 edge CS, taking values from SRC_LAT and putting them into DEST_LAT. SRC_IDX
1047 is the index of the source parameter. */
1049 static bool
1050 propagate_vals_accross_ancestor (struct cgraph_edge *cs,
1051 struct ipa_jump_func *jfunc,
1052 struct ipcp_lattice *src_lat,
1053 struct ipcp_lattice *dest_lat,
1054 int src_idx)
1056 struct ipcp_value *src_val;
1057 bool ret = false;
1059 if (ipa_edge_within_scc (cs))
1060 return set_lattice_contains_variable (dest_lat);
1062 for (src_val = src_lat->values; src_val; src_val = src_val->next)
1064 tree t = ipa_get_jf_ancestor_result (jfunc, src_val->value);
1066 if (t)
1067 ret |= add_scalar_value_to_lattice (dest_lat, t, cs, src_val, src_idx);
1068 else
1069 ret |= set_lattice_contains_variable (dest_lat);
1072 return ret;
1075 /* Propagate scalar values across jump function JFUNC that is associated with
1076 edge CS and put the values into DEST_LAT. */
1078 static bool
1079 propagate_scalar_accross_jump_function (struct cgraph_edge *cs,
1080 struct ipa_jump_func *jfunc,
1081 struct ipcp_lattice *dest_lat)
1083 if (dest_lat->bottom)
1084 return false;
1086 if (jfunc->type == IPA_JF_CONST
1087 || jfunc->type == IPA_JF_KNOWN_TYPE)
1089 tree val;
1091 if (jfunc->type == IPA_JF_KNOWN_TYPE)
1093 val = ipa_binfo_from_known_type_jfunc (jfunc);
1094 if (!val)
1095 return set_lattice_contains_variable (dest_lat);
1097 else
1098 val = ipa_get_jf_constant (jfunc);
1099 return add_scalar_value_to_lattice (dest_lat, val, cs, NULL, 0);
1101 else if (jfunc->type == IPA_JF_PASS_THROUGH
1102 || jfunc->type == IPA_JF_ANCESTOR)
1104 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
1105 struct ipcp_lattice *src_lat;
1106 int src_idx;
1107 bool ret;
1109 if (jfunc->type == IPA_JF_PASS_THROUGH)
1110 src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
1111 else
1112 src_idx = ipa_get_jf_ancestor_formal_id (jfunc);
1114 src_lat = ipa_get_scalar_lat (caller_info, src_idx);
1115 if (src_lat->bottom)
1116 return set_lattice_contains_variable (dest_lat);
1118 /* If we would need to clone the caller and cannot, do not propagate. */
1119 if (!ipcp_versionable_function_p (cs->caller)
1120 && (src_lat->contains_variable
1121 || (src_lat->values_count > 1)))
1122 return set_lattice_contains_variable (dest_lat);
1124 if (jfunc->type == IPA_JF_PASS_THROUGH)
1125 ret = propagate_vals_accross_pass_through (cs, jfunc, src_lat,
1126 dest_lat, src_idx);
1127 else
1128 ret = propagate_vals_accross_ancestor (cs, jfunc, src_lat, dest_lat,
1129 src_idx);
1131 if (src_lat->contains_variable)
1132 ret |= set_lattice_contains_variable (dest_lat);
1134 return ret;
1137 /* TODO: We currently do not handle member method pointers in IPA-CP (we only
1138 use it for indirect inlining), we should propagate them too. */
1139 return set_lattice_contains_variable (dest_lat);
1142 /* If DEST_PLATS already has aggregate items, check that aggs_by_ref matches
1143 NEW_AGGS_BY_REF and if not, mark all aggs as bottoms and return true (in all
1144 other cases, return false). If there are no aggregate items, set
1145 aggs_by_ref to NEW_AGGS_BY_REF. */
1147 static bool
1148 set_check_aggs_by_ref (struct ipcp_param_lattices *dest_plats,
1149 bool new_aggs_by_ref)
1151 if (dest_plats->aggs)
1153 if (dest_plats->aggs_by_ref != new_aggs_by_ref)
1155 set_agg_lats_to_bottom (dest_plats);
1156 return true;
1159 else
1160 dest_plats->aggs_by_ref = new_aggs_by_ref;
1161 return false;
1164 /* Walk aggregate lattices in DEST_PLATS from ***AGLAT on, until ***aglat is an
1165 already existing lattice for the given OFFSET and SIZE, marking all skipped
1166 lattices as containing variable and checking for overlaps. If there is no
1167 already existing lattice for the OFFSET and VAL_SIZE, create one, initialize
1168 it with offset, size and contains_variable to PRE_EXISTING, and return true,
1169 unless there are too many already. If there are two many, return false. If
1170 there are overlaps turn whole DEST_PLATS to bottom and return false. If any
1171 skipped lattices were newly marked as containing variable, set *CHANGE to
1172 true. */
1174 static bool
1175 merge_agg_lats_step (struct ipcp_param_lattices *dest_plats,
1176 HOST_WIDE_INT offset, HOST_WIDE_INT val_size,
1177 struct ipcp_agg_lattice ***aglat,
1178 bool pre_existing, bool *change)
1180 gcc_checking_assert (offset >= 0);
1182 while (**aglat && (**aglat)->offset < offset)
1184 if ((**aglat)->offset + (**aglat)->size > offset)
1186 set_agg_lats_to_bottom (dest_plats);
1187 return false;
1189 *change |= set_lattice_contains_variable (**aglat);
1190 *aglat = &(**aglat)->next;
1193 if (**aglat && (**aglat)->offset == offset)
1195 if ((**aglat)->size != val_size
1196 || ((**aglat)->next
1197 && (**aglat)->next->offset < offset + val_size))
1199 set_agg_lats_to_bottom (dest_plats);
1200 return false;
1202 gcc_checking_assert (!(**aglat)->next
1203 || (**aglat)->next->offset >= offset + val_size);
1204 return true;
1206 else
1208 struct ipcp_agg_lattice *new_al;
1210 if (**aglat && (**aglat)->offset < offset + val_size)
1212 set_agg_lats_to_bottom (dest_plats);
1213 return false;
1215 if (dest_plats->aggs_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1216 return false;
1217 dest_plats->aggs_count++;
1218 new_al = (struct ipcp_agg_lattice *) pool_alloc (ipcp_agg_lattice_pool);
1219 memset (new_al, 0, sizeof (*new_al));
1221 new_al->offset = offset;
1222 new_al->size = val_size;
1223 new_al->contains_variable = pre_existing;
1225 new_al->next = **aglat;
1226 **aglat = new_al;
1227 return true;
1231 /* Set all AGLAT and all other aggregate lattices reachable by next pointers as
1232 containing an unknown value. */
1234 static bool
1235 set_chain_of_aglats_contains_variable (struct ipcp_agg_lattice *aglat)
1237 bool ret = false;
1238 while (aglat)
1240 ret |= set_lattice_contains_variable (aglat);
1241 aglat = aglat->next;
1243 return ret;
1246 /* Merge existing aggregate lattices in SRC_PLATS to DEST_PLATS, subtracting
1247 DELTA_OFFSET. CS is the call graph edge and SRC_IDX the index of the source
1248 parameter used for lattice value sources. Return true if DEST_PLATS changed
1249 in any way. */
1251 static bool
1252 merge_aggregate_lattices (struct cgraph_edge *cs,
1253 struct ipcp_param_lattices *dest_plats,
1254 struct ipcp_param_lattices *src_plats,
1255 int src_idx, HOST_WIDE_INT offset_delta)
1257 bool pre_existing = dest_plats->aggs != NULL;
1258 struct ipcp_agg_lattice **dst_aglat;
1259 bool ret = false;
1261 if (set_check_aggs_by_ref (dest_plats, src_plats->aggs_by_ref))
1262 return true;
1263 if (src_plats->aggs_bottom)
1264 return set_agg_lats_contain_variable (dest_plats);
1265 if (src_plats->aggs_contain_variable)
1266 ret |= set_agg_lats_contain_variable (dest_plats);
1267 dst_aglat = &dest_plats->aggs;
1269 for (struct ipcp_agg_lattice *src_aglat = src_plats->aggs;
1270 src_aglat;
1271 src_aglat = src_aglat->next)
1273 HOST_WIDE_INT new_offset = src_aglat->offset - offset_delta;
1275 if (new_offset < 0)
1276 continue;
1277 if (merge_agg_lats_step (dest_plats, new_offset, src_aglat->size,
1278 &dst_aglat, pre_existing, &ret))
1280 struct ipcp_agg_lattice *new_al = *dst_aglat;
1282 dst_aglat = &(*dst_aglat)->next;
1283 if (src_aglat->bottom)
1285 ret |= set_lattice_contains_variable (new_al);
1286 continue;
1288 if (src_aglat->contains_variable)
1289 ret |= set_lattice_contains_variable (new_al);
1290 for (struct ipcp_value *val = src_aglat->values;
1291 val;
1292 val = val->next)
1293 ret |= add_value_to_lattice (new_al, val->value, cs, val, src_idx,
1294 src_aglat->offset);
1296 else if (dest_plats->aggs_bottom)
1297 return true;
1299 ret |= set_chain_of_aglats_contains_variable (*dst_aglat);
1300 return ret;
1303 /* Determine whether there is anything to propagate FROM SRC_PLATS through a
1304 pass-through JFUNC and if so, whether it has conform and conforms to the
1305 rules about propagating values passed by reference. */
1307 static bool
1308 agg_pass_through_permissible_p (struct ipcp_param_lattices *src_plats,
1309 struct ipa_jump_func *jfunc)
1311 return src_plats->aggs
1312 && (!src_plats->aggs_by_ref
1313 || ipa_get_jf_pass_through_agg_preserved (jfunc));
1316 /* Propagate scalar values across jump function JFUNC that is associated with
1317 edge CS and put the values into DEST_LAT. */
1319 static bool
1320 propagate_aggs_accross_jump_function (struct cgraph_edge *cs,
1321 struct ipa_jump_func *jfunc,
1322 struct ipcp_param_lattices *dest_plats)
1324 bool ret = false;
1326 if (dest_plats->aggs_bottom)
1327 return false;
1329 if (jfunc->type == IPA_JF_PASS_THROUGH
1330 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
1332 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
1333 int src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
1334 struct ipcp_param_lattices *src_plats;
1336 src_plats = ipa_get_parm_lattices (caller_info, src_idx);
1337 if (agg_pass_through_permissible_p (src_plats, jfunc))
1339 /* Currently we do not produce clobber aggregate jump
1340 functions, replace with merging when we do. */
1341 gcc_assert (!jfunc->agg.items);
1342 ret |= merge_aggregate_lattices (cs, dest_plats, src_plats,
1343 src_idx, 0);
1345 else
1346 ret |= set_agg_lats_contain_variable (dest_plats);
1348 else if (jfunc->type == IPA_JF_ANCESTOR
1349 && ipa_get_jf_ancestor_agg_preserved (jfunc))
1351 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
1352 int src_idx = ipa_get_jf_ancestor_formal_id (jfunc);
1353 struct ipcp_param_lattices *src_plats;
1355 src_plats = ipa_get_parm_lattices (caller_info, src_idx);
1356 if (src_plats->aggs && src_plats->aggs_by_ref)
1358 /* Currently we do not produce clobber aggregate jump
1359 functions, replace with merging when we do. */
1360 gcc_assert (!jfunc->agg.items);
1361 ret |= merge_aggregate_lattices (cs, dest_plats, src_plats, src_idx,
1362 ipa_get_jf_ancestor_offset (jfunc));
1364 else if (!src_plats->aggs_by_ref)
1365 ret |= set_agg_lats_to_bottom (dest_plats);
1366 else
1367 ret |= set_agg_lats_contain_variable (dest_plats);
1369 else if (jfunc->agg.items)
1371 bool pre_existing = dest_plats->aggs != NULL;
1372 struct ipcp_agg_lattice **aglat = &dest_plats->aggs;
1373 struct ipa_agg_jf_item *item;
1374 int i;
1376 if (set_check_aggs_by_ref (dest_plats, jfunc->agg.by_ref))
1377 return true;
1379 FOR_EACH_VEC_ELT (*jfunc->agg.items, i, item)
1381 HOST_WIDE_INT val_size;
1383 if (item->offset < 0)
1384 continue;
1385 gcc_checking_assert (is_gimple_ip_invariant (item->value));
1386 val_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (item->value)));
1388 if (merge_agg_lats_step (dest_plats, item->offset, val_size,
1389 &aglat, pre_existing, &ret))
1391 ret |= add_value_to_lattice (*aglat, item->value, cs, NULL, 0, 0);
1392 aglat = &(*aglat)->next;
1394 else if (dest_plats->aggs_bottom)
1395 return true;
1398 ret |= set_chain_of_aglats_contains_variable (*aglat);
1400 else
1401 ret |= set_agg_lats_contain_variable (dest_plats);
1403 return ret;
1406 /* Propagate constants from the caller to the callee of CS. INFO describes the
1407 caller. */
1409 static bool
1410 propagate_constants_accross_call (struct cgraph_edge *cs)
1412 struct ipa_node_params *callee_info;
1413 enum availability availability;
1414 struct cgraph_node *callee, *alias_or_thunk;
1415 struct ipa_edge_args *args;
1416 bool ret = false;
1417 int i, args_count, parms_count;
1419 callee = cgraph_function_node (cs->callee, &availability);
1420 if (!callee->definition)
1421 return false;
1422 gcc_checking_assert (cgraph_function_with_gimple_body_p (callee));
1423 callee_info = IPA_NODE_REF (callee);
1425 args = IPA_EDGE_REF (cs);
1426 args_count = ipa_get_cs_argument_count (args);
1427 parms_count = ipa_get_param_count (callee_info);
1429 /* If this call goes through a thunk we must not propagate to the first (0th)
1430 parameter. However, we might need to uncover a thunk from below a series
1431 of aliases first. */
1432 alias_or_thunk = cs->callee;
1433 while (alias_or_thunk->alias)
1434 alias_or_thunk = cgraph_alias_target (alias_or_thunk);
1435 if (alias_or_thunk->thunk.thunk_p)
1437 ret |= set_all_contains_variable (ipa_get_parm_lattices (callee_info,
1438 0));
1439 i = 1;
1441 else
1442 i = 0;
1444 for (; (i < args_count) && (i < parms_count); i++)
1446 struct ipa_jump_func *jump_func = ipa_get_ith_jump_func (args, i);
1447 struct ipcp_param_lattices *dest_plats;
1449 dest_plats = ipa_get_parm_lattices (callee_info, i);
1450 if (availability == AVAIL_OVERWRITABLE)
1451 ret |= set_all_contains_variable (dest_plats);
1452 else
1454 ret |= propagate_scalar_accross_jump_function (cs, jump_func,
1455 &dest_plats->itself);
1456 ret |= propagate_aggs_accross_jump_function (cs, jump_func,
1457 dest_plats);
1460 for (; i < parms_count; i++)
1461 ret |= set_all_contains_variable (ipa_get_parm_lattices (callee_info, i));
1463 return ret;
1466 /* If an indirect edge IE can be turned into a direct one based on KNOWN_VALS
1467 (which can contain both constants and binfos), KNOWN_BINFOS, KNOWN_AGGS or
1468 AGG_REPS return the destination. The latter three can be NULL. If AGG_REPS
1469 is not NULL, KNOWN_AGGS is ignored. */
1471 static tree
1472 ipa_get_indirect_edge_target_1 (struct cgraph_edge *ie,
1473 vec<tree> known_vals,
1474 vec<tree> known_binfos,
1475 vec<ipa_agg_jump_function_p> known_aggs,
1476 struct ipa_agg_replacement_value *agg_reps)
1478 int param_index = ie->indirect_info->param_index;
1479 HOST_WIDE_INT token, anc_offset;
1480 tree otr_type;
1481 tree t;
1482 tree target;
1484 if (param_index == -1
1485 || known_vals.length () <= (unsigned int) param_index)
1486 return NULL_TREE;
1488 if (!ie->indirect_info->polymorphic)
1490 tree t;
1492 if (ie->indirect_info->agg_contents)
1494 if (agg_reps)
1496 t = NULL;
1497 while (agg_reps)
1499 if (agg_reps->index == param_index
1500 && agg_reps->offset == ie->indirect_info->offset
1501 && agg_reps->by_ref == ie->indirect_info->by_ref)
1503 t = agg_reps->value;
1504 break;
1506 agg_reps = agg_reps->next;
1509 else if (known_aggs.length () > (unsigned int) param_index)
1511 struct ipa_agg_jump_function *agg;
1512 agg = known_aggs[param_index];
1513 t = ipa_find_agg_cst_for_param (agg, ie->indirect_info->offset,
1514 ie->indirect_info->by_ref);
1516 else
1517 t = NULL;
1519 else
1520 t = known_vals[param_index];
1522 if (t &&
1523 TREE_CODE (t) == ADDR_EXPR
1524 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL)
1525 return TREE_OPERAND (t, 0);
1526 else
1527 return NULL_TREE;
1530 gcc_assert (!ie->indirect_info->agg_contents);
1531 token = ie->indirect_info->otr_token;
1532 anc_offset = ie->indirect_info->offset;
1533 otr_type = ie->indirect_info->otr_type;
1535 t = known_vals[param_index];
1536 if (!t && known_binfos.length () > (unsigned int) param_index)
1537 t = known_binfos[param_index];
1538 if (!t)
1539 return NULL_TREE;
1541 if (TREE_CODE (t) != TREE_BINFO)
1543 tree binfo;
1544 binfo = gimple_extract_devirt_binfo_from_cst
1545 (t, ie->indirect_info->otr_type);
1546 if (!binfo)
1547 return NULL_TREE;
1548 binfo = get_binfo_at_offset (binfo, anc_offset, otr_type);
1549 if (!binfo)
1550 return NULL_TREE;
1551 target = gimple_get_virt_method_for_binfo (token, binfo);
1553 else
1555 tree binfo;
1557 binfo = get_binfo_at_offset (t, anc_offset, otr_type);
1558 if (!binfo)
1559 return NULL_TREE;
1560 target = gimple_get_virt_method_for_binfo (token, binfo);
1562 #ifdef ENABLE_CHECKING
1563 if (target)
1564 gcc_assert (possible_polymorphic_call_target_p
1565 (ie, cgraph_get_node (target)));
1566 #endif
1568 return target;
1572 /* If an indirect edge IE can be turned into a direct one based on KNOWN_VALS
1573 (which can contain both constants and binfos), KNOWN_BINFOS (which can be
1574 NULL) or KNOWN_AGGS (which also can be NULL) return the destination. */
1576 tree
1577 ipa_get_indirect_edge_target (struct cgraph_edge *ie,
1578 vec<tree> known_vals,
1579 vec<tree> known_binfos,
1580 vec<ipa_agg_jump_function_p> known_aggs)
1582 return ipa_get_indirect_edge_target_1 (ie, known_vals, known_binfos,
1583 known_aggs, NULL);
1586 /* Calculate devirtualization time bonus for NODE, assuming we know KNOWN_CSTS
1587 and KNOWN_BINFOS. */
1589 static int
1590 devirtualization_time_bonus (struct cgraph_node *node,
1591 vec<tree> known_csts,
1592 vec<tree> known_binfos,
1593 vec<ipa_agg_jump_function_p> known_aggs)
1595 struct cgraph_edge *ie;
1596 int res = 0;
1598 for (ie = node->indirect_calls; ie; ie = ie->next_callee)
1600 struct cgraph_node *callee;
1601 struct inline_summary *isummary;
1602 tree target;
1604 target = ipa_get_indirect_edge_target (ie, known_csts, known_binfos,
1605 known_aggs);
1606 if (!target)
1607 continue;
1609 /* Only bare minimum benefit for clearly un-inlineable targets. */
1610 res += 1;
1611 callee = cgraph_get_node (target);
1612 if (!callee || !callee->definition)
1613 continue;
1614 isummary = inline_summary (callee);
1615 if (!isummary->inlinable)
1616 continue;
1618 /* FIXME: The values below need re-considering and perhaps also
1619 integrating into the cost metrics, at lest in some very basic way. */
1620 if (isummary->size <= MAX_INLINE_INSNS_AUTO / 4)
1621 res += 31;
1622 else if (isummary->size <= MAX_INLINE_INSNS_AUTO / 2)
1623 res += 15;
1624 else if (isummary->size <= MAX_INLINE_INSNS_AUTO
1625 || DECL_DECLARED_INLINE_P (callee->decl))
1626 res += 7;
1629 return res;
1632 /* Return time bonus incurred because of HINTS. */
1634 static int
1635 hint_time_bonus (inline_hints hints)
1637 int result = 0;
1638 if (hints & (INLINE_HINT_loop_iterations | INLINE_HINT_loop_stride))
1639 result += PARAM_VALUE (PARAM_IPA_CP_LOOP_HINT_BONUS);
1640 if (hints & INLINE_HINT_array_index)
1641 result += PARAM_VALUE (PARAM_IPA_CP_ARRAY_INDEX_HINT_BONUS);
1642 return result;
1645 /* Return true if cloning NODE is a good idea, given the estimated TIME_BENEFIT
1646 and SIZE_COST and with the sum of frequencies of incoming edges to the
1647 potential new clone in FREQUENCIES. */
1649 static bool
1650 good_cloning_opportunity_p (struct cgraph_node *node, int time_benefit,
1651 int freq_sum, gcov_type count_sum, int size_cost)
1653 if (time_benefit == 0
1654 || !flag_ipa_cp_clone
1655 || !optimize_function_for_speed_p (DECL_STRUCT_FUNCTION (node->decl)))
1656 return false;
1658 gcc_assert (size_cost > 0);
1660 if (max_count)
1662 int factor = (count_sum * 1000) / max_count;
1663 HOST_WIDEST_INT evaluation = (((HOST_WIDEST_INT) time_benefit * factor)
1664 / size_cost);
1666 if (dump_file && (dump_flags & TDF_DETAILS))
1667 fprintf (dump_file, " good_cloning_opportunity_p (time: %i, "
1668 "size: %i, count_sum: " HOST_WIDE_INT_PRINT_DEC
1669 ") -> evaluation: " HOST_WIDEST_INT_PRINT_DEC
1670 ", threshold: %i\n",
1671 time_benefit, size_cost, (HOST_WIDE_INT) count_sum,
1672 evaluation, PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD));
1674 return evaluation >= PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD);
1676 else
1678 HOST_WIDEST_INT evaluation = (((HOST_WIDEST_INT) time_benefit * freq_sum)
1679 / size_cost);
1681 if (dump_file && (dump_flags & TDF_DETAILS))
1682 fprintf (dump_file, " good_cloning_opportunity_p (time: %i, "
1683 "size: %i, freq_sum: %i) -> evaluation: "
1684 HOST_WIDEST_INT_PRINT_DEC ", threshold: %i\n",
1685 time_benefit, size_cost, freq_sum, evaluation,
1686 PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD));
1688 return evaluation >= PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD);
1692 /* Return all context independent values from aggregate lattices in PLATS in a
1693 vector. Return NULL if there are none. */
1695 static vec<ipa_agg_jf_item, va_gc> *
1696 context_independent_aggregate_values (struct ipcp_param_lattices *plats)
1698 vec<ipa_agg_jf_item, va_gc> *res = NULL;
1700 if (plats->aggs_bottom
1701 || plats->aggs_contain_variable
1702 || plats->aggs_count == 0)
1703 return NULL;
1705 for (struct ipcp_agg_lattice *aglat = plats->aggs;
1706 aglat;
1707 aglat = aglat->next)
1708 if (ipa_lat_is_single_const (aglat))
1710 struct ipa_agg_jf_item item;
1711 item.offset = aglat->offset;
1712 item.value = aglat->values->value;
1713 vec_safe_push (res, item);
1715 return res;
1718 /* Allocate KNOWN_CSTS, KNOWN_BINFOS and, if non-NULL, KNOWN_AGGS and populate
1719 them with values of parameters that are known independent of the context.
1720 INFO describes the function. If REMOVABLE_PARAMS_COST is non-NULL, the
1721 movement cost of all removable parameters will be stored in it. */
1723 static bool
1724 gather_context_independent_values (struct ipa_node_params *info,
1725 vec<tree> *known_csts,
1726 vec<tree> *known_binfos,
1727 vec<ipa_agg_jump_function> *known_aggs,
1728 int *removable_params_cost)
1730 int i, count = ipa_get_param_count (info);
1731 bool ret = false;
1733 known_csts->create (0);
1734 known_binfos->create (0);
1735 known_csts->safe_grow_cleared (count);
1736 known_binfos->safe_grow_cleared (count);
1737 if (known_aggs)
1739 known_aggs->create (0);
1740 known_aggs->safe_grow_cleared (count);
1743 if (removable_params_cost)
1744 *removable_params_cost = 0;
1746 for (i = 0; i < count ; i++)
1748 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
1749 struct ipcp_lattice *lat = &plats->itself;
1751 if (ipa_lat_is_single_const (lat))
1753 struct ipcp_value *val = lat->values;
1754 if (TREE_CODE (val->value) != TREE_BINFO)
1756 (*known_csts)[i] = val->value;
1757 if (removable_params_cost)
1758 *removable_params_cost
1759 += estimate_move_cost (TREE_TYPE (val->value));
1760 ret = true;
1762 else if (plats->virt_call)
1764 (*known_binfos)[i] = val->value;
1765 ret = true;
1767 else if (removable_params_cost
1768 && !ipa_is_param_used (info, i))
1769 *removable_params_cost += ipa_get_param_move_cost (info, i);
1771 else if (removable_params_cost
1772 && !ipa_is_param_used (info, i))
1773 *removable_params_cost
1774 += ipa_get_param_move_cost (info, i);
1776 if (known_aggs)
1778 vec<ipa_agg_jf_item, va_gc> *agg_items;
1779 struct ipa_agg_jump_function *ajf;
1781 agg_items = context_independent_aggregate_values (plats);
1782 ajf = &(*known_aggs)[i];
1783 ajf->items = agg_items;
1784 ajf->by_ref = plats->aggs_by_ref;
1785 ret |= agg_items != NULL;
1789 return ret;
1792 /* The current interface in ipa-inline-analysis requires a pointer vector.
1793 Create it.
1795 FIXME: That interface should be re-worked, this is slightly silly. Still,
1796 I'd like to discuss how to change it first and this demonstrates the
1797 issue. */
1799 static vec<ipa_agg_jump_function_p>
1800 agg_jmp_p_vec_for_t_vec (vec<ipa_agg_jump_function> known_aggs)
1802 vec<ipa_agg_jump_function_p> ret;
1803 struct ipa_agg_jump_function *ajf;
1804 int i;
1806 ret.create (known_aggs.length ());
1807 FOR_EACH_VEC_ELT (known_aggs, i, ajf)
1808 ret.quick_push (ajf);
1809 return ret;
1812 /* Iterate over known values of parameters of NODE and estimate the local
1813 effects in terms of time and size they have. */
1815 static void
1816 estimate_local_effects (struct cgraph_node *node)
1818 struct ipa_node_params *info = IPA_NODE_REF (node);
1819 int i, count = ipa_get_param_count (info);
1820 vec<tree> known_csts, known_binfos;
1821 vec<ipa_agg_jump_function> known_aggs;
1822 vec<ipa_agg_jump_function_p> known_aggs_ptrs;
1823 bool always_const;
1824 int base_time = inline_summary (node)->time;
1825 int removable_params_cost;
1827 if (!count || !ipcp_versionable_function_p (node))
1828 return;
1830 if (dump_file && (dump_flags & TDF_DETAILS))
1831 fprintf (dump_file, "\nEstimating effects for %s/%i, base_time: %i.\n",
1832 node->name (), node->order, base_time);
1834 always_const = gather_context_independent_values (info, &known_csts,
1835 &known_binfos, &known_aggs,
1836 &removable_params_cost);
1837 known_aggs_ptrs = agg_jmp_p_vec_for_t_vec (known_aggs);
1838 if (always_const)
1840 struct caller_statistics stats;
1841 inline_hints hints;
1842 int time, size;
1844 init_caller_stats (&stats);
1845 cgraph_for_node_and_aliases (node, gather_caller_stats, &stats, false);
1846 estimate_ipcp_clone_size_and_time (node, known_csts, known_binfos,
1847 known_aggs_ptrs, &size, &time, &hints);
1848 time -= devirtualization_time_bonus (node, known_csts, known_binfos,
1849 known_aggs_ptrs);
1850 time -= hint_time_bonus (hints);
1851 time -= removable_params_cost;
1852 size -= stats.n_calls * removable_params_cost;
1854 if (dump_file)
1855 fprintf (dump_file, " - context independent values, size: %i, "
1856 "time_benefit: %i\n", size, base_time - time);
1858 if (size <= 0
1859 || cgraph_will_be_removed_from_program_if_no_direct_calls (node))
1861 info->do_clone_for_all_contexts = true;
1862 base_time = time;
1864 if (dump_file)
1865 fprintf (dump_file, " Decided to specialize for all "
1866 "known contexts, code not going to grow.\n");
1868 else if (good_cloning_opportunity_p (node, base_time - time,
1869 stats.freq_sum, stats.count_sum,
1870 size))
1872 if (size + overall_size <= max_new_size)
1874 info->do_clone_for_all_contexts = true;
1875 base_time = time;
1876 overall_size += size;
1878 if (dump_file)
1879 fprintf (dump_file, " Decided to specialize for all "
1880 "known contexts, growth deemed beneficial.\n");
1882 else if (dump_file && (dump_flags & TDF_DETAILS))
1883 fprintf (dump_file, " Not cloning for all contexts because "
1884 "max_new_size would be reached with %li.\n",
1885 size + overall_size);
1889 for (i = 0; i < count ; i++)
1891 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
1892 struct ipcp_lattice *lat = &plats->itself;
1893 struct ipcp_value *val;
1894 int emc;
1896 if (lat->bottom
1897 || !lat->values
1898 || known_csts[i]
1899 || known_binfos[i])
1900 continue;
1902 for (val = lat->values; val; val = val->next)
1904 int time, size, time_benefit;
1905 inline_hints hints;
1907 if (TREE_CODE (val->value) != TREE_BINFO)
1909 known_csts[i] = val->value;
1910 known_binfos[i] = NULL_TREE;
1911 emc = estimate_move_cost (TREE_TYPE (val->value));
1913 else if (plats->virt_call)
1915 known_csts[i] = NULL_TREE;
1916 known_binfos[i] = val->value;
1917 emc = 0;
1919 else
1920 continue;
1922 estimate_ipcp_clone_size_and_time (node, known_csts, known_binfos,
1923 known_aggs_ptrs, &size, &time,
1924 &hints);
1925 time_benefit = base_time - time
1926 + devirtualization_time_bonus (node, known_csts, known_binfos,
1927 known_aggs_ptrs)
1928 + hint_time_bonus (hints)
1929 + removable_params_cost + emc;
1931 gcc_checking_assert (size >=0);
1932 /* The inliner-heuristics based estimates may think that in certain
1933 contexts some functions do not have any size at all but we want
1934 all specializations to have at least a tiny cost, not least not to
1935 divide by zero. */
1936 if (size == 0)
1937 size = 1;
1939 if (dump_file && (dump_flags & TDF_DETAILS))
1941 fprintf (dump_file, " - estimates for value ");
1942 print_ipcp_constant_value (dump_file, val->value);
1943 fprintf (dump_file, " for ");
1944 ipa_dump_param (dump_file, info, i);
1945 fprintf (dump_file, ": time_benefit: %i, size: %i\n",
1946 time_benefit, size);
1949 val->local_time_benefit = time_benefit;
1950 val->local_size_cost = size;
1952 known_binfos[i] = NULL_TREE;
1953 known_csts[i] = NULL_TREE;
1956 for (i = 0; i < count ; i++)
1958 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
1959 struct ipa_agg_jump_function *ajf;
1960 struct ipcp_agg_lattice *aglat;
1962 if (plats->aggs_bottom || !plats->aggs)
1963 continue;
1965 ajf = &known_aggs[i];
1966 for (aglat = plats->aggs; aglat; aglat = aglat->next)
1968 struct ipcp_value *val;
1969 if (aglat->bottom || !aglat->values
1970 /* If the following is true, the one value is in known_aggs. */
1971 || (!plats->aggs_contain_variable
1972 && ipa_lat_is_single_const (aglat)))
1973 continue;
1975 for (val = aglat->values; val; val = val->next)
1977 int time, size, time_benefit;
1978 struct ipa_agg_jf_item item;
1979 inline_hints hints;
1981 item.offset = aglat->offset;
1982 item.value = val->value;
1983 vec_safe_push (ajf->items, item);
1985 estimate_ipcp_clone_size_and_time (node, known_csts, known_binfos,
1986 known_aggs_ptrs, &size, &time,
1987 &hints);
1988 time_benefit = base_time - time
1989 + devirtualization_time_bonus (node, known_csts, known_binfos,
1990 known_aggs_ptrs)
1991 + hint_time_bonus (hints);
1992 gcc_checking_assert (size >=0);
1993 if (size == 0)
1994 size = 1;
1996 if (dump_file && (dump_flags & TDF_DETAILS))
1998 fprintf (dump_file, " - estimates for value ");
1999 print_ipcp_constant_value (dump_file, val->value);
2000 fprintf (dump_file, " for ");
2001 ipa_dump_param (dump_file, info, i);
2002 fprintf (dump_file, "[%soffset: " HOST_WIDE_INT_PRINT_DEC
2003 "]: time_benefit: %i, size: %i\n",
2004 plats->aggs_by_ref ? "ref " : "",
2005 aglat->offset, time_benefit, size);
2008 val->local_time_benefit = time_benefit;
2009 val->local_size_cost = size;
2010 ajf->items->pop ();
2015 for (i = 0; i < count ; i++)
2016 vec_free (known_aggs[i].items);
2018 known_csts.release ();
2019 known_binfos.release ();
2020 known_aggs.release ();
2021 known_aggs_ptrs.release ();
2025 /* Add value CUR_VAL and all yet-unsorted values it is dependent on to the
2026 topological sort of values. */
2028 static void
2029 add_val_to_toposort (struct ipcp_value *cur_val)
2031 static int dfs_counter = 0;
2032 static struct ipcp_value *stack;
2033 struct ipcp_value_source *src;
2035 if (cur_val->dfs)
2036 return;
2038 dfs_counter++;
2039 cur_val->dfs = dfs_counter;
2040 cur_val->low_link = dfs_counter;
2042 cur_val->topo_next = stack;
2043 stack = cur_val;
2044 cur_val->on_stack = true;
2046 for (src = cur_val->sources; src; src = src->next)
2047 if (src->val)
2049 if (src->val->dfs == 0)
2051 add_val_to_toposort (src->val);
2052 if (src->val->low_link < cur_val->low_link)
2053 cur_val->low_link = src->val->low_link;
2055 else if (src->val->on_stack
2056 && src->val->dfs < cur_val->low_link)
2057 cur_val->low_link = src->val->dfs;
2060 if (cur_val->dfs == cur_val->low_link)
2062 struct ipcp_value *v, *scc_list = NULL;
2066 v = stack;
2067 stack = v->topo_next;
2068 v->on_stack = false;
2070 v->scc_next = scc_list;
2071 scc_list = v;
2073 while (v != cur_val);
2075 cur_val->topo_next = values_topo;
2076 values_topo = cur_val;
2080 /* Add all values in lattices associated with NODE to the topological sort if
2081 they are not there yet. */
2083 static void
2084 add_all_node_vals_to_toposort (struct cgraph_node *node)
2086 struct ipa_node_params *info = IPA_NODE_REF (node);
2087 int i, count = ipa_get_param_count (info);
2089 for (i = 0; i < count ; i++)
2091 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
2092 struct ipcp_lattice *lat = &plats->itself;
2093 struct ipcp_agg_lattice *aglat;
2094 struct ipcp_value *val;
2096 if (!lat->bottom)
2097 for (val = lat->values; val; val = val->next)
2098 add_val_to_toposort (val);
2100 if (!plats->aggs_bottom)
2101 for (aglat = plats->aggs; aglat; aglat = aglat->next)
2102 if (!aglat->bottom)
2103 for (val = aglat->values; val; val = val->next)
2104 add_val_to_toposort (val);
2108 /* One pass of constants propagation along the call graph edges, from callers
2109 to callees (requires topological ordering in TOPO), iterate over strongly
2110 connected components. */
2112 static void
2113 propagate_constants_topo (struct topo_info *topo)
2115 int i;
2117 for (i = topo->nnodes - 1; i >= 0; i--)
2119 unsigned j;
2120 struct cgraph_node *v, *node = topo->order[i];
2121 vec<cgraph_node_ptr> cycle_nodes = ipa_get_nodes_in_cycle (node);
2123 /* First, iteratively propagate within the strongly connected component
2124 until all lattices stabilize. */
2125 FOR_EACH_VEC_ELT (cycle_nodes, j, v)
2126 if (cgraph_function_with_gimple_body_p (v))
2127 push_node_to_stack (topo, v);
2129 v = pop_node_from_stack (topo);
2130 while (v)
2132 struct cgraph_edge *cs;
2134 for (cs = v->callees; cs; cs = cs->next_callee)
2135 if (ipa_edge_within_scc (cs)
2136 && propagate_constants_accross_call (cs))
2137 push_node_to_stack (topo, cs->callee);
2138 v = pop_node_from_stack (topo);
2141 /* Afterwards, propagate along edges leading out of the SCC, calculates
2142 the local effects of the discovered constants and all valid values to
2143 their topological sort. */
2144 FOR_EACH_VEC_ELT (cycle_nodes, j, v)
2145 if (cgraph_function_with_gimple_body_p (v))
2147 struct cgraph_edge *cs;
2149 estimate_local_effects (v);
2150 add_all_node_vals_to_toposort (v);
2151 for (cs = v->callees; cs; cs = cs->next_callee)
2152 if (!ipa_edge_within_scc (cs))
2153 propagate_constants_accross_call (cs);
2155 cycle_nodes.release ();
2160 /* Return the sum of A and B if none of them is bigger than INT_MAX/2, return
2161 the bigger one if otherwise. */
2163 static int
2164 safe_add (int a, int b)
2166 if (a > INT_MAX/2 || b > INT_MAX/2)
2167 return a > b ? a : b;
2168 else
2169 return a + b;
2173 /* Propagate the estimated effects of individual values along the topological
2174 from the dependent values to those they depend on. */
2176 static void
2177 propagate_effects (void)
2179 struct ipcp_value *base;
2181 for (base = values_topo; base; base = base->topo_next)
2183 struct ipcp_value_source *src;
2184 struct ipcp_value *val;
2185 int time = 0, size = 0;
2187 for (val = base; val; val = val->scc_next)
2189 time = safe_add (time,
2190 val->local_time_benefit + val->prop_time_benefit);
2191 size = safe_add (size, val->local_size_cost + val->prop_size_cost);
2194 for (val = base; val; val = val->scc_next)
2195 for (src = val->sources; src; src = src->next)
2196 if (src->val
2197 && cgraph_maybe_hot_edge_p (src->cs))
2199 src->val->prop_time_benefit = safe_add (time,
2200 src->val->prop_time_benefit);
2201 src->val->prop_size_cost = safe_add (size,
2202 src->val->prop_size_cost);
2208 /* Propagate constants, binfos and their effects from the summaries
2209 interprocedurally. */
2211 static void
2212 ipcp_propagate_stage (struct topo_info *topo)
2214 struct cgraph_node *node;
2216 if (dump_file)
2217 fprintf (dump_file, "\n Propagating constants:\n\n");
2219 if (in_lto_p)
2220 ipa_update_after_lto_read ();
2223 FOR_EACH_DEFINED_FUNCTION (node)
2225 struct ipa_node_params *info = IPA_NODE_REF (node);
2227 determine_versionability (node);
2228 if (cgraph_function_with_gimple_body_p (node))
2230 info->lattices = XCNEWVEC (struct ipcp_param_lattices,
2231 ipa_get_param_count (info));
2232 initialize_node_lattices (node);
2234 if (node->definition && !node->alias)
2235 overall_size += inline_summary (node)->self_size;
2236 if (node->count > max_count)
2237 max_count = node->count;
2240 max_new_size = overall_size;
2241 if (max_new_size < PARAM_VALUE (PARAM_LARGE_UNIT_INSNS))
2242 max_new_size = PARAM_VALUE (PARAM_LARGE_UNIT_INSNS);
2243 max_new_size += max_new_size * PARAM_VALUE (PARAM_IPCP_UNIT_GROWTH) / 100 + 1;
2245 if (dump_file)
2246 fprintf (dump_file, "\noverall_size: %li, max_new_size: %li\n",
2247 overall_size, max_new_size);
2249 propagate_constants_topo (topo);
2250 #ifdef ENABLE_CHECKING
2251 ipcp_verify_propagated_values ();
2252 #endif
2253 propagate_effects ();
2255 if (dump_file)
2257 fprintf (dump_file, "\nIPA lattices after all propagation:\n");
2258 print_all_lattices (dump_file, (dump_flags & TDF_DETAILS), true);
2262 /* Discover newly direct outgoing edges from NODE which is a new clone with
2263 known KNOWN_VALS and make them direct. */
2265 static void
2266 ipcp_discover_new_direct_edges (struct cgraph_node *node,
2267 vec<tree> known_vals,
2268 struct ipa_agg_replacement_value *aggvals)
2270 struct cgraph_edge *ie, *next_ie;
2271 bool found = false;
2273 for (ie = node->indirect_calls; ie; ie = next_ie)
2275 tree target;
2277 next_ie = ie->next_callee;
2278 target = ipa_get_indirect_edge_target_1 (ie, known_vals, vNULL, vNULL,
2279 aggvals);
2280 if (target)
2282 bool agg_contents = ie->indirect_info->agg_contents;
2283 bool polymorphic = ie->indirect_info->polymorphic;
2284 int param_index = ie->indirect_info->param_index;
2285 struct cgraph_edge *cs = ipa_make_edge_direct_to_target (ie, target);
2286 found = true;
2288 if (cs && !agg_contents && !polymorphic)
2290 struct ipa_node_params *info = IPA_NODE_REF (node);
2291 int c = ipa_get_controlled_uses (info, param_index);
2292 if (c != IPA_UNDESCRIBED_USE)
2294 struct ipa_ref *to_del;
2296 c--;
2297 ipa_set_controlled_uses (info, param_index, c);
2298 if (dump_file && (dump_flags & TDF_DETAILS))
2299 fprintf (dump_file, " controlled uses count of param "
2300 "%i bumped down to %i\n", param_index, c);
2301 if (c == 0
2302 && (to_del = ipa_find_reference (node,
2303 cs->callee,
2304 NULL, 0)))
2306 if (dump_file && (dump_flags & TDF_DETAILS))
2307 fprintf (dump_file, " and even removing its "
2308 "cloning-created reference\n");
2309 ipa_remove_reference (to_del);
2315 /* Turning calls to direct calls will improve overall summary. */
2316 if (found)
2317 inline_update_overall_summary (node);
2320 /* Vector of pointers which for linked lists of clones of an original crgaph
2321 edge. */
2323 static vec<cgraph_edge_p> next_edge_clone;
2325 static inline void
2326 grow_next_edge_clone_vector (void)
2328 if (next_edge_clone.length ()
2329 <= (unsigned) cgraph_edge_max_uid)
2330 next_edge_clone.safe_grow_cleared (cgraph_edge_max_uid + 1);
2333 /* Edge duplication hook to grow the appropriate linked list in
2334 next_edge_clone. */
2336 static void
2337 ipcp_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
2338 __attribute__((unused)) void *data)
2340 grow_next_edge_clone_vector ();
2341 next_edge_clone[dst->uid] = next_edge_clone[src->uid];
2342 next_edge_clone[src->uid] = dst;
2345 /* See if NODE is a clone with a known aggregate value at a given OFFSET of a
2346 parameter with the given INDEX. */
2348 static tree
2349 get_clone_agg_value (struct cgraph_node *node, HOST_WIDEST_INT offset,
2350 int index)
2352 struct ipa_agg_replacement_value *aggval;
2354 aggval = ipa_get_agg_replacements_for_node (node);
2355 while (aggval)
2357 if (aggval->offset == offset
2358 && aggval->index == index)
2359 return aggval->value;
2360 aggval = aggval->next;
2362 return NULL_TREE;
2365 /* Return true if edge CS does bring about the value described by SRC. */
2367 static bool
2368 cgraph_edge_brings_value_p (struct cgraph_edge *cs,
2369 struct ipcp_value_source *src)
2371 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
2372 struct ipa_node_params *dst_info = IPA_NODE_REF (cs->callee);
2374 if ((dst_info->ipcp_orig_node && !dst_info->is_all_contexts_clone)
2375 || caller_info->node_dead)
2376 return false;
2377 if (!src->val)
2378 return true;
2380 if (caller_info->ipcp_orig_node)
2382 tree t;
2383 if (src->offset == -1)
2384 t = caller_info->known_vals[src->index];
2385 else
2386 t = get_clone_agg_value (cs->caller, src->offset, src->index);
2387 return (t != NULL_TREE
2388 && values_equal_for_ipcp_p (src->val->value, t));
2390 else
2392 struct ipcp_agg_lattice *aglat;
2393 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (caller_info,
2394 src->index);
2395 if (src->offset == -1)
2396 return (ipa_lat_is_single_const (&plats->itself)
2397 && values_equal_for_ipcp_p (src->val->value,
2398 plats->itself.values->value));
2399 else
2401 if (plats->aggs_bottom || plats->aggs_contain_variable)
2402 return false;
2403 for (aglat = plats->aggs; aglat; aglat = aglat->next)
2404 if (aglat->offset == src->offset)
2405 return (ipa_lat_is_single_const (aglat)
2406 && values_equal_for_ipcp_p (src->val->value,
2407 aglat->values->value));
2409 return false;
2413 /* Get the next clone in the linked list of clones of an edge. */
2415 static inline struct cgraph_edge *
2416 get_next_cgraph_edge_clone (struct cgraph_edge *cs)
2418 return next_edge_clone[cs->uid];
2421 /* Given VAL, iterate over all its sources and if they still hold, add their
2422 edge frequency and their number into *FREQUENCY and *CALLER_COUNT
2423 respectively. */
2425 static bool
2426 get_info_about_necessary_edges (struct ipcp_value *val, int *freq_sum,
2427 gcov_type *count_sum, int *caller_count)
2429 struct ipcp_value_source *src;
2430 int freq = 0, count = 0;
2431 gcov_type cnt = 0;
2432 bool hot = false;
2434 for (src = val->sources; src; src = src->next)
2436 struct cgraph_edge *cs = src->cs;
2437 while (cs)
2439 if (cgraph_edge_brings_value_p (cs, src))
2441 count++;
2442 freq += cs->frequency;
2443 cnt += cs->count;
2444 hot |= cgraph_maybe_hot_edge_p (cs);
2446 cs = get_next_cgraph_edge_clone (cs);
2450 *freq_sum = freq;
2451 *count_sum = cnt;
2452 *caller_count = count;
2453 return hot;
2456 /* Return a vector of incoming edges that do bring value VAL. It is assumed
2457 their number is known and equal to CALLER_COUNT. */
2459 static vec<cgraph_edge_p>
2460 gather_edges_for_value (struct ipcp_value *val, int caller_count)
2462 struct ipcp_value_source *src;
2463 vec<cgraph_edge_p> ret;
2465 ret.create (caller_count);
2466 for (src = val->sources; src; src = src->next)
2468 struct cgraph_edge *cs = src->cs;
2469 while (cs)
2471 if (cgraph_edge_brings_value_p (cs, src))
2472 ret.quick_push (cs);
2473 cs = get_next_cgraph_edge_clone (cs);
2477 return ret;
2480 /* Construct a replacement map for a know VALUE for a formal parameter PARAM.
2481 Return it or NULL if for some reason it cannot be created. */
2483 static struct ipa_replace_map *
2484 get_replacement_map (struct ipa_node_params *info, tree value, int parm_num)
2486 struct ipa_replace_map *replace_map;
2489 replace_map = ggc_alloc_ipa_replace_map ();
2490 if (dump_file)
2492 fprintf (dump_file, " replacing ");
2493 ipa_dump_param (dump_file, info, parm_num);
2495 fprintf (dump_file, " with const ");
2496 print_generic_expr (dump_file, value, 0);
2497 fprintf (dump_file, "\n");
2499 replace_map->old_tree = NULL;
2500 replace_map->parm_num = parm_num;
2501 replace_map->new_tree = value;
2502 replace_map->replace_p = true;
2503 replace_map->ref_p = false;
2505 return replace_map;
2508 /* Dump new profiling counts */
2510 static void
2511 dump_profile_updates (struct cgraph_node *orig_node,
2512 struct cgraph_node *new_node)
2514 struct cgraph_edge *cs;
2516 fprintf (dump_file, " setting count of the specialized node to "
2517 HOST_WIDE_INT_PRINT_DEC "\n", (HOST_WIDE_INT) new_node->count);
2518 for (cs = new_node->callees; cs ; cs = cs->next_callee)
2519 fprintf (dump_file, " edge to %s has count "
2520 HOST_WIDE_INT_PRINT_DEC "\n",
2521 cs->callee->name (), (HOST_WIDE_INT) cs->count);
2523 fprintf (dump_file, " setting count of the original node to "
2524 HOST_WIDE_INT_PRINT_DEC "\n", (HOST_WIDE_INT) orig_node->count);
2525 for (cs = orig_node->callees; cs ; cs = cs->next_callee)
2526 fprintf (dump_file, " edge to %s is left with "
2527 HOST_WIDE_INT_PRINT_DEC "\n",
2528 cs->callee->name (), (HOST_WIDE_INT) cs->count);
2531 /* After a specialized NEW_NODE version of ORIG_NODE has been created, update
2532 their profile information to reflect this. */
2534 static void
2535 update_profiling_info (struct cgraph_node *orig_node,
2536 struct cgraph_node *new_node)
2538 struct cgraph_edge *cs;
2539 struct caller_statistics stats;
2540 gcov_type new_sum, orig_sum;
2541 gcov_type remainder, orig_node_count = orig_node->count;
2543 if (orig_node_count == 0)
2544 return;
2546 init_caller_stats (&stats);
2547 cgraph_for_node_and_aliases (orig_node, gather_caller_stats, &stats, false);
2548 orig_sum = stats.count_sum;
2549 init_caller_stats (&stats);
2550 cgraph_for_node_and_aliases (new_node, gather_caller_stats, &stats, false);
2551 new_sum = stats.count_sum;
2553 if (orig_node_count < orig_sum + new_sum)
2555 if (dump_file)
2556 fprintf (dump_file, " Problem: node %s/%i has too low count "
2557 HOST_WIDE_INT_PRINT_DEC " while the sum of incoming "
2558 "counts is " HOST_WIDE_INT_PRINT_DEC "\n",
2559 orig_node->name (), orig_node->order,
2560 (HOST_WIDE_INT) orig_node_count,
2561 (HOST_WIDE_INT) (orig_sum + new_sum));
2563 orig_node_count = (orig_sum + new_sum) * 12 / 10;
2564 if (dump_file)
2565 fprintf (dump_file, " proceeding by pretending it was "
2566 HOST_WIDE_INT_PRINT_DEC "\n",
2567 (HOST_WIDE_INT) orig_node_count);
2570 new_node->count = new_sum;
2571 remainder = orig_node_count - new_sum;
2572 orig_node->count = remainder;
2574 for (cs = new_node->callees; cs ; cs = cs->next_callee)
2575 if (cs->frequency)
2576 cs->count = apply_probability (cs->count,
2577 GCOV_COMPUTE_SCALE (new_sum,
2578 orig_node_count));
2579 else
2580 cs->count = 0;
2582 for (cs = orig_node->callees; cs ; cs = cs->next_callee)
2583 cs->count = apply_probability (cs->count,
2584 GCOV_COMPUTE_SCALE (remainder,
2585 orig_node_count));
2587 if (dump_file)
2588 dump_profile_updates (orig_node, new_node);
2591 /* Update the respective profile of specialized NEW_NODE and the original
2592 ORIG_NODE after additional edges with cumulative count sum REDIRECTED_SUM
2593 have been redirected to the specialized version. */
2595 static void
2596 update_specialized_profile (struct cgraph_node *new_node,
2597 struct cgraph_node *orig_node,
2598 gcov_type redirected_sum)
2600 struct cgraph_edge *cs;
2601 gcov_type new_node_count, orig_node_count = orig_node->count;
2603 if (dump_file)
2604 fprintf (dump_file, " the sum of counts of redirected edges is "
2605 HOST_WIDE_INT_PRINT_DEC "\n", (HOST_WIDE_INT) redirected_sum);
2606 if (orig_node_count == 0)
2607 return;
2609 gcc_assert (orig_node_count >= redirected_sum);
2611 new_node_count = new_node->count;
2612 new_node->count += redirected_sum;
2613 orig_node->count -= redirected_sum;
2615 for (cs = new_node->callees; cs ; cs = cs->next_callee)
2616 if (cs->frequency)
2617 cs->count += apply_probability (cs->count,
2618 GCOV_COMPUTE_SCALE (redirected_sum,
2619 new_node_count));
2620 else
2621 cs->count = 0;
2623 for (cs = orig_node->callees; cs ; cs = cs->next_callee)
2625 gcov_type dec = apply_probability (cs->count,
2626 GCOV_COMPUTE_SCALE (redirected_sum,
2627 orig_node_count));
2628 if (dec < cs->count)
2629 cs->count -= dec;
2630 else
2631 cs->count = 0;
2634 if (dump_file)
2635 dump_profile_updates (orig_node, new_node);
2638 /* Create a specialized version of NODE with known constants and types of
2639 parameters in KNOWN_VALS and redirect all edges in CALLERS to it. */
2641 static struct cgraph_node *
2642 create_specialized_node (struct cgraph_node *node,
2643 vec<tree> known_vals,
2644 struct ipa_agg_replacement_value *aggvals,
2645 vec<cgraph_edge_p> callers)
2647 struct ipa_node_params *new_info, *info = IPA_NODE_REF (node);
2648 vec<ipa_replace_map_p, va_gc> *replace_trees = NULL;
2649 struct ipa_agg_replacement_value *av;
2650 struct cgraph_node *new_node;
2651 int i, count = ipa_get_param_count (info);
2652 bitmap args_to_skip;
2654 gcc_assert (!info->ipcp_orig_node);
2656 if (node->local.can_change_signature)
2658 args_to_skip = BITMAP_GGC_ALLOC ();
2659 for (i = 0; i < count; i++)
2661 tree t = known_vals[i];
2663 if ((t && TREE_CODE (t) != TREE_BINFO)
2664 || !ipa_is_param_used (info, i))
2665 bitmap_set_bit (args_to_skip, i);
2668 else
2670 args_to_skip = NULL;
2671 if (dump_file && (dump_flags & TDF_DETAILS))
2672 fprintf (dump_file, " cannot change function signature\n");
2675 for (i = 0; i < count ; i++)
2677 tree t = known_vals[i];
2678 if (t && TREE_CODE (t) != TREE_BINFO)
2680 struct ipa_replace_map *replace_map;
2682 replace_map = get_replacement_map (info, t, i);
2683 if (replace_map)
2684 vec_safe_push (replace_trees, replace_map);
2688 new_node = cgraph_create_virtual_clone (node, callers, replace_trees,
2689 args_to_skip, "constprop");
2690 ipa_set_node_agg_value_chain (new_node, aggvals);
2691 for (av = aggvals; av; av = av->next)
2692 ipa_maybe_record_reference (new_node, av->value,
2693 IPA_REF_ADDR, NULL);
2695 if (dump_file && (dump_flags & TDF_DETAILS))
2697 fprintf (dump_file, " the new node is %s/%i.\n",
2698 new_node->name (), new_node->order);
2699 if (aggvals)
2700 ipa_dump_agg_replacement_values (dump_file, aggvals);
2702 gcc_checking_assert (ipa_node_params_vector.exists ()
2703 && (ipa_node_params_vector.length ()
2704 > (unsigned) cgraph_max_uid));
2705 update_profiling_info (node, new_node);
2706 new_info = IPA_NODE_REF (new_node);
2707 new_info->ipcp_orig_node = node;
2708 new_info->known_vals = known_vals;
2710 ipcp_discover_new_direct_edges (new_node, known_vals, aggvals);
2712 callers.release ();
2713 return new_node;
2716 /* Given a NODE, and a subset of its CALLERS, try to populate blanks slots in
2717 KNOWN_VALS with constants and types that are also known for all of the
2718 CALLERS. */
2720 static void
2721 find_more_scalar_values_for_callers_subset (struct cgraph_node *node,
2722 vec<tree> known_vals,
2723 vec<cgraph_edge_p> callers)
2725 struct ipa_node_params *info = IPA_NODE_REF (node);
2726 int i, count = ipa_get_param_count (info);
2728 for (i = 0; i < count ; i++)
2730 struct cgraph_edge *cs;
2731 tree newval = NULL_TREE;
2732 int j;
2734 if (ipa_get_scalar_lat (info, i)->bottom || known_vals[i])
2735 continue;
2737 FOR_EACH_VEC_ELT (callers, j, cs)
2739 struct ipa_jump_func *jump_func;
2740 tree t;
2742 if (i >= ipa_get_cs_argument_count (IPA_EDGE_REF (cs)))
2744 newval = NULL_TREE;
2745 break;
2747 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
2748 t = ipa_value_from_jfunc (IPA_NODE_REF (cs->caller), jump_func);
2749 if (!t
2750 || (newval
2751 && !values_equal_for_ipcp_p (t, newval)))
2753 newval = NULL_TREE;
2754 break;
2756 else
2757 newval = t;
2760 if (newval)
2762 if (dump_file && (dump_flags & TDF_DETAILS))
2764 fprintf (dump_file, " adding an extra known scalar value ");
2765 print_ipcp_constant_value (dump_file, newval);
2766 fprintf (dump_file, " for ");
2767 ipa_dump_param (dump_file, info, i);
2768 fprintf (dump_file, "\n");
2771 known_vals[i] = newval;
2776 /* Go through PLATS and create a vector of values consisting of values and
2777 offsets (minus OFFSET) of lattices that contain only a single value. */
2779 static vec<ipa_agg_jf_item>
2780 copy_plats_to_inter (struct ipcp_param_lattices *plats, HOST_WIDE_INT offset)
2782 vec<ipa_agg_jf_item> res = vNULL;
2784 if (!plats->aggs || plats->aggs_contain_variable || plats->aggs_bottom)
2785 return vNULL;
2787 for (struct ipcp_agg_lattice *aglat = plats->aggs; aglat; aglat = aglat->next)
2788 if (ipa_lat_is_single_const (aglat))
2790 struct ipa_agg_jf_item ti;
2791 ti.offset = aglat->offset - offset;
2792 ti.value = aglat->values->value;
2793 res.safe_push (ti);
2795 return res;
2798 /* Intersect all values in INTER with single value lattices in PLATS (while
2799 subtracting OFFSET). */
2801 static void
2802 intersect_with_plats (struct ipcp_param_lattices *plats,
2803 vec<ipa_agg_jf_item> *inter,
2804 HOST_WIDE_INT offset)
2806 struct ipcp_agg_lattice *aglat;
2807 struct ipa_agg_jf_item *item;
2808 int k;
2810 if (!plats->aggs || plats->aggs_contain_variable || plats->aggs_bottom)
2812 inter->release ();
2813 return;
2816 aglat = plats->aggs;
2817 FOR_EACH_VEC_ELT (*inter, k, item)
2819 bool found = false;
2820 if (!item->value)
2821 continue;
2822 while (aglat)
2824 if (aglat->offset - offset > item->offset)
2825 break;
2826 if (aglat->offset - offset == item->offset)
2828 gcc_checking_assert (item->value);
2829 if (values_equal_for_ipcp_p (item->value, aglat->values->value))
2830 found = true;
2831 break;
2833 aglat = aglat->next;
2835 if (!found)
2836 item->value = NULL_TREE;
2840 /* Copy agggregate replacement values of NODE (which is an IPA-CP clone) to the
2841 vector result while subtracting OFFSET from the individual value offsets. */
2843 static vec<ipa_agg_jf_item>
2844 agg_replacements_to_vector (struct cgraph_node *node, int index,
2845 HOST_WIDE_INT offset)
2847 struct ipa_agg_replacement_value *av;
2848 vec<ipa_agg_jf_item> res = vNULL;
2850 for (av = ipa_get_agg_replacements_for_node (node); av; av = av->next)
2851 if (av->index == index
2852 && (av->offset - offset) >= 0)
2854 struct ipa_agg_jf_item item;
2855 gcc_checking_assert (av->value);
2856 item.offset = av->offset - offset;
2857 item.value = av->value;
2858 res.safe_push (item);
2861 return res;
2864 /* Intersect all values in INTER with those that we have already scheduled to
2865 be replaced in parameter number INDEX of NODE, which is an IPA-CP clone
2866 (while subtracting OFFSET). */
2868 static void
2869 intersect_with_agg_replacements (struct cgraph_node *node, int index,
2870 vec<ipa_agg_jf_item> *inter,
2871 HOST_WIDE_INT offset)
2873 struct ipa_agg_replacement_value *srcvals;
2874 struct ipa_agg_jf_item *item;
2875 int i;
2877 srcvals = ipa_get_agg_replacements_for_node (node);
2878 if (!srcvals)
2880 inter->release ();
2881 return;
2884 FOR_EACH_VEC_ELT (*inter, i, item)
2886 struct ipa_agg_replacement_value *av;
2887 bool found = false;
2888 if (!item->value)
2889 continue;
2890 for (av = srcvals; av; av = av->next)
2892 gcc_checking_assert (av->value);
2893 if (av->index == index
2894 && av->offset - offset == item->offset)
2896 if (values_equal_for_ipcp_p (item->value, av->value))
2897 found = true;
2898 break;
2901 if (!found)
2902 item->value = NULL_TREE;
2906 /* Intersect values in INTER with aggregate values that come along edge CS to
2907 parameter number INDEX and return it. If INTER does not actually exist yet,
2908 copy all incoming values to it. If we determine we ended up with no values
2909 whatsoever, return a released vector. */
2911 static vec<ipa_agg_jf_item>
2912 intersect_aggregates_with_edge (struct cgraph_edge *cs, int index,
2913 vec<ipa_agg_jf_item> inter)
2915 struct ipa_jump_func *jfunc;
2916 jfunc = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), index);
2917 if (jfunc->type == IPA_JF_PASS_THROUGH
2918 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
2920 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
2921 int src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
2923 if (caller_info->ipcp_orig_node)
2925 struct cgraph_node *orig_node = caller_info->ipcp_orig_node;
2926 struct ipcp_param_lattices *orig_plats;
2927 orig_plats = ipa_get_parm_lattices (IPA_NODE_REF (orig_node),
2928 src_idx);
2929 if (agg_pass_through_permissible_p (orig_plats, jfunc))
2931 if (!inter.exists ())
2932 inter = agg_replacements_to_vector (cs->caller, src_idx, 0);
2933 else
2934 intersect_with_agg_replacements (cs->caller, src_idx,
2935 &inter, 0);
2938 else
2940 struct ipcp_param_lattices *src_plats;
2941 src_plats = ipa_get_parm_lattices (caller_info, src_idx);
2942 if (agg_pass_through_permissible_p (src_plats, jfunc))
2944 /* Currently we do not produce clobber aggregate jump
2945 functions, adjust when we do. */
2946 gcc_checking_assert (!jfunc->agg.items);
2947 if (!inter.exists ())
2948 inter = copy_plats_to_inter (src_plats, 0);
2949 else
2950 intersect_with_plats (src_plats, &inter, 0);
2954 else if (jfunc->type == IPA_JF_ANCESTOR
2955 && ipa_get_jf_ancestor_agg_preserved (jfunc))
2957 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
2958 int src_idx = ipa_get_jf_ancestor_formal_id (jfunc);
2959 struct ipcp_param_lattices *src_plats;
2960 HOST_WIDE_INT delta = ipa_get_jf_ancestor_offset (jfunc);
2962 if (caller_info->ipcp_orig_node)
2964 if (!inter.exists ())
2965 inter = agg_replacements_to_vector (cs->caller, src_idx, delta);
2966 else
2967 intersect_with_agg_replacements (cs->caller, src_idx, &inter,
2968 delta);
2970 else
2972 src_plats = ipa_get_parm_lattices (caller_info, src_idx);;
2973 /* Currently we do not produce clobber aggregate jump
2974 functions, adjust when we do. */
2975 gcc_checking_assert (!src_plats->aggs || !jfunc->agg.items);
2976 if (!inter.exists ())
2977 inter = copy_plats_to_inter (src_plats, delta);
2978 else
2979 intersect_with_plats (src_plats, &inter, delta);
2982 else if (jfunc->agg.items)
2984 struct ipa_agg_jf_item *item;
2985 int k;
2987 if (!inter.exists ())
2988 for (unsigned i = 0; i < jfunc->agg.items->length (); i++)
2989 inter.safe_push ((*jfunc->agg.items)[i]);
2990 else
2991 FOR_EACH_VEC_ELT (inter, k, item)
2993 int l = 0;
2994 bool found = false;;
2996 if (!item->value)
2997 continue;
2999 while ((unsigned) l < jfunc->agg.items->length ())
3001 struct ipa_agg_jf_item *ti;
3002 ti = &(*jfunc->agg.items)[l];
3003 if (ti->offset > item->offset)
3004 break;
3005 if (ti->offset == item->offset)
3007 gcc_checking_assert (ti->value);
3008 if (values_equal_for_ipcp_p (item->value,
3009 ti->value))
3010 found = true;
3011 break;
3013 l++;
3015 if (!found)
3016 item->value = NULL;
3019 else
3021 inter.release ();
3022 return vec<ipa_agg_jf_item>();
3024 return inter;
3027 /* Look at edges in CALLERS and collect all known aggregate values that arrive
3028 from all of them. */
3030 static struct ipa_agg_replacement_value *
3031 find_aggregate_values_for_callers_subset (struct cgraph_node *node,
3032 vec<cgraph_edge_p> callers)
3034 struct ipa_node_params *dest_info = IPA_NODE_REF (node);
3035 struct ipa_agg_replacement_value *res = NULL;
3036 struct cgraph_edge *cs;
3037 int i, j, count = ipa_get_param_count (dest_info);
3039 FOR_EACH_VEC_ELT (callers, j, cs)
3041 int c = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
3042 if (c < count)
3043 count = c;
3046 for (i = 0; i < count ; i++)
3048 struct cgraph_edge *cs;
3049 vec<ipa_agg_jf_item> inter = vNULL;
3050 struct ipa_agg_jf_item *item;
3051 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (dest_info, i);
3052 int j;
3054 /* Among other things, the following check should deal with all by_ref
3055 mismatches. */
3056 if (plats->aggs_bottom)
3057 continue;
3059 FOR_EACH_VEC_ELT (callers, j, cs)
3061 inter = intersect_aggregates_with_edge (cs, i, inter);
3063 if (!inter.exists ())
3064 goto next_param;
3067 FOR_EACH_VEC_ELT (inter, j, item)
3069 struct ipa_agg_replacement_value *v;
3071 if (!item->value)
3072 continue;
3074 v = ggc_alloc_ipa_agg_replacement_value ();
3075 v->index = i;
3076 v->offset = item->offset;
3077 v->value = item->value;
3078 v->by_ref = plats->aggs_by_ref;
3079 v->next = res;
3080 res = v;
3083 next_param:
3084 if (inter.exists ())
3085 inter.release ();
3087 return res;
3090 /* Turn KNOWN_AGGS into a list of aggreate replacement values. */
3092 static struct ipa_agg_replacement_value *
3093 known_aggs_to_agg_replacement_list (vec<ipa_agg_jump_function> known_aggs)
3095 struct ipa_agg_replacement_value *res = NULL;
3096 struct ipa_agg_jump_function *aggjf;
3097 struct ipa_agg_jf_item *item;
3098 int i, j;
3100 FOR_EACH_VEC_ELT (known_aggs, i, aggjf)
3101 FOR_EACH_VEC_SAFE_ELT (aggjf->items, j, item)
3103 struct ipa_agg_replacement_value *v;
3104 v = ggc_alloc_ipa_agg_replacement_value ();
3105 v->index = i;
3106 v->offset = item->offset;
3107 v->value = item->value;
3108 v->by_ref = aggjf->by_ref;
3109 v->next = res;
3110 res = v;
3112 return res;
3115 /* Determine whether CS also brings all scalar values that the NODE is
3116 specialized for. */
3118 static bool
3119 cgraph_edge_brings_all_scalars_for_node (struct cgraph_edge *cs,
3120 struct cgraph_node *node)
3122 struct ipa_node_params *dest_info = IPA_NODE_REF (node);
3123 int count = ipa_get_param_count (dest_info);
3124 struct ipa_node_params *caller_info;
3125 struct ipa_edge_args *args;
3126 int i;
3128 caller_info = IPA_NODE_REF (cs->caller);
3129 args = IPA_EDGE_REF (cs);
3130 for (i = 0; i < count; i++)
3132 struct ipa_jump_func *jump_func;
3133 tree val, t;
3135 val = dest_info->known_vals[i];
3136 if (!val)
3137 continue;
3139 if (i >= ipa_get_cs_argument_count (args))
3140 return false;
3141 jump_func = ipa_get_ith_jump_func (args, i);
3142 t = ipa_value_from_jfunc (caller_info, jump_func);
3143 if (!t || !values_equal_for_ipcp_p (val, t))
3144 return false;
3146 return true;
3149 /* Determine whether CS also brings all aggregate values that NODE is
3150 specialized for. */
3151 static bool
3152 cgraph_edge_brings_all_agg_vals_for_node (struct cgraph_edge *cs,
3153 struct cgraph_node *node)
3155 struct ipa_node_params *orig_caller_info = IPA_NODE_REF (cs->caller);
3156 struct ipa_agg_replacement_value *aggval;
3157 int i, ec, count;
3159 aggval = ipa_get_agg_replacements_for_node (node);
3160 if (!aggval)
3161 return true;
3163 count = ipa_get_param_count (IPA_NODE_REF (node));
3164 ec = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
3165 if (ec < count)
3166 for (struct ipa_agg_replacement_value *av = aggval; av; av = av->next)
3167 if (aggval->index >= ec)
3168 return false;
3170 if (orig_caller_info->ipcp_orig_node)
3171 orig_caller_info = IPA_NODE_REF (orig_caller_info->ipcp_orig_node);
3173 for (i = 0; i < count; i++)
3175 static vec<ipa_agg_jf_item> values = vec<ipa_agg_jf_item>();
3176 struct ipcp_param_lattices *plats;
3177 bool interesting = false;
3178 for (struct ipa_agg_replacement_value *av = aggval; av; av = av->next)
3179 if (aggval->index == i)
3181 interesting = true;
3182 break;
3184 if (!interesting)
3185 continue;
3187 plats = ipa_get_parm_lattices (orig_caller_info, aggval->index);
3188 if (plats->aggs_bottom)
3189 return false;
3191 values = intersect_aggregates_with_edge (cs, i, values);
3192 if (!values.exists ())
3193 return false;
3195 for (struct ipa_agg_replacement_value *av = aggval; av; av = av->next)
3196 if (aggval->index == i)
3198 struct ipa_agg_jf_item *item;
3199 int j;
3200 bool found = false;
3201 FOR_EACH_VEC_ELT (values, j, item)
3202 if (item->value
3203 && item->offset == av->offset
3204 && values_equal_for_ipcp_p (item->value, av->value))
3206 found = true;
3207 break;
3209 if (!found)
3211 values.release ();
3212 return false;
3216 return true;
3219 /* Given an original NODE and a VAL for which we have already created a
3220 specialized clone, look whether there are incoming edges that still lead
3221 into the old node but now also bring the requested value and also conform to
3222 all other criteria such that they can be redirected the the special node.
3223 This function can therefore redirect the final edge in a SCC. */
3225 static void
3226 perhaps_add_new_callers (struct cgraph_node *node, struct ipcp_value *val)
3228 struct ipcp_value_source *src;
3229 gcov_type redirected_sum = 0;
3231 for (src = val->sources; src; src = src->next)
3233 struct cgraph_edge *cs = src->cs;
3234 while (cs)
3236 enum availability availability;
3237 struct cgraph_node *dst = cgraph_function_node (cs->callee,
3238 &availability);
3239 if ((dst == node || IPA_NODE_REF (dst)->is_all_contexts_clone)
3240 && availability > AVAIL_OVERWRITABLE
3241 && cgraph_edge_brings_value_p (cs, src))
3243 if (cgraph_edge_brings_all_scalars_for_node (cs, val->spec_node)
3244 && cgraph_edge_brings_all_agg_vals_for_node (cs,
3245 val->spec_node))
3247 if (dump_file)
3248 fprintf (dump_file, " - adding an extra caller %s/%i"
3249 " of %s/%i\n",
3250 xstrdup (cs->caller->name ()),
3251 cs->caller->order,
3252 xstrdup (val->spec_node->name ()),
3253 val->spec_node->order);
3255 cgraph_redirect_edge_callee (cs, val->spec_node);
3256 redirected_sum += cs->count;
3259 cs = get_next_cgraph_edge_clone (cs);
3263 if (redirected_sum)
3264 update_specialized_profile (val->spec_node, node, redirected_sum);
3268 /* Copy KNOWN_BINFOS to KNOWN_VALS. */
3270 static void
3271 move_binfos_to_values (vec<tree> known_vals,
3272 vec<tree> known_binfos)
3274 tree t;
3275 int i;
3277 for (i = 0; known_binfos.iterate (i, &t); i++)
3278 if (t)
3279 known_vals[i] = t;
3282 /* Return true if there is a replacement equivalent to VALUE, INDEX and OFFSET
3283 among those in the AGGVALS list. */
3285 DEBUG_FUNCTION bool
3286 ipcp_val_in_agg_replacements_p (struct ipa_agg_replacement_value *aggvals,
3287 int index, HOST_WIDE_INT offset, tree value)
3289 while (aggvals)
3291 if (aggvals->index == index
3292 && aggvals->offset == offset
3293 && values_equal_for_ipcp_p (aggvals->value, value))
3294 return true;
3295 aggvals = aggvals->next;
3297 return false;
3300 /* Decide wheter to create a special version of NODE for value VAL of parameter
3301 at the given INDEX. If OFFSET is -1, the value is for the parameter itself,
3302 otherwise it is stored at the given OFFSET of the parameter. KNOWN_CSTS,
3303 KNOWN_BINFOS and KNOWN_AGGS describe the other already known values. */
3305 static bool
3306 decide_about_value (struct cgraph_node *node, int index, HOST_WIDE_INT offset,
3307 struct ipcp_value *val, vec<tree> known_csts,
3308 vec<tree> known_binfos)
3310 struct ipa_agg_replacement_value *aggvals;
3311 int freq_sum, caller_count;
3312 gcov_type count_sum;
3313 vec<cgraph_edge_p> callers;
3314 vec<tree> kv;
3316 if (val->spec_node)
3318 perhaps_add_new_callers (node, val);
3319 return false;
3321 else if (val->local_size_cost + overall_size > max_new_size)
3323 if (dump_file && (dump_flags & TDF_DETAILS))
3324 fprintf (dump_file, " Ignoring candidate value because "
3325 "max_new_size would be reached with %li.\n",
3326 val->local_size_cost + overall_size);
3327 return false;
3329 else if (!get_info_about_necessary_edges (val, &freq_sum, &count_sum,
3330 &caller_count))
3331 return false;
3333 if (dump_file && (dump_flags & TDF_DETAILS))
3335 fprintf (dump_file, " - considering value ");
3336 print_ipcp_constant_value (dump_file, val->value);
3337 fprintf (dump_file, " for ");
3338 ipa_dump_param (dump_file, IPA_NODE_REF (node), index);
3339 if (offset != -1)
3340 fprintf (dump_file, ", offset: " HOST_WIDE_INT_PRINT_DEC, offset);
3341 fprintf (dump_file, " (caller_count: %i)\n", caller_count);
3344 if (!good_cloning_opportunity_p (node, val->local_time_benefit,
3345 freq_sum, count_sum,
3346 val->local_size_cost)
3347 && !good_cloning_opportunity_p (node,
3348 val->local_time_benefit
3349 + val->prop_time_benefit,
3350 freq_sum, count_sum,
3351 val->local_size_cost
3352 + val->prop_size_cost))
3353 return false;
3355 if (dump_file)
3356 fprintf (dump_file, " Creating a specialized node of %s/%i.\n",
3357 node->name (), node->order);
3359 callers = gather_edges_for_value (val, caller_count);
3360 kv = known_csts.copy ();
3361 move_binfos_to_values (kv, known_binfos);
3362 if (offset == -1)
3363 kv[index] = val->value;
3364 find_more_scalar_values_for_callers_subset (node, kv, callers);
3365 aggvals = find_aggregate_values_for_callers_subset (node, callers);
3366 gcc_checking_assert (offset == -1
3367 || ipcp_val_in_agg_replacements_p (aggvals, index,
3368 offset, val->value));
3369 val->spec_node = create_specialized_node (node, kv, aggvals, callers);
3370 overall_size += val->local_size_cost;
3372 /* TODO: If for some lattice there is only one other known value
3373 left, make a special node for it too. */
3375 return true;
3378 /* Decide whether and what specialized clones of NODE should be created. */
3380 static bool
3381 decide_whether_version_node (struct cgraph_node *node)
3383 struct ipa_node_params *info = IPA_NODE_REF (node);
3384 int i, count = ipa_get_param_count (info);
3385 vec<tree> known_csts, known_binfos;
3386 vec<ipa_agg_jump_function> known_aggs = vNULL;
3387 bool ret = false;
3389 if (count == 0)
3390 return false;
3392 if (dump_file && (dump_flags & TDF_DETAILS))
3393 fprintf (dump_file, "\nEvaluating opportunities for %s/%i.\n",
3394 node->name (), node->order);
3396 gather_context_independent_values (info, &known_csts, &known_binfos,
3397 info->do_clone_for_all_contexts ? &known_aggs
3398 : NULL, NULL);
3400 for (i = 0; i < count ;i++)
3402 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
3403 struct ipcp_lattice *lat = &plats->itself;
3404 struct ipcp_value *val;
3406 if (!lat->bottom
3407 && !known_csts[i]
3408 && !known_binfos[i])
3409 for (val = lat->values; val; val = val->next)
3410 ret |= decide_about_value (node, i, -1, val, known_csts,
3411 known_binfos);
3413 if (!plats->aggs_bottom)
3415 struct ipcp_agg_lattice *aglat;
3416 struct ipcp_value *val;
3417 for (aglat = plats->aggs; aglat; aglat = aglat->next)
3418 if (!aglat->bottom && aglat->values
3419 /* If the following is false, the one value is in
3420 known_aggs. */
3421 && (plats->aggs_contain_variable
3422 || !ipa_lat_is_single_const (aglat)))
3423 for (val = aglat->values; val; val = val->next)
3424 ret |= decide_about_value (node, i, aglat->offset, val,
3425 known_csts, known_binfos);
3427 info = IPA_NODE_REF (node);
3430 if (info->do_clone_for_all_contexts)
3432 struct cgraph_node *clone;
3433 vec<cgraph_edge_p> callers;
3435 if (dump_file)
3436 fprintf (dump_file, " - Creating a specialized node of %s/%i "
3437 "for all known contexts.\n", node->name (),
3438 node->order);
3440 callers = collect_callers_of_node (node);
3441 move_binfos_to_values (known_csts, known_binfos);
3442 clone = create_specialized_node (node, known_csts,
3443 known_aggs_to_agg_replacement_list (known_aggs),
3444 callers);
3445 info = IPA_NODE_REF (node);
3446 info->do_clone_for_all_contexts = false;
3447 IPA_NODE_REF (clone)->is_all_contexts_clone = true;
3448 for (i = 0; i < count ; i++)
3449 vec_free (known_aggs[i].items);
3450 known_aggs.release ();
3451 ret = true;
3453 else
3454 known_csts.release ();
3456 known_binfos.release ();
3457 return ret;
3460 /* Transitively mark all callees of NODE within the same SCC as not dead. */
3462 static void
3463 spread_undeadness (struct cgraph_node *node)
3465 struct cgraph_edge *cs;
3467 for (cs = node->callees; cs; cs = cs->next_callee)
3468 if (ipa_edge_within_scc (cs))
3470 struct cgraph_node *callee;
3471 struct ipa_node_params *info;
3473 callee = cgraph_function_node (cs->callee, NULL);
3474 info = IPA_NODE_REF (callee);
3476 if (info->node_dead)
3478 info->node_dead = 0;
3479 spread_undeadness (callee);
3484 /* Return true if NODE has a caller from outside of its SCC that is not
3485 dead. Worker callback for cgraph_for_node_and_aliases. */
3487 static bool
3488 has_undead_caller_from_outside_scc_p (struct cgraph_node *node,
3489 void *data ATTRIBUTE_UNUSED)
3491 struct cgraph_edge *cs;
3493 for (cs = node->callers; cs; cs = cs->next_caller)
3494 if (cs->caller->thunk.thunk_p
3495 && cgraph_for_node_and_aliases (cs->caller,
3496 has_undead_caller_from_outside_scc_p,
3497 NULL, true))
3498 return true;
3499 else if (!ipa_edge_within_scc (cs)
3500 && !IPA_NODE_REF (cs->caller)->node_dead)
3501 return true;
3502 return false;
3506 /* Identify nodes within the same SCC as NODE which are no longer needed
3507 because of new clones and will be removed as unreachable. */
3509 static void
3510 identify_dead_nodes (struct cgraph_node *node)
3512 struct cgraph_node *v;
3513 for (v = node; v ; v = ((struct ipa_dfs_info *) v->aux)->next_cycle)
3514 if (cgraph_will_be_removed_from_program_if_no_direct_calls (v)
3515 && !cgraph_for_node_and_aliases (v,
3516 has_undead_caller_from_outside_scc_p,
3517 NULL, true))
3518 IPA_NODE_REF (v)->node_dead = 1;
3520 for (v = node; v ; v = ((struct ipa_dfs_info *) v->aux)->next_cycle)
3521 if (!IPA_NODE_REF (v)->node_dead)
3522 spread_undeadness (v);
3524 if (dump_file && (dump_flags & TDF_DETAILS))
3526 for (v = node; v ; v = ((struct ipa_dfs_info *) v->aux)->next_cycle)
3527 if (IPA_NODE_REF (v)->node_dead)
3528 fprintf (dump_file, " Marking node as dead: %s/%i.\n",
3529 v->name (), v->order);
3533 /* The decision stage. Iterate over the topological order of call graph nodes
3534 TOPO and make specialized clones if deemed beneficial. */
3536 static void
3537 ipcp_decision_stage (struct topo_info *topo)
3539 int i;
3541 if (dump_file)
3542 fprintf (dump_file, "\nIPA decision stage:\n\n");
3544 for (i = topo->nnodes - 1; i >= 0; i--)
3546 struct cgraph_node *node = topo->order[i];
3547 bool change = false, iterate = true;
3549 while (iterate)
3551 struct cgraph_node *v;
3552 iterate = false;
3553 for (v = node; v ; v = ((struct ipa_dfs_info *) v->aux)->next_cycle)
3554 if (cgraph_function_with_gimple_body_p (v)
3555 && ipcp_versionable_function_p (v))
3556 iterate |= decide_whether_version_node (v);
3558 change |= iterate;
3560 if (change)
3561 identify_dead_nodes (node);
3565 /* The IPCP driver. */
3567 static unsigned int
3568 ipcp_driver (void)
3570 struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
3571 struct topo_info topo;
3573 ipa_check_create_node_params ();
3574 ipa_check_create_edge_args ();
3575 grow_next_edge_clone_vector ();
3576 edge_duplication_hook_holder =
3577 cgraph_add_edge_duplication_hook (&ipcp_edge_duplication_hook, NULL);
3578 ipcp_values_pool = create_alloc_pool ("IPA-CP values",
3579 sizeof (struct ipcp_value), 32);
3580 ipcp_sources_pool = create_alloc_pool ("IPA-CP value sources",
3581 sizeof (struct ipcp_value_source), 64);
3582 ipcp_agg_lattice_pool = create_alloc_pool ("IPA_CP aggregate lattices",
3583 sizeof (struct ipcp_agg_lattice),
3584 32);
3585 if (dump_file)
3587 fprintf (dump_file, "\nIPA structures before propagation:\n");
3588 if (dump_flags & TDF_DETAILS)
3589 ipa_print_all_params (dump_file);
3590 ipa_print_all_jump_functions (dump_file);
3593 /* Topological sort. */
3594 build_toporder_info (&topo);
3595 /* Do the interprocedural propagation. */
3596 ipcp_propagate_stage (&topo);
3597 /* Decide what constant propagation and cloning should be performed. */
3598 ipcp_decision_stage (&topo);
3600 /* Free all IPCP structures. */
3601 free_toporder_info (&topo);
3602 next_edge_clone.release ();
3603 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
3604 ipa_free_all_structures_after_ipa_cp ();
3605 if (dump_file)
3606 fprintf (dump_file, "\nIPA constant propagation end\n");
3607 return 0;
3610 /* Initialization and computation of IPCP data structures. This is the initial
3611 intraprocedural analysis of functions, which gathers information to be
3612 propagated later on. */
3614 static void
3615 ipcp_generate_summary (void)
3617 struct cgraph_node *node;
3619 if (dump_file)
3620 fprintf (dump_file, "\nIPA constant propagation start:\n");
3621 ipa_register_cgraph_hooks ();
3623 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
3625 node->local.versionable
3626 = tree_versionable_function_p (node->decl);
3627 ipa_analyze_node (node);
3631 /* Write ipcp summary for nodes in SET. */
3633 static void
3634 ipcp_write_summary (void)
3636 ipa_prop_write_jump_functions ();
3639 /* Read ipcp summary. */
3641 static void
3642 ipcp_read_summary (void)
3644 ipa_prop_read_jump_functions ();
3647 /* Gate for IPCP optimization. */
3649 static bool
3650 cgraph_gate_cp (void)
3652 /* FIXME: We should remove the optimize check after we ensure we never run
3653 IPA passes when not optimizing. */
3654 return flag_ipa_cp && optimize;
3657 namespace {
3659 const pass_data pass_data_ipa_cp =
3661 IPA_PASS, /* type */
3662 "cp", /* name */
3663 OPTGROUP_NONE, /* optinfo_flags */
3664 true, /* has_gate */
3665 true, /* has_execute */
3666 TV_IPA_CONSTANT_PROP, /* tv_id */
3667 0, /* properties_required */
3668 0, /* properties_provided */
3669 0, /* properties_destroyed */
3670 0, /* todo_flags_start */
3671 ( TODO_dump_symtab | TODO_remove_functions ), /* todo_flags_finish */
3674 class pass_ipa_cp : public ipa_opt_pass_d
3676 public:
3677 pass_ipa_cp (gcc::context *ctxt)
3678 : ipa_opt_pass_d (pass_data_ipa_cp, ctxt,
3679 ipcp_generate_summary, /* generate_summary */
3680 ipcp_write_summary, /* write_summary */
3681 ipcp_read_summary, /* read_summary */
3682 ipa_prop_write_all_agg_replacement, /*
3683 write_optimization_summary */
3684 ipa_prop_read_all_agg_replacement, /*
3685 read_optimization_summary */
3686 NULL, /* stmt_fixup */
3687 0, /* function_transform_todo_flags_start */
3688 ipcp_transform_function, /* function_transform */
3689 NULL) /* variable_transform */
3692 /* opt_pass methods: */
3693 bool gate () { return cgraph_gate_cp (); }
3694 unsigned int execute () { return ipcp_driver (); }
3696 }; // class pass_ipa_cp
3698 } // anon namespace
3700 ipa_opt_pass_d *
3701 make_pass_ipa_cp (gcc::context *ctxt)
3703 return new pass_ipa_cp (ctxt);