re PR bootstrap/55051 (profiledbootstrap failed)
[official-gcc.git] / gcc / ipa-cp.c
blob82b0b53ce25d1bcc716cc474a9098147024d42d7
1 /* Interprocedural constant propagation
2 Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012
3 Free Software Foundation, Inc.
5 Contributed by Razya Ladelsky <RAZYA@il.ibm.com> and Martin Jambor
6 <mjambor@suse.cz>
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
13 version.
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 for more details.
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
24 /* Interprocedural constant propagation (IPA-CP).
26 The goal of this transformation is to
28 1) discover functions which are always invoked with some arguments with the
29 same known constant values and modify the functions so that the
30 subsequent optimizations can take advantage of the knowledge, and
32 2) partial specialization - create specialized versions of functions
33 transformed in this way if some parameters are known constants only in
34 certain contexts but the estimated tradeoff between speedup and cost size
35 is deemed good.
37 The algorithm also propagates types and attempts to perform type based
38 devirtualization. Types are propagated much like constants.
40 The algorithm basically consists of three stages. In the first, functions
41 are analyzed one at a time and jump functions are constructed for all known
42 call-sites. In the second phase, the pass propagates information from the
43 jump functions across the call to reveal what values are available at what
44 call sites, performs estimations of effects of known values on functions and
45 their callees, and finally decides what specialized extra versions should be
46 created. In the third, the special versions materialize and appropriate
47 calls are redirected.
49 The algorithm used is to a certain extent based on "Interprocedural Constant
50 Propagation", by David Callahan, Keith D Cooper, Ken Kennedy, Linda Torczon,
51 Comp86, pg 152-161 and "A Methodology for Procedure Cloning" by Keith D
52 Cooper, Mary W. Hall, and Ken Kennedy.
55 First stage - intraprocedural analysis
56 =======================================
58 This phase computes jump_function and modification flags.
60 A jump function for a call-site represents the values passed as an actual
61 arguments of a given call-site. In principle, there are three types of
62 values:
64 Pass through - the caller's formal parameter is passed as an actual
65 argument, plus an operation on it can be performed.
66 Constant - a constant is passed as an actual argument.
67 Unknown - neither of the above.
69 All jump function types are described in detail in ipa-prop.h, together with
70 the data structures that represent them and methods of accessing them.
72 ipcp_generate_summary() is the main function of the first stage.
74 Second stage - interprocedural analysis
75 ========================================
77 This stage is itself divided into two phases. In the first, we propagate
78 known values over the call graph, in the second, we make cloning decisions.
79 It uses a different algorithm than the original Callahan's paper.
81 First, we traverse the functions topologically from callers to callees and,
82 for each strongly connected component (SCC), we propagate constants
83 according to previously computed jump functions. We also record what known
84 values depend on other known values and estimate local effects. Finally, we
85 propagate cumulative information about these effects from dependent values
86 to those on which they depend.
88 Second, we again traverse the call graph in the same topological order and
89 make clones for functions which we know are called with the same values in
90 all contexts and decide about extra specialized clones of functions just for
91 some contexts - these decisions are based on both local estimates and
92 cumulative estimates propagated from callees.
94 ipcp_propagate_stage() and ipcp_decision_stage() together constitute the
95 third stage.
97 Third phase - materialization of clones, call statement updates.
98 ============================================
100 This stage is currently performed by call graph code (mainly in cgraphunit.c
101 and tree-inline.c) according to instructions inserted to the call graph by
102 the second stage. */
104 #include "config.h"
105 #include "system.h"
106 #include "coretypes.h"
107 #include "tree.h"
108 #include "target.h"
109 #include "gimple.h"
110 #include "cgraph.h"
111 #include "ipa-prop.h"
112 #include "tree-flow.h"
113 #include "tree-pass.h"
114 #include "flags.h"
115 #include "diagnostic.h"
116 #include "tree-pretty-print.h"
117 #include "tree-inline.h"
118 #include "params.h"
119 #include "ipa-inline.h"
120 #include "ipa-utils.h"
122 struct ipcp_value;
124 /* Describes a particular source for an IPA-CP value. */
126 struct ipcp_value_source
128 /* Aggregate offset of the source, negative if the source is scalar value of
129 the argument itself. */
130 HOST_WIDE_INT offset;
131 /* The incoming edge that brought the value. */
132 struct cgraph_edge *cs;
133 /* If the jump function that resulted into his value was a pass-through or an
134 ancestor, this is the ipcp_value of the caller from which the described
135 value has been derived. Otherwise it is NULL. */
136 struct ipcp_value *val;
137 /* Next pointer in a linked list of sources of a value. */
138 struct ipcp_value_source *next;
139 /* If the jump function that resulted into his value was a pass-through or an
140 ancestor, this is the index of the parameter of the caller the jump
141 function references. */
142 int index;
145 /* Describes one particular value stored in struct ipcp_lattice. */
147 struct ipcp_value
149 /* The actual value for the given parameter. This is either an IPA invariant
150 or a TREE_BINFO describing a type that can be used for
151 devirtualization. */
152 tree value;
153 /* The list of sources from which this value originates. */
154 struct ipcp_value_source *sources;
155 /* Next pointers in a linked list of all values in a lattice. */
156 struct ipcp_value *next;
157 /* Next pointers in a linked list of values in a strongly connected component
158 of values. */
159 struct ipcp_value *scc_next;
160 /* Next pointers in a linked list of SCCs of values sorted topologically
161 according their sources. */
162 struct ipcp_value *topo_next;
163 /* A specialized node created for this value, NULL if none has been (so far)
164 created. */
165 struct cgraph_node *spec_node;
166 /* Depth first search number and low link for topological sorting of
167 values. */
168 int dfs, low_link;
169 /* Time benefit and size cost that specializing the function for this value
170 would bring about in this function alone. */
171 int local_time_benefit, local_size_cost;
172 /* Time benefit and size cost that specializing the function for this value
173 can bring about in it's callees (transitively). */
174 int prop_time_benefit, prop_size_cost;
175 /* True if this valye is currently on the topo-sort stack. */
176 bool on_stack;
179 /* Lattice describing potential values of a formal parameter of a function, or
180 a part of an aggreagate. TOP is represented by a lattice with zero values
181 and with contains_variable and bottom flags cleared. BOTTOM is represented
182 by a lattice with the bottom flag set. In that case, values and
183 contains_variable flag should be disregarded. */
185 struct ipcp_lattice
187 /* The list of known values and types in this lattice. Note that values are
188 not deallocated if a lattice is set to bottom because there may be value
189 sources referencing them. */
190 struct ipcp_value *values;
191 /* Number of known values and types in this lattice. */
192 int values_count;
193 /* The lattice contains a variable component (in addition to values). */
194 bool contains_variable;
195 /* The value of the lattice is bottom (i.e. variable and unusable for any
196 propagation). */
197 bool bottom;
200 /* Lattice with an offset to describe a part of an aggregate. */
202 struct ipcp_agg_lattice : public ipcp_lattice
204 /* Offset that is being described by this lattice. */
205 HOST_WIDE_INT offset;
206 /* Size so that we don't have to re-compute it every time we traverse the
207 list. Must correspond to TYPE_SIZE of all lat values. */
208 HOST_WIDE_INT size;
209 /* Next element of the linked list. */
210 struct ipcp_agg_lattice *next;
213 /* Structure containing lattices for a parameter itself and for pieces of
214 aggregates that are passed in the parameter or by a reference in a parameter
215 plus some other useful flags. */
217 struct ipcp_param_lattices
219 /* Lattice describing the value of the parameter itself. */
220 struct ipcp_lattice itself;
221 /* Lattices describing aggregate parts. */
222 struct ipcp_agg_lattice *aggs;
223 /* Number of aggregate lattices */
224 int aggs_count;
225 /* True if aggregate data were passed by reference (as opposed to by
226 value). */
227 bool aggs_by_ref;
228 /* All aggregate lattices contain a variable component (in addition to
229 values). */
230 bool aggs_contain_variable;
231 /* The value of all aggregate lattices is bottom (i.e. variable and unusable
232 for any propagation). */
233 bool aggs_bottom;
235 /* There is a virtual call based on this parameter. */
236 bool virt_call;
239 /* Allocation pools for values and their sources in ipa-cp. */
241 alloc_pool ipcp_values_pool;
242 alloc_pool ipcp_sources_pool;
243 alloc_pool ipcp_agg_lattice_pool;
245 /* Maximal count found in program. */
247 static gcov_type max_count;
249 /* Original overall size of the program. */
251 static long overall_size, max_new_size;
253 /* Head of the linked list of topologically sorted values. */
255 static struct ipcp_value *values_topo;
257 /* Return the param lattices structure corresponding to the Ith formal
258 parameter of the function described by INFO. */
259 static inline struct ipcp_param_lattices *
260 ipa_get_parm_lattices (struct ipa_node_params *info, int i)
262 gcc_assert (i >= 0 && i < ipa_get_param_count (info));
263 gcc_checking_assert (!info->ipcp_orig_node);
264 gcc_checking_assert (info->lattices);
265 return &(info->lattices[i]);
268 /* Return the lattice corresponding to the scalar value of the Ith formal
269 parameter of the function described by INFO. */
270 static inline struct ipcp_lattice *
271 ipa_get_scalar_lat (struct ipa_node_params *info, int i)
273 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
274 return &plats->itself;
277 /* Return whether LAT is a lattice with a single constant and without an
278 undefined value. */
280 static inline bool
281 ipa_lat_is_single_const (struct ipcp_lattice *lat)
283 if (lat->bottom
284 || lat->contains_variable
285 || lat->values_count != 1)
286 return false;
287 else
288 return true;
291 /* Return true iff the CS is an edge within a strongly connected component as
292 computed by ipa_reduced_postorder. */
294 static inline bool
295 edge_within_scc (struct cgraph_edge *cs)
297 struct ipa_dfs_info *caller_dfs = (struct ipa_dfs_info *) cs->caller->symbol.aux;
298 struct ipa_dfs_info *callee_dfs;
299 struct cgraph_node *callee = cgraph_function_node (cs->callee, NULL);
301 callee_dfs = (struct ipa_dfs_info *) callee->symbol.aux;
302 return (caller_dfs
303 && callee_dfs
304 && caller_dfs->scc_no == callee_dfs->scc_no);
307 /* Print V which is extracted from a value in a lattice to F. */
309 static void
310 print_ipcp_constant_value (FILE * f, tree v)
312 if (TREE_CODE (v) == TREE_BINFO)
314 fprintf (f, "BINFO ");
315 print_generic_expr (f, BINFO_TYPE (v), 0);
317 else if (TREE_CODE (v) == ADDR_EXPR
318 && TREE_CODE (TREE_OPERAND (v, 0)) == CONST_DECL)
320 fprintf (f, "& ");
321 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (v, 0)), 0);
323 else
324 print_generic_expr (f, v, 0);
327 /* Print a lattice LAT to F. */
329 static void
330 print_lattice (FILE * f, struct ipcp_lattice *lat,
331 bool dump_sources, bool dump_benefits)
333 struct ipcp_value *val;
334 bool prev = false;
336 if (lat->bottom)
338 fprintf (f, "BOTTOM\n");
339 return;
342 if (!lat->values_count && !lat->contains_variable)
344 fprintf (f, "TOP\n");
345 return;
348 if (lat->contains_variable)
350 fprintf (f, "VARIABLE");
351 prev = true;
352 if (dump_benefits)
353 fprintf (f, "\n");
356 for (val = lat->values; val; val = val->next)
358 if (dump_benefits && prev)
359 fprintf (f, " ");
360 else if (!dump_benefits && prev)
361 fprintf (f, ", ");
362 else
363 prev = true;
365 print_ipcp_constant_value (f, val->value);
367 if (dump_sources)
369 struct ipcp_value_source *s;
371 fprintf (f, " [from:");
372 for (s = val->sources; s; s = s->next)
373 fprintf (f, " %i(%i)", s->cs->caller->uid,s->cs->frequency);
374 fprintf (f, "]");
377 if (dump_benefits)
378 fprintf (f, " [loc_time: %i, loc_size: %i, "
379 "prop_time: %i, prop_size: %i]\n",
380 val->local_time_benefit, val->local_size_cost,
381 val->prop_time_benefit, val->prop_size_cost);
383 if (!dump_benefits)
384 fprintf (f, "\n");
387 /* Print all ipcp_lattices of all functions to F. */
389 static void
390 print_all_lattices (FILE * f, bool dump_sources, bool dump_benefits)
392 struct cgraph_node *node;
393 int i, count;
395 fprintf (f, "\nLattices:\n");
396 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
398 struct ipa_node_params *info;
400 info = IPA_NODE_REF (node);
401 fprintf (f, " Node: %s/%i:\n", cgraph_node_name (node), node->uid);
402 count = ipa_get_param_count (info);
403 for (i = 0; i < count; i++)
405 struct ipcp_agg_lattice *aglat;
406 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
407 fprintf (f, " param [%d]: ", i);
408 print_lattice (f, &plats->itself, dump_sources, dump_benefits);
410 if (plats->virt_call)
411 fprintf (f, " virt_call flag set\n");
413 if (plats->aggs_bottom)
415 fprintf (f, " AGGS BOTTOM\n");
416 continue;
418 if (plats->aggs_contain_variable)
419 fprintf (f, " AGGS VARIABLE\n");
420 for (aglat = plats->aggs; aglat; aglat = aglat->next)
422 fprintf (f, " %soffset " HOST_WIDE_INT_PRINT_DEC ": ",
423 plats->aggs_by_ref ? "ref " : "", aglat->offset);
424 print_lattice (f, aglat, dump_sources, dump_benefits);
430 /* Determine whether it is at all technically possible to create clones of NODE
431 and store this information in the ipa_node_params structure associated
432 with NODE. */
434 static void
435 determine_versionability (struct cgraph_node *node)
437 const char *reason = NULL;
439 /* There are a number of generic reasons functions cannot be versioned. We
440 also cannot remove parameters if there are type attributes such as fnspec
441 present. */
442 if (node->alias || node->thunk.thunk_p)
443 reason = "alias or thunk";
444 else if (!node->local.versionable)
445 reason = "not a tree_versionable_function";
446 else if (cgraph_function_body_availability (node) <= AVAIL_OVERWRITABLE)
447 reason = "insufficient body availability";
449 if (reason && dump_file && !node->alias && !node->thunk.thunk_p)
450 fprintf (dump_file, "Function %s/%i is not versionable, reason: %s.\n",
451 cgraph_node_name (node), node->uid, reason);
453 node->local.versionable = (reason == NULL);
456 /* Return true if it is at all technically possible to create clones of a
457 NODE. */
459 static bool
460 ipcp_versionable_function_p (struct cgraph_node *node)
462 return node->local.versionable;
465 /* Structure holding accumulated information about callers of a node. */
467 struct caller_statistics
469 gcov_type count_sum;
470 int n_calls, n_hot_calls, freq_sum;
473 /* Initialize fields of STAT to zeroes. */
475 static inline void
476 init_caller_stats (struct caller_statistics *stats)
478 stats->count_sum = 0;
479 stats->n_calls = 0;
480 stats->n_hot_calls = 0;
481 stats->freq_sum = 0;
484 /* Worker callback of cgraph_for_node_and_aliases accumulating statistics of
485 non-thunk incoming edges to NODE. */
487 static bool
488 gather_caller_stats (struct cgraph_node *node, void *data)
490 struct caller_statistics *stats = (struct caller_statistics *) data;
491 struct cgraph_edge *cs;
493 for (cs = node->callers; cs; cs = cs->next_caller)
494 if (cs->caller->thunk.thunk_p)
495 cgraph_for_node_and_aliases (cs->caller, gather_caller_stats,
496 stats, false);
497 else
499 stats->count_sum += cs->count;
500 stats->freq_sum += cs->frequency;
501 stats->n_calls++;
502 if (cgraph_maybe_hot_edge_p (cs))
503 stats->n_hot_calls ++;
505 return false;
509 /* Return true if this NODE is viable candidate for cloning. */
511 static bool
512 ipcp_cloning_candidate_p (struct cgraph_node *node)
514 struct caller_statistics stats;
516 gcc_checking_assert (cgraph_function_with_gimple_body_p (node));
518 if (!flag_ipa_cp_clone)
520 if (dump_file)
521 fprintf (dump_file, "Not considering %s for cloning; "
522 "-fipa-cp-clone disabled.\n",
523 cgraph_node_name (node));
524 return false;
527 if (!optimize_function_for_speed_p (DECL_STRUCT_FUNCTION (node->symbol.decl)))
529 if (dump_file)
530 fprintf (dump_file, "Not considering %s for cloning; "
531 "optimizing it for size.\n",
532 cgraph_node_name (node));
533 return false;
536 init_caller_stats (&stats);
537 cgraph_for_node_and_aliases (node, gather_caller_stats, &stats, false);
539 if (inline_summary (node)->self_size < stats.n_calls)
541 if (dump_file)
542 fprintf (dump_file, "Considering %s for cloning; code might shrink.\n",
543 cgraph_node_name (node));
544 return true;
547 /* When profile is available and function is hot, propagate into it even if
548 calls seems cold; constant propagation can improve function's speed
549 significantly. */
550 if (max_count)
552 if (stats.count_sum > node->count * 90 / 100)
554 if (dump_file)
555 fprintf (dump_file, "Considering %s for cloning; "
556 "usually called directly.\n",
557 cgraph_node_name (node));
558 return true;
561 if (!stats.n_hot_calls)
563 if (dump_file)
564 fprintf (dump_file, "Not considering %s for cloning; no hot calls.\n",
565 cgraph_node_name (node));
566 return false;
568 if (dump_file)
569 fprintf (dump_file, "Considering %s for cloning.\n",
570 cgraph_node_name (node));
571 return true;
574 /* Arrays representing a topological ordering of call graph nodes and a stack
575 of noes used during constant propagation. */
577 struct topo_info
579 struct cgraph_node **order;
580 struct cgraph_node **stack;
581 int nnodes, stack_top;
584 /* Allocate the arrays in TOPO and topologically sort the nodes into order. */
586 static void
587 build_toporder_info (struct topo_info *topo)
589 topo->order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
590 topo->stack = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
591 topo->stack_top = 0;
592 topo->nnodes = ipa_reduced_postorder (topo->order, true, true, NULL);
595 /* Free information about strongly connected components and the arrays in
596 TOPO. */
598 static void
599 free_toporder_info (struct topo_info *topo)
601 ipa_free_postorder_info ();
602 free (topo->order);
603 free (topo->stack);
606 /* Add NODE to the stack in TOPO, unless it is already there. */
608 static inline void
609 push_node_to_stack (struct topo_info *topo, struct cgraph_node *node)
611 struct ipa_node_params *info = IPA_NODE_REF (node);
612 if (info->node_enqueued)
613 return;
614 info->node_enqueued = 1;
615 topo->stack[topo->stack_top++] = node;
618 /* Pop a node from the stack in TOPO and return it or return NULL if the stack
619 is empty. */
621 static struct cgraph_node *
622 pop_node_from_stack (struct topo_info *topo)
624 if (topo->stack_top)
626 struct cgraph_node *node;
627 topo->stack_top--;
628 node = topo->stack[topo->stack_top];
629 IPA_NODE_REF (node)->node_enqueued = 0;
630 return node;
632 else
633 return NULL;
636 /* Set lattice LAT to bottom and return true if it previously was not set as
637 such. */
639 static inline bool
640 set_lattice_to_bottom (struct ipcp_lattice *lat)
642 bool ret = !lat->bottom;
643 lat->bottom = true;
644 return ret;
647 /* Mark lattice as containing an unknown value and return true if it previously
648 was not marked as such. */
650 static inline bool
651 set_lattice_contains_variable (struct ipcp_lattice *lat)
653 bool ret = !lat->contains_variable;
654 lat->contains_variable = true;
655 return ret;
658 /* Set all aggegate lattices in PLATS to bottom and return true if they were
659 not previously set as such. */
661 static inline bool
662 set_agg_lats_to_bottom (struct ipcp_param_lattices *plats)
664 bool ret = !plats->aggs_bottom;
665 plats->aggs_bottom = true;
666 return ret;
669 /* Mark all aggegate lattices in PLATS as containing an unknown value and
670 return true if they were not previously marked as such. */
672 static inline bool
673 set_agg_lats_contain_variable (struct ipcp_param_lattices *plats)
675 bool ret = !plats->aggs_contain_variable;
676 plats->aggs_contain_variable = true;
677 return ret;
680 /* Mark bot aggregate and scalar lattices as containing an unknown variable,
681 return true is any of them has not been marked as such so far. */
683 static inline bool
684 set_all_contains_variable (struct ipcp_param_lattices *plats)
686 bool ret = !plats->itself.contains_variable || !plats->aggs_contain_variable;
687 plats->itself.contains_variable = true;
688 plats->aggs_contain_variable = true;
689 return ret;
692 /* Initialize ipcp_lattices. */
694 static void
695 initialize_node_lattices (struct cgraph_node *node)
697 struct ipa_node_params *info = IPA_NODE_REF (node);
698 struct cgraph_edge *ie;
699 bool disable = false, variable = false;
700 int i;
702 gcc_checking_assert (cgraph_function_with_gimple_body_p (node));
703 if (!node->local.local)
705 /* When cloning is allowed, we can assume that externally visible
706 functions are not called. We will compensate this by cloning
707 later. */
708 if (ipcp_versionable_function_p (node)
709 && ipcp_cloning_candidate_p (node))
710 variable = true;
711 else
712 disable = true;
715 if (disable || variable)
717 for (i = 0; i < ipa_get_param_count (info) ; i++)
719 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
720 if (disable)
722 set_lattice_to_bottom (&plats->itself);
723 set_agg_lats_to_bottom (plats);
725 else
726 set_all_contains_variable (plats);
728 if (dump_file && (dump_flags & TDF_DETAILS)
729 && node->alias && node->thunk.thunk_p)
730 fprintf (dump_file, "Marking all lattices of %s/%i as %s\n",
731 cgraph_node_name (node), node->uid,
732 disable ? "BOTTOM" : "VARIABLE");
735 for (ie = node->indirect_calls; ie; ie = ie->next_callee)
736 if (ie->indirect_info->polymorphic)
738 gcc_checking_assert (ie->indirect_info->param_index >= 0);
739 ipa_get_parm_lattices (info,
740 ie->indirect_info->param_index)->virt_call = 1;
744 /* Return the result of a (possibly arithmetic) pass through jump function
745 JFUNC on the constant value INPUT. Return NULL_TREE if that cannot be
746 determined or itself is considered an interprocedural invariant. */
748 static tree
749 ipa_get_jf_pass_through_result (struct ipa_jump_func *jfunc, tree input)
751 tree restype, res;
753 if (ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
754 return input;
755 else if (TREE_CODE (input) == TREE_BINFO)
756 return NULL_TREE;
758 gcc_checking_assert (is_gimple_ip_invariant (input));
759 if (TREE_CODE_CLASS (ipa_get_jf_pass_through_operation (jfunc))
760 == tcc_comparison)
761 restype = boolean_type_node;
762 else
763 restype = TREE_TYPE (input);
764 res = fold_binary (ipa_get_jf_pass_through_operation (jfunc), restype,
765 input, ipa_get_jf_pass_through_operand (jfunc));
767 if (res && !is_gimple_ip_invariant (res))
768 return NULL_TREE;
770 return res;
773 /* Return the result of an ancestor jump function JFUNC on the constant value
774 INPUT. Return NULL_TREE if that cannot be determined. */
776 static tree
777 ipa_get_jf_ancestor_result (struct ipa_jump_func *jfunc, tree input)
779 if (TREE_CODE (input) == TREE_BINFO)
780 return get_binfo_at_offset (input,
781 ipa_get_jf_ancestor_offset (jfunc),
782 ipa_get_jf_ancestor_type (jfunc));
783 else if (TREE_CODE (input) == ADDR_EXPR)
785 tree t = TREE_OPERAND (input, 0);
786 t = build_ref_for_offset (EXPR_LOCATION (t), t,
787 ipa_get_jf_ancestor_offset (jfunc),
788 ipa_get_jf_ancestor_type (jfunc), NULL, false);
789 return build_fold_addr_expr (t);
791 else
792 return NULL_TREE;
795 /* Extract the acual BINFO being described by JFUNC which must be a known type
796 jump function. */
798 static tree
799 ipa_value_from_known_type_jfunc (struct ipa_jump_func *jfunc)
801 tree base_binfo = TYPE_BINFO (ipa_get_jf_known_type_base_type (jfunc));
802 if (!base_binfo)
803 return NULL_TREE;
804 return get_binfo_at_offset (base_binfo,
805 ipa_get_jf_known_type_offset (jfunc),
806 ipa_get_jf_known_type_component_type (jfunc));
809 /* Determine whether JFUNC evaluates to a known value (that is either a
810 constant or a binfo) and if so, return it. Otherwise return NULL. INFO
811 describes the caller node so that pass-through jump functions can be
812 evaluated. */
814 tree
815 ipa_value_from_jfunc (struct ipa_node_params *info, struct ipa_jump_func *jfunc)
817 if (jfunc->type == IPA_JF_CONST)
818 return ipa_get_jf_constant (jfunc);
819 else if (jfunc->type == IPA_JF_KNOWN_TYPE)
820 return ipa_value_from_known_type_jfunc (jfunc);
821 else if (jfunc->type == IPA_JF_PASS_THROUGH
822 || jfunc->type == IPA_JF_ANCESTOR)
824 tree input;
825 int idx;
827 if (jfunc->type == IPA_JF_PASS_THROUGH)
828 idx = ipa_get_jf_pass_through_formal_id (jfunc);
829 else
830 idx = ipa_get_jf_ancestor_formal_id (jfunc);
832 if (info->ipcp_orig_node)
833 input = VEC_index (tree, info->known_vals, idx);
834 else
836 struct ipcp_lattice *lat;
838 if (!info->lattices)
840 gcc_checking_assert (!flag_ipa_cp);
841 return NULL_TREE;
843 lat = ipa_get_scalar_lat (info, idx);
844 if (!ipa_lat_is_single_const (lat))
845 return NULL_TREE;
846 input = lat->values->value;
849 if (!input)
850 return NULL_TREE;
852 if (jfunc->type == IPA_JF_PASS_THROUGH)
853 return ipa_get_jf_pass_through_result (jfunc, input);
854 else
855 return ipa_get_jf_ancestor_result (jfunc, input);
857 else
858 return NULL_TREE;
862 /* If checking is enabled, verify that no lattice is in the TOP state, i.e. not
863 bottom, not containing a variable component and without any known value at
864 the same time. */
866 DEBUG_FUNCTION void
867 ipcp_verify_propagated_values (void)
869 struct cgraph_node *node;
871 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
873 struct ipa_node_params *info = IPA_NODE_REF (node);
874 int i, count = ipa_get_param_count (info);
876 for (i = 0; i < count; i++)
878 struct ipcp_lattice *lat = ipa_get_scalar_lat (info, i);
880 if (!lat->bottom
881 && !lat->contains_variable
882 && lat->values_count == 0)
884 if (dump_file)
886 fprintf (dump_file, "\nIPA lattices after constant "
887 "propagation:\n");
888 print_all_lattices (dump_file, true, false);
891 gcc_unreachable ();
897 /* Return true iff X and Y should be considered equal values by IPA-CP. */
899 static bool
900 values_equal_for_ipcp_p (tree x, tree y)
902 gcc_checking_assert (x != NULL_TREE && y != NULL_TREE);
904 if (x == y)
905 return true;
907 if (TREE_CODE (x) == TREE_BINFO || TREE_CODE (y) == TREE_BINFO)
908 return false;
910 if (TREE_CODE (x) == ADDR_EXPR
911 && TREE_CODE (y) == ADDR_EXPR
912 && TREE_CODE (TREE_OPERAND (x, 0)) == CONST_DECL
913 && TREE_CODE (TREE_OPERAND (y, 0)) == CONST_DECL)
914 return operand_equal_p (DECL_INITIAL (TREE_OPERAND (x, 0)),
915 DECL_INITIAL (TREE_OPERAND (y, 0)), 0);
916 else
917 return operand_equal_p (x, y, 0);
920 /* Add a new value source to VAL, marking that a value comes from edge CS and
921 (if the underlying jump function is a pass-through or an ancestor one) from
922 a caller value SRC_VAL of a caller parameter described by SRC_INDEX. OFFSET
923 is negative if the source was the scalar value of the parameter itself or
924 the offset within an aggregate. */
926 static void
927 add_value_source (struct ipcp_value *val, struct cgraph_edge *cs,
928 struct ipcp_value *src_val, int src_idx, HOST_WIDE_INT offset)
930 struct ipcp_value_source *src;
932 src = (struct ipcp_value_source *) pool_alloc (ipcp_sources_pool);
933 src->offset = offset;
934 src->cs = cs;
935 src->val = src_val;
936 src->index = src_idx;
938 src->next = val->sources;
939 val->sources = src;
942 /* Try to add NEWVAL to LAT, potentially creating a new struct ipcp_value for
943 it. CS, SRC_VAL SRC_INDEX and OFFSET are meant for add_value_source and
944 have the same meaning. */
946 static bool
947 add_value_to_lattice (struct ipcp_lattice *lat, tree newval,
948 struct cgraph_edge *cs, struct ipcp_value *src_val,
949 int src_idx, HOST_WIDE_INT offset)
951 struct ipcp_value *val;
953 if (lat->bottom)
954 return false;
956 for (val = lat->values; val; val = val->next)
957 if (values_equal_for_ipcp_p (val->value, newval))
959 if (edge_within_scc (cs))
961 struct ipcp_value_source *s;
962 for (s = val->sources; s ; s = s->next)
963 if (s->cs == cs)
964 break;
965 if (s)
966 return false;
969 add_value_source (val, cs, src_val, src_idx, offset);
970 return false;
973 if (lat->values_count == PARAM_VALUE (PARAM_IPA_CP_VALUE_LIST_SIZE))
975 /* We can only free sources, not the values themselves, because sources
976 of other values in this this SCC might point to them. */
977 for (val = lat->values; val; val = val->next)
979 while (val->sources)
981 struct ipcp_value_source *src = val->sources;
982 val->sources = src->next;
983 pool_free (ipcp_sources_pool, src);
987 lat->values = NULL;
988 return set_lattice_to_bottom (lat);
991 lat->values_count++;
992 val = (struct ipcp_value *) pool_alloc (ipcp_values_pool);
993 memset (val, 0, sizeof (*val));
995 add_value_source (val, cs, src_val, src_idx, offset);
996 val->value = newval;
997 val->next = lat->values;
998 lat->values = val;
999 return true;
1002 /* Like above but passes a special value of offset to distinguish that the
1003 origin is the scalar value of the parameter rather than a part of an
1004 aggregate. */
1006 static inline bool
1007 add_scalar_value_to_lattice (struct ipcp_lattice *lat, tree newval,
1008 struct cgraph_edge *cs,
1009 struct ipcp_value *src_val, int src_idx)
1011 return add_value_to_lattice (lat, newval, cs, src_val, src_idx, -1);
1014 /* Propagate values through a pass-through jump function JFUNC associated with
1015 edge CS, taking values from SRC_LAT and putting them into DEST_LAT. SRC_IDX
1016 is the index of the source parameter. */
1018 static bool
1019 propagate_vals_accross_pass_through (struct cgraph_edge *cs,
1020 struct ipa_jump_func *jfunc,
1021 struct ipcp_lattice *src_lat,
1022 struct ipcp_lattice *dest_lat,
1023 int src_idx)
1025 struct ipcp_value *src_val;
1026 bool ret = false;
1028 if (ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
1029 for (src_val = src_lat->values; src_val; src_val = src_val->next)
1030 ret |= add_scalar_value_to_lattice (dest_lat, src_val->value, cs,
1031 src_val, src_idx);
1032 /* Do not create new values when propagating within an SCC because if there
1033 are arithmetic functions with circular dependencies, there is infinite
1034 number of them and we would just make lattices bottom. */
1035 else if (edge_within_scc (cs))
1036 ret = set_lattice_contains_variable (dest_lat);
1037 else
1038 for (src_val = src_lat->values; src_val; src_val = src_val->next)
1040 tree cstval = src_val->value;
1042 if (TREE_CODE (cstval) == TREE_BINFO)
1044 ret |= set_lattice_contains_variable (dest_lat);
1045 continue;
1047 cstval = ipa_get_jf_pass_through_result (jfunc, cstval);
1049 if (cstval)
1050 ret |= add_scalar_value_to_lattice (dest_lat, cstval, cs, src_val,
1051 src_idx);
1052 else
1053 ret |= set_lattice_contains_variable (dest_lat);
1056 return ret;
1059 /* Propagate values through an ancestor jump function JFUNC associated with
1060 edge CS, taking values from SRC_LAT and putting them into DEST_LAT. SRC_IDX
1061 is the index of the source parameter. */
1063 static bool
1064 propagate_vals_accross_ancestor (struct cgraph_edge *cs,
1065 struct ipa_jump_func *jfunc,
1066 struct ipcp_lattice *src_lat,
1067 struct ipcp_lattice *dest_lat,
1068 int src_idx)
1070 struct ipcp_value *src_val;
1071 bool ret = false;
1073 if (edge_within_scc (cs))
1074 return set_lattice_contains_variable (dest_lat);
1076 for (src_val = src_lat->values; src_val; src_val = src_val->next)
1078 tree t = ipa_get_jf_ancestor_result (jfunc, src_val->value);
1080 if (t)
1081 ret |= add_scalar_value_to_lattice (dest_lat, t, cs, src_val, src_idx);
1082 else
1083 ret |= set_lattice_contains_variable (dest_lat);
1086 return ret;
1089 /* Propagate scalar values across jump function JFUNC that is associated with
1090 edge CS and put the values into DEST_LAT. */
1092 static bool
1093 propagate_scalar_accross_jump_function (struct cgraph_edge *cs,
1094 struct ipa_jump_func *jfunc,
1095 struct ipcp_lattice *dest_lat)
1097 if (dest_lat->bottom)
1098 return false;
1100 if (jfunc->type == IPA_JF_CONST
1101 || jfunc->type == IPA_JF_KNOWN_TYPE)
1103 tree val;
1105 if (jfunc->type == IPA_JF_KNOWN_TYPE)
1107 val = ipa_value_from_known_type_jfunc (jfunc);
1108 if (!val)
1109 return set_lattice_contains_variable (dest_lat);
1111 else
1112 val = ipa_get_jf_constant (jfunc);
1113 return add_scalar_value_to_lattice (dest_lat, val, cs, NULL, 0);
1115 else if (jfunc->type == IPA_JF_PASS_THROUGH
1116 || jfunc->type == IPA_JF_ANCESTOR)
1118 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
1119 struct ipcp_lattice *src_lat;
1120 int src_idx;
1121 bool ret;
1123 if (jfunc->type == IPA_JF_PASS_THROUGH)
1124 src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
1125 else
1126 src_idx = ipa_get_jf_ancestor_formal_id (jfunc);
1128 src_lat = ipa_get_scalar_lat (caller_info, src_idx);
1129 if (src_lat->bottom)
1130 return set_lattice_contains_variable (dest_lat);
1132 /* If we would need to clone the caller and cannot, do not propagate. */
1133 if (!ipcp_versionable_function_p (cs->caller)
1134 && (src_lat->contains_variable
1135 || (src_lat->values_count > 1)))
1136 return set_lattice_contains_variable (dest_lat);
1138 if (jfunc->type == IPA_JF_PASS_THROUGH)
1139 ret = propagate_vals_accross_pass_through (cs, jfunc, src_lat,
1140 dest_lat, src_idx);
1141 else
1142 ret = propagate_vals_accross_ancestor (cs, jfunc, src_lat, dest_lat,
1143 src_idx);
1145 if (src_lat->contains_variable)
1146 ret |= set_lattice_contains_variable (dest_lat);
1148 return ret;
1151 /* TODO: We currently do not handle member method pointers in IPA-CP (we only
1152 use it for indirect inlining), we should propagate them too. */
1153 return set_lattice_contains_variable (dest_lat);
1156 /* If DEST_PLATS already has aggregate items, check that aggs_by_ref matches
1157 NEW_AGGS_BY_REF and if not, mark all aggs as bottoms and return true (in all
1158 other cases, return false). If there are no aggregate items, set
1159 aggs_by_ref to NEW_AGGS_BY_REF. */
1161 static bool
1162 set_check_aggs_by_ref (struct ipcp_param_lattices *dest_plats,
1163 bool new_aggs_by_ref)
1165 if (dest_plats->aggs)
1167 if (dest_plats->aggs_by_ref != new_aggs_by_ref)
1169 set_agg_lats_to_bottom (dest_plats);
1170 return true;
1173 else
1174 dest_plats->aggs_by_ref = new_aggs_by_ref;
1175 return false;
1178 /* Walk aggregate lattices in DEST_PLATS from ***AGLAT on, until ***aglat is an
1179 already existing lattice for the given OFFSET and SIZE, marking all skipped
1180 lattices as containing variable and checking for overlaps. If there is no
1181 already existing lattice for the OFFSET and VAL_SIZE, create one, initialize
1182 it with offset, size and contains_variable to PRE_EXISTING, and return true,
1183 unless there are too many already. If there are two many, return false. If
1184 there are overlaps turn whole DEST_PLATS to bottom and return false. If any
1185 skipped lattices were newly marked as containing variable, set *CHANGE to
1186 true. */
1188 static bool
1189 merge_agg_lats_step (struct ipcp_param_lattices *dest_plats,
1190 HOST_WIDE_INT offset, HOST_WIDE_INT val_size,
1191 struct ipcp_agg_lattice ***aglat,
1192 bool pre_existing, bool *change)
1194 gcc_checking_assert (offset >= 0);
1196 while (**aglat && (**aglat)->offset < offset)
1198 if ((**aglat)->offset + (**aglat)->size > offset)
1200 set_agg_lats_to_bottom (dest_plats);
1201 return false;
1203 *change |= set_lattice_contains_variable (**aglat);
1204 *aglat = &(**aglat)->next;
1207 if (**aglat && (**aglat)->offset == offset)
1209 if ((**aglat)->size != val_size
1210 || ((**aglat)->next
1211 && (**aglat)->next->offset < offset + val_size))
1213 set_agg_lats_to_bottom (dest_plats);
1214 return false;
1216 gcc_checking_assert (!(**aglat)->next
1217 || (**aglat)->next->offset >= offset + val_size);
1218 return true;
1220 else
1222 struct ipcp_agg_lattice *new_al;
1224 if (**aglat && (**aglat)->offset < offset + val_size)
1226 set_agg_lats_to_bottom (dest_plats);
1227 return false;
1229 if (dest_plats->aggs_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1230 return false;
1231 dest_plats->aggs_count++;
1232 new_al = (struct ipcp_agg_lattice *) pool_alloc (ipcp_agg_lattice_pool);
1233 memset (new_al, 0, sizeof (*new_al));
1235 new_al->offset = offset;
1236 new_al->size = val_size;
1237 new_al->contains_variable = pre_existing;
1239 new_al->next = **aglat;
1240 **aglat = new_al;
1241 return true;
1245 /* Set all AGLAT and all other aggregate lattices reachable by next pointers as
1246 containing an unknown value. */
1248 static bool
1249 set_chain_of_aglats_contains_variable (struct ipcp_agg_lattice *aglat)
1251 bool ret = false;
1252 while (aglat)
1254 ret |= set_lattice_contains_variable (aglat);
1255 aglat = aglat->next;
1257 return ret;
1260 /* Merge existing aggregate lattices in SRC_PLATS to DEST_PLATS, subtracting
1261 DELTA_OFFSET. CS is the call graph edge and SRC_IDX the index of the source
1262 parameter used for lattice value sources. Return true if DEST_PLATS changed
1263 in any way. */
1265 static bool
1266 merge_aggregate_lattices (struct cgraph_edge *cs,
1267 struct ipcp_param_lattices *dest_plats,
1268 struct ipcp_param_lattices *src_plats,
1269 int src_idx, HOST_WIDE_INT offset_delta)
1271 bool pre_existing = dest_plats->aggs != NULL;
1272 struct ipcp_agg_lattice **dst_aglat;
1273 bool ret = false;
1275 if (set_check_aggs_by_ref (dest_plats, src_plats->aggs_by_ref))
1276 return true;
1277 if (src_plats->aggs_bottom)
1278 return set_agg_lats_contain_variable (dest_plats);
1279 if (src_plats->aggs_contain_variable)
1280 ret |= set_agg_lats_contain_variable (dest_plats);
1281 dst_aglat = &dest_plats->aggs;
1283 for (struct ipcp_agg_lattice *src_aglat = src_plats->aggs;
1284 src_aglat;
1285 src_aglat = src_aglat->next)
1287 HOST_WIDE_INT new_offset = src_aglat->offset - offset_delta;
1289 if (new_offset < 0)
1290 continue;
1291 if (merge_agg_lats_step (dest_plats, new_offset, src_aglat->size,
1292 &dst_aglat, pre_existing, &ret))
1294 struct ipcp_agg_lattice *new_al = *dst_aglat;
1296 dst_aglat = &(*dst_aglat)->next;
1297 if (src_aglat->bottom)
1299 ret |= set_lattice_contains_variable (new_al);
1300 continue;
1302 if (src_aglat->contains_variable)
1303 ret |= set_lattice_contains_variable (new_al);
1304 for (struct ipcp_value *val = src_aglat->values;
1305 val;
1306 val = val->next)
1307 ret |= add_value_to_lattice (new_al, val->value, cs, val, src_idx,
1308 src_aglat->offset);
1310 else if (dest_plats->aggs_bottom)
1311 return true;
1313 ret |= set_chain_of_aglats_contains_variable (*dst_aglat);
1314 return ret;
1317 /* Determine whether there is anything to propagate FROM SRC_PLATS through a
1318 pass-through JFUNC and if so, whether it has conform and conforms to the
1319 rules about propagating values passed by reference. */
1321 static bool
1322 agg_pass_through_permissible_p (struct ipcp_param_lattices *src_plats,
1323 struct ipa_jump_func *jfunc)
1325 return src_plats->aggs
1326 && (!src_plats->aggs_by_ref
1327 || ipa_get_jf_pass_through_agg_preserved (jfunc));
1330 /* Propagate scalar values across jump function JFUNC that is associated with
1331 edge CS and put the values into DEST_LAT. */
1333 static bool
1334 propagate_aggs_accross_jump_function (struct cgraph_edge *cs,
1335 struct ipa_jump_func *jfunc,
1336 struct ipcp_param_lattices *dest_plats)
1338 bool ret = false;
1340 if (dest_plats->aggs_bottom)
1341 return false;
1343 if (jfunc->type == IPA_JF_PASS_THROUGH
1344 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
1346 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
1347 int src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
1348 struct ipcp_param_lattices *src_plats;
1350 src_plats = ipa_get_parm_lattices (caller_info, src_idx);
1351 if (agg_pass_through_permissible_p (src_plats, jfunc))
1353 /* Currently we do not produce clobber aggregate jump
1354 functions, replace with merging when we do. */
1355 gcc_assert (!jfunc->agg.items);
1356 ret |= merge_aggregate_lattices (cs, dest_plats, src_plats,
1357 src_idx, 0);
1359 else
1360 ret |= set_agg_lats_contain_variable (dest_plats);
1362 else if (jfunc->type == IPA_JF_ANCESTOR
1363 && ipa_get_jf_ancestor_agg_preserved (jfunc))
1365 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
1366 int src_idx = ipa_get_jf_ancestor_formal_id (jfunc);
1367 struct ipcp_param_lattices *src_plats;
1369 src_plats = ipa_get_parm_lattices (caller_info, src_idx);
1370 if (src_plats->aggs && src_plats->aggs_by_ref)
1372 /* Currently we do not produce clobber aggregate jump
1373 functions, replace with merging when we do. */
1374 gcc_assert (!jfunc->agg.items);
1375 ret |= merge_aggregate_lattices (cs, dest_plats, src_plats, src_idx,
1376 ipa_get_jf_ancestor_offset (jfunc));
1378 else if (!src_plats->aggs_by_ref)
1379 ret |= set_agg_lats_to_bottom (dest_plats);
1380 else
1381 ret |= set_agg_lats_contain_variable (dest_plats);
1383 else if (jfunc->agg.items)
1385 bool pre_existing = dest_plats->aggs != NULL;
1386 struct ipcp_agg_lattice **aglat = &dest_plats->aggs;
1387 struct ipa_agg_jf_item *item;
1388 int i;
1390 if (set_check_aggs_by_ref (dest_plats, jfunc->agg.by_ref))
1391 return true;
1393 FOR_EACH_VEC_ELT (ipa_agg_jf_item_t, jfunc->agg.items, i, item)
1395 HOST_WIDE_INT val_size;
1397 if (item->offset < 0)
1398 continue;
1399 gcc_checking_assert (is_gimple_ip_invariant (item->value));
1400 val_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (item->value)), 1);
1402 if (merge_agg_lats_step (dest_plats, item->offset, val_size,
1403 &aglat, pre_existing, &ret))
1405 ret |= add_value_to_lattice (*aglat, item->value, cs, NULL, 0, 0);
1406 aglat = &(*aglat)->next;
1408 else if (dest_plats->aggs_bottom)
1409 return true;
1412 ret |= set_chain_of_aglats_contains_variable (*aglat);
1414 else
1415 ret |= set_agg_lats_contain_variable (dest_plats);
1417 return ret;
1420 /* Propagate constants from the caller to the callee of CS. INFO describes the
1421 caller. */
1423 static bool
1424 propagate_constants_accross_call (struct cgraph_edge *cs)
1426 struct ipa_node_params *callee_info;
1427 enum availability availability;
1428 struct cgraph_node *callee, *alias_or_thunk;
1429 struct ipa_edge_args *args;
1430 bool ret = false;
1431 int i, args_count, parms_count;
1433 callee = cgraph_function_node (cs->callee, &availability);
1434 if (!callee->analyzed)
1435 return false;
1436 gcc_checking_assert (cgraph_function_with_gimple_body_p (callee));
1437 callee_info = IPA_NODE_REF (callee);
1439 args = IPA_EDGE_REF (cs);
1440 args_count = ipa_get_cs_argument_count (args);
1441 parms_count = ipa_get_param_count (callee_info);
1443 /* If this call goes through a thunk we must not propagate to the first (0th)
1444 parameter. However, we might need to uncover a thunk from below a series
1445 of aliases first. */
1446 alias_or_thunk = cs->callee;
1447 while (alias_or_thunk->alias)
1448 alias_or_thunk = cgraph_alias_aliased_node (alias_or_thunk);
1449 if (alias_or_thunk->thunk.thunk_p)
1451 ret |= set_all_contains_variable (ipa_get_parm_lattices (callee_info,
1452 0));
1453 i = 1;
1455 else
1456 i = 0;
1458 for (; (i < args_count) && (i < parms_count); i++)
1460 struct ipa_jump_func *jump_func = ipa_get_ith_jump_func (args, i);
1461 struct ipcp_param_lattices *dest_plats;
1463 dest_plats = ipa_get_parm_lattices (callee_info, i);
1464 if (availability == AVAIL_OVERWRITABLE)
1465 ret |= set_all_contains_variable (dest_plats);
1466 else
1468 ret |= propagate_scalar_accross_jump_function (cs, jump_func,
1469 &dest_plats->itself);
1470 ret |= propagate_aggs_accross_jump_function (cs, jump_func,
1471 dest_plats);
1474 for (; i < parms_count; i++)
1475 ret |= set_all_contains_variable (ipa_get_parm_lattices (callee_info, i));
1477 return ret;
1480 /* If an indirect edge IE can be turned into a direct one based on KNOWN_VALS
1481 (which can contain both constants and binfos) or KNOWN_BINFOS (which can be
1482 NULL) return the destination. */
1484 tree
1485 ipa_get_indirect_edge_target (struct cgraph_edge *ie,
1486 VEC (tree, heap) *known_vals,
1487 VEC (tree, heap) *known_binfos,
1488 VEC (ipa_agg_jump_function_p, heap) *known_aggs)
1490 int param_index = ie->indirect_info->param_index;
1491 HOST_WIDE_INT token, anc_offset;
1492 tree otr_type;
1493 tree t;
1495 if (param_index == -1)
1496 return NULL_TREE;
1498 if (!ie->indirect_info->polymorphic)
1500 tree t;
1502 if (ie->indirect_info->agg_contents)
1504 if (VEC_length (ipa_agg_jump_function_p, known_aggs)
1505 > (unsigned int) param_index)
1507 struct ipa_agg_jump_function *agg;
1508 agg = VEC_index (ipa_agg_jump_function_p, known_aggs,
1509 param_index);
1510 t = ipa_find_agg_cst_for_param (agg, ie->indirect_info->offset,
1511 ie->indirect_info->by_ref);
1513 else
1514 t = NULL;
1516 else
1517 t = (VEC_length (tree, known_vals) > (unsigned int) param_index
1518 ? VEC_index (tree, known_vals, param_index) : NULL);
1520 if (t &&
1521 TREE_CODE (t) == ADDR_EXPR
1522 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL)
1523 return TREE_OPERAND (t, 0);
1524 else
1525 return NULL_TREE;
1528 gcc_assert (!ie->indirect_info->agg_contents);
1529 token = ie->indirect_info->otr_token;
1530 anc_offset = ie->indirect_info->offset;
1531 otr_type = ie->indirect_info->otr_type;
1533 t = VEC_index (tree, known_vals, param_index);
1534 if (!t && known_binfos
1535 && VEC_length (tree, known_binfos) > (unsigned int) param_index)
1536 t = VEC_index (tree, known_binfos, param_index);
1537 if (!t)
1538 return NULL_TREE;
1540 if (TREE_CODE (t) != TREE_BINFO)
1542 tree binfo;
1543 binfo = gimple_extract_devirt_binfo_from_cst (t);
1544 if (!binfo)
1545 return NULL_TREE;
1546 binfo = get_binfo_at_offset (binfo, anc_offset, otr_type);
1547 if (!binfo)
1548 return NULL_TREE;
1549 return gimple_get_virt_method_for_binfo (token, binfo);
1551 else
1553 tree binfo;
1555 binfo = get_binfo_at_offset (t, anc_offset, otr_type);
1556 if (!binfo)
1557 return NULL_TREE;
1558 return gimple_get_virt_method_for_binfo (token, binfo);
1562 /* Calculate devirtualization time bonus for NODE, assuming we know KNOWN_CSTS
1563 and KNOWN_BINFOS. */
1565 static int
1566 devirtualization_time_bonus (struct cgraph_node *node,
1567 VEC (tree, heap) *known_csts,
1568 VEC (tree, heap) *known_binfos)
1570 struct cgraph_edge *ie;
1571 int res = 0;
1573 for (ie = node->indirect_calls; ie; ie = ie->next_callee)
1575 struct cgraph_node *callee;
1576 struct inline_summary *isummary;
1577 tree target;
1579 target = ipa_get_indirect_edge_target (ie, known_csts, known_binfos,
1580 NULL);
1581 if (!target)
1582 continue;
1584 /* Only bare minimum benefit for clearly un-inlineable targets. */
1585 res += 1;
1586 callee = cgraph_get_node (target);
1587 if (!callee || !callee->analyzed)
1588 continue;
1589 isummary = inline_summary (callee);
1590 if (!isummary->inlinable)
1591 continue;
1593 /* FIXME: The values below need re-considering and perhaps also
1594 integrating into the cost metrics, at lest in some very basic way. */
1595 if (isummary->size <= MAX_INLINE_INSNS_AUTO / 4)
1596 res += 31;
1597 else if (isummary->size <= MAX_INLINE_INSNS_AUTO / 2)
1598 res += 15;
1599 else if (isummary->size <= MAX_INLINE_INSNS_AUTO
1600 || DECL_DECLARED_INLINE_P (callee->symbol.decl))
1601 res += 7;
1604 return res;
1607 /* Return time bonus incurred because of HINTS. */
1609 static int
1610 hint_time_bonus (inline_hints hints)
1612 if (hints & (INLINE_HINT_loop_iterations | INLINE_HINT_loop_stride))
1613 return PARAM_VALUE (PARAM_IPA_CP_LOOP_HINT_BONUS);
1614 return 0;
1617 /* Return true if cloning NODE is a good idea, given the estimated TIME_BENEFIT
1618 and SIZE_COST and with the sum of frequencies of incoming edges to the
1619 potential new clone in FREQUENCIES. */
1621 static bool
1622 good_cloning_opportunity_p (struct cgraph_node *node, int time_benefit,
1623 int freq_sum, gcov_type count_sum, int size_cost)
1625 if (time_benefit == 0
1626 || !flag_ipa_cp_clone
1627 || !optimize_function_for_speed_p (DECL_STRUCT_FUNCTION (node->symbol.decl)))
1628 return false;
1630 gcc_assert (size_cost > 0);
1632 if (max_count)
1634 int factor = (count_sum * 1000) / max_count;
1635 HOST_WIDEST_INT evaluation = (((HOST_WIDEST_INT) time_benefit * factor)
1636 / size_cost);
1638 if (dump_file && (dump_flags & TDF_DETAILS))
1639 fprintf (dump_file, " good_cloning_opportunity_p (time: %i, "
1640 "size: %i, count_sum: " HOST_WIDE_INT_PRINT_DEC
1641 ") -> evaluation: " HOST_WIDEST_INT_PRINT_DEC
1642 ", threshold: %i\n",
1643 time_benefit, size_cost, (HOST_WIDE_INT) count_sum,
1644 evaluation, 500);
1646 return evaluation >= PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD);
1648 else
1650 HOST_WIDEST_INT evaluation = (((HOST_WIDEST_INT) time_benefit * freq_sum)
1651 / size_cost);
1653 if (dump_file && (dump_flags & TDF_DETAILS))
1654 fprintf (dump_file, " good_cloning_opportunity_p (time: %i, "
1655 "size: %i, freq_sum: %i) -> evaluation: "
1656 HOST_WIDEST_INT_PRINT_DEC ", threshold: %i\n",
1657 time_benefit, size_cost, freq_sum, evaluation,
1658 CGRAPH_FREQ_BASE /2);
1660 return evaluation >= PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD);
1664 /* Return all context independent values from aggregate lattices in PLATS in a
1665 vector. Return NULL if there are none. */
1667 static VEC (ipa_agg_jf_item_t, gc) *
1668 context_independent_aggregate_values (struct ipcp_param_lattices *plats)
1670 VEC (ipa_agg_jf_item_t, gc) *res = NULL;
1672 if (plats->aggs_bottom
1673 || plats->aggs_contain_variable
1674 || plats->aggs_count == 0)
1675 return NULL;
1677 for (struct ipcp_agg_lattice *aglat = plats->aggs;
1678 aglat;
1679 aglat = aglat->next)
1680 if (ipa_lat_is_single_const (aglat))
1682 struct ipa_agg_jf_item item;
1683 item.offset = aglat->offset;
1684 item.value = aglat->values->value;
1685 VEC_safe_push (ipa_agg_jf_item_t, gc, res, item);
1687 return res;
1690 /* Allocate KNOWN_CSTS, KNOWN_BINFOS and, if non-NULL, KNOWN_AGGS and populate
1691 them with values of parameters that are known independent of the context.
1692 INFO describes the function. If REMOVABLE_PARAMS_COST is non-NULL, the
1693 movement cost of all removable parameters will be stored in it. */
1695 static bool
1696 gather_context_independent_values (struct ipa_node_params *info,
1697 VEC (tree, heap) **known_csts,
1698 VEC (tree, heap) **known_binfos,
1699 VEC (ipa_agg_jump_function_t, heap) **known_aggs,
1700 int *removable_params_cost)
1702 int i, count = ipa_get_param_count (info);
1703 bool ret = false;
1705 *known_csts = NULL;
1706 *known_binfos = NULL;
1707 VEC_safe_grow_cleared (tree, heap, *known_csts, count);
1708 VEC_safe_grow_cleared (tree, heap, *known_binfos, count);
1709 if (known_aggs)
1711 *known_aggs = NULL;
1712 VEC_safe_grow_cleared (ipa_agg_jump_function_t, heap, *known_aggs, count);
1715 if (removable_params_cost)
1716 *removable_params_cost = 0;
1718 for (i = 0; i < count ; i++)
1720 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
1721 struct ipcp_lattice *lat = &plats->itself;
1723 if (ipa_lat_is_single_const (lat))
1725 struct ipcp_value *val = lat->values;
1726 if (TREE_CODE (val->value) != TREE_BINFO)
1728 VEC_replace (tree, *known_csts, i, val->value);
1729 if (removable_params_cost)
1730 *removable_params_cost
1731 += estimate_move_cost (TREE_TYPE (val->value));
1732 ret = true;
1734 else if (plats->virt_call)
1736 VEC_replace (tree, *known_binfos, i, val->value);
1737 ret = true;
1739 else if (removable_params_cost
1740 && !ipa_is_param_used (info, i))
1741 *removable_params_cost
1742 += estimate_move_cost (TREE_TYPE (ipa_get_param (info, i)));
1744 else if (removable_params_cost
1745 && !ipa_is_param_used (info, i))
1746 *removable_params_cost
1747 += estimate_move_cost (TREE_TYPE (ipa_get_param (info, i)));
1749 if (known_aggs)
1751 VEC (ipa_agg_jf_item_t, gc) *agg_items;
1752 struct ipa_agg_jump_function *ajf;
1754 agg_items = context_independent_aggregate_values (plats);
1755 ajf = &VEC_index (ipa_agg_jump_function_t, *known_aggs, i);
1756 ajf->items = agg_items;
1757 ajf->by_ref = plats->aggs_by_ref;
1758 ret |= agg_items != NULL;
1762 return ret;
1765 /* The current interface in ipa-inline-analysis requires a pointer vector.
1766 Create it.
1768 FIXME: That interface should be re-worked, this is slightly silly. Still,
1769 I'd like to discuss how to change it first and this demonstrates the
1770 issue. */
1772 static VEC (ipa_agg_jump_function_p, heap) *
1773 agg_jmp_p_vec_for_t_vec (VEC (ipa_agg_jump_function_t, heap) *known_aggs)
1775 VEC (ipa_agg_jump_function_p, heap) *ret;
1776 struct ipa_agg_jump_function *ajf;
1777 int i;
1779 ret = VEC_alloc (ipa_agg_jump_function_p, heap,
1780 VEC_length (ipa_agg_jump_function_t, known_aggs));
1781 FOR_EACH_VEC_ELT (ipa_agg_jump_function_t, known_aggs, i, ajf)
1782 VEC_quick_push (ipa_agg_jump_function_p, ret, ajf);
1783 return ret;
1786 /* Iterate over known values of parameters of NODE and estimate the local
1787 effects in terms of time and size they have. */
1789 static void
1790 estimate_local_effects (struct cgraph_node *node)
1792 struct ipa_node_params *info = IPA_NODE_REF (node);
1793 int i, count = ipa_get_param_count (info);
1794 VEC (tree, heap) *known_csts, *known_binfos;
1795 VEC (ipa_agg_jump_function_t, heap) *known_aggs;
1796 VEC (ipa_agg_jump_function_p, heap) *known_aggs_ptrs;
1797 bool always_const;
1798 int base_time = inline_summary (node)->time;
1799 int removable_params_cost;
1801 if (!count || !ipcp_versionable_function_p (node))
1802 return;
1804 if (dump_file && (dump_flags & TDF_DETAILS))
1805 fprintf (dump_file, "\nEstimating effects for %s/%i, base_time: %i.\n",
1806 cgraph_node_name (node), node->uid, base_time);
1808 always_const = gather_context_independent_values (info, &known_csts,
1809 &known_binfos, &known_aggs,
1810 &removable_params_cost);
1811 known_aggs_ptrs = agg_jmp_p_vec_for_t_vec (known_aggs);
1812 if (always_const)
1814 struct caller_statistics stats;
1815 inline_hints hints;
1816 int time, size;
1818 init_caller_stats (&stats);
1819 cgraph_for_node_and_aliases (node, gather_caller_stats, &stats, false);
1820 estimate_ipcp_clone_size_and_time (node, known_csts, known_binfos,
1821 known_aggs_ptrs, &size, &time, &hints);
1822 time -= devirtualization_time_bonus (node, known_csts, known_binfos);
1823 time -= hint_time_bonus (hints);
1824 time -= removable_params_cost;
1825 size -= stats.n_calls * removable_params_cost;
1827 if (dump_file)
1828 fprintf (dump_file, " - context independent values, size: %i, "
1829 "time_benefit: %i\n", size, base_time - time);
1831 if (size <= 0
1832 || cgraph_will_be_removed_from_program_if_no_direct_calls (node))
1834 info->clone_for_all_contexts = true;
1835 base_time = time;
1837 if (dump_file)
1838 fprintf (dump_file, " Decided to specialize for all "
1839 "known contexts, code not going to grow.\n");
1841 else if (good_cloning_opportunity_p (node, base_time - time,
1842 stats.freq_sum, stats.count_sum,
1843 size))
1845 if (size + overall_size <= max_new_size)
1847 info->clone_for_all_contexts = true;
1848 base_time = time;
1849 overall_size += size;
1851 if (dump_file)
1852 fprintf (dump_file, " Decided to specialize for all "
1853 "known contexts, growth deemed beneficial.\n");
1855 else if (dump_file && (dump_flags & TDF_DETAILS))
1856 fprintf (dump_file, " Not cloning for all contexts because "
1857 "max_new_size would be reached with %li.\n",
1858 size + overall_size);
1862 for (i = 0; i < count ; i++)
1864 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
1865 struct ipcp_lattice *lat = &plats->itself;
1866 struct ipcp_value *val;
1867 int emc;
1869 if (lat->bottom
1870 || !lat->values
1871 || VEC_index (tree, known_csts, i)
1872 || VEC_index (tree, known_binfos, i))
1873 continue;
1875 for (val = lat->values; val; val = val->next)
1877 int time, size, time_benefit;
1878 inline_hints hints;
1880 if (TREE_CODE (val->value) != TREE_BINFO)
1882 VEC_replace (tree, known_csts, i, val->value);
1883 VEC_replace (tree, known_binfos, i, NULL_TREE);
1884 emc = estimate_move_cost (TREE_TYPE (val->value));
1886 else if (plats->virt_call)
1888 VEC_replace (tree, known_csts, i, NULL_TREE);
1889 VEC_replace (tree, known_binfos, i, val->value);
1890 emc = 0;
1892 else
1893 continue;
1895 estimate_ipcp_clone_size_and_time (node, known_csts, known_binfos,
1896 known_aggs_ptrs, &size, &time,
1897 &hints);
1898 time_benefit = base_time - time
1899 + devirtualization_time_bonus (node, known_csts, known_binfos)
1900 + hint_time_bonus (hints)
1901 + removable_params_cost + emc;
1903 gcc_checking_assert (size >=0);
1904 /* The inliner-heuristics based estimates may think that in certain
1905 contexts some functions do not have any size at all but we want
1906 all specializations to have at least a tiny cost, not least not to
1907 divide by zero. */
1908 if (size == 0)
1909 size = 1;
1911 if (dump_file && (dump_flags & TDF_DETAILS))
1913 fprintf (dump_file, " - estimates for value ");
1914 print_ipcp_constant_value (dump_file, val->value);
1915 fprintf (dump_file, " for parameter ");
1916 print_generic_expr (dump_file, ipa_get_param (info, i), 0);
1917 fprintf (dump_file, ": time_benefit: %i, size: %i\n",
1918 time_benefit, size);
1921 val->local_time_benefit = time_benefit;
1922 val->local_size_cost = size;
1924 VEC_replace (tree, known_binfos, i, NULL_TREE);
1925 VEC_replace (tree, known_csts, i, NULL_TREE);
1928 for (i = 0; i < count ; i++)
1930 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
1931 struct ipa_agg_jump_function *ajf;
1932 struct ipcp_agg_lattice *aglat;
1934 if (plats->aggs_bottom || !plats->aggs)
1935 continue;
1937 ajf = &VEC_index (ipa_agg_jump_function_t, known_aggs, i);
1938 for (aglat = plats->aggs; aglat; aglat = aglat->next)
1940 struct ipcp_value *val;
1941 if (aglat->bottom || !aglat->values
1942 /* If the following is true, the one value is in known_aggs. */
1943 || (!plats->aggs_contain_variable
1944 && ipa_lat_is_single_const (aglat)))
1945 continue;
1947 for (val = aglat->values; val; val = val->next)
1949 int time, size, time_benefit;
1950 struct ipa_agg_jf_item item;
1951 inline_hints hints;
1953 item.offset = aglat->offset;
1954 item.value = val->value;
1955 VEC_safe_push (ipa_agg_jf_item_t, gc, ajf->items, item);
1957 estimate_ipcp_clone_size_and_time (node, known_csts, known_binfos,
1958 known_aggs_ptrs, &size, &time,
1959 &hints);
1960 time_benefit = base_time - time
1961 + devirtualization_time_bonus (node, known_csts, known_binfos)
1962 + hint_time_bonus (hints);
1963 gcc_checking_assert (size >=0);
1964 if (size == 0)
1965 size = 1;
1967 if (dump_file && (dump_flags & TDF_DETAILS))
1969 fprintf (dump_file, " - estimates for value ");
1970 print_ipcp_constant_value (dump_file, val->value);
1971 fprintf (dump_file, " for parameter ");
1972 print_generic_expr (dump_file, ipa_get_param (info, i), 0);
1973 fprintf (dump_file, "[%soffset: " HOST_WIDE_INT_PRINT_DEC
1974 "]: time_benefit: %i, size: %i\n",
1975 plats->aggs_by_ref ? "ref " : "",
1976 aglat->offset, time_benefit, size);
1979 val->local_time_benefit = time_benefit;
1980 val->local_size_cost = size;
1981 VEC_pop (ipa_agg_jf_item_t, ajf->items);
1986 for (i = 0; i < count ; i++)
1988 VEC_free (ipa_agg_jf_item_t, gc,
1989 VEC_index (ipa_agg_jump_function_t, known_aggs, i).items);
1990 VEC_index (ipa_agg_jump_function_t, known_aggs, i).items = NULL;
1993 VEC_free (tree, heap, known_csts);
1994 VEC_free (tree, heap, known_binfos);
1995 VEC_free (ipa_agg_jump_function_t, heap, known_aggs);
1996 VEC_free (ipa_agg_jump_function_p, heap, known_aggs_ptrs);
2000 /* Add value CUR_VAL and all yet-unsorted values it is dependent on to the
2001 topological sort of values. */
2003 static void
2004 add_val_to_toposort (struct ipcp_value *cur_val)
2006 static int dfs_counter = 0;
2007 static struct ipcp_value *stack;
2008 struct ipcp_value_source *src;
2010 if (cur_val->dfs)
2011 return;
2013 dfs_counter++;
2014 cur_val->dfs = dfs_counter;
2015 cur_val->low_link = dfs_counter;
2017 cur_val->topo_next = stack;
2018 stack = cur_val;
2019 cur_val->on_stack = true;
2021 for (src = cur_val->sources; src; src = src->next)
2022 if (src->val)
2024 if (src->val->dfs == 0)
2026 add_val_to_toposort (src->val);
2027 if (src->val->low_link < cur_val->low_link)
2028 cur_val->low_link = src->val->low_link;
2030 else if (src->val->on_stack
2031 && src->val->dfs < cur_val->low_link)
2032 cur_val->low_link = src->val->dfs;
2035 if (cur_val->dfs == cur_val->low_link)
2037 struct ipcp_value *v, *scc_list = NULL;
2041 v = stack;
2042 stack = v->topo_next;
2043 v->on_stack = false;
2045 v->scc_next = scc_list;
2046 scc_list = v;
2048 while (v != cur_val);
2050 cur_val->topo_next = values_topo;
2051 values_topo = cur_val;
2055 /* Add all values in lattices associated with NODE to the topological sort if
2056 they are not there yet. */
2058 static void
2059 add_all_node_vals_to_toposort (struct cgraph_node *node)
2061 struct ipa_node_params *info = IPA_NODE_REF (node);
2062 int i, count = ipa_get_param_count (info);
2064 for (i = 0; i < count ; i++)
2066 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
2067 struct ipcp_lattice *lat = &plats->itself;
2068 struct ipcp_agg_lattice *aglat;
2069 struct ipcp_value *val;
2071 if (!lat->bottom)
2072 for (val = lat->values; val; val = val->next)
2073 add_val_to_toposort (val);
2075 if (!plats->aggs_bottom)
2076 for (aglat = plats->aggs; aglat; aglat = aglat->next)
2077 if (!aglat->bottom)
2078 for (val = aglat->values; val; val = val->next)
2079 add_val_to_toposort (val);
2083 /* One pass of constants propagation along the call graph edges, from callers
2084 to callees (requires topological ordering in TOPO), iterate over strongly
2085 connected components. */
2087 static void
2088 propagate_constants_topo (struct topo_info *topo)
2090 int i;
2092 for (i = topo->nnodes - 1; i >= 0; i--)
2094 struct cgraph_node *v, *node = topo->order[i];
2095 struct ipa_dfs_info *node_dfs_info;
2097 if (!cgraph_function_with_gimple_body_p (node))
2098 continue;
2100 node_dfs_info = (struct ipa_dfs_info *) node->symbol.aux;
2101 /* First, iteratively propagate within the strongly connected component
2102 until all lattices stabilize. */
2103 v = node_dfs_info->next_cycle;
2104 while (v)
2106 push_node_to_stack (topo, v);
2107 v = ((struct ipa_dfs_info *) v->symbol.aux)->next_cycle;
2110 v = node;
2111 while (v)
2113 struct cgraph_edge *cs;
2115 for (cs = v->callees; cs; cs = cs->next_callee)
2116 if (edge_within_scc (cs)
2117 && propagate_constants_accross_call (cs))
2118 push_node_to_stack (topo, cs->callee);
2119 v = pop_node_from_stack (topo);
2122 /* Afterwards, propagate along edges leading out of the SCC, calculates
2123 the local effects of the discovered constants and all valid values to
2124 their topological sort. */
2125 v = node;
2126 while (v)
2128 struct cgraph_edge *cs;
2130 estimate_local_effects (v);
2131 add_all_node_vals_to_toposort (v);
2132 for (cs = v->callees; cs; cs = cs->next_callee)
2133 if (!edge_within_scc (cs))
2134 propagate_constants_accross_call (cs);
2136 v = ((struct ipa_dfs_info *) v->symbol.aux)->next_cycle;
2142 /* Return the sum of A and B if none of them is bigger than INT_MAX/2, return
2143 the bigger one if otherwise. */
2145 static int
2146 safe_add (int a, int b)
2148 if (a > INT_MAX/2 || b > INT_MAX/2)
2149 return a > b ? a : b;
2150 else
2151 return a + b;
2155 /* Propagate the estimated effects of individual values along the topological
2156 from the dependent values to those they depend on. */
2158 static void
2159 propagate_effects (void)
2161 struct ipcp_value *base;
2163 for (base = values_topo; base; base = base->topo_next)
2165 struct ipcp_value_source *src;
2166 struct ipcp_value *val;
2167 int time = 0, size = 0;
2169 for (val = base; val; val = val->scc_next)
2171 time = safe_add (time,
2172 val->local_time_benefit + val->prop_time_benefit);
2173 size = safe_add (size, val->local_size_cost + val->prop_size_cost);
2176 for (val = base; val; val = val->scc_next)
2177 for (src = val->sources; src; src = src->next)
2178 if (src->val
2179 && cgraph_maybe_hot_edge_p (src->cs))
2181 src->val->prop_time_benefit = safe_add (time,
2182 src->val->prop_time_benefit);
2183 src->val->prop_size_cost = safe_add (size,
2184 src->val->prop_size_cost);
2190 /* Propagate constants, binfos and their effects from the summaries
2191 interprocedurally. */
2193 static void
2194 ipcp_propagate_stage (struct topo_info *topo)
2196 struct cgraph_node *node;
2198 if (dump_file)
2199 fprintf (dump_file, "\n Propagating constants:\n\n");
2201 if (in_lto_p)
2202 ipa_update_after_lto_read ();
2205 FOR_EACH_DEFINED_FUNCTION (node)
2207 struct ipa_node_params *info = IPA_NODE_REF (node);
2209 determine_versionability (node);
2210 if (cgraph_function_with_gimple_body_p (node))
2212 info->lattices = XCNEWVEC (struct ipcp_param_lattices,
2213 ipa_get_param_count (info));
2214 initialize_node_lattices (node);
2216 if (node->count > max_count)
2217 max_count = node->count;
2218 overall_size += inline_summary (node)->self_size;
2221 max_new_size = overall_size;
2222 if (max_new_size < PARAM_VALUE (PARAM_LARGE_UNIT_INSNS))
2223 max_new_size = PARAM_VALUE (PARAM_LARGE_UNIT_INSNS);
2224 max_new_size += max_new_size * PARAM_VALUE (PARAM_IPCP_UNIT_GROWTH) / 100 + 1;
2226 if (dump_file)
2227 fprintf (dump_file, "\noverall_size: %li, max_new_size: %li\n",
2228 overall_size, max_new_size);
2230 propagate_constants_topo (topo);
2231 #ifdef ENABLE_CHECKING
2232 ipcp_verify_propagated_values ();
2233 #endif
2234 propagate_effects ();
2236 if (dump_file)
2238 fprintf (dump_file, "\nIPA lattices after all propagation:\n");
2239 print_all_lattices (dump_file, (dump_flags & TDF_DETAILS), true);
2243 /* Discover newly direct outgoing edges from NODE which is a new clone with
2244 known KNOWN_VALS and make them direct. */
2246 static void
2247 ipcp_discover_new_direct_edges (struct cgraph_node *node,
2248 VEC (tree, heap) *known_vals)
2250 struct cgraph_edge *ie, *next_ie;
2251 bool found = false;
2253 for (ie = node->indirect_calls; ie; ie = next_ie)
2255 tree target;
2257 next_ie = ie->next_callee;
2258 target = ipa_get_indirect_edge_target (ie, known_vals, NULL, NULL);
2259 if (target)
2261 ipa_make_edge_direct_to_target (ie, target);
2262 found = true;
2265 /* Turning calls to direct calls will improve overall summary. */
2266 if (found)
2267 inline_update_overall_summary (node);
2270 /* Vector of pointers which for linked lists of clones of an original crgaph
2271 edge. */
2273 static VEC (cgraph_edge_p, heap) *next_edge_clone;
2275 static inline void
2276 grow_next_edge_clone_vector (void)
2278 if (VEC_length (cgraph_edge_p, next_edge_clone)
2279 <= (unsigned) cgraph_edge_max_uid)
2280 VEC_safe_grow_cleared (cgraph_edge_p, heap, next_edge_clone,
2281 cgraph_edge_max_uid + 1);
2284 /* Edge duplication hook to grow the appropriate linked list in
2285 next_edge_clone. */
2287 static void
2288 ipcp_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
2289 __attribute__((unused)) void *data)
2291 grow_next_edge_clone_vector ();
2292 VEC_replace (cgraph_edge_p, next_edge_clone, dst->uid,
2293 VEC_index (cgraph_edge_p, next_edge_clone, src->uid));
2294 VEC_replace (cgraph_edge_p, next_edge_clone, src->uid, dst);
2297 /* See if NODE is a clone with a known aggregate value at a given OFFSET of a
2298 parameter with the given INDEX. */
2300 static tree
2301 get_clone_agg_value (struct cgraph_node *node, HOST_WIDEST_INT offset,
2302 int index)
2304 struct ipa_agg_replacement_value *aggval;
2306 aggval = ipa_get_agg_replacements_for_node (node);
2307 while (aggval)
2309 if (aggval->offset == offset
2310 && aggval->index == index)
2311 return aggval->value;
2312 aggval = aggval->next;
2314 return NULL_TREE;
2317 /* Return true if edge CS does bring about the value described by SRC. */
2319 static bool
2320 cgraph_edge_brings_value_p (struct cgraph_edge *cs,
2321 struct ipcp_value_source *src)
2323 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
2325 if (IPA_NODE_REF (cs->callee)->ipcp_orig_node
2326 || caller_info->node_dead)
2327 return false;
2328 if (!src->val)
2329 return true;
2331 if (caller_info->ipcp_orig_node)
2333 tree t;
2334 if (src->offset == -1)
2335 t = VEC_index (tree, caller_info->known_vals, src->index);
2336 else
2337 t = get_clone_agg_value (cs->caller, src->offset, src->index);
2338 return (t != NULL_TREE
2339 && values_equal_for_ipcp_p (src->val->value, t));
2341 else
2343 struct ipcp_agg_lattice *aglat;
2344 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (caller_info,
2345 src->index);
2346 if (src->offset == -1)
2347 return (ipa_lat_is_single_const (&plats->itself)
2348 && values_equal_for_ipcp_p (src->val->value,
2349 plats->itself.values->value));
2350 else
2352 if (plats->aggs_bottom || plats->aggs_contain_variable)
2353 return false;
2354 for (aglat = plats->aggs; aglat; aglat = aglat->next)
2355 if (aglat->offset == src->offset)
2356 return (ipa_lat_is_single_const (aglat)
2357 && values_equal_for_ipcp_p (src->val->value,
2358 aglat->values->value));
2360 return false;
2364 /* Get the next clone in the linked list of clones of an edge. */
2366 static inline struct cgraph_edge *
2367 get_next_cgraph_edge_clone (struct cgraph_edge *cs)
2369 return VEC_index (cgraph_edge_p, next_edge_clone, cs->uid);
2372 /* Given VAL, iterate over all its sources and if they still hold, add their
2373 edge frequency and their number into *FREQUENCY and *CALLER_COUNT
2374 respectively. */
2376 static bool
2377 get_info_about_necessary_edges (struct ipcp_value *val, int *freq_sum,
2378 gcov_type *count_sum, int *caller_count)
2380 struct ipcp_value_source *src;
2381 int freq = 0, count = 0;
2382 gcov_type cnt = 0;
2383 bool hot = false;
2385 for (src = val->sources; src; src = src->next)
2387 struct cgraph_edge *cs = src->cs;
2388 while (cs)
2390 if (cgraph_edge_brings_value_p (cs, src))
2392 count++;
2393 freq += cs->frequency;
2394 cnt += cs->count;
2395 hot |= cgraph_maybe_hot_edge_p (cs);
2397 cs = get_next_cgraph_edge_clone (cs);
2401 *freq_sum = freq;
2402 *count_sum = cnt;
2403 *caller_count = count;
2404 return hot;
2407 /* Return a vector of incoming edges that do bring value VAL. It is assumed
2408 their number is known and equal to CALLER_COUNT. */
2410 static VEC (cgraph_edge_p,heap) *
2411 gather_edges_for_value (struct ipcp_value *val, int caller_count)
2413 struct ipcp_value_source *src;
2414 VEC (cgraph_edge_p,heap) *ret;
2416 ret = VEC_alloc (cgraph_edge_p, heap, caller_count);
2417 for (src = val->sources; src; src = src->next)
2419 struct cgraph_edge *cs = src->cs;
2420 while (cs)
2422 if (cgraph_edge_brings_value_p (cs, src))
2423 VEC_quick_push (cgraph_edge_p, ret, cs);
2424 cs = get_next_cgraph_edge_clone (cs);
2428 return ret;
2431 /* Construct a replacement map for a know VALUE for a formal parameter PARAM.
2432 Return it or NULL if for some reason it cannot be created. */
2434 static struct ipa_replace_map *
2435 get_replacement_map (tree value, tree parm)
2437 tree req_type = TREE_TYPE (parm);
2438 struct ipa_replace_map *replace_map;
2440 if (!useless_type_conversion_p (req_type, TREE_TYPE (value)))
2442 if (fold_convertible_p (req_type, value))
2443 value = fold_build1 (NOP_EXPR, req_type, value);
2444 else if (TYPE_SIZE (req_type) == TYPE_SIZE (TREE_TYPE (value)))
2445 value = fold_build1 (VIEW_CONVERT_EXPR, req_type, value);
2446 else
2448 if (dump_file)
2450 fprintf (dump_file, " const ");
2451 print_generic_expr (dump_file, value, 0);
2452 fprintf (dump_file, " can't be converted to param ");
2453 print_generic_expr (dump_file, parm, 0);
2454 fprintf (dump_file, "\n");
2456 return NULL;
2460 replace_map = ggc_alloc_ipa_replace_map ();
2461 if (dump_file)
2463 fprintf (dump_file, " replacing param ");
2464 print_generic_expr (dump_file, parm, 0);
2465 fprintf (dump_file, " with const ");
2466 print_generic_expr (dump_file, value, 0);
2467 fprintf (dump_file, "\n");
2469 replace_map->old_tree = parm;
2470 replace_map->new_tree = value;
2471 replace_map->replace_p = true;
2472 replace_map->ref_p = false;
2474 return replace_map;
2477 /* Dump new profiling counts */
2479 static void
2480 dump_profile_updates (struct cgraph_node *orig_node,
2481 struct cgraph_node *new_node)
2483 struct cgraph_edge *cs;
2485 fprintf (dump_file, " setting count of the specialized node to "
2486 HOST_WIDE_INT_PRINT_DEC "\n", (HOST_WIDE_INT) new_node->count);
2487 for (cs = new_node->callees; cs ; cs = cs->next_callee)
2488 fprintf (dump_file, " edge to %s has count "
2489 HOST_WIDE_INT_PRINT_DEC "\n",
2490 cgraph_node_name (cs->callee), (HOST_WIDE_INT) cs->count);
2492 fprintf (dump_file, " setting count of the original node to "
2493 HOST_WIDE_INT_PRINT_DEC "\n", (HOST_WIDE_INT) orig_node->count);
2494 for (cs = orig_node->callees; cs ; cs = cs->next_callee)
2495 fprintf (dump_file, " edge to %s is left with "
2496 HOST_WIDE_INT_PRINT_DEC "\n",
2497 cgraph_node_name (cs->callee), (HOST_WIDE_INT) cs->count);
2500 /* After a specialized NEW_NODE version of ORIG_NODE has been created, update
2501 their profile information to reflect this. */
2503 static void
2504 update_profiling_info (struct cgraph_node *orig_node,
2505 struct cgraph_node *new_node)
2507 struct cgraph_edge *cs;
2508 struct caller_statistics stats;
2509 gcov_type new_sum, orig_sum;
2510 gcov_type remainder, orig_node_count = orig_node->count;
2512 if (orig_node_count == 0)
2513 return;
2515 init_caller_stats (&stats);
2516 cgraph_for_node_and_aliases (orig_node, gather_caller_stats, &stats, false);
2517 orig_sum = stats.count_sum;
2518 init_caller_stats (&stats);
2519 cgraph_for_node_and_aliases (new_node, gather_caller_stats, &stats, false);
2520 new_sum = stats.count_sum;
2522 if (orig_node_count < orig_sum + new_sum)
2524 if (dump_file)
2525 fprintf (dump_file, " Problem: node %s/%i has too low count "
2526 HOST_WIDE_INT_PRINT_DEC " while the sum of incoming "
2527 "counts is " HOST_WIDE_INT_PRINT_DEC "\n",
2528 cgraph_node_name (orig_node), orig_node->uid,
2529 (HOST_WIDE_INT) orig_node_count,
2530 (HOST_WIDE_INT) (orig_sum + new_sum));
2532 orig_node_count = (orig_sum + new_sum) * 12 / 10;
2533 if (dump_file)
2534 fprintf (dump_file, " proceeding by pretending it was "
2535 HOST_WIDE_INT_PRINT_DEC "\n",
2536 (HOST_WIDE_INT) orig_node_count);
2539 new_node->count = new_sum;
2540 remainder = orig_node_count - new_sum;
2541 orig_node->count = remainder;
2543 for (cs = new_node->callees; cs ; cs = cs->next_callee)
2544 if (cs->frequency)
2545 cs->count = cs->count * (new_sum * REG_BR_PROB_BASE
2546 / orig_node_count) / REG_BR_PROB_BASE;
2547 else
2548 cs->count = 0;
2550 for (cs = orig_node->callees; cs ; cs = cs->next_callee)
2551 cs->count = cs->count * (remainder * REG_BR_PROB_BASE
2552 / orig_node_count) / REG_BR_PROB_BASE;
2554 if (dump_file)
2555 dump_profile_updates (orig_node, new_node);
2558 /* Update the respective profile of specialized NEW_NODE and the original
2559 ORIG_NODE after additional edges with cumulative count sum REDIRECTED_SUM
2560 have been redirected to the specialized version. */
2562 static void
2563 update_specialized_profile (struct cgraph_node *new_node,
2564 struct cgraph_node *orig_node,
2565 gcov_type redirected_sum)
2567 struct cgraph_edge *cs;
2568 gcov_type new_node_count, orig_node_count = orig_node->count;
2570 if (dump_file)
2571 fprintf (dump_file, " the sum of counts of redirected edges is "
2572 HOST_WIDE_INT_PRINT_DEC "\n", (HOST_WIDE_INT) redirected_sum);
2573 if (orig_node_count == 0)
2574 return;
2576 gcc_assert (orig_node_count >= redirected_sum);
2578 new_node_count = new_node->count;
2579 new_node->count += redirected_sum;
2580 orig_node->count -= redirected_sum;
2582 for (cs = new_node->callees; cs ; cs = cs->next_callee)
2583 if (cs->frequency)
2584 cs->count += cs->count * redirected_sum / new_node_count;
2585 else
2586 cs->count = 0;
2588 for (cs = orig_node->callees; cs ; cs = cs->next_callee)
2590 gcov_type dec = cs->count * (redirected_sum * REG_BR_PROB_BASE
2591 / orig_node_count) / REG_BR_PROB_BASE;
2592 if (dec < cs->count)
2593 cs->count -= dec;
2594 else
2595 cs->count = 0;
2598 if (dump_file)
2599 dump_profile_updates (orig_node, new_node);
2602 /* Create a specialized version of NODE with known constants and types of
2603 parameters in KNOWN_VALS and redirect all edges in CALLERS to it. */
2605 static struct cgraph_node *
2606 create_specialized_node (struct cgraph_node *node,
2607 VEC (tree, heap) *known_vals,
2608 struct ipa_agg_replacement_value *aggvals,
2609 VEC (cgraph_edge_p,heap) *callers)
2611 struct ipa_node_params *new_info, *info = IPA_NODE_REF (node);
2612 VEC (ipa_replace_map_p,gc)* replace_trees = NULL;
2613 struct cgraph_node *new_node;
2614 int i, count = ipa_get_param_count (info);
2615 bitmap args_to_skip;
2617 gcc_assert (!info->ipcp_orig_node);
2619 if (node->local.can_change_signature)
2621 args_to_skip = BITMAP_GGC_ALLOC ();
2622 for (i = 0; i < count; i++)
2624 tree t = VEC_index (tree, known_vals, i);
2626 if ((t && TREE_CODE (t) != TREE_BINFO)
2627 || !ipa_is_param_used (info, i))
2628 bitmap_set_bit (args_to_skip, i);
2631 else
2633 args_to_skip = NULL;
2634 if (dump_file && (dump_flags & TDF_DETAILS))
2635 fprintf (dump_file, " cannot change function signature\n");
2638 for (i = 0; i < count ; i++)
2640 tree t = VEC_index (tree, known_vals, i);
2641 if (t && TREE_CODE (t) != TREE_BINFO)
2643 struct ipa_replace_map *replace_map;
2645 replace_map = get_replacement_map (t, ipa_get_param (info, i));
2646 if (replace_map)
2647 VEC_safe_push (ipa_replace_map_p, gc, replace_trees, replace_map);
2651 new_node = cgraph_create_virtual_clone (node, callers, replace_trees,
2652 args_to_skip, "constprop");
2653 ipa_set_node_agg_value_chain (new_node, aggvals);
2654 if (dump_file && (dump_flags & TDF_DETAILS))
2656 fprintf (dump_file, " the new node is %s/%i.\n",
2657 cgraph_node_name (new_node), new_node->uid);
2658 if (aggvals)
2659 ipa_dump_agg_replacement_values (dump_file, aggvals);
2661 gcc_checking_assert (ipa_node_params_vector
2662 && (VEC_length (ipa_node_params_t,
2663 ipa_node_params_vector)
2664 > (unsigned) cgraph_max_uid));
2665 update_profiling_info (node, new_node);
2666 new_info = IPA_NODE_REF (new_node);
2667 new_info->ipcp_orig_node = node;
2668 new_info->known_vals = known_vals;
2670 ipcp_discover_new_direct_edges (new_node, known_vals);
2672 VEC_free (cgraph_edge_p, heap, callers);
2673 return new_node;
2676 /* Given a NODE, and a subset of its CALLERS, try to populate blanks slots in
2677 KNOWN_VALS with constants and types that are also known for all of the
2678 CALLERS. */
2680 static void
2681 find_more_scalar_values_for_callers_subset (struct cgraph_node *node,
2682 VEC (tree, heap) *known_vals,
2683 VEC (cgraph_edge_p,heap) *callers)
2685 struct ipa_node_params *info = IPA_NODE_REF (node);
2686 int i, count = ipa_get_param_count (info);
2688 for (i = 0; i < count ; i++)
2690 struct cgraph_edge *cs;
2691 tree newval = NULL_TREE;
2692 int j;
2694 if (ipa_get_scalar_lat (info, i)->bottom
2695 || VEC_index (tree, known_vals, i))
2696 continue;
2698 FOR_EACH_VEC_ELT (cgraph_edge_p, callers, j, cs)
2700 struct ipa_jump_func *jump_func;
2701 tree t;
2703 if (i >= ipa_get_cs_argument_count (IPA_EDGE_REF (cs)))
2705 newval = NULL_TREE;
2706 break;
2708 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
2709 t = ipa_value_from_jfunc (IPA_NODE_REF (cs->caller), jump_func);
2710 if (!t
2711 || (newval
2712 && !values_equal_for_ipcp_p (t, newval)))
2714 newval = NULL_TREE;
2715 break;
2717 else
2718 newval = t;
2721 if (newval)
2723 if (dump_file && (dump_flags & TDF_DETAILS))
2725 fprintf (dump_file, " adding an extra known scalar value ");
2726 print_ipcp_constant_value (dump_file, newval);
2727 fprintf (dump_file, " for parameter ");
2728 print_generic_expr (dump_file, ipa_get_param (info, i), 0);
2729 fprintf (dump_file, "\n");
2732 VEC_replace (tree, known_vals, i, newval);
2737 /* Go through PLATS and create a vector of values consisting of values and
2738 offsets (minus OFFSET) of lattices that contain only a single value. */
2740 static VEC (ipa_agg_jf_item_t, heap) *
2741 copy_plats_to_inter (struct ipcp_param_lattices *plats, HOST_WIDE_INT offset)
2743 VEC (ipa_agg_jf_item_t, heap) *res = NULL;
2745 if (!plats->aggs || plats->aggs_contain_variable || plats->aggs_bottom)
2746 return NULL;
2748 for (struct ipcp_agg_lattice *aglat = plats->aggs; aglat; aglat = aglat->next)
2749 if (ipa_lat_is_single_const (aglat))
2751 struct ipa_agg_jf_item ti;
2752 ti.offset = aglat->offset - offset;
2753 ti.value = aglat->values->value;
2754 VEC_safe_push (ipa_agg_jf_item_t, heap, res, ti);
2756 return res;
2759 /* Intersect all values in INTER with single value lattices in PLATS (while
2760 subtracting OFFSET). */
2762 static void
2763 intersect_with_plats (struct ipcp_param_lattices *plats,
2764 VEC (ipa_agg_jf_item_t, heap) **inter,
2765 HOST_WIDE_INT offset)
2767 struct ipcp_agg_lattice *aglat;
2768 struct ipa_agg_jf_item *item;
2769 int k;
2771 if (!plats->aggs || plats->aggs_contain_variable || plats->aggs_bottom)
2773 VEC_free (ipa_agg_jf_item_t, heap, *inter);
2774 *inter = NULL;
2775 return;
2778 aglat = plats->aggs;
2779 FOR_EACH_VEC_ELT (ipa_agg_jf_item_t, *inter, k, item)
2781 bool found = false;
2782 if (!item->value)
2783 continue;
2784 while (aglat)
2786 if (aglat->offset - offset > item->offset)
2787 break;
2788 if (aglat->offset - offset == item->offset)
2790 gcc_checking_assert (item->value);
2791 if (values_equal_for_ipcp_p (item->value, aglat->values->value))
2792 found = true;
2793 break;
2795 aglat = aglat->next;
2797 if (!found)
2798 item->value = NULL_TREE;
2802 /* Copy agggregate replacement values of NODE (which is an IPA-CP clone) to the
2803 vector result while subtracting OFFSET from the individual value offsets. */
2805 static VEC (ipa_agg_jf_item_t, heap) *
2806 agg_replacements_to_vector (struct cgraph_node *node, HOST_WIDE_INT offset)
2808 struct ipa_agg_replacement_value *av;
2809 VEC (ipa_agg_jf_item_t, heap) *res = NULL;
2811 for (av = ipa_get_agg_replacements_for_node (node); av; av = av->next)
2813 struct ipa_agg_jf_item item;
2814 gcc_checking_assert (av->value);
2815 item.offset = av->offset - offset;
2816 item.value = av->value;
2817 VEC_safe_push (ipa_agg_jf_item_t, heap, res, item);
2820 return res;
2823 /* Intersect all values in INTER with those that we have already scheduled to
2824 be replaced in parameter number INDEX of NODE, which is an IPA-CP clone
2825 (while subtracting OFFSET). */
2827 static void
2828 intersect_with_agg_replacements (struct cgraph_node *node, int index,
2829 VEC (ipa_agg_jf_item_t, heap) **inter,
2830 HOST_WIDE_INT offset)
2832 struct ipa_agg_replacement_value *srcvals;
2833 struct ipa_agg_jf_item *item;
2834 int i;
2836 srcvals = ipa_get_agg_replacements_for_node (node);
2837 if (!srcvals)
2839 VEC_free (ipa_agg_jf_item_t, heap, *inter);
2840 *inter = NULL;
2841 return;
2844 FOR_EACH_VEC_ELT (ipa_agg_jf_item_t, *inter, i, item)
2846 struct ipa_agg_replacement_value *av;
2847 bool found = false;
2848 if (!item->value)
2849 continue;
2850 for (av = srcvals; av; av = av->next)
2852 gcc_checking_assert (av->value);
2853 if (av->index == index
2854 && av->offset - offset == item->offset)
2856 if (values_equal_for_ipcp_p (item->value, av->value))
2857 found = true;
2858 break;
2861 if (!found)
2862 item->value = NULL_TREE;
2866 /* Look at edges in CALLERS and collect all known aggregate values that arrive
2867 from all of them. */
2869 static struct ipa_agg_replacement_value *
2870 find_aggregate_values_for_callers_subset (struct cgraph_node *node,
2871 VEC (cgraph_edge_p,heap) *callers)
2873 struct ipa_node_params *info = IPA_NODE_REF (node);
2874 struct ipa_agg_replacement_value *res = NULL;
2875 struct cgraph_edge *cs;
2876 int i, j, count = ipa_get_param_count (info);
2878 FOR_EACH_VEC_ELT (cgraph_edge_p, callers, j, cs)
2880 int c = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
2881 if (c < count)
2882 count = c;
2885 for (i = 0; i < count ; i++)
2887 struct cgraph_edge *cs;
2888 VEC (ipa_agg_jf_item_t, heap) *inter = NULL;
2889 struct ipa_agg_jf_item *item;
2890 int j;
2892 /* Among other things, the following check should deal with all by_ref
2893 mismatches. */
2894 if (ipa_get_parm_lattices (info, i)->aggs_bottom)
2895 continue;
2897 FOR_EACH_VEC_ELT (cgraph_edge_p, callers, j, cs)
2899 struct ipa_jump_func *jfunc;
2900 jfunc = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
2901 if (jfunc->type == IPA_JF_PASS_THROUGH
2902 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
2904 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
2905 int src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
2907 if (caller_info->ipcp_orig_node)
2909 struct cgraph_node *orig_node = caller_info->ipcp_orig_node;
2910 struct ipcp_param_lattices *orig_plats;
2911 orig_plats = ipa_get_parm_lattices (IPA_NODE_REF (orig_node),
2912 src_idx);
2913 if (agg_pass_through_permissible_p (orig_plats, jfunc))
2915 if (!inter)
2916 inter = agg_replacements_to_vector (cs->caller, 0);
2917 else
2918 intersect_with_agg_replacements (cs->caller, src_idx,
2919 &inter, 0);
2922 else
2924 struct ipcp_param_lattices *src_plats;
2925 src_plats = ipa_get_parm_lattices (caller_info, src_idx);
2926 if (agg_pass_through_permissible_p (src_plats, jfunc))
2928 /* Currently we do not produce clobber aggregate jump
2929 functions, adjust when we do. */
2930 gcc_checking_assert (!jfunc->agg.items);
2931 if (!inter)
2932 inter = copy_plats_to_inter (src_plats, 0);
2933 else
2934 intersect_with_plats (src_plats, &inter, 0);
2938 else if (jfunc->type == IPA_JF_ANCESTOR
2939 && ipa_get_jf_ancestor_agg_preserved (jfunc))
2941 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
2942 int src_idx = ipa_get_jf_ancestor_formal_id (jfunc);
2943 struct ipcp_param_lattices *src_plats;
2944 HOST_WIDE_INT delta = ipa_get_jf_ancestor_offset (jfunc);
2946 if (info->ipcp_orig_node)
2948 if (!inter)
2949 inter = agg_replacements_to_vector (cs->caller, delta);
2950 else
2951 intersect_with_agg_replacements (cs->caller, i, &inter,
2952 delta);
2954 else
2956 src_plats = ipa_get_parm_lattices (caller_info, src_idx);;
2957 /* Currently we do not produce clobber aggregate jump
2958 functions, adjust when we do. */
2959 gcc_checking_assert (!src_plats->aggs || !jfunc->agg.items);
2960 if (!inter)
2961 inter = copy_plats_to_inter (src_plats, delta);
2962 else
2963 intersect_with_plats (src_plats, &inter, delta);
2966 else if (jfunc->agg.items)
2968 int k;
2970 if (!inter)
2971 inter = VEC_copy (ipa_agg_jf_item, heap, jfunc->agg.items);
2972 else
2973 FOR_EACH_VEC_ELT (ipa_agg_jf_item_t, inter, k, item)
2975 int l = 0;
2976 bool found = false;;
2978 if (!item->value)
2979 continue;
2981 while ((unsigned) l < VEC_length (ipa_agg_jf_item_t,
2982 jfunc->agg.items))
2984 struct ipa_agg_jf_item *ti;
2985 ti = &VEC_index (ipa_agg_jf_item_t,
2986 jfunc->agg.items, l);
2987 if (ti->offset > item->offset)
2988 break;
2989 if (ti->offset == item->offset)
2991 gcc_checking_assert (ti->value);
2992 if (values_equal_for_ipcp_p (item->value,
2993 ti->value))
2994 found = true;
2995 break;
2997 l++;
2999 if (!found)
3000 item->value = NULL;
3003 else
3004 goto next_param;
3006 if (!inter)
3007 goto next_param;
3010 FOR_EACH_VEC_ELT (ipa_agg_jf_item_t, inter, j, item)
3012 struct ipa_agg_replacement_value *v;
3014 if (!item->value)
3015 continue;
3017 v = ggc_alloc_ipa_agg_replacement_value ();
3018 v->index = i;
3019 v->offset = item->offset;
3020 v->value = item->value;
3021 v->next = res;
3022 res = v;
3025 next_param:
3026 if (inter)
3027 VEC_free (ipa_agg_jf_item, heap, inter);
3029 return res;
3032 /* Turn KNOWN_AGGS into a list of aggreate replacement values. */
3034 static struct ipa_agg_replacement_value *
3035 known_aggs_to_agg_replacement_list (VEC (ipa_agg_jump_function_t,
3036 heap) *known_aggs)
3038 struct ipa_agg_replacement_value *res = NULL;
3039 struct ipa_agg_jump_function *aggjf;
3040 struct ipa_agg_jf_item *item;
3041 int i, j;
3043 FOR_EACH_VEC_ELT (ipa_agg_jump_function_t, known_aggs, i, aggjf)
3044 FOR_EACH_VEC_ELT (ipa_agg_jf_item_t, aggjf->items, j, item)
3046 struct ipa_agg_replacement_value *v;
3047 v = ggc_alloc_ipa_agg_replacement_value ();
3048 v->index = i;
3049 v->offset = item->offset;
3050 v->value = item->value;
3051 v->next = res;
3052 res = v;
3054 return res;
3057 /* Determine whether CS also brings all scalar values that the NODE is
3058 specialized for. */
3060 static bool
3061 cgraph_edge_brings_all_scalars_for_node (struct cgraph_edge *cs,
3062 struct cgraph_node *node)
3064 struct ipa_node_params *dest_info = IPA_NODE_REF (node);
3065 int count = ipa_get_param_count (dest_info);
3066 struct ipa_node_params *caller_info;
3067 struct ipa_edge_args *args;
3068 int i;
3070 caller_info = IPA_NODE_REF (cs->caller);
3071 args = IPA_EDGE_REF (cs);
3072 for (i = 0; i < count; i++)
3074 struct ipa_jump_func *jump_func;
3075 tree val, t;
3077 val = VEC_index (tree, dest_info->known_vals, i);
3078 if (!val)
3079 continue;
3081 if (i >= ipa_get_cs_argument_count (args))
3082 return false;
3083 jump_func = ipa_get_ith_jump_func (args, i);
3084 t = ipa_value_from_jfunc (caller_info, jump_func);
3085 if (!t || !values_equal_for_ipcp_p (val, t))
3086 return false;
3088 return true;
3091 /* Determine whether CS also brings all aggregate values that NODE is
3092 specialized for. */
3093 static bool
3094 cgraph_edge_brings_all_agg_vals_for_node (struct cgraph_edge *cs,
3095 struct cgraph_node *node)
3097 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
3098 struct ipa_agg_replacement_value *aggval;
3100 aggval = ipa_get_agg_replacements_for_node (node);
3101 while (aggval)
3103 bool found = false;
3104 struct ipcp_param_lattices *plats;
3105 plats = ipa_get_parm_lattices (caller_info, aggval->index);
3106 if (plats->aggs_bottom || plats->aggs_contain_variable)
3107 return false;
3108 for (struct ipcp_agg_lattice *aglat = plats->aggs;
3109 aglat;
3110 aglat = aglat->next)
3111 if (aglat->offset == aggval->offset)
3113 if (ipa_lat_is_single_const (aglat)
3114 && values_equal_for_ipcp_p (aggval->value,
3115 aglat->values->value))
3117 found = true;
3118 break;
3120 else
3121 return false;
3124 if (!found)
3125 return false;
3127 aggval = aggval->next;
3129 return true;
3132 /* Given an original NODE and a VAL for which we have already created a
3133 specialized clone, look whether there are incoming edges that still lead
3134 into the old node but now also bring the requested value and also conform to
3135 all other criteria such that they can be redirected the the special node.
3136 This function can therefore redirect the final edge in a SCC. */
3138 static void
3139 perhaps_add_new_callers (struct cgraph_node *node, struct ipcp_value *val)
3141 struct ipcp_value_source *src;
3142 gcov_type redirected_sum = 0;
3144 for (src = val->sources; src; src = src->next)
3146 struct cgraph_edge *cs = src->cs;
3147 while (cs)
3149 enum availability availability;
3151 if (cgraph_function_node (cs->callee, &availability) == node
3152 && availability > AVAIL_OVERWRITABLE
3153 && cgraph_edge_brings_value_p (cs, src))
3155 if (cgraph_edge_brings_all_scalars_for_node (cs, val->spec_node)
3156 && cgraph_edge_brings_all_agg_vals_for_node (cs,
3157 val->spec_node))
3159 if (dump_file)
3160 fprintf (dump_file, " - adding an extra caller %s/%i"
3161 " of %s/%i\n",
3162 xstrdup (cgraph_node_name (cs->caller)),
3163 cs->caller->uid,
3164 xstrdup (cgraph_node_name (val->spec_node)),
3165 val->spec_node->uid);
3167 cgraph_redirect_edge_callee (cs, val->spec_node);
3168 redirected_sum += cs->count;
3171 cs = get_next_cgraph_edge_clone (cs);
3175 if (redirected_sum)
3176 update_specialized_profile (val->spec_node, node, redirected_sum);
3180 /* Copy KNOWN_BINFOS to KNOWN_VALS. */
3182 static void
3183 move_binfos_to_values (VEC (tree, heap) *known_vals,
3184 VEC (tree, heap) *known_binfos)
3186 tree t;
3187 int i;
3189 for (i = 0; VEC_iterate (tree, known_binfos, i, t); i++)
3190 if (t)
3191 VEC_replace (tree, known_vals, i, t);
3194 /* Return true if there is a replacement equivalent to VALUE, INDEX and OFFSET
3195 among those in the AGGVALS list. */
3197 DEBUG_FUNCTION bool
3198 ipcp_val_in_agg_replacements_p (struct ipa_agg_replacement_value *aggvals,
3199 int index, HOST_WIDE_INT offset, tree value)
3201 while (aggvals)
3203 if (aggvals->index == index
3204 && aggvals->offset == offset
3205 && values_equal_for_ipcp_p (aggvals->value, value))
3206 return true;
3207 aggvals = aggvals->next;
3209 return false;
3212 /* Decide wheter to create a special version of NODE for value VAL of parameter
3213 at the given INDEX. If OFFSET is -1, the value is for the parameter itself,
3214 otherwise it is stored at the given OFFSET of the parameter. KNOWN_CSTS,
3215 KNOWN_BINFOS and KNOWN_AGGS describe the other already known values. */
3217 static bool
3218 decide_about_value (struct cgraph_node *node, int index, HOST_WIDE_INT offset,
3219 struct ipcp_value *val, VEC (tree, heap) *known_csts,
3220 VEC (tree, heap) *known_binfos)
3222 struct ipa_agg_replacement_value *aggvals;
3223 int freq_sum, caller_count;
3224 gcov_type count_sum;
3225 VEC (cgraph_edge_p, heap) *callers;
3226 VEC (tree, heap) *kv;
3228 if (val->spec_node)
3230 perhaps_add_new_callers (node, val);
3231 return false;
3233 else if (val->local_size_cost + overall_size > max_new_size)
3235 if (dump_file && (dump_flags & TDF_DETAILS))
3236 fprintf (dump_file, " Ignoring candidate value because "
3237 "max_new_size would be reached with %li.\n",
3238 val->local_size_cost + overall_size);
3239 return false;
3241 else if (!get_info_about_necessary_edges (val, &freq_sum, &count_sum,
3242 &caller_count))
3243 return false;
3245 if (dump_file && (dump_flags & TDF_DETAILS))
3247 fprintf (dump_file, " - considering value ");
3248 print_ipcp_constant_value (dump_file, val->value);
3249 fprintf (dump_file, " for parameter ");
3250 print_generic_expr (dump_file, ipa_get_param (IPA_NODE_REF (node),
3251 index), 0);
3252 if (offset != -1)
3253 fprintf (dump_file, ", offset: " HOST_WIDE_INT_PRINT_DEC, offset);
3254 fprintf (dump_file, " (caller_count: %i)\n", caller_count);
3257 if (!good_cloning_opportunity_p (node, val->local_time_benefit,
3258 freq_sum, count_sum,
3259 val->local_size_cost)
3260 && !good_cloning_opportunity_p (node,
3261 val->local_time_benefit
3262 + val->prop_time_benefit,
3263 freq_sum, count_sum,
3264 val->local_size_cost
3265 + val->prop_size_cost))
3266 return false;
3268 if (dump_file)
3269 fprintf (dump_file, " Creating a specialized node of %s/%i.\n",
3270 cgraph_node_name (node), node->uid);
3272 callers = gather_edges_for_value (val, caller_count);
3273 kv = VEC_copy (tree, heap, known_csts);
3274 move_binfos_to_values (kv, known_binfos);
3275 if (offset == -1)
3276 VEC_replace (tree, kv, index, val->value);
3277 find_more_scalar_values_for_callers_subset (node, kv, callers);
3278 aggvals = find_aggregate_values_for_callers_subset (node, callers);
3279 gcc_checking_assert (offset == -1
3280 || ipcp_val_in_agg_replacements_p (aggvals, index,
3281 offset, val->value));
3282 val->spec_node = create_specialized_node (node, kv, aggvals, callers);
3283 overall_size += val->local_size_cost;
3285 /* TODO: If for some lattice there is only one other known value
3286 left, make a special node for it too. */
3288 return true;
3291 /* Decide whether and what specialized clones of NODE should be created. */
3293 static bool
3294 decide_whether_version_node (struct cgraph_node *node)
3296 struct ipa_node_params *info = IPA_NODE_REF (node);
3297 int i, count = ipa_get_param_count (info);
3298 VEC (tree, heap) *known_csts, *known_binfos;
3299 VEC (ipa_agg_jump_function_t, heap) *known_aggs = NULL;
3300 bool ret = false;
3302 if (count == 0)
3303 return false;
3305 if (dump_file && (dump_flags & TDF_DETAILS))
3306 fprintf (dump_file, "\nEvaluating opportunities for %s/%i.\n",
3307 cgraph_node_name (node), node->uid);
3309 gather_context_independent_values (info, &known_csts, &known_binfos,
3310 info->clone_for_all_contexts ? &known_aggs
3311 : NULL, NULL);
3313 for (i = 0; i < count ;i++)
3315 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
3316 struct ipcp_lattice *lat = &plats->itself;
3317 struct ipcp_value *val;
3319 if (!lat->bottom
3320 && !VEC_index (tree, known_csts, i)
3321 && !VEC_index (tree, known_binfos, i))
3322 for (val = lat->values; val; val = val->next)
3323 ret |= decide_about_value (node, i, -1, val, known_csts,
3324 known_binfos);
3326 if (!plats->aggs_bottom || !plats->aggs)
3328 struct ipcp_agg_lattice *aglat;
3329 struct ipcp_value *val;
3330 for (aglat = plats->aggs; aglat; aglat = aglat->next)
3331 if (!aglat->bottom && aglat->values
3332 /* If the following is false, the one value is in
3333 known_aggs. */
3334 && (plats->aggs_contain_variable
3335 || !ipa_lat_is_single_const (aglat)))
3336 for (val = aglat->values; val; val = val->next)
3337 ret |= decide_about_value (node, i, aglat->offset, val,
3338 known_csts, known_binfos);
3340 info = IPA_NODE_REF (node);
3343 if (info->clone_for_all_contexts)
3345 VEC (cgraph_edge_p, heap) *callers;
3347 if (dump_file)
3348 fprintf (dump_file, " - Creating a specialized node of %s/%i "
3349 "for all known contexts.\n", cgraph_node_name (node),
3350 node->uid);
3352 callers = collect_callers_of_node (node);
3353 move_binfos_to_values (known_csts, known_binfos);
3354 create_specialized_node (node, known_csts,
3355 known_aggs_to_agg_replacement_list (known_aggs),
3356 callers);
3357 info = IPA_NODE_REF (node);
3358 info->clone_for_all_contexts = false;
3359 ret = true;
3361 else
3362 VEC_free (tree, heap, known_csts);
3364 VEC_free (tree, heap, known_binfos);
3365 return ret;
3368 /* Transitively mark all callees of NODE within the same SCC as not dead. */
3370 static void
3371 spread_undeadness (struct cgraph_node *node)
3373 struct cgraph_edge *cs;
3375 for (cs = node->callees; cs; cs = cs->next_callee)
3376 if (edge_within_scc (cs))
3378 struct cgraph_node *callee;
3379 struct ipa_node_params *info;
3381 callee = cgraph_function_node (cs->callee, NULL);
3382 info = IPA_NODE_REF (callee);
3384 if (info->node_dead)
3386 info->node_dead = 0;
3387 spread_undeadness (callee);
3392 /* Return true if NODE has a caller from outside of its SCC that is not
3393 dead. Worker callback for cgraph_for_node_and_aliases. */
3395 static bool
3396 has_undead_caller_from_outside_scc_p (struct cgraph_node *node,
3397 void *data ATTRIBUTE_UNUSED)
3399 struct cgraph_edge *cs;
3401 for (cs = node->callers; cs; cs = cs->next_caller)
3402 if (cs->caller->thunk.thunk_p
3403 && cgraph_for_node_and_aliases (cs->caller,
3404 has_undead_caller_from_outside_scc_p,
3405 NULL, true))
3406 return true;
3407 else if (!edge_within_scc (cs)
3408 && !IPA_NODE_REF (cs->caller)->node_dead)
3409 return true;
3410 return false;
3414 /* Identify nodes within the same SCC as NODE which are no longer needed
3415 because of new clones and will be removed as unreachable. */
3417 static void
3418 identify_dead_nodes (struct cgraph_node *node)
3420 struct cgraph_node *v;
3421 for (v = node; v ; v = ((struct ipa_dfs_info *) v->symbol.aux)->next_cycle)
3422 if (cgraph_will_be_removed_from_program_if_no_direct_calls (v)
3423 && !cgraph_for_node_and_aliases (v,
3424 has_undead_caller_from_outside_scc_p,
3425 NULL, true))
3426 IPA_NODE_REF (v)->node_dead = 1;
3428 for (v = node; v ; v = ((struct ipa_dfs_info *) v->symbol.aux)->next_cycle)
3429 if (!IPA_NODE_REF (v)->node_dead)
3430 spread_undeadness (v);
3432 if (dump_file && (dump_flags & TDF_DETAILS))
3434 for (v = node; v ; v = ((struct ipa_dfs_info *) v->symbol.aux)->next_cycle)
3435 if (IPA_NODE_REF (v)->node_dead)
3436 fprintf (dump_file, " Marking node as dead: %s/%i.\n",
3437 cgraph_node_name (v), v->uid);
3441 /* The decision stage. Iterate over the topological order of call graph nodes
3442 TOPO and make specialized clones if deemed beneficial. */
3444 static void
3445 ipcp_decision_stage (struct topo_info *topo)
3447 int i;
3449 if (dump_file)
3450 fprintf (dump_file, "\nIPA decision stage:\n\n");
3452 for (i = topo->nnodes - 1; i >= 0; i--)
3454 struct cgraph_node *node = topo->order[i];
3455 bool change = false, iterate = true;
3457 while (iterate)
3459 struct cgraph_node *v;
3460 iterate = false;
3461 for (v = node; v ; v = ((struct ipa_dfs_info *) v->symbol.aux)->next_cycle)
3462 if (cgraph_function_with_gimple_body_p (v)
3463 && ipcp_versionable_function_p (v))
3464 iterate |= decide_whether_version_node (v);
3466 change |= iterate;
3468 if (change)
3469 identify_dead_nodes (node);
3473 /* The IPCP driver. */
3475 static unsigned int
3476 ipcp_driver (void)
3478 struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
3479 struct topo_info topo;
3481 ipa_check_create_node_params ();
3482 ipa_check_create_edge_args ();
3483 grow_next_edge_clone_vector ();
3484 edge_duplication_hook_holder =
3485 cgraph_add_edge_duplication_hook (&ipcp_edge_duplication_hook, NULL);
3486 ipcp_values_pool = create_alloc_pool ("IPA-CP values",
3487 sizeof (struct ipcp_value), 32);
3488 ipcp_sources_pool = create_alloc_pool ("IPA-CP value sources",
3489 sizeof (struct ipcp_value_source), 64);
3490 ipcp_agg_lattice_pool = create_alloc_pool ("IPA_CP aggregate lattices",
3491 sizeof (struct ipcp_agg_lattice),
3492 32);
3493 if (dump_file)
3495 fprintf (dump_file, "\nIPA structures before propagation:\n");
3496 if (dump_flags & TDF_DETAILS)
3497 ipa_print_all_params (dump_file);
3498 ipa_print_all_jump_functions (dump_file);
3501 /* Topological sort. */
3502 build_toporder_info (&topo);
3503 /* Do the interprocedural propagation. */
3504 ipcp_propagate_stage (&topo);
3505 /* Decide what constant propagation and cloning should be performed. */
3506 ipcp_decision_stage (&topo);
3508 /* Free all IPCP structures. */
3509 free_toporder_info (&topo);
3510 VEC_free (cgraph_edge_p, heap, next_edge_clone);
3511 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
3512 ipa_free_all_structures_after_ipa_cp ();
3513 if (dump_file)
3514 fprintf (dump_file, "\nIPA constant propagation end\n");
3515 return 0;
3518 /* Initialization and computation of IPCP data structures. This is the initial
3519 intraprocedural analysis of functions, which gathers information to be
3520 propagated later on. */
3522 static void
3523 ipcp_generate_summary (void)
3525 struct cgraph_node *node;
3527 if (dump_file)
3528 fprintf (dump_file, "\nIPA constant propagation start:\n");
3529 ipa_register_cgraph_hooks ();
3531 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
3533 node->local.versionable
3534 = tree_versionable_function_p (node->symbol.decl);
3535 ipa_analyze_node (node);
3539 /* Write ipcp summary for nodes in SET. */
3541 static void
3542 ipcp_write_summary (void)
3544 ipa_prop_write_jump_functions ();
3547 /* Read ipcp summary. */
3549 static void
3550 ipcp_read_summary (void)
3552 ipa_prop_read_jump_functions ();
3555 /* Gate for IPCP optimization. */
3557 static bool
3558 cgraph_gate_cp (void)
3560 /* FIXME: We should remove the optimize check after we ensure we never run
3561 IPA passes when not optimizing. */
3562 return flag_ipa_cp && optimize;
3565 struct ipa_opt_pass_d pass_ipa_cp =
3568 IPA_PASS,
3569 "cp", /* name */
3570 OPTGROUP_NONE, /* optinfo_flags */
3571 cgraph_gate_cp, /* gate */
3572 ipcp_driver, /* execute */
3573 NULL, /* sub */
3574 NULL, /* next */
3575 0, /* static_pass_number */
3576 TV_IPA_CONSTANT_PROP, /* tv_id */
3577 0, /* properties_required */
3578 0, /* properties_provided */
3579 0, /* properties_destroyed */
3580 0, /* todo_flags_start */
3581 TODO_dump_symtab |
3582 TODO_remove_functions | TODO_ggc_collect /* todo_flags_finish */
3584 ipcp_generate_summary, /* generate_summary */
3585 ipcp_write_summary, /* write_summary */
3586 ipcp_read_summary, /* read_summary */
3587 ipa_prop_write_all_agg_replacement, /* write_optimization_summary */
3588 ipa_prop_read_all_agg_replacement, /* read_optimization_summary */
3589 NULL, /* stmt_fixup */
3590 0, /* TODOs */
3591 ipcp_transform_function, /* function_transform */
3592 NULL, /* variable_transform */