2012-11-10 Sandra Loosemore <sandra@codesourcery.com>
[official-gcc.git] / gcc / ipa-cp.c
blob764c931439ce04055e15ea0640ba16ace947c60f
1 /* Interprocedural constant propagation
2 Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012
3 Free Software Foundation, Inc.
5 Contributed by Razya Ladelsky <RAZYA@il.ibm.com> and Martin Jambor
6 <mjambor@suse.cz>
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
13 version.
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 for more details.
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
24 /* Interprocedural constant propagation (IPA-CP).
26 The goal of this transformation is to
28 1) discover functions which are always invoked with some arguments with the
29 same known constant values and modify the functions so that the
30 subsequent optimizations can take advantage of the knowledge, and
32 2) partial specialization - create specialized versions of functions
33 transformed in this way if some parameters are known constants only in
34 certain contexts but the estimated tradeoff between speedup and cost size
35 is deemed good.
37 The algorithm also propagates types and attempts to perform type based
38 devirtualization. Types are propagated much like constants.
40 The algorithm basically consists of three stages. In the first, functions
41 are analyzed one at a time and jump functions are constructed for all known
42 call-sites. In the second phase, the pass propagates information from the
43 jump functions across the call to reveal what values are available at what
44 call sites, performs estimations of effects of known values on functions and
45 their callees, and finally decides what specialized extra versions should be
46 created. In the third, the special versions materialize and appropriate
47 calls are redirected.
49 The algorithm used is to a certain extent based on "Interprocedural Constant
50 Propagation", by David Callahan, Keith D Cooper, Ken Kennedy, Linda Torczon,
51 Comp86, pg 152-161 and "A Methodology for Procedure Cloning" by Keith D
52 Cooper, Mary W. Hall, and Ken Kennedy.
55 First stage - intraprocedural analysis
56 =======================================
58 This phase computes jump_function and modification flags.
60 A jump function for a call-site represents the values passed as an actual
61 arguments of a given call-site. In principle, there are three types of
62 values:
64 Pass through - the caller's formal parameter is passed as an actual
65 argument, plus an operation on it can be performed.
66 Constant - a constant is passed as an actual argument.
67 Unknown - neither of the above.
69 All jump function types are described in detail in ipa-prop.h, together with
70 the data structures that represent them and methods of accessing them.
72 ipcp_generate_summary() is the main function of the first stage.
74 Second stage - interprocedural analysis
75 ========================================
77 This stage is itself divided into two phases. In the first, we propagate
78 known values over the call graph, in the second, we make cloning decisions.
79 It uses a different algorithm than the original Callahan's paper.
81 First, we traverse the functions topologically from callers to callees and,
82 for each strongly connected component (SCC), we propagate constants
83 according to previously computed jump functions. We also record what known
84 values depend on other known values and estimate local effects. Finally, we
85 propagate cumulative information about these effects from dependent values
86 to those on which they depend.
88 Second, we again traverse the call graph in the same topological order and
89 make clones for functions which we know are called with the same values in
90 all contexts and decide about extra specialized clones of functions just for
91 some contexts - these decisions are based on both local estimates and
92 cumulative estimates propagated from callees.
94 ipcp_propagate_stage() and ipcp_decision_stage() together constitute the
95 third stage.
97 Third phase - materialization of clones, call statement updates.
98 ============================================
100 This stage is currently performed by call graph code (mainly in cgraphunit.c
101 and tree-inline.c) according to instructions inserted to the call graph by
102 the second stage. */
104 #include "config.h"
105 #include "system.h"
106 #include "coretypes.h"
107 #include "tree.h"
108 #include "target.h"
109 #include "gimple.h"
110 #include "cgraph.h"
111 #include "ipa-prop.h"
112 #include "tree-flow.h"
113 #include "tree-pass.h"
114 #include "flags.h"
115 #include "diagnostic.h"
116 #include "tree-pretty-print.h"
117 #include "tree-inline.h"
118 #include "params.h"
119 #include "ipa-inline.h"
120 #include "ipa-utils.h"
122 struct ipcp_value;
124 /* Describes a particular source for an IPA-CP value. */
126 struct ipcp_value_source
128 /* Aggregate offset of the source, negative if the source is scalar value of
129 the argument itself. */
130 HOST_WIDE_INT offset;
131 /* The incoming edge that brought the value. */
132 struct cgraph_edge *cs;
133 /* If the jump function that resulted into his value was a pass-through or an
134 ancestor, this is the ipcp_value of the caller from which the described
135 value has been derived. Otherwise it is NULL. */
136 struct ipcp_value *val;
137 /* Next pointer in a linked list of sources of a value. */
138 struct ipcp_value_source *next;
139 /* If the jump function that resulted into his value was a pass-through or an
140 ancestor, this is the index of the parameter of the caller the jump
141 function references. */
142 int index;
145 /* Describes one particular value stored in struct ipcp_lattice. */
147 struct ipcp_value
149 /* The actual value for the given parameter. This is either an IPA invariant
150 or a TREE_BINFO describing a type that can be used for
151 devirtualization. */
152 tree value;
153 /* The list of sources from which this value originates. */
154 struct ipcp_value_source *sources;
155 /* Next pointers in a linked list of all values in a lattice. */
156 struct ipcp_value *next;
157 /* Next pointers in a linked list of values in a strongly connected component
158 of values. */
159 struct ipcp_value *scc_next;
160 /* Next pointers in a linked list of SCCs of values sorted topologically
161 according their sources. */
162 struct ipcp_value *topo_next;
163 /* A specialized node created for this value, NULL if none has been (so far)
164 created. */
165 struct cgraph_node *spec_node;
166 /* Depth first search number and low link for topological sorting of
167 values. */
168 int dfs, low_link;
169 /* Time benefit and size cost that specializing the function for this value
170 would bring about in this function alone. */
171 int local_time_benefit, local_size_cost;
172 /* Time benefit and size cost that specializing the function for this value
173 can bring about in it's callees (transitively). */
174 int prop_time_benefit, prop_size_cost;
175 /* True if this valye is currently on the topo-sort stack. */
176 bool on_stack;
179 /* Lattice describing potential values of a formal parameter of a function, or
180 a part of an aggreagate. TOP is represented by a lattice with zero values
181 and with contains_variable and bottom flags cleared. BOTTOM is represented
182 by a lattice with the bottom flag set. In that case, values and
183 contains_variable flag should be disregarded. */
185 struct ipcp_lattice
187 /* The list of known values and types in this lattice. Note that values are
188 not deallocated if a lattice is set to bottom because there may be value
189 sources referencing them. */
190 struct ipcp_value *values;
191 /* Number of known values and types in this lattice. */
192 int values_count;
193 /* The lattice contains a variable component (in addition to values). */
194 bool contains_variable;
195 /* The value of the lattice is bottom (i.e. variable and unusable for any
196 propagation). */
197 bool bottom;
200 /* Lattice with an offset to describe a part of an aggregate. */
202 struct ipcp_agg_lattice : public ipcp_lattice
204 /* Offset that is being described by this lattice. */
205 HOST_WIDE_INT offset;
206 /* Size so that we don't have to re-compute it every time we traverse the
207 list. Must correspond to TYPE_SIZE of all lat values. */
208 HOST_WIDE_INT size;
209 /* Next element of the linked list. */
210 struct ipcp_agg_lattice *next;
213 /* Structure containing lattices for a parameter itself and for pieces of
214 aggregates that are passed in the parameter or by a reference in a parameter
215 plus some other useful flags. */
217 struct ipcp_param_lattices
219 /* Lattice describing the value of the parameter itself. */
220 struct ipcp_lattice itself;
221 /* Lattices describing aggregate parts. */
222 struct ipcp_agg_lattice *aggs;
223 /* Number of aggregate lattices */
224 int aggs_count;
225 /* True if aggregate data were passed by reference (as opposed to by
226 value). */
227 bool aggs_by_ref;
228 /* All aggregate lattices contain a variable component (in addition to
229 values). */
230 bool aggs_contain_variable;
231 /* The value of all aggregate lattices is bottom (i.e. variable and unusable
232 for any propagation). */
233 bool aggs_bottom;
235 /* There is a virtual call based on this parameter. */
236 bool virt_call;
239 /* Allocation pools for values and their sources in ipa-cp. */
241 alloc_pool ipcp_values_pool;
242 alloc_pool ipcp_sources_pool;
243 alloc_pool ipcp_agg_lattice_pool;
245 /* Maximal count found in program. */
247 static gcov_type max_count;
249 /* Original overall size of the program. */
251 static long overall_size, max_new_size;
253 /* Head of the linked list of topologically sorted values. */
255 static struct ipcp_value *values_topo;
257 /* Return the param lattices structure corresponding to the Ith formal
258 parameter of the function described by INFO. */
259 static inline struct ipcp_param_lattices *
260 ipa_get_parm_lattices (struct ipa_node_params *info, int i)
262 gcc_assert (i >= 0 && i < ipa_get_param_count (info));
263 gcc_checking_assert (!info->ipcp_orig_node);
264 gcc_checking_assert (info->lattices);
265 return &(info->lattices[i]);
268 /* Return the lattice corresponding to the scalar value of the Ith formal
269 parameter of the function described by INFO. */
270 static inline struct ipcp_lattice *
271 ipa_get_scalar_lat (struct ipa_node_params *info, int i)
273 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
274 return &plats->itself;
277 /* Return whether LAT is a lattice with a single constant and without an
278 undefined value. */
280 static inline bool
281 ipa_lat_is_single_const (struct ipcp_lattice *lat)
283 if (lat->bottom
284 || lat->contains_variable
285 || lat->values_count != 1)
286 return false;
287 else
288 return true;
291 /* Return true iff the CS is an edge within a strongly connected component as
292 computed by ipa_reduced_postorder. */
294 static inline bool
295 edge_within_scc (struct cgraph_edge *cs)
297 struct ipa_dfs_info *caller_dfs = (struct ipa_dfs_info *) cs->caller->symbol.aux;
298 struct ipa_dfs_info *callee_dfs;
299 struct cgraph_node *callee = cgraph_function_node (cs->callee, NULL);
301 callee_dfs = (struct ipa_dfs_info *) callee->symbol.aux;
302 return (caller_dfs
303 && callee_dfs
304 && caller_dfs->scc_no == callee_dfs->scc_no);
307 /* Print V which is extracted from a value in a lattice to F. */
309 static void
310 print_ipcp_constant_value (FILE * f, tree v)
312 if (TREE_CODE (v) == TREE_BINFO)
314 fprintf (f, "BINFO ");
315 print_generic_expr (f, BINFO_TYPE (v), 0);
317 else if (TREE_CODE (v) == ADDR_EXPR
318 && TREE_CODE (TREE_OPERAND (v, 0)) == CONST_DECL)
320 fprintf (f, "& ");
321 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (v, 0)), 0);
323 else
324 print_generic_expr (f, v, 0);
327 /* Print a lattice LAT to F. */
329 static void
330 print_lattice (FILE * f, struct ipcp_lattice *lat,
331 bool dump_sources, bool dump_benefits)
333 struct ipcp_value *val;
334 bool prev = false;
336 if (lat->bottom)
338 fprintf (f, "BOTTOM\n");
339 return;
342 if (!lat->values_count && !lat->contains_variable)
344 fprintf (f, "TOP\n");
345 return;
348 if (lat->contains_variable)
350 fprintf (f, "VARIABLE");
351 prev = true;
352 if (dump_benefits)
353 fprintf (f, "\n");
356 for (val = lat->values; val; val = val->next)
358 if (dump_benefits && prev)
359 fprintf (f, " ");
360 else if (!dump_benefits && prev)
361 fprintf (f, ", ");
362 else
363 prev = true;
365 print_ipcp_constant_value (f, val->value);
367 if (dump_sources)
369 struct ipcp_value_source *s;
371 fprintf (f, " [from:");
372 for (s = val->sources; s; s = s->next)
373 fprintf (f, " %i(%i)", s->cs->caller->uid,s->cs->frequency);
374 fprintf (f, "]");
377 if (dump_benefits)
378 fprintf (f, " [loc_time: %i, loc_size: %i, "
379 "prop_time: %i, prop_size: %i]\n",
380 val->local_time_benefit, val->local_size_cost,
381 val->prop_time_benefit, val->prop_size_cost);
383 if (!dump_benefits)
384 fprintf (f, "\n");
387 /* Print all ipcp_lattices of all functions to F. */
389 static void
390 print_all_lattices (FILE * f, bool dump_sources, bool dump_benefits)
392 struct cgraph_node *node;
393 int i, count;
395 fprintf (f, "\nLattices:\n");
396 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
398 struct ipa_node_params *info;
400 info = IPA_NODE_REF (node);
401 fprintf (f, " Node: %s/%i:\n", cgraph_node_name (node), node->uid);
402 count = ipa_get_param_count (info);
403 for (i = 0; i < count; i++)
405 struct ipcp_agg_lattice *aglat;
406 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
407 fprintf (f, " param [%d]: ", i);
408 print_lattice (f, &plats->itself, dump_sources, dump_benefits);
410 if (plats->virt_call)
411 fprintf (f, " virt_call flag set\n");
413 if (plats->aggs_bottom)
415 fprintf (f, " AGGS BOTTOM\n");
416 continue;
418 if (plats->aggs_contain_variable)
419 fprintf (f, " AGGS VARIABLE\n");
420 for (aglat = plats->aggs; aglat; aglat = aglat->next)
422 fprintf (f, " %soffset " HOST_WIDE_INT_PRINT_DEC ": ",
423 plats->aggs_by_ref ? "ref " : "", aglat->offset);
424 print_lattice (f, aglat, dump_sources, dump_benefits);
430 /* Determine whether it is at all technically possible to create clones of NODE
431 and store this information in the ipa_node_params structure associated
432 with NODE. */
434 static void
435 determine_versionability (struct cgraph_node *node)
437 const char *reason = NULL;
439 /* There are a number of generic reasons functions cannot be versioned. We
440 also cannot remove parameters if there are type attributes such as fnspec
441 present. */
442 if (node->alias || node->thunk.thunk_p)
443 reason = "alias or thunk";
444 else if (!node->local.versionable)
445 reason = "not a tree_versionable_function";
446 else if (cgraph_function_body_availability (node) <= AVAIL_OVERWRITABLE)
447 reason = "insufficient body availability";
449 if (reason && dump_file && !node->alias && !node->thunk.thunk_p)
450 fprintf (dump_file, "Function %s/%i is not versionable, reason: %s.\n",
451 cgraph_node_name (node), node->uid, reason);
453 node->local.versionable = (reason == NULL);
456 /* Return true if it is at all technically possible to create clones of a
457 NODE. */
459 static bool
460 ipcp_versionable_function_p (struct cgraph_node *node)
462 return node->local.versionable;
465 /* Structure holding accumulated information about callers of a node. */
467 struct caller_statistics
469 gcov_type count_sum;
470 int n_calls, n_hot_calls, freq_sum;
473 /* Initialize fields of STAT to zeroes. */
475 static inline void
476 init_caller_stats (struct caller_statistics *stats)
478 stats->count_sum = 0;
479 stats->n_calls = 0;
480 stats->n_hot_calls = 0;
481 stats->freq_sum = 0;
484 /* Worker callback of cgraph_for_node_and_aliases accumulating statistics of
485 non-thunk incoming edges to NODE. */
487 static bool
488 gather_caller_stats (struct cgraph_node *node, void *data)
490 struct caller_statistics *stats = (struct caller_statistics *) data;
491 struct cgraph_edge *cs;
493 for (cs = node->callers; cs; cs = cs->next_caller)
494 if (cs->caller->thunk.thunk_p)
495 cgraph_for_node_and_aliases (cs->caller, gather_caller_stats,
496 stats, false);
497 else
499 stats->count_sum += cs->count;
500 stats->freq_sum += cs->frequency;
501 stats->n_calls++;
502 if (cgraph_maybe_hot_edge_p (cs))
503 stats->n_hot_calls ++;
505 return false;
509 /* Return true if this NODE is viable candidate for cloning. */
511 static bool
512 ipcp_cloning_candidate_p (struct cgraph_node *node)
514 struct caller_statistics stats;
516 gcc_checking_assert (cgraph_function_with_gimple_body_p (node));
518 if (!flag_ipa_cp_clone)
520 if (dump_file)
521 fprintf (dump_file, "Not considering %s for cloning; "
522 "-fipa-cp-clone disabled.\n",
523 cgraph_node_name (node));
524 return false;
527 if (!optimize_function_for_speed_p (DECL_STRUCT_FUNCTION (node->symbol.decl)))
529 if (dump_file)
530 fprintf (dump_file, "Not considering %s for cloning; "
531 "optimizing it for size.\n",
532 cgraph_node_name (node));
533 return false;
536 init_caller_stats (&stats);
537 cgraph_for_node_and_aliases (node, gather_caller_stats, &stats, false);
539 if (inline_summary (node)->self_size < stats.n_calls)
541 if (dump_file)
542 fprintf (dump_file, "Considering %s for cloning; code might shrink.\n",
543 cgraph_node_name (node));
544 return true;
547 /* When profile is available and function is hot, propagate into it even if
548 calls seems cold; constant propagation can improve function's speed
549 significantly. */
550 if (max_count)
552 if (stats.count_sum > node->count * 90 / 100)
554 if (dump_file)
555 fprintf (dump_file, "Considering %s for cloning; "
556 "usually called directly.\n",
557 cgraph_node_name (node));
558 return true;
561 if (!stats.n_hot_calls)
563 if (dump_file)
564 fprintf (dump_file, "Not considering %s for cloning; no hot calls.\n",
565 cgraph_node_name (node));
566 return false;
568 if (dump_file)
569 fprintf (dump_file, "Considering %s for cloning.\n",
570 cgraph_node_name (node));
571 return true;
574 /* Arrays representing a topological ordering of call graph nodes and a stack
575 of noes used during constant propagation. */
577 struct topo_info
579 struct cgraph_node **order;
580 struct cgraph_node **stack;
581 int nnodes, stack_top;
584 /* Allocate the arrays in TOPO and topologically sort the nodes into order. */
586 static void
587 build_toporder_info (struct topo_info *topo)
589 topo->order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
590 topo->stack = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
591 topo->stack_top = 0;
592 topo->nnodes = ipa_reduced_postorder (topo->order, true, true, NULL);
595 /* Free information about strongly connected components and the arrays in
596 TOPO. */
598 static void
599 free_toporder_info (struct topo_info *topo)
601 ipa_free_postorder_info ();
602 free (topo->order);
603 free (topo->stack);
606 /* Add NODE to the stack in TOPO, unless it is already there. */
608 static inline void
609 push_node_to_stack (struct topo_info *topo, struct cgraph_node *node)
611 struct ipa_node_params *info = IPA_NODE_REF (node);
612 if (info->node_enqueued)
613 return;
614 info->node_enqueued = 1;
615 topo->stack[topo->stack_top++] = node;
618 /* Pop a node from the stack in TOPO and return it or return NULL if the stack
619 is empty. */
621 static struct cgraph_node *
622 pop_node_from_stack (struct topo_info *topo)
624 if (topo->stack_top)
626 struct cgraph_node *node;
627 topo->stack_top--;
628 node = topo->stack[topo->stack_top];
629 IPA_NODE_REF (node)->node_enqueued = 0;
630 return node;
632 else
633 return NULL;
636 /* Set lattice LAT to bottom and return true if it previously was not set as
637 such. */
639 static inline bool
640 set_lattice_to_bottom (struct ipcp_lattice *lat)
642 bool ret = !lat->bottom;
643 lat->bottom = true;
644 return ret;
647 /* Mark lattice as containing an unknown value and return true if it previously
648 was not marked as such. */
650 static inline bool
651 set_lattice_contains_variable (struct ipcp_lattice *lat)
653 bool ret = !lat->contains_variable;
654 lat->contains_variable = true;
655 return ret;
658 /* Set all aggegate lattices in PLATS to bottom and return true if they were
659 not previously set as such. */
661 static inline bool
662 set_agg_lats_to_bottom (struct ipcp_param_lattices *plats)
664 bool ret = !plats->aggs_bottom;
665 plats->aggs_bottom = true;
666 return ret;
669 /* Mark all aggegate lattices in PLATS as containing an unknown value and
670 return true if they were not previously marked as such. */
672 static inline bool
673 set_agg_lats_contain_variable (struct ipcp_param_lattices *plats)
675 bool ret = !plats->aggs_contain_variable;
676 plats->aggs_contain_variable = true;
677 return ret;
680 /* Mark bot aggregate and scalar lattices as containing an unknown variable,
681 return true is any of them has not been marked as such so far. */
683 static inline bool
684 set_all_contains_variable (struct ipcp_param_lattices *plats)
686 bool ret = !plats->itself.contains_variable || !plats->aggs_contain_variable;
687 plats->itself.contains_variable = true;
688 plats->aggs_contain_variable = true;
689 return ret;
692 /* Initialize ipcp_lattices. */
694 static void
695 initialize_node_lattices (struct cgraph_node *node)
697 struct ipa_node_params *info = IPA_NODE_REF (node);
698 struct cgraph_edge *ie;
699 bool disable = false, variable = false;
700 int i;
702 gcc_checking_assert (cgraph_function_with_gimple_body_p (node));
703 if (!node->local.local)
705 /* When cloning is allowed, we can assume that externally visible
706 functions are not called. We will compensate this by cloning
707 later. */
708 if (ipcp_versionable_function_p (node)
709 && ipcp_cloning_candidate_p (node))
710 variable = true;
711 else
712 disable = true;
715 if (disable || variable)
717 for (i = 0; i < ipa_get_param_count (info) ; i++)
719 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
720 if (disable)
722 set_lattice_to_bottom (&plats->itself);
723 set_agg_lats_to_bottom (plats);
725 else
726 set_all_contains_variable (plats);
728 if (dump_file && (dump_flags & TDF_DETAILS)
729 && node->alias && node->thunk.thunk_p)
730 fprintf (dump_file, "Marking all lattices of %s/%i as %s\n",
731 cgraph_node_name (node), node->uid,
732 disable ? "BOTTOM" : "VARIABLE");
735 for (ie = node->indirect_calls; ie; ie = ie->next_callee)
736 if (ie->indirect_info->polymorphic)
738 gcc_checking_assert (ie->indirect_info->param_index >= 0);
739 ipa_get_parm_lattices (info,
740 ie->indirect_info->param_index)->virt_call = 1;
744 /* Return the result of a (possibly arithmetic) pass through jump function
745 JFUNC on the constant value INPUT. Return NULL_TREE if that cannot be
746 determined or itself is considered an interprocedural invariant. */
748 static tree
749 ipa_get_jf_pass_through_result (struct ipa_jump_func *jfunc, tree input)
751 tree restype, res;
753 if (ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
754 return input;
755 else if (TREE_CODE (input) == TREE_BINFO)
756 return NULL_TREE;
758 gcc_checking_assert (is_gimple_ip_invariant (input));
759 if (TREE_CODE_CLASS (ipa_get_jf_pass_through_operation (jfunc))
760 == tcc_comparison)
761 restype = boolean_type_node;
762 else
763 restype = TREE_TYPE (input);
764 res = fold_binary (ipa_get_jf_pass_through_operation (jfunc), restype,
765 input, ipa_get_jf_pass_through_operand (jfunc));
767 if (res && !is_gimple_ip_invariant (res))
768 return NULL_TREE;
770 return res;
773 /* Return the result of an ancestor jump function JFUNC on the constant value
774 INPUT. Return NULL_TREE if that cannot be determined. */
776 static tree
777 ipa_get_jf_ancestor_result (struct ipa_jump_func *jfunc, tree input)
779 if (TREE_CODE (input) == TREE_BINFO)
780 return get_binfo_at_offset (input,
781 ipa_get_jf_ancestor_offset (jfunc),
782 ipa_get_jf_ancestor_type (jfunc));
783 else if (TREE_CODE (input) == ADDR_EXPR)
785 tree t = TREE_OPERAND (input, 0);
786 t = build_ref_for_offset (EXPR_LOCATION (t), t,
787 ipa_get_jf_ancestor_offset (jfunc),
788 ipa_get_jf_ancestor_type (jfunc), NULL, false);
789 return build_fold_addr_expr (t);
791 else
792 return NULL_TREE;
795 /* Extract the acual BINFO being described by JFUNC which must be a known type
796 jump function. */
798 static tree
799 ipa_value_from_known_type_jfunc (struct ipa_jump_func *jfunc)
801 tree base_binfo = TYPE_BINFO (ipa_get_jf_known_type_base_type (jfunc));
802 if (!base_binfo)
803 return NULL_TREE;
804 return get_binfo_at_offset (base_binfo,
805 ipa_get_jf_known_type_offset (jfunc),
806 ipa_get_jf_known_type_component_type (jfunc));
809 /* Determine whether JFUNC evaluates to a known value (that is either a
810 constant or a binfo) and if so, return it. Otherwise return NULL. INFO
811 describes the caller node so that pass-through jump functions can be
812 evaluated. */
814 tree
815 ipa_value_from_jfunc (struct ipa_node_params *info, struct ipa_jump_func *jfunc)
817 if (jfunc->type == IPA_JF_CONST)
818 return ipa_get_jf_constant (jfunc);
819 else if (jfunc->type == IPA_JF_KNOWN_TYPE)
820 return ipa_value_from_known_type_jfunc (jfunc);
821 else if (jfunc->type == IPA_JF_PASS_THROUGH
822 || jfunc->type == IPA_JF_ANCESTOR)
824 tree input;
825 int idx;
827 if (jfunc->type == IPA_JF_PASS_THROUGH)
828 idx = ipa_get_jf_pass_through_formal_id (jfunc);
829 else
830 idx = ipa_get_jf_ancestor_formal_id (jfunc);
832 if (info->ipcp_orig_node)
833 input = VEC_index (tree, info->known_vals, idx);
834 else
836 struct ipcp_lattice *lat;
838 if (!info->lattices)
840 gcc_checking_assert (!flag_ipa_cp);
841 return NULL_TREE;
843 lat = ipa_get_scalar_lat (info, idx);
844 if (!ipa_lat_is_single_const (lat))
845 return NULL_TREE;
846 input = lat->values->value;
849 if (!input)
850 return NULL_TREE;
852 if (jfunc->type == IPA_JF_PASS_THROUGH)
853 return ipa_get_jf_pass_through_result (jfunc, input);
854 else
855 return ipa_get_jf_ancestor_result (jfunc, input);
857 else
858 return NULL_TREE;
862 /* If checking is enabled, verify that no lattice is in the TOP state, i.e. not
863 bottom, not containing a variable component and without any known value at
864 the same time. */
866 DEBUG_FUNCTION void
867 ipcp_verify_propagated_values (void)
869 struct cgraph_node *node;
871 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
873 struct ipa_node_params *info = IPA_NODE_REF (node);
874 int i, count = ipa_get_param_count (info);
876 for (i = 0; i < count; i++)
878 struct ipcp_lattice *lat = ipa_get_scalar_lat (info, i);
880 if (!lat->bottom
881 && !lat->contains_variable
882 && lat->values_count == 0)
884 if (dump_file)
886 fprintf (dump_file, "\nIPA lattices after constant "
887 "propagation:\n");
888 print_all_lattices (dump_file, true, false);
891 gcc_unreachable ();
897 /* Return true iff X and Y should be considered equal values by IPA-CP. */
899 static bool
900 values_equal_for_ipcp_p (tree x, tree y)
902 gcc_checking_assert (x != NULL_TREE && y != NULL_TREE);
904 if (x == y)
905 return true;
907 if (TREE_CODE (x) == TREE_BINFO || TREE_CODE (y) == TREE_BINFO)
908 return false;
910 if (TREE_CODE (x) == ADDR_EXPR
911 && TREE_CODE (y) == ADDR_EXPR
912 && TREE_CODE (TREE_OPERAND (x, 0)) == CONST_DECL
913 && TREE_CODE (TREE_OPERAND (y, 0)) == CONST_DECL)
914 return operand_equal_p (DECL_INITIAL (TREE_OPERAND (x, 0)),
915 DECL_INITIAL (TREE_OPERAND (y, 0)), 0);
916 else
917 return operand_equal_p (x, y, 0);
920 /* Add a new value source to VAL, marking that a value comes from edge CS and
921 (if the underlying jump function is a pass-through or an ancestor one) from
922 a caller value SRC_VAL of a caller parameter described by SRC_INDEX. OFFSET
923 is negative if the source was the scalar value of the parameter itself or
924 the offset within an aggregate. */
926 static void
927 add_value_source (struct ipcp_value *val, struct cgraph_edge *cs,
928 struct ipcp_value *src_val, int src_idx, HOST_WIDE_INT offset)
930 struct ipcp_value_source *src;
932 src = (struct ipcp_value_source *) pool_alloc (ipcp_sources_pool);
933 src->offset = offset;
934 src->cs = cs;
935 src->val = src_val;
936 src->index = src_idx;
938 src->next = val->sources;
939 val->sources = src;
942 /* Try to add NEWVAL to LAT, potentially creating a new struct ipcp_value for
943 it. CS, SRC_VAL SRC_INDEX and OFFSET are meant for add_value_source and
944 have the same meaning. */
946 static bool
947 add_value_to_lattice (struct ipcp_lattice *lat, tree newval,
948 struct cgraph_edge *cs, struct ipcp_value *src_val,
949 int src_idx, HOST_WIDE_INT offset)
951 struct ipcp_value *val;
953 if (lat->bottom)
954 return false;
956 for (val = lat->values; val; val = val->next)
957 if (values_equal_for_ipcp_p (val->value, newval))
959 if (edge_within_scc (cs))
961 struct ipcp_value_source *s;
962 for (s = val->sources; s ; s = s->next)
963 if (s->cs == cs)
964 break;
965 if (s)
966 return false;
969 add_value_source (val, cs, src_val, src_idx, offset);
970 return false;
973 if (lat->values_count == PARAM_VALUE (PARAM_IPA_CP_VALUE_LIST_SIZE))
975 /* We can only free sources, not the values themselves, because sources
976 of other values in this this SCC might point to them. */
977 for (val = lat->values; val; val = val->next)
979 while (val->sources)
981 struct ipcp_value_source *src = val->sources;
982 val->sources = src->next;
983 pool_free (ipcp_sources_pool, src);
987 lat->values = NULL;
988 return set_lattice_to_bottom (lat);
991 lat->values_count++;
992 val = (struct ipcp_value *) pool_alloc (ipcp_values_pool);
993 memset (val, 0, sizeof (*val));
995 add_value_source (val, cs, src_val, src_idx, offset);
996 val->value = newval;
997 val->next = lat->values;
998 lat->values = val;
999 return true;
1002 /* Like above but passes a special value of offset to distinguish that the
1003 origin is the scalar value of the parameter rather than a part of an
1004 aggregate. */
1006 static inline bool
1007 add_scalar_value_to_lattice (struct ipcp_lattice *lat, tree newval,
1008 struct cgraph_edge *cs,
1009 struct ipcp_value *src_val, int src_idx)
1011 return add_value_to_lattice (lat, newval, cs, src_val, src_idx, -1);
1014 /* Propagate values through a pass-through jump function JFUNC associated with
1015 edge CS, taking values from SRC_LAT and putting them into DEST_LAT. SRC_IDX
1016 is the index of the source parameter. */
1018 static bool
1019 propagate_vals_accross_pass_through (struct cgraph_edge *cs,
1020 struct ipa_jump_func *jfunc,
1021 struct ipcp_lattice *src_lat,
1022 struct ipcp_lattice *dest_lat,
1023 int src_idx)
1025 struct ipcp_value *src_val;
1026 bool ret = false;
1028 if (ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
1029 for (src_val = src_lat->values; src_val; src_val = src_val->next)
1030 ret |= add_scalar_value_to_lattice (dest_lat, src_val->value, cs,
1031 src_val, src_idx);
1032 /* Do not create new values when propagating within an SCC because if there
1033 are arithmetic functions with circular dependencies, there is infinite
1034 number of them and we would just make lattices bottom. */
1035 else if (edge_within_scc (cs))
1036 ret = set_lattice_contains_variable (dest_lat);
1037 else
1038 for (src_val = src_lat->values; src_val; src_val = src_val->next)
1040 tree cstval = src_val->value;
1042 if (TREE_CODE (cstval) == TREE_BINFO)
1044 ret |= set_lattice_contains_variable (dest_lat);
1045 continue;
1047 cstval = ipa_get_jf_pass_through_result (jfunc, cstval);
1049 if (cstval)
1050 ret |= add_scalar_value_to_lattice (dest_lat, cstval, cs, src_val,
1051 src_idx);
1052 else
1053 ret |= set_lattice_contains_variable (dest_lat);
1056 return ret;
1059 /* Propagate values through an ancestor jump function JFUNC associated with
1060 edge CS, taking values from SRC_LAT and putting them into DEST_LAT. SRC_IDX
1061 is the index of the source parameter. */
1063 static bool
1064 propagate_vals_accross_ancestor (struct cgraph_edge *cs,
1065 struct ipa_jump_func *jfunc,
1066 struct ipcp_lattice *src_lat,
1067 struct ipcp_lattice *dest_lat,
1068 int src_idx)
1070 struct ipcp_value *src_val;
1071 bool ret = false;
1073 if (edge_within_scc (cs))
1074 return set_lattice_contains_variable (dest_lat);
1076 for (src_val = src_lat->values; src_val; src_val = src_val->next)
1078 tree t = ipa_get_jf_ancestor_result (jfunc, src_val->value);
1080 if (t)
1081 ret |= add_scalar_value_to_lattice (dest_lat, t, cs, src_val, src_idx);
1082 else
1083 ret |= set_lattice_contains_variable (dest_lat);
1086 return ret;
1089 /* Propagate scalar values across jump function JFUNC that is associated with
1090 edge CS and put the values into DEST_LAT. */
1092 static bool
1093 propagate_scalar_accross_jump_function (struct cgraph_edge *cs,
1094 struct ipa_jump_func *jfunc,
1095 struct ipcp_lattice *dest_lat)
1097 if (dest_lat->bottom)
1098 return false;
1100 if (jfunc->type == IPA_JF_CONST
1101 || jfunc->type == IPA_JF_KNOWN_TYPE)
1103 tree val;
1105 if (jfunc->type == IPA_JF_KNOWN_TYPE)
1107 val = ipa_value_from_known_type_jfunc (jfunc);
1108 if (!val)
1109 return set_lattice_contains_variable (dest_lat);
1111 else
1112 val = ipa_get_jf_constant (jfunc);
1113 return add_scalar_value_to_lattice (dest_lat, val, cs, NULL, 0);
1115 else if (jfunc->type == IPA_JF_PASS_THROUGH
1116 || jfunc->type == IPA_JF_ANCESTOR)
1118 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
1119 struct ipcp_lattice *src_lat;
1120 int src_idx;
1121 bool ret;
1123 if (jfunc->type == IPA_JF_PASS_THROUGH)
1124 src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
1125 else
1126 src_idx = ipa_get_jf_ancestor_formal_id (jfunc);
1128 src_lat = ipa_get_scalar_lat (caller_info, src_idx);
1129 if (src_lat->bottom)
1130 return set_lattice_contains_variable (dest_lat);
1132 /* If we would need to clone the caller and cannot, do not propagate. */
1133 if (!ipcp_versionable_function_p (cs->caller)
1134 && (src_lat->contains_variable
1135 || (src_lat->values_count > 1)))
1136 return set_lattice_contains_variable (dest_lat);
1138 if (jfunc->type == IPA_JF_PASS_THROUGH)
1139 ret = propagate_vals_accross_pass_through (cs, jfunc, src_lat,
1140 dest_lat, src_idx);
1141 else
1142 ret = propagate_vals_accross_ancestor (cs, jfunc, src_lat, dest_lat,
1143 src_idx);
1145 if (src_lat->contains_variable)
1146 ret |= set_lattice_contains_variable (dest_lat);
1148 return ret;
1151 /* TODO: We currently do not handle member method pointers in IPA-CP (we only
1152 use it for indirect inlining), we should propagate them too. */
1153 return set_lattice_contains_variable (dest_lat);
1156 /* If DEST_PLATS already has aggregate items, check that aggs_by_ref matches
1157 NEW_AGGS_BY_REF and if not, mark all aggs as bottoms and return true (in all
1158 other cases, return false). If there are no aggregate items, set
1159 aggs_by_ref to NEW_AGGS_BY_REF. */
1161 static bool
1162 set_check_aggs_by_ref (struct ipcp_param_lattices *dest_plats,
1163 bool new_aggs_by_ref)
1165 if (dest_plats->aggs)
1167 if (dest_plats->aggs_by_ref != new_aggs_by_ref)
1169 set_agg_lats_to_bottom (dest_plats);
1170 return true;
1173 else
1174 dest_plats->aggs_by_ref = new_aggs_by_ref;
1175 return false;
1178 /* Walk aggregate lattices in DEST_PLATS from ***AGLAT on, until ***aglat is an
1179 already existing lattice for the given OFFSET and SIZE, marking all skipped
1180 lattices as containing variable and checking for overlaps. If there is no
1181 already existing lattice for the OFFSET and VAL_SIZE, create one, initialize
1182 it with offset, size and contains_variable to PRE_EXISTING, and return true,
1183 unless there are too many already. If there are two many, return false. If
1184 there are overlaps turn whole DEST_PLATS to bottom and return false. If any
1185 skipped lattices were newly marked as containing variable, set *CHANGE to
1186 true. */
1188 static bool
1189 merge_agg_lats_step (struct ipcp_param_lattices *dest_plats,
1190 HOST_WIDE_INT offset, HOST_WIDE_INT val_size,
1191 struct ipcp_agg_lattice ***aglat,
1192 bool pre_existing, bool *change)
1194 gcc_checking_assert (offset >= 0);
1196 while (**aglat && (**aglat)->offset < offset)
1198 if ((**aglat)->offset + (**aglat)->size > offset)
1200 set_agg_lats_to_bottom (dest_plats);
1201 return false;
1203 *change |= set_lattice_contains_variable (**aglat);
1204 *aglat = &(**aglat)->next;
1207 if (**aglat && (**aglat)->offset == offset)
1209 if ((**aglat)->size != val_size
1210 || ((**aglat)->next
1211 && (**aglat)->next->offset < offset + val_size))
1213 set_agg_lats_to_bottom (dest_plats);
1214 return false;
1216 gcc_checking_assert (!(**aglat)->next
1217 || (**aglat)->next->offset >= offset + val_size);
1218 return true;
1220 else
1222 struct ipcp_agg_lattice *new_al;
1224 if (**aglat && (**aglat)->offset < offset + val_size)
1226 set_agg_lats_to_bottom (dest_plats);
1227 return false;
1229 if (dest_plats->aggs_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1230 return false;
1231 dest_plats->aggs_count++;
1232 new_al = (struct ipcp_agg_lattice *) pool_alloc (ipcp_agg_lattice_pool);
1233 memset (new_al, 0, sizeof (*new_al));
1235 new_al->offset = offset;
1236 new_al->size = val_size;
1237 new_al->contains_variable = pre_existing;
1239 new_al->next = **aglat;
1240 **aglat = new_al;
1241 return true;
1245 /* Set all AGLAT and all other aggregate lattices reachable by next pointers as
1246 containing an unknown value. */
1248 static bool
1249 set_chain_of_aglats_contains_variable (struct ipcp_agg_lattice *aglat)
1251 bool ret = false;
1252 while (aglat)
1254 ret |= set_lattice_contains_variable (aglat);
1255 aglat = aglat->next;
1257 return ret;
1260 /* Merge existing aggregate lattices in SRC_PLATS to DEST_PLATS, subtracting
1261 DELTA_OFFSET. CS is the call graph edge and SRC_IDX the index of the source
1262 parameter used for lattice value sources. Return true if DEST_PLATS changed
1263 in any way. */
1265 static bool
1266 merge_aggregate_lattices (struct cgraph_edge *cs,
1267 struct ipcp_param_lattices *dest_plats,
1268 struct ipcp_param_lattices *src_plats,
1269 int src_idx, HOST_WIDE_INT offset_delta)
1271 bool pre_existing = dest_plats->aggs != NULL;
1272 struct ipcp_agg_lattice **dst_aglat;
1273 bool ret = false;
1275 if (set_check_aggs_by_ref (dest_plats, src_plats->aggs_by_ref))
1276 return true;
1277 if (src_plats->aggs_bottom)
1278 return set_agg_lats_contain_variable (dest_plats);
1279 dst_aglat = &dest_plats->aggs;
1281 for (struct ipcp_agg_lattice *src_aglat = src_plats->aggs;
1282 src_aglat;
1283 src_aglat = src_aglat->next)
1285 HOST_WIDE_INT new_offset = src_aglat->offset - offset_delta;
1287 if (new_offset < 0)
1288 continue;
1289 if (merge_agg_lats_step (dest_plats, new_offset, src_aglat->size,
1290 &dst_aglat, pre_existing, &ret))
1292 struct ipcp_agg_lattice *new_al = *dst_aglat;
1294 dst_aglat = &(*dst_aglat)->next;
1295 if (src_aglat->bottom)
1297 ret |= set_lattice_contains_variable (new_al);
1298 continue;
1300 if (src_aglat->contains_variable)
1301 ret |= set_lattice_contains_variable (new_al);
1302 for (struct ipcp_value *val = src_aglat->values;
1303 val;
1304 val = val->next)
1305 ret |= add_value_to_lattice (new_al, val->value, cs, val, src_idx,
1306 src_aglat->offset);
1308 else if (dest_plats->aggs_bottom)
1309 return true;
1311 ret |= set_chain_of_aglats_contains_variable (*dst_aglat);
1312 return ret;
1315 /* Propagate scalar values across jump function JFUNC that is associated with
1316 edge CS and put the values into DEST_LAT. */
1318 static bool
1319 propagate_aggs_accross_jump_function (struct cgraph_edge *cs,
1320 struct ipa_jump_func *jfunc,
1321 struct ipcp_param_lattices *dest_plats)
1323 bool ret = false;
1325 if (dest_plats->aggs_bottom)
1326 return false;
1328 if (jfunc->type == IPA_JF_PASS_THROUGH
1329 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
1331 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
1332 int src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
1333 struct ipcp_param_lattices *src_plats;
1335 src_plats = ipa_get_parm_lattices (caller_info, src_idx);
1336 if (src_plats->aggs
1337 && (!src_plats->aggs_by_ref
1338 || ipa_get_jf_pass_through_agg_preserved (jfunc)))
1340 /* Currently we do not produce clobber aggregate jump
1341 functions, replace with merging when we do. */
1342 gcc_assert (!jfunc->agg.items);
1343 ret |= merge_aggregate_lattices (cs, dest_plats, src_plats,
1344 src_idx, 0);
1346 else
1347 ret |= set_agg_lats_contain_variable (dest_plats);
1349 else if (jfunc->type == IPA_JF_ANCESTOR
1350 && ipa_get_jf_ancestor_agg_preserved (jfunc))
1352 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
1353 int src_idx = ipa_get_jf_ancestor_formal_id (jfunc);
1354 struct ipcp_param_lattices *src_plats;
1356 src_plats = ipa_get_parm_lattices (caller_info, src_idx);
1357 if (src_plats->aggs && src_plats->aggs_by_ref)
1359 /* Currently we do not produce clobber aggregate jump
1360 functions, replace with merging when we do. */
1361 gcc_assert (!jfunc->agg.items);
1362 ret |= merge_aggregate_lattices (cs, dest_plats, src_plats, src_idx,
1363 ipa_get_jf_ancestor_offset (jfunc));
1365 else if (!src_plats->aggs_by_ref)
1366 ret |= set_agg_lats_to_bottom (dest_plats);
1367 else
1368 ret |= set_agg_lats_contain_variable (dest_plats);
1370 else if (jfunc->agg.items)
1372 bool pre_existing = dest_plats->aggs != NULL;
1373 struct ipcp_agg_lattice **aglat = &dest_plats->aggs;
1374 struct ipa_agg_jf_item *item;
1375 int i;
1377 if (set_check_aggs_by_ref (dest_plats, jfunc->agg.by_ref))
1378 return true;
1380 FOR_EACH_VEC_ELT (ipa_agg_jf_item_t, jfunc->agg.items, i, item)
1382 HOST_WIDE_INT val_size;
1384 if (item->offset < 0)
1385 continue;
1386 gcc_checking_assert (is_gimple_ip_invariant (item->value));
1387 val_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (item->value)), 1);
1389 if (merge_agg_lats_step (dest_plats, item->offset, val_size,
1390 &aglat, pre_existing, &ret))
1392 ret |= add_value_to_lattice (*aglat, item->value, cs, NULL, 0, 0);
1393 aglat = &(*aglat)->next;
1395 else if (dest_plats->aggs_bottom)
1396 return true;
1399 ret |= set_chain_of_aglats_contains_variable (*aglat);
1401 else
1402 ret |= set_agg_lats_contain_variable (dest_plats);
1404 return ret;
1407 /* Propagate constants from the caller to the callee of CS. INFO describes the
1408 caller. */
1410 static bool
1411 propagate_constants_accross_call (struct cgraph_edge *cs)
1413 struct ipa_node_params *callee_info;
1414 enum availability availability;
1415 struct cgraph_node *callee, *alias_or_thunk;
1416 struct ipa_edge_args *args;
1417 bool ret = false;
1418 int i, args_count, parms_count;
1420 callee = cgraph_function_node (cs->callee, &availability);
1421 if (!callee->analyzed)
1422 return false;
1423 gcc_checking_assert (cgraph_function_with_gimple_body_p (callee));
1424 callee_info = IPA_NODE_REF (callee);
1426 args = IPA_EDGE_REF (cs);
1427 args_count = ipa_get_cs_argument_count (args);
1428 parms_count = ipa_get_param_count (callee_info);
1430 /* If this call goes through a thunk we must not propagate to the first (0th)
1431 parameter. However, we might need to uncover a thunk from below a series
1432 of aliases first. */
1433 alias_or_thunk = cs->callee;
1434 while (alias_or_thunk->alias)
1435 alias_or_thunk = cgraph_alias_aliased_node (alias_or_thunk);
1436 if (alias_or_thunk->thunk.thunk_p)
1438 ret |= set_all_contains_variable (ipa_get_parm_lattices (callee_info,
1439 0));
1440 i = 1;
1442 else
1443 i = 0;
1445 for (; (i < args_count) && (i < parms_count); i++)
1447 struct ipa_jump_func *jump_func = ipa_get_ith_jump_func (args, i);
1448 struct ipcp_param_lattices *dest_plats;
1450 dest_plats = ipa_get_parm_lattices (callee_info, i);
1451 if (availability == AVAIL_OVERWRITABLE)
1452 ret |= set_all_contains_variable (dest_plats);
1453 else
1455 ret |= propagate_scalar_accross_jump_function (cs, jump_func,
1456 &dest_plats->itself);
1457 ret |= propagate_aggs_accross_jump_function (cs, jump_func,
1458 dest_plats);
1461 for (; i < parms_count; i++)
1462 ret |= set_all_contains_variable (ipa_get_parm_lattices (callee_info, i));
1464 return ret;
1467 /* If an indirect edge IE can be turned into a direct one based on KNOWN_VALS
1468 (which can contain both constants and binfos) or KNOWN_BINFOS (which can be
1469 NULL) return the destination. */
1471 tree
1472 ipa_get_indirect_edge_target (struct cgraph_edge *ie,
1473 VEC (tree, heap) *known_vals,
1474 VEC (tree, heap) *known_binfos,
1475 VEC (ipa_agg_jump_function_p, heap) *known_aggs)
1477 int param_index = ie->indirect_info->param_index;
1478 HOST_WIDE_INT token, anc_offset;
1479 tree otr_type;
1480 tree t;
1482 if (param_index == -1)
1483 return NULL_TREE;
1485 if (!ie->indirect_info->polymorphic)
1487 tree t;
1489 if (ie->indirect_info->agg_contents)
1491 if (VEC_length (ipa_agg_jump_function_p, known_aggs)
1492 > (unsigned int) param_index)
1494 struct ipa_agg_jump_function *agg;
1495 agg = VEC_index (ipa_agg_jump_function_p, known_aggs,
1496 param_index);
1497 t = ipa_find_agg_cst_for_param (agg, ie->indirect_info->offset,
1498 ie->indirect_info->by_ref);
1500 else
1501 t = NULL;
1503 else
1504 t = (VEC_length (tree, known_vals) > (unsigned int) param_index
1505 ? VEC_index (tree, known_vals, param_index) : NULL);
1507 if (t &&
1508 TREE_CODE (t) == ADDR_EXPR
1509 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL)
1510 return TREE_OPERAND (t, 0);
1511 else
1512 return NULL_TREE;
1515 gcc_assert (!ie->indirect_info->agg_contents);
1516 token = ie->indirect_info->otr_token;
1517 anc_offset = ie->indirect_info->offset;
1518 otr_type = ie->indirect_info->otr_type;
1520 t = VEC_index (tree, known_vals, param_index);
1521 if (!t && known_binfos
1522 && VEC_length (tree, known_binfos) > (unsigned int) param_index)
1523 t = VEC_index (tree, known_binfos, param_index);
1524 if (!t)
1525 return NULL_TREE;
1527 if (TREE_CODE (t) != TREE_BINFO)
1529 tree binfo;
1530 binfo = gimple_extract_devirt_binfo_from_cst (t);
1531 if (!binfo)
1532 return NULL_TREE;
1533 binfo = get_binfo_at_offset (binfo, anc_offset, otr_type);
1534 if (!binfo)
1535 return NULL_TREE;
1536 return gimple_get_virt_method_for_binfo (token, binfo);
1538 else
1540 tree binfo;
1542 binfo = get_binfo_at_offset (t, anc_offset, otr_type);
1543 if (!binfo)
1544 return NULL_TREE;
1545 return gimple_get_virt_method_for_binfo (token, binfo);
1549 /* Calculate devirtualization time bonus for NODE, assuming we know KNOWN_CSTS
1550 and KNOWN_BINFOS. */
1552 static int
1553 devirtualization_time_bonus (struct cgraph_node *node,
1554 VEC (tree, heap) *known_csts,
1555 VEC (tree, heap) *known_binfos)
1557 struct cgraph_edge *ie;
1558 int res = 0;
1560 for (ie = node->indirect_calls; ie; ie = ie->next_callee)
1562 struct cgraph_node *callee;
1563 struct inline_summary *isummary;
1564 tree target;
1566 target = ipa_get_indirect_edge_target (ie, known_csts, known_binfos,
1567 NULL);
1568 if (!target)
1569 continue;
1571 /* Only bare minimum benefit for clearly un-inlineable targets. */
1572 res += 1;
1573 callee = cgraph_get_node (target);
1574 if (!callee || !callee->analyzed)
1575 continue;
1576 isummary = inline_summary (callee);
1577 if (!isummary->inlinable)
1578 continue;
1580 /* FIXME: The values below need re-considering and perhaps also
1581 integrating into the cost metrics, at lest in some very basic way. */
1582 if (isummary->size <= MAX_INLINE_INSNS_AUTO / 4)
1583 res += 31;
1584 else if (isummary->size <= MAX_INLINE_INSNS_AUTO / 2)
1585 res += 15;
1586 else if (isummary->size <= MAX_INLINE_INSNS_AUTO
1587 || DECL_DECLARED_INLINE_P (callee->symbol.decl))
1588 res += 7;
1591 return res;
1594 /* Return time bonus incurred because of HINTS. */
1596 static int
1597 hint_time_bonus (inline_hints hints)
1599 if (hints & (INLINE_HINT_loop_iterations | INLINE_HINT_loop_stride))
1600 return PARAM_VALUE (PARAM_IPA_CP_LOOP_HINT_BONUS);
1601 return 0;
1604 /* Return true if cloning NODE is a good idea, given the estimated TIME_BENEFIT
1605 and SIZE_COST and with the sum of frequencies of incoming edges to the
1606 potential new clone in FREQUENCIES. */
1608 static bool
1609 good_cloning_opportunity_p (struct cgraph_node *node, int time_benefit,
1610 int freq_sum, gcov_type count_sum, int size_cost)
1612 if (time_benefit == 0
1613 || !flag_ipa_cp_clone
1614 || !optimize_function_for_speed_p (DECL_STRUCT_FUNCTION (node->symbol.decl)))
1615 return false;
1617 gcc_assert (size_cost > 0);
1619 if (max_count)
1621 int factor = (count_sum * 1000) / max_count;
1622 HOST_WIDEST_INT evaluation = (((HOST_WIDEST_INT) time_benefit * factor)
1623 / size_cost);
1625 if (dump_file && (dump_flags & TDF_DETAILS))
1626 fprintf (dump_file, " good_cloning_opportunity_p (time: %i, "
1627 "size: %i, count_sum: " HOST_WIDE_INT_PRINT_DEC
1628 ") -> evaluation: " HOST_WIDEST_INT_PRINT_DEC
1629 ", threshold: %i\n",
1630 time_benefit, size_cost, (HOST_WIDE_INT) count_sum,
1631 evaluation, 500);
1633 return evaluation >= PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD);
1635 else
1637 HOST_WIDEST_INT evaluation = (((HOST_WIDEST_INT) time_benefit * freq_sum)
1638 / size_cost);
1640 if (dump_file && (dump_flags & TDF_DETAILS))
1641 fprintf (dump_file, " good_cloning_opportunity_p (time: %i, "
1642 "size: %i, freq_sum: %i) -> evaluation: "
1643 HOST_WIDEST_INT_PRINT_DEC ", threshold: %i\n",
1644 time_benefit, size_cost, freq_sum, evaluation,
1645 CGRAPH_FREQ_BASE /2);
1647 return evaluation >= PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD);
1651 /* Return all context independent values from aggregate lattices in PLATS in a
1652 vector. Return NULL if there are none. */
1654 static VEC (ipa_agg_jf_item_t, gc) *
1655 context_independent_aggregate_values (struct ipcp_param_lattices *plats)
1657 VEC (ipa_agg_jf_item_t, gc) *res = NULL;
1659 if (plats->aggs_bottom
1660 || plats->aggs_contain_variable
1661 || plats->aggs_count == 0)
1662 return NULL;
1664 for (struct ipcp_agg_lattice *aglat = plats->aggs;
1665 aglat;
1666 aglat = aglat->next)
1667 if (ipa_lat_is_single_const (aglat))
1669 struct ipa_agg_jf_item item;
1670 item.offset = aglat->offset;
1671 item.value = aglat->values->value;
1672 VEC_safe_push (ipa_agg_jf_item_t, gc, res, item);
1674 return res;
1677 /* Allocate KNOWN_CSTS, KNOWN_BINFOS and, if non-NULL, KNOWN_AGGS and populate
1678 them with values of parameters that are known independent of the context.
1679 INFO describes the function. If REMOVABLE_PARAMS_COST is non-NULL, the
1680 movement cost of all removable parameters will be stored in it. */
1682 static bool
1683 gather_context_independent_values (struct ipa_node_params *info,
1684 VEC (tree, heap) **known_csts,
1685 VEC (tree, heap) **known_binfos,
1686 VEC (ipa_agg_jump_function_t, heap) **known_aggs,
1687 int *removable_params_cost)
1689 int i, count = ipa_get_param_count (info);
1690 bool ret = false;
1692 *known_csts = NULL;
1693 *known_binfos = NULL;
1694 VEC_safe_grow_cleared (tree, heap, *known_csts, count);
1695 VEC_safe_grow_cleared (tree, heap, *known_binfos, count);
1696 if (known_aggs)
1698 *known_aggs = NULL;
1699 VEC_safe_grow_cleared (ipa_agg_jump_function_t, heap, *known_aggs, count);
1702 if (removable_params_cost)
1703 *removable_params_cost = 0;
1705 for (i = 0; i < count ; i++)
1707 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
1708 struct ipcp_lattice *lat = &plats->itself;
1710 if (ipa_lat_is_single_const (lat))
1712 struct ipcp_value *val = lat->values;
1713 if (TREE_CODE (val->value) != TREE_BINFO)
1715 VEC_replace (tree, *known_csts, i, val->value);
1716 if (removable_params_cost)
1717 *removable_params_cost
1718 += estimate_move_cost (TREE_TYPE (val->value));
1719 ret = true;
1721 else if (plats->virt_call)
1723 VEC_replace (tree, *known_binfos, i, val->value);
1724 ret = true;
1726 else if (removable_params_cost
1727 && !ipa_is_param_used (info, i))
1728 *removable_params_cost
1729 += estimate_move_cost (TREE_TYPE (ipa_get_param (info, i)));
1731 else if (removable_params_cost
1732 && !ipa_is_param_used (info, i))
1733 *removable_params_cost
1734 += estimate_move_cost (TREE_TYPE (ipa_get_param (info, i)));
1736 if (known_aggs)
1738 VEC (ipa_agg_jf_item_t, gc) *agg_items;
1739 struct ipa_agg_jump_function *ajf;
1741 agg_items = context_independent_aggregate_values (plats);
1742 ajf = &VEC_index (ipa_agg_jump_function_t, *known_aggs, i);
1743 ajf->items = agg_items;
1744 ajf->by_ref = plats->aggs_by_ref;
1745 ret |= agg_items != NULL;
1749 return ret;
1752 /* The current interface in ipa-inline-analysis requires a pointer vector.
1753 Create it.
1755 FIXME: That interface should be re-worked, this is slightly silly. Still,
1756 I'd like to discuss how to change it first and this demonstrates the
1757 issue. */
1759 static VEC (ipa_agg_jump_function_p, heap) *
1760 agg_jmp_p_vec_for_t_vec (VEC (ipa_agg_jump_function_t, heap) *known_aggs)
1762 VEC (ipa_agg_jump_function_p, heap) *ret;
1763 struct ipa_agg_jump_function *ajf;
1764 int i;
1766 ret = VEC_alloc (ipa_agg_jump_function_p, heap,
1767 VEC_length (ipa_agg_jump_function_t, known_aggs));
1768 FOR_EACH_VEC_ELT (ipa_agg_jump_function_t, known_aggs, i, ajf)
1769 VEC_quick_push (ipa_agg_jump_function_p, ret, ajf);
1770 return ret;
1773 /* Iterate over known values of parameters of NODE and estimate the local
1774 effects in terms of time and size they have. */
1776 static void
1777 estimate_local_effects (struct cgraph_node *node)
1779 struct ipa_node_params *info = IPA_NODE_REF (node);
1780 int i, count = ipa_get_param_count (info);
1781 VEC (tree, heap) *known_csts, *known_binfos;
1782 VEC (ipa_agg_jump_function_t, heap) *known_aggs;
1783 VEC (ipa_agg_jump_function_p, heap) *known_aggs_ptrs;
1784 bool always_const;
1785 int base_time = inline_summary (node)->time;
1786 int removable_params_cost;
1788 if (!count || !ipcp_versionable_function_p (node))
1789 return;
1791 if (dump_file && (dump_flags & TDF_DETAILS))
1792 fprintf (dump_file, "\nEstimating effects for %s/%i, base_time: %i.\n",
1793 cgraph_node_name (node), node->uid, base_time);
1795 always_const = gather_context_independent_values (info, &known_csts,
1796 &known_binfos, &known_aggs,
1797 &removable_params_cost);
1798 known_aggs_ptrs = agg_jmp_p_vec_for_t_vec (known_aggs);
1799 if (always_const)
1801 struct caller_statistics stats;
1802 inline_hints hints;
1803 int time, size;
1805 init_caller_stats (&stats);
1806 cgraph_for_node_and_aliases (node, gather_caller_stats, &stats, false);
1807 estimate_ipcp_clone_size_and_time (node, known_csts, known_binfos,
1808 known_aggs_ptrs, &size, &time, &hints);
1809 time -= devirtualization_time_bonus (node, known_csts, known_binfos);
1810 time -= hint_time_bonus (hints);
1811 time -= removable_params_cost;
1812 size -= stats.n_calls * removable_params_cost;
1814 if (dump_file)
1815 fprintf (dump_file, " - context independent values, size: %i, "
1816 "time_benefit: %i\n", size, base_time - time);
1818 if (size <= 0
1819 || cgraph_will_be_removed_from_program_if_no_direct_calls (node))
1821 info->clone_for_all_contexts = true;
1822 base_time = time;
1824 if (dump_file)
1825 fprintf (dump_file, " Decided to specialize for all "
1826 "known contexts, code not going to grow.\n");
1828 else if (good_cloning_opportunity_p (node, base_time - time,
1829 stats.freq_sum, stats.count_sum,
1830 size))
1832 if (size + overall_size <= max_new_size)
1834 info->clone_for_all_contexts = true;
1835 base_time = time;
1836 overall_size += size;
1838 if (dump_file)
1839 fprintf (dump_file, " Decided to specialize for all "
1840 "known contexts, growth deemed beneficial.\n");
1842 else if (dump_file && (dump_flags & TDF_DETAILS))
1843 fprintf (dump_file, " Not cloning for all contexts because "
1844 "max_new_size would be reached with %li.\n",
1845 size + overall_size);
1849 for (i = 0; i < count ; i++)
1851 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
1852 struct ipcp_lattice *lat = &plats->itself;
1853 struct ipcp_value *val;
1854 int emc;
1856 if (lat->bottom
1857 || !lat->values
1858 || VEC_index (tree, known_csts, i)
1859 || VEC_index (tree, known_binfos, i))
1860 continue;
1862 for (val = lat->values; val; val = val->next)
1864 int time, size, time_benefit;
1865 inline_hints hints;
1867 if (TREE_CODE (val->value) != TREE_BINFO)
1869 VEC_replace (tree, known_csts, i, val->value);
1870 VEC_replace (tree, known_binfos, i, NULL_TREE);
1871 emc = estimate_move_cost (TREE_TYPE (val->value));
1873 else if (plats->virt_call)
1875 VEC_replace (tree, known_csts, i, NULL_TREE);
1876 VEC_replace (tree, known_binfos, i, val->value);
1877 emc = 0;
1879 else
1880 continue;
1882 estimate_ipcp_clone_size_and_time (node, known_csts, known_binfos,
1883 known_aggs_ptrs, &size, &time,
1884 &hints);
1885 time_benefit = base_time - time
1886 + devirtualization_time_bonus (node, known_csts, known_binfos)
1887 + hint_time_bonus (hints)
1888 + removable_params_cost + emc;
1890 gcc_checking_assert (size >=0);
1891 /* The inliner-heuristics based estimates may think that in certain
1892 contexts some functions do not have any size at all but we want
1893 all specializations to have at least a tiny cost, not least not to
1894 divide by zero. */
1895 if (size == 0)
1896 size = 1;
1898 if (dump_file && (dump_flags & TDF_DETAILS))
1900 fprintf (dump_file, " - estimates for value ");
1901 print_ipcp_constant_value (dump_file, val->value);
1902 fprintf (dump_file, " for parameter ");
1903 print_generic_expr (dump_file, ipa_get_param (info, i), 0);
1904 fprintf (dump_file, ": time_benefit: %i, size: %i\n",
1905 time_benefit, size);
1908 val->local_time_benefit = time_benefit;
1909 val->local_size_cost = size;
1911 VEC_replace (tree, known_binfos, i, NULL_TREE);
1912 VEC_replace (tree, known_csts, i, NULL_TREE);
1915 for (i = 0; i < count ; i++)
1917 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
1918 struct ipa_agg_jump_function *ajf;
1919 struct ipcp_agg_lattice *aglat;
1921 if (plats->aggs_bottom || !plats->aggs)
1922 continue;
1924 ajf = &VEC_index (ipa_agg_jump_function_t, known_aggs, i);
1925 for (aglat = plats->aggs; aglat; aglat = aglat->next)
1927 struct ipcp_value *val;
1928 if (aglat->bottom || !aglat->values
1929 /* If the following is true, the one value is in known_aggs. */
1930 || (!plats->aggs_contain_variable
1931 && ipa_lat_is_single_const (aglat)))
1932 continue;
1934 for (val = aglat->values; val; val = val->next)
1936 int time, size, time_benefit;
1937 struct ipa_agg_jf_item item;
1938 inline_hints hints;
1940 item.offset = aglat->offset;
1941 item.value = val->value;
1942 VEC_safe_push (ipa_agg_jf_item_t, gc, ajf->items, item);
1944 estimate_ipcp_clone_size_and_time (node, known_csts, known_binfos,
1945 known_aggs_ptrs, &size, &time,
1946 &hints);
1947 time_benefit = base_time - time
1948 + devirtualization_time_bonus (node, known_csts, known_binfos)
1949 + hint_time_bonus (hints);
1950 gcc_checking_assert (size >=0);
1951 if (size == 0)
1952 size = 1;
1954 if (dump_file && (dump_flags & TDF_DETAILS))
1956 fprintf (dump_file, " - estimates for value ");
1957 print_ipcp_constant_value (dump_file, val->value);
1958 fprintf (dump_file, " for parameter ");
1959 print_generic_expr (dump_file, ipa_get_param (info, i), 0);
1960 fprintf (dump_file, "[%soffset: " HOST_WIDE_INT_PRINT_DEC
1961 "]: time_benefit: %i, size: %i\n",
1962 plats->aggs_by_ref ? "ref " : "",
1963 aglat->offset, time_benefit, size);
1966 val->local_time_benefit = time_benefit;
1967 val->local_size_cost = size;
1968 VEC_pop (ipa_agg_jf_item_t, ajf->items);
1973 for (i = 0; i < count ; i++)
1975 VEC_free (ipa_agg_jf_item_t, gc,
1976 VEC_index (ipa_agg_jump_function_t, known_aggs, i).items);
1977 VEC_index (ipa_agg_jump_function_t, known_aggs, i).items = NULL;
1980 VEC_free (tree, heap, known_csts);
1981 VEC_free (tree, heap, known_binfos);
1982 VEC_free (ipa_agg_jump_function_t, heap, known_aggs);
1983 VEC_free (ipa_agg_jump_function_p, heap, known_aggs_ptrs);
1987 /* Add value CUR_VAL and all yet-unsorted values it is dependent on to the
1988 topological sort of values. */
1990 static void
1991 add_val_to_toposort (struct ipcp_value *cur_val)
1993 static int dfs_counter = 0;
1994 static struct ipcp_value *stack;
1995 struct ipcp_value_source *src;
1997 if (cur_val->dfs)
1998 return;
2000 dfs_counter++;
2001 cur_val->dfs = dfs_counter;
2002 cur_val->low_link = dfs_counter;
2004 cur_val->topo_next = stack;
2005 stack = cur_val;
2006 cur_val->on_stack = true;
2008 for (src = cur_val->sources; src; src = src->next)
2009 if (src->val)
2011 if (src->val->dfs == 0)
2013 add_val_to_toposort (src->val);
2014 if (src->val->low_link < cur_val->low_link)
2015 cur_val->low_link = src->val->low_link;
2017 else if (src->val->on_stack
2018 && src->val->dfs < cur_val->low_link)
2019 cur_val->low_link = src->val->dfs;
2022 if (cur_val->dfs == cur_val->low_link)
2024 struct ipcp_value *v, *scc_list = NULL;
2028 v = stack;
2029 stack = v->topo_next;
2030 v->on_stack = false;
2032 v->scc_next = scc_list;
2033 scc_list = v;
2035 while (v != cur_val);
2037 cur_val->topo_next = values_topo;
2038 values_topo = cur_val;
2042 /* Add all values in lattices associated with NODE to the topological sort if
2043 they are not there yet. */
2045 static void
2046 add_all_node_vals_to_toposort (struct cgraph_node *node)
2048 struct ipa_node_params *info = IPA_NODE_REF (node);
2049 int i, count = ipa_get_param_count (info);
2051 for (i = 0; i < count ; i++)
2053 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
2054 struct ipcp_lattice *lat = &plats->itself;
2055 struct ipcp_agg_lattice *aglat;
2056 struct ipcp_value *val;
2058 if (!lat->bottom)
2059 for (val = lat->values; val; val = val->next)
2060 add_val_to_toposort (val);
2062 if (!plats->aggs_bottom)
2063 for (aglat = plats->aggs; aglat; aglat = aglat->next)
2064 if (!aglat->bottom)
2065 for (val = aglat->values; val; val = val->next)
2066 add_val_to_toposort (val);
2070 /* One pass of constants propagation along the call graph edges, from callers
2071 to callees (requires topological ordering in TOPO), iterate over strongly
2072 connected components. */
2074 static void
2075 propagate_constants_topo (struct topo_info *topo)
2077 int i;
2079 for (i = topo->nnodes - 1; i >= 0; i--)
2081 struct cgraph_node *v, *node = topo->order[i];
2082 struct ipa_dfs_info *node_dfs_info;
2084 if (!cgraph_function_with_gimple_body_p (node))
2085 continue;
2087 node_dfs_info = (struct ipa_dfs_info *) node->symbol.aux;
2088 /* First, iteratively propagate within the strongly connected component
2089 until all lattices stabilize. */
2090 v = node_dfs_info->next_cycle;
2091 while (v)
2093 push_node_to_stack (topo, v);
2094 v = ((struct ipa_dfs_info *) v->symbol.aux)->next_cycle;
2097 v = node;
2098 while (v)
2100 struct cgraph_edge *cs;
2102 for (cs = v->callees; cs; cs = cs->next_callee)
2103 if (edge_within_scc (cs)
2104 && propagate_constants_accross_call (cs))
2105 push_node_to_stack (topo, cs->callee);
2106 v = pop_node_from_stack (topo);
2109 /* Afterwards, propagate along edges leading out of the SCC, calculates
2110 the local effects of the discovered constants and all valid values to
2111 their topological sort. */
2112 v = node;
2113 while (v)
2115 struct cgraph_edge *cs;
2117 estimate_local_effects (v);
2118 add_all_node_vals_to_toposort (v);
2119 for (cs = v->callees; cs; cs = cs->next_callee)
2120 if (!edge_within_scc (cs))
2121 propagate_constants_accross_call (cs);
2123 v = ((struct ipa_dfs_info *) v->symbol.aux)->next_cycle;
2129 /* Return the sum of A and B if none of them is bigger than INT_MAX/2, return
2130 the bigger one if otherwise. */
2132 static int
2133 safe_add (int a, int b)
2135 if (a > INT_MAX/2 || b > INT_MAX/2)
2136 return a > b ? a : b;
2137 else
2138 return a + b;
2142 /* Propagate the estimated effects of individual values along the topological
2143 from the dependent values to those they depend on. */
2145 static void
2146 propagate_effects (void)
2148 struct ipcp_value *base;
2150 for (base = values_topo; base; base = base->topo_next)
2152 struct ipcp_value_source *src;
2153 struct ipcp_value *val;
2154 int time = 0, size = 0;
2156 for (val = base; val; val = val->scc_next)
2158 time = safe_add (time,
2159 val->local_time_benefit + val->prop_time_benefit);
2160 size = safe_add (size, val->local_size_cost + val->prop_size_cost);
2163 for (val = base; val; val = val->scc_next)
2164 for (src = val->sources; src; src = src->next)
2165 if (src->val
2166 && cgraph_maybe_hot_edge_p (src->cs))
2168 src->val->prop_time_benefit = safe_add (time,
2169 src->val->prop_time_benefit);
2170 src->val->prop_size_cost = safe_add (size,
2171 src->val->prop_size_cost);
2177 /* Propagate constants, binfos and their effects from the summaries
2178 interprocedurally. */
2180 static void
2181 ipcp_propagate_stage (struct topo_info *topo)
2183 struct cgraph_node *node;
2185 if (dump_file)
2186 fprintf (dump_file, "\n Propagating constants:\n\n");
2188 if (in_lto_p)
2189 ipa_update_after_lto_read ();
2192 FOR_EACH_DEFINED_FUNCTION (node)
2194 struct ipa_node_params *info = IPA_NODE_REF (node);
2196 determine_versionability (node);
2197 if (cgraph_function_with_gimple_body_p (node))
2199 info->lattices = XCNEWVEC (struct ipcp_param_lattices,
2200 ipa_get_param_count (info));
2201 initialize_node_lattices (node);
2203 if (node->count > max_count)
2204 max_count = node->count;
2205 overall_size += inline_summary (node)->self_size;
2208 max_new_size = overall_size;
2209 if (max_new_size < PARAM_VALUE (PARAM_LARGE_UNIT_INSNS))
2210 max_new_size = PARAM_VALUE (PARAM_LARGE_UNIT_INSNS);
2211 max_new_size += max_new_size * PARAM_VALUE (PARAM_IPCP_UNIT_GROWTH) / 100 + 1;
2213 if (dump_file)
2214 fprintf (dump_file, "\noverall_size: %li, max_new_size: %li\n",
2215 overall_size, max_new_size);
2217 propagate_constants_topo (topo);
2218 #ifdef ENABLE_CHECKING
2219 ipcp_verify_propagated_values ();
2220 #endif
2221 propagate_effects ();
2223 if (dump_file)
2225 fprintf (dump_file, "\nIPA lattices after all propagation:\n");
2226 print_all_lattices (dump_file, (dump_flags & TDF_DETAILS), true);
2230 /* Discover newly direct outgoing edges from NODE which is a new clone with
2231 known KNOWN_VALS and make them direct. */
2233 static void
2234 ipcp_discover_new_direct_edges (struct cgraph_node *node,
2235 VEC (tree, heap) *known_vals)
2237 struct cgraph_edge *ie, *next_ie;
2238 bool found = false;
2240 for (ie = node->indirect_calls; ie; ie = next_ie)
2242 tree target;
2244 next_ie = ie->next_callee;
2245 target = ipa_get_indirect_edge_target (ie, known_vals, NULL, NULL);
2246 if (target)
2248 ipa_make_edge_direct_to_target (ie, target);
2249 found = true;
2252 /* Turning calls to direct calls will improve overall summary. */
2253 if (found)
2254 inline_update_overall_summary (node);
2257 /* Vector of pointers which for linked lists of clones of an original crgaph
2258 edge. */
2260 static VEC (cgraph_edge_p, heap) *next_edge_clone;
2262 static inline void
2263 grow_next_edge_clone_vector (void)
2265 if (VEC_length (cgraph_edge_p, next_edge_clone)
2266 <= (unsigned) cgraph_edge_max_uid)
2267 VEC_safe_grow_cleared (cgraph_edge_p, heap, next_edge_clone,
2268 cgraph_edge_max_uid + 1);
2271 /* Edge duplication hook to grow the appropriate linked list in
2272 next_edge_clone. */
2274 static void
2275 ipcp_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
2276 __attribute__((unused)) void *data)
2278 grow_next_edge_clone_vector ();
2279 VEC_replace (cgraph_edge_p, next_edge_clone, dst->uid,
2280 VEC_index (cgraph_edge_p, next_edge_clone, src->uid));
2281 VEC_replace (cgraph_edge_p, next_edge_clone, src->uid, dst);
2284 /* See if NODE is a clone with a known aggregate value at a given OFFSET of a
2285 parameter with the given INDEX. */
2287 static tree
2288 get_clone_agg_value (struct cgraph_node *node, HOST_WIDEST_INT offset,
2289 int index)
2291 struct ipa_agg_replacement_value *aggval;
2293 aggval = ipa_get_agg_replacements_for_node (node);
2294 while (aggval)
2296 if (aggval->offset == offset
2297 && aggval->index == index)
2298 return aggval->value;
2299 aggval = aggval->next;
2301 return NULL_TREE;
2304 /* Return true if edge CS does bring about the value described by SRC. */
2306 static bool
2307 cgraph_edge_brings_value_p (struct cgraph_edge *cs,
2308 struct ipcp_value_source *src)
2310 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
2312 if (IPA_NODE_REF (cs->callee)->ipcp_orig_node
2313 || caller_info->node_dead)
2314 return false;
2315 if (!src->val)
2316 return true;
2318 if (caller_info->ipcp_orig_node)
2320 tree t;
2321 if (src->offset == -1)
2322 t = VEC_index (tree, caller_info->known_vals, src->index);
2323 else
2324 t = get_clone_agg_value (cs->caller, src->offset, src->index);
2325 return (t != NULL_TREE
2326 && values_equal_for_ipcp_p (src->val->value, t));
2328 else
2330 struct ipcp_agg_lattice *aglat;
2331 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (caller_info,
2332 src->index);
2333 if (src->offset == -1)
2334 return (ipa_lat_is_single_const (&plats->itself)
2335 && values_equal_for_ipcp_p (src->val->value,
2336 plats->itself.values->value));
2337 else
2339 if (plats->aggs_bottom || plats->aggs_contain_variable)
2340 return false;
2341 for (aglat = plats->aggs; aglat; aglat = aglat->next)
2342 if (aglat->offset == src->offset)
2343 return (ipa_lat_is_single_const (aglat)
2344 && values_equal_for_ipcp_p (src->val->value,
2345 aglat->values->value));
2347 return false;
2351 /* Get the next clone in the linked list of clones of an edge. */
2353 static inline struct cgraph_edge *
2354 get_next_cgraph_edge_clone (struct cgraph_edge *cs)
2356 return VEC_index (cgraph_edge_p, next_edge_clone, cs->uid);
2359 /* Given VAL, iterate over all its sources and if they still hold, add their
2360 edge frequency and their number into *FREQUENCY and *CALLER_COUNT
2361 respectively. */
2363 static bool
2364 get_info_about_necessary_edges (struct ipcp_value *val, int *freq_sum,
2365 gcov_type *count_sum, int *caller_count)
2367 struct ipcp_value_source *src;
2368 int freq = 0, count = 0;
2369 gcov_type cnt = 0;
2370 bool hot = false;
2372 for (src = val->sources; src; src = src->next)
2374 struct cgraph_edge *cs = src->cs;
2375 while (cs)
2377 if (cgraph_edge_brings_value_p (cs, src))
2379 count++;
2380 freq += cs->frequency;
2381 cnt += cs->count;
2382 hot |= cgraph_maybe_hot_edge_p (cs);
2384 cs = get_next_cgraph_edge_clone (cs);
2388 *freq_sum = freq;
2389 *count_sum = cnt;
2390 *caller_count = count;
2391 return hot;
2394 /* Return a vector of incoming edges that do bring value VAL. It is assumed
2395 their number is known and equal to CALLER_COUNT. */
2397 static VEC (cgraph_edge_p,heap) *
2398 gather_edges_for_value (struct ipcp_value *val, int caller_count)
2400 struct ipcp_value_source *src;
2401 VEC (cgraph_edge_p,heap) *ret;
2403 ret = VEC_alloc (cgraph_edge_p, heap, caller_count);
2404 for (src = val->sources; src; src = src->next)
2406 struct cgraph_edge *cs = src->cs;
2407 while (cs)
2409 if (cgraph_edge_brings_value_p (cs, src))
2410 VEC_quick_push (cgraph_edge_p, ret, cs);
2411 cs = get_next_cgraph_edge_clone (cs);
2415 return ret;
2418 /* Construct a replacement map for a know VALUE for a formal parameter PARAM.
2419 Return it or NULL if for some reason it cannot be created. */
2421 static struct ipa_replace_map *
2422 get_replacement_map (tree value, tree parm)
2424 tree req_type = TREE_TYPE (parm);
2425 struct ipa_replace_map *replace_map;
2427 if (!useless_type_conversion_p (req_type, TREE_TYPE (value)))
2429 if (fold_convertible_p (req_type, value))
2430 value = fold_build1 (NOP_EXPR, req_type, value);
2431 else if (TYPE_SIZE (req_type) == TYPE_SIZE (TREE_TYPE (value)))
2432 value = fold_build1 (VIEW_CONVERT_EXPR, req_type, value);
2433 else
2435 if (dump_file)
2437 fprintf (dump_file, " const ");
2438 print_generic_expr (dump_file, value, 0);
2439 fprintf (dump_file, " can't be converted to param ");
2440 print_generic_expr (dump_file, parm, 0);
2441 fprintf (dump_file, "\n");
2443 return NULL;
2447 replace_map = ggc_alloc_ipa_replace_map ();
2448 if (dump_file)
2450 fprintf (dump_file, " replacing param ");
2451 print_generic_expr (dump_file, parm, 0);
2452 fprintf (dump_file, " with const ");
2453 print_generic_expr (dump_file, value, 0);
2454 fprintf (dump_file, "\n");
2456 replace_map->old_tree = parm;
2457 replace_map->new_tree = value;
2458 replace_map->replace_p = true;
2459 replace_map->ref_p = false;
2461 return replace_map;
2464 /* Dump new profiling counts */
2466 static void
2467 dump_profile_updates (struct cgraph_node *orig_node,
2468 struct cgraph_node *new_node)
2470 struct cgraph_edge *cs;
2472 fprintf (dump_file, " setting count of the specialized node to "
2473 HOST_WIDE_INT_PRINT_DEC "\n", (HOST_WIDE_INT) new_node->count);
2474 for (cs = new_node->callees; cs ; cs = cs->next_callee)
2475 fprintf (dump_file, " edge to %s has count "
2476 HOST_WIDE_INT_PRINT_DEC "\n",
2477 cgraph_node_name (cs->callee), (HOST_WIDE_INT) cs->count);
2479 fprintf (dump_file, " setting count of the original node to "
2480 HOST_WIDE_INT_PRINT_DEC "\n", (HOST_WIDE_INT) orig_node->count);
2481 for (cs = orig_node->callees; cs ; cs = cs->next_callee)
2482 fprintf (dump_file, " edge to %s is left with "
2483 HOST_WIDE_INT_PRINT_DEC "\n",
2484 cgraph_node_name (cs->callee), (HOST_WIDE_INT) cs->count);
2487 /* After a specialized NEW_NODE version of ORIG_NODE has been created, update
2488 their profile information to reflect this. */
2490 static void
2491 update_profiling_info (struct cgraph_node *orig_node,
2492 struct cgraph_node *new_node)
2494 struct cgraph_edge *cs;
2495 struct caller_statistics stats;
2496 gcov_type new_sum, orig_sum;
2497 gcov_type remainder, orig_node_count = orig_node->count;
2499 if (orig_node_count == 0)
2500 return;
2502 init_caller_stats (&stats);
2503 cgraph_for_node_and_aliases (orig_node, gather_caller_stats, &stats, false);
2504 orig_sum = stats.count_sum;
2505 init_caller_stats (&stats);
2506 cgraph_for_node_and_aliases (new_node, gather_caller_stats, &stats, false);
2507 new_sum = stats.count_sum;
2509 if (orig_node_count < orig_sum + new_sum)
2511 if (dump_file)
2512 fprintf (dump_file, " Problem: node %s/%i has too low count "
2513 HOST_WIDE_INT_PRINT_DEC " while the sum of incoming "
2514 "counts is " HOST_WIDE_INT_PRINT_DEC "\n",
2515 cgraph_node_name (orig_node), orig_node->uid,
2516 (HOST_WIDE_INT) orig_node_count,
2517 (HOST_WIDE_INT) (orig_sum + new_sum));
2519 orig_node_count = (orig_sum + new_sum) * 12 / 10;
2520 if (dump_file)
2521 fprintf (dump_file, " proceeding by pretending it was "
2522 HOST_WIDE_INT_PRINT_DEC "\n",
2523 (HOST_WIDE_INT) orig_node_count);
2526 new_node->count = new_sum;
2527 remainder = orig_node_count - new_sum;
2528 orig_node->count = remainder;
2530 for (cs = new_node->callees; cs ; cs = cs->next_callee)
2531 if (cs->frequency)
2532 cs->count = cs->count * (new_sum * REG_BR_PROB_BASE
2533 / orig_node_count) / REG_BR_PROB_BASE;
2534 else
2535 cs->count = 0;
2537 for (cs = orig_node->callees; cs ; cs = cs->next_callee)
2538 cs->count = cs->count * (remainder * REG_BR_PROB_BASE
2539 / orig_node_count) / REG_BR_PROB_BASE;
2541 if (dump_file)
2542 dump_profile_updates (orig_node, new_node);
2545 /* Update the respective profile of specialized NEW_NODE and the original
2546 ORIG_NODE after additional edges with cumulative count sum REDIRECTED_SUM
2547 have been redirected to the specialized version. */
2549 static void
2550 update_specialized_profile (struct cgraph_node *new_node,
2551 struct cgraph_node *orig_node,
2552 gcov_type redirected_sum)
2554 struct cgraph_edge *cs;
2555 gcov_type new_node_count, orig_node_count = orig_node->count;
2557 if (dump_file)
2558 fprintf (dump_file, " the sum of counts of redirected edges is "
2559 HOST_WIDE_INT_PRINT_DEC "\n", (HOST_WIDE_INT) redirected_sum);
2560 if (orig_node_count == 0)
2561 return;
2563 gcc_assert (orig_node_count >= redirected_sum);
2565 new_node_count = new_node->count;
2566 new_node->count += redirected_sum;
2567 orig_node->count -= redirected_sum;
2569 for (cs = new_node->callees; cs ; cs = cs->next_callee)
2570 if (cs->frequency)
2571 cs->count += cs->count * redirected_sum / new_node_count;
2572 else
2573 cs->count = 0;
2575 for (cs = orig_node->callees; cs ; cs = cs->next_callee)
2577 gcov_type dec = cs->count * (redirected_sum * REG_BR_PROB_BASE
2578 / orig_node_count) / REG_BR_PROB_BASE;
2579 if (dec < cs->count)
2580 cs->count -= dec;
2581 else
2582 cs->count = 0;
2585 if (dump_file)
2586 dump_profile_updates (orig_node, new_node);
2589 /* Create a specialized version of NODE with known constants and types of
2590 parameters in KNOWN_VALS and redirect all edges in CALLERS to it. */
2592 static struct cgraph_node *
2593 create_specialized_node (struct cgraph_node *node,
2594 VEC (tree, heap) *known_vals,
2595 struct ipa_agg_replacement_value *aggvals,
2596 VEC (cgraph_edge_p,heap) *callers)
2598 struct ipa_node_params *new_info, *info = IPA_NODE_REF (node);
2599 VEC (ipa_replace_map_p,gc)* replace_trees = NULL;
2600 struct cgraph_node *new_node;
2601 int i, count = ipa_get_param_count (info);
2602 bitmap args_to_skip;
2604 gcc_assert (!info->ipcp_orig_node);
2606 if (node->local.can_change_signature)
2608 args_to_skip = BITMAP_GGC_ALLOC ();
2609 for (i = 0; i < count; i++)
2611 tree t = VEC_index (tree, known_vals, i);
2613 if ((t && TREE_CODE (t) != TREE_BINFO)
2614 || !ipa_is_param_used (info, i))
2615 bitmap_set_bit (args_to_skip, i);
2618 else
2620 args_to_skip = NULL;
2621 if (dump_file && (dump_flags & TDF_DETAILS))
2622 fprintf (dump_file, " cannot change function signature\n");
2625 for (i = 0; i < count ; i++)
2627 tree t = VEC_index (tree, known_vals, i);
2628 if (t && TREE_CODE (t) != TREE_BINFO)
2630 struct ipa_replace_map *replace_map;
2632 replace_map = get_replacement_map (t, ipa_get_param (info, i));
2633 if (replace_map)
2634 VEC_safe_push (ipa_replace_map_p, gc, replace_trees, replace_map);
2638 new_node = cgraph_create_virtual_clone (node, callers, replace_trees,
2639 args_to_skip, "constprop");
2640 ipa_set_node_agg_value_chain (new_node, aggvals);
2641 if (dump_file && (dump_flags & TDF_DETAILS))
2643 fprintf (dump_file, " the new node is %s/%i.\n",
2644 cgraph_node_name (new_node), new_node->uid);
2645 if (aggvals)
2646 ipa_dump_agg_replacement_values (dump_file, aggvals);
2648 gcc_checking_assert (ipa_node_params_vector
2649 && (VEC_length (ipa_node_params_t,
2650 ipa_node_params_vector)
2651 > (unsigned) cgraph_max_uid));
2652 update_profiling_info (node, new_node);
2653 new_info = IPA_NODE_REF (new_node);
2654 new_info->ipcp_orig_node = node;
2655 new_info->known_vals = known_vals;
2657 ipcp_discover_new_direct_edges (new_node, known_vals);
2659 VEC_free (cgraph_edge_p, heap, callers);
2660 return new_node;
2663 /* Given a NODE, and a subset of its CALLERS, try to populate blanks slots in
2664 KNOWN_VALS with constants and types that are also known for all of the
2665 CALLERS. */
2667 static void
2668 find_more_scalar_values_for_callers_subset (struct cgraph_node *node,
2669 VEC (tree, heap) *known_vals,
2670 VEC (cgraph_edge_p,heap) *callers)
2672 struct ipa_node_params *info = IPA_NODE_REF (node);
2673 int i, count = ipa_get_param_count (info);
2675 for (i = 0; i < count ; i++)
2677 struct cgraph_edge *cs;
2678 tree newval = NULL_TREE;
2679 int j;
2681 if (ipa_get_scalar_lat (info, i)->bottom
2682 || VEC_index (tree, known_vals, i))
2683 continue;
2685 FOR_EACH_VEC_ELT (cgraph_edge_p, callers, j, cs)
2687 struct ipa_jump_func *jump_func;
2688 tree t;
2690 if (i >= ipa_get_cs_argument_count (IPA_EDGE_REF (cs)))
2692 newval = NULL_TREE;
2693 break;
2695 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
2696 t = ipa_value_from_jfunc (IPA_NODE_REF (cs->caller), jump_func);
2697 if (!t
2698 || (newval
2699 && !values_equal_for_ipcp_p (t, newval)))
2701 newval = NULL_TREE;
2702 break;
2704 else
2705 newval = t;
2708 if (newval)
2710 if (dump_file && (dump_flags & TDF_DETAILS))
2712 fprintf (dump_file, " adding an extra known scalar value ");
2713 print_ipcp_constant_value (dump_file, newval);
2714 fprintf (dump_file, " for parameter ");
2715 print_generic_expr (dump_file, ipa_get_param (info, i), 0);
2716 fprintf (dump_file, "\n");
2719 VEC_replace (tree, known_vals, i, newval);
2724 /* Go through PLATS and create a vector of values consisting of values and
2725 offsets (minus OFFSET) of lattices that contain only a single value. */
2727 static VEC (ipa_agg_jf_item_t, heap) *
2728 copy_plats_to_inter (struct ipcp_param_lattices *plats, HOST_WIDE_INT offset)
2730 VEC (ipa_agg_jf_item_t, heap) *res = NULL;
2732 if (!plats->aggs || plats->aggs_contain_variable || plats->aggs_bottom)
2733 return NULL;
2735 for (struct ipcp_agg_lattice *aglat = plats->aggs; aglat; aglat = aglat->next)
2736 if (ipa_lat_is_single_const (aglat))
2738 struct ipa_agg_jf_item ti;
2739 ti.offset = aglat->offset - offset;
2740 ti.value = aglat->values->value;
2741 VEC_safe_push (ipa_agg_jf_item_t, heap, res, ti);
2743 return res;
2746 /* Intersect all values in INTER with single value lattices in PLATS (while
2747 subtracting OFFSET). */
2749 static void
2750 intersect_with_plats (struct ipcp_param_lattices *plats,
2751 VEC (ipa_agg_jf_item_t, heap) **inter,
2752 HOST_WIDE_INT offset)
2754 struct ipcp_agg_lattice *aglat;
2755 struct ipa_agg_jf_item *item;
2756 int k;
2758 if (!plats->aggs || plats->aggs_contain_variable || plats->aggs_bottom)
2760 VEC_free (ipa_agg_jf_item_t, heap, *inter);
2761 *inter = NULL;
2762 return;
2765 aglat = plats->aggs;
2766 FOR_EACH_VEC_ELT (ipa_agg_jf_item_t, *inter, k, item)
2768 bool found = false;
2769 if (!item->value)
2770 continue;
2771 while (aglat)
2773 if (aglat->offset - offset > item->offset)
2774 break;
2775 if (aglat->offset - offset == item->offset)
2777 gcc_checking_assert (item->value);
2778 if (values_equal_for_ipcp_p (item->value, aglat->values->value))
2779 found = true;
2780 break;
2782 aglat = aglat->next;
2784 if (!found)
2785 item->value = NULL_TREE;
2789 /* Copy agggregate replacement values of NODE (which is an IPA-CP clone) to the
2790 vector result while subtracting OFFSET from the individual value offsets. */
2792 static VEC (ipa_agg_jf_item_t, heap) *
2793 agg_replacements_to_vector (struct cgraph_node *node, HOST_WIDE_INT offset)
2795 struct ipa_agg_replacement_value *av;
2796 VEC (ipa_agg_jf_item_t, heap) *res = NULL;
2798 for (av = ipa_get_agg_replacements_for_node (node); av; av = av->next)
2800 struct ipa_agg_jf_item item;
2801 gcc_checking_assert (av->value);
2802 item.offset = av->offset - offset;
2803 item.value = av->value;
2804 VEC_safe_push (ipa_agg_jf_item_t, heap, res, item);
2807 return res;
2810 /* Intersect all values in INTER with those that we have already scheduled to
2811 be replaced in parameter number INDEX of NODE, which is an IPA-CP clone
2812 (while subtracting OFFSET). */
2814 static void
2815 intersect_with_agg_replacements (struct cgraph_node *node, int index,
2816 VEC (ipa_agg_jf_item_t, heap) **inter,
2817 HOST_WIDE_INT offset)
2819 struct ipa_agg_replacement_value *srcvals;
2820 struct ipa_agg_jf_item *item;
2821 int i;
2823 srcvals = ipa_get_agg_replacements_for_node (node);
2824 if (!srcvals)
2826 VEC_free (ipa_agg_jf_item_t, heap, *inter);
2827 *inter = NULL;
2828 return;
2831 FOR_EACH_VEC_ELT (ipa_agg_jf_item_t, *inter, i, item)
2833 struct ipa_agg_replacement_value *av;
2834 bool found = false;
2835 if (!item->value)
2836 continue;
2837 for (av = srcvals; av; av = av->next)
2839 gcc_checking_assert (av->value);
2840 if (av->index == index
2841 && av->offset - offset == item->offset)
2843 if (values_equal_for_ipcp_p (item->value, av->value))
2844 found = true;
2845 break;
2848 if (!found)
2849 item->value = NULL_TREE;
2853 /* Look at edges in CALLERS and collect all known aggregate values that arrive
2854 from all of them. */
2856 static struct ipa_agg_replacement_value *
2857 find_aggregate_values_for_callers_subset (struct cgraph_node *node,
2858 VEC (cgraph_edge_p,heap) *callers)
2860 struct ipa_node_params *info = IPA_NODE_REF (node);
2861 struct ipa_agg_replacement_value *res = NULL;
2862 struct cgraph_edge *cs;
2863 int i, j, count = ipa_get_param_count (info);
2865 FOR_EACH_VEC_ELT (cgraph_edge_p, callers, j, cs)
2867 int c = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
2868 if (c < count)
2869 count = c;
2872 for (i = 0; i < count ; i++)
2874 struct cgraph_edge *cs;
2875 VEC (ipa_agg_jf_item_t, heap) *inter = NULL;
2876 struct ipa_agg_jf_item *item;
2877 int j;
2879 /* Among other things, the following check should deal with all by_ref
2880 mismatches. */
2881 if (ipa_get_parm_lattices (info, i)->aggs_bottom)
2882 continue;
2884 FOR_EACH_VEC_ELT (cgraph_edge_p, callers, j, cs)
2886 struct ipa_jump_func *jfunc;
2887 jfunc = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
2888 if (jfunc->type == IPA_JF_PASS_THROUGH
2889 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
2891 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
2892 int src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
2894 if (caller_info->ipcp_orig_node)
2896 if (!inter)
2897 inter = agg_replacements_to_vector (cs->caller, 0);
2898 else
2899 intersect_with_agg_replacements (cs->caller, src_idx,
2900 &inter, 0);
2902 else
2904 struct ipcp_param_lattices *src_plats;
2905 src_plats = ipa_get_parm_lattices (caller_info, src_idx);
2906 /* Currently we do not produce clobber aggregate jump
2907 functions, adjust when we do. */
2908 gcc_checking_assert (!jfunc->agg.items);
2909 if (!inter)
2910 inter = copy_plats_to_inter (src_plats, 0);
2911 else
2912 intersect_with_plats (src_plats, &inter, 0);
2915 else if (jfunc->type == IPA_JF_ANCESTOR
2916 && ipa_get_jf_ancestor_agg_preserved (jfunc))
2918 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
2919 int src_idx = ipa_get_jf_ancestor_formal_id (jfunc);
2920 struct ipcp_param_lattices *src_plats;
2921 HOST_WIDE_INT delta = ipa_get_jf_ancestor_offset (jfunc);
2923 if (info->ipcp_orig_node)
2925 if (!inter)
2926 inter = agg_replacements_to_vector (cs->caller, delta);
2927 else
2928 intersect_with_agg_replacements (cs->caller, i, &inter,
2929 delta);
2931 else
2933 src_plats = ipa_get_parm_lattices (caller_info, src_idx);;
2934 /* Currently we do not produce clobber aggregate jump
2935 functions, adjust when we do. */
2936 gcc_checking_assert (!jfunc->agg.items);
2937 if (!inter)
2938 inter = copy_plats_to_inter (src_plats, delta);
2939 else
2940 intersect_with_plats (src_plats, &inter, delta);
2943 else if (jfunc->agg.items)
2945 int k;
2947 if (!inter)
2948 inter = VEC_copy (ipa_agg_jf_item, heap, jfunc->agg.items);
2949 else
2950 FOR_EACH_VEC_ELT (ipa_agg_jf_item_t, inter, k, item)
2952 int l = 0;
2953 bool found = false;;
2955 if (!item->value)
2956 continue;
2958 while ((unsigned) l < VEC_length (ipa_agg_jf_item_t,
2959 jfunc->agg.items))
2961 struct ipa_agg_jf_item *ti;
2962 ti = &VEC_index (ipa_agg_jf_item_t,
2963 jfunc->agg.items, l);
2964 if (ti->offset > item->offset)
2965 break;
2966 if (ti->offset == item->offset)
2968 gcc_checking_assert (ti->value);
2969 if (values_equal_for_ipcp_p (item->value,
2970 ti->value))
2971 found = true;
2972 break;
2974 l++;
2976 if (!found)
2977 item->value = NULL;
2980 else
2981 goto next_param;
2983 if (!inter)
2984 goto next_param;
2987 FOR_EACH_VEC_ELT (ipa_agg_jf_item_t, inter, j, item)
2989 struct ipa_agg_replacement_value *v;
2991 if (!item->value)
2992 continue;
2994 v = ggc_alloc_ipa_agg_replacement_value ();
2995 v->index = i;
2996 v->offset = item->offset;
2997 v->value = item->value;
2998 v->next = res;
2999 res = v;
3002 next_param:
3003 if (inter)
3004 VEC_free (ipa_agg_jf_item, heap, inter);
3006 return res;
3009 /* Turn KNOWN_AGGS into a list of aggreate replacement values. */
3011 static struct ipa_agg_replacement_value *
3012 known_aggs_to_agg_replacement_list (VEC (ipa_agg_jump_function_t,
3013 heap) *known_aggs)
3015 struct ipa_agg_replacement_value *res = NULL;
3016 struct ipa_agg_jump_function *aggjf;
3017 struct ipa_agg_jf_item *item;
3018 int i, j;
3020 FOR_EACH_VEC_ELT (ipa_agg_jump_function_t, known_aggs, i, aggjf)
3021 FOR_EACH_VEC_ELT (ipa_agg_jf_item_t, aggjf->items, j, item)
3023 struct ipa_agg_replacement_value *v;
3024 v = ggc_alloc_ipa_agg_replacement_value ();
3025 v->index = i;
3026 v->offset = item->offset;
3027 v->value = item->value;
3028 v->next = res;
3029 res = v;
3031 return res;
3034 /* Determine whether CS also brings all scalar values that the NODE is
3035 specialized for. */
3037 static bool
3038 cgraph_edge_brings_all_scalars_for_node (struct cgraph_edge *cs,
3039 struct cgraph_node *node)
3041 struct ipa_node_params *dest_info = IPA_NODE_REF (node);
3042 int count = ipa_get_param_count (dest_info);
3043 struct ipa_node_params *caller_info;
3044 struct ipa_edge_args *args;
3045 int i;
3047 caller_info = IPA_NODE_REF (cs->caller);
3048 args = IPA_EDGE_REF (cs);
3049 for (i = 0; i < count; i++)
3051 struct ipa_jump_func *jump_func;
3052 tree val, t;
3054 val = VEC_index (tree, dest_info->known_vals, i);
3055 if (!val)
3056 continue;
3058 if (i >= ipa_get_cs_argument_count (args))
3059 return false;
3060 jump_func = ipa_get_ith_jump_func (args, i);
3061 t = ipa_value_from_jfunc (caller_info, jump_func);
3062 if (!t || !values_equal_for_ipcp_p (val, t))
3063 return false;
3065 return true;
3068 /* Determine whether CS also brings all aggregate values that NODE is
3069 specialized for. */
3070 static bool
3071 cgraph_edge_brings_all_agg_vals_for_node (struct cgraph_edge *cs,
3072 struct cgraph_node *node)
3074 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
3075 struct ipa_agg_replacement_value *aggval;
3077 aggval = ipa_get_agg_replacements_for_node (node);
3078 while (aggval)
3080 bool found = false;
3081 struct ipcp_param_lattices *plats;
3082 plats = ipa_get_parm_lattices (caller_info, aggval->index);
3083 if (plats->aggs_bottom || plats->aggs_contain_variable)
3084 return false;
3085 for (struct ipcp_agg_lattice *aglat = plats->aggs;
3086 aglat;
3087 aglat = aglat->next)
3088 if (aglat->offset == aggval->offset)
3090 if (ipa_lat_is_single_const (aglat)
3091 && values_equal_for_ipcp_p (aggval->value,
3092 aglat->values->value))
3094 found = true;
3095 break;
3097 else
3098 return false;
3101 if (!found)
3102 return false;
3104 aggval = aggval->next;
3106 return true;
3109 /* Given an original NODE and a VAL for which we have already created a
3110 specialized clone, look whether there are incoming edges that still lead
3111 into the old node but now also bring the requested value and also conform to
3112 all other criteria such that they can be redirected the the special node.
3113 This function can therefore redirect the final edge in a SCC. */
3115 static void
3116 perhaps_add_new_callers (struct cgraph_node *node, struct ipcp_value *val)
3118 struct ipcp_value_source *src;
3119 gcov_type redirected_sum = 0;
3121 for (src = val->sources; src; src = src->next)
3123 struct cgraph_edge *cs = src->cs;
3124 while (cs)
3126 enum availability availability;
3128 if (cgraph_function_node (cs->callee, &availability) == node
3129 && availability > AVAIL_OVERWRITABLE
3130 && cgraph_edge_brings_value_p (cs, src))
3132 if (cgraph_edge_brings_all_scalars_for_node (cs, val->spec_node)
3133 && cgraph_edge_brings_all_agg_vals_for_node (cs,
3134 val->spec_node))
3136 if (dump_file)
3137 fprintf (dump_file, " - adding an extra caller %s/%i"
3138 " of %s/%i\n",
3139 xstrdup (cgraph_node_name (cs->caller)),
3140 cs->caller->uid,
3141 xstrdup (cgraph_node_name (val->spec_node)),
3142 val->spec_node->uid);
3144 cgraph_redirect_edge_callee (cs, val->spec_node);
3145 redirected_sum += cs->count;
3148 cs = get_next_cgraph_edge_clone (cs);
3152 if (redirected_sum)
3153 update_specialized_profile (val->spec_node, node, redirected_sum);
3157 /* Copy KNOWN_BINFOS to KNOWN_VALS. */
3159 static void
3160 move_binfos_to_values (VEC (tree, heap) *known_vals,
3161 VEC (tree, heap) *known_binfos)
3163 tree t;
3164 int i;
3166 for (i = 0; VEC_iterate (tree, known_binfos, i, t); i++)
3167 if (t)
3168 VEC_replace (tree, known_vals, i, t);
3171 /* Return true if there is a replacement equivalent to VALUE, INDEX and OFFSET
3172 among those in the AGGVALS list. */
3174 DEBUG_FUNCTION bool
3175 ipcp_val_in_agg_replacements_p (struct ipa_agg_replacement_value *aggvals,
3176 int index, HOST_WIDE_INT offset, tree value)
3178 while (aggvals)
3180 if (aggvals->index == index
3181 && aggvals->offset == offset
3182 && values_equal_for_ipcp_p (aggvals->value, value))
3183 return true;
3184 aggvals = aggvals->next;
3186 return false;
3189 /* Decide wheter to create a special version of NODE for value VAL of parameter
3190 at the given INDEX. If OFFSET is -1, the value is for the parameter itself,
3191 otherwise it is stored at the given OFFSET of the parameter. KNOWN_CSTS,
3192 KNOWN_BINFOS and KNOWN_AGGS describe the other already known values. */
3194 static bool
3195 decide_about_value (struct cgraph_node *node, int index, HOST_WIDE_INT offset,
3196 struct ipcp_value *val, VEC (tree, heap) *known_csts,
3197 VEC (tree, heap) *known_binfos)
3199 struct ipa_agg_replacement_value *aggvals;
3200 int freq_sum, caller_count;
3201 gcov_type count_sum;
3202 VEC (cgraph_edge_p, heap) *callers;
3203 VEC (tree, heap) *kv;
3205 if (val->spec_node)
3207 perhaps_add_new_callers (node, val);
3208 return false;
3210 else if (val->local_size_cost + overall_size > max_new_size)
3212 if (dump_file && (dump_flags & TDF_DETAILS))
3213 fprintf (dump_file, " Ignoring candidate value because "
3214 "max_new_size would be reached with %li.\n",
3215 val->local_size_cost + overall_size);
3216 return false;
3218 else if (!get_info_about_necessary_edges (val, &freq_sum, &count_sum,
3219 &caller_count))
3220 return false;
3222 if (dump_file && (dump_flags & TDF_DETAILS))
3224 fprintf (dump_file, " - considering value ");
3225 print_ipcp_constant_value (dump_file, val->value);
3226 fprintf (dump_file, " for parameter ");
3227 print_generic_expr (dump_file, ipa_get_param (IPA_NODE_REF (node),
3228 index), 0);
3229 if (offset != -1)
3230 fprintf (dump_file, ", offset: " HOST_WIDE_INT_PRINT_DEC, offset);
3231 fprintf (dump_file, " (caller_count: %i)\n", caller_count);
3234 if (!good_cloning_opportunity_p (node, val->local_time_benefit,
3235 freq_sum, count_sum,
3236 val->local_size_cost)
3237 && !good_cloning_opportunity_p (node,
3238 val->local_time_benefit
3239 + val->prop_time_benefit,
3240 freq_sum, count_sum,
3241 val->local_size_cost
3242 + val->prop_size_cost))
3243 return false;
3245 if (dump_file)
3246 fprintf (dump_file, " Creating a specialized node of %s/%i.\n",
3247 cgraph_node_name (node), node->uid);
3249 callers = gather_edges_for_value (val, caller_count);
3250 kv = VEC_copy (tree, heap, known_csts);
3251 move_binfos_to_values (kv, known_binfos);
3252 if (offset == -1)
3253 VEC_replace (tree, kv, index, val->value);
3254 find_more_scalar_values_for_callers_subset (node, kv, callers);
3255 aggvals = find_aggregate_values_for_callers_subset (node, callers);
3256 gcc_checking_assert (offset == -1
3257 || ipcp_val_in_agg_replacements_p (aggvals, index,
3258 offset, val->value));
3259 val->spec_node = create_specialized_node (node, kv, aggvals, callers);
3260 overall_size += val->local_size_cost;
3262 /* TODO: If for some lattice there is only one other known value
3263 left, make a special node for it too. */
3265 return true;
3268 /* Decide whether and what specialized clones of NODE should be created. */
3270 static bool
3271 decide_whether_version_node (struct cgraph_node *node)
3273 struct ipa_node_params *info = IPA_NODE_REF (node);
3274 int i, count = ipa_get_param_count (info);
3275 VEC (tree, heap) *known_csts, *known_binfos;
3276 VEC (ipa_agg_jump_function_t, heap) *known_aggs = NULL;
3277 bool ret = false;
3279 if (count == 0)
3280 return false;
3282 if (dump_file && (dump_flags & TDF_DETAILS))
3283 fprintf (dump_file, "\nEvaluating opportunities for %s/%i.\n",
3284 cgraph_node_name (node), node->uid);
3286 gather_context_independent_values (info, &known_csts, &known_binfos,
3287 info->clone_for_all_contexts ? &known_aggs
3288 : NULL, NULL);
3290 for (i = 0; i < count ;i++)
3292 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
3293 struct ipcp_lattice *lat = &plats->itself;
3294 struct ipcp_value *val;
3296 if (!lat->bottom
3297 && !VEC_index (tree, known_csts, i)
3298 && !VEC_index (tree, known_binfos, i))
3299 for (val = lat->values; val; val = val->next)
3300 ret |= decide_about_value (node, i, -1, val, known_csts,
3301 known_binfos);
3303 if (!plats->aggs_bottom || !plats->aggs)
3305 struct ipcp_agg_lattice *aglat;
3306 struct ipcp_value *val;
3307 for (aglat = plats->aggs; aglat; aglat = aglat->next)
3308 if (!aglat->bottom && aglat->values
3309 /* If the following is false, the one value is in
3310 known_aggs. */
3311 && (plats->aggs_contain_variable
3312 || !ipa_lat_is_single_const (aglat)))
3313 for (val = aglat->values; val; val = val->next)
3314 ret |= decide_about_value (node, i, aglat->offset, val,
3315 known_csts, known_binfos);
3317 info = IPA_NODE_REF (node);
3320 if (info->clone_for_all_contexts)
3322 VEC (cgraph_edge_p, heap) *callers;
3324 if (dump_file)
3325 fprintf (dump_file, " - Creating a specialized node of %s/%i "
3326 "for all known contexts.\n", cgraph_node_name (node),
3327 node->uid);
3329 callers = collect_callers_of_node (node);
3330 move_binfos_to_values (known_csts, known_binfos);
3331 create_specialized_node (node, known_csts,
3332 known_aggs_to_agg_replacement_list (known_aggs),
3333 callers);
3334 info = IPA_NODE_REF (node);
3335 info->clone_for_all_contexts = false;
3336 ret = true;
3338 else
3339 VEC_free (tree, heap, known_csts);
3341 VEC_free (tree, heap, known_binfos);
3342 return ret;
3345 /* Transitively mark all callees of NODE within the same SCC as not dead. */
3347 static void
3348 spread_undeadness (struct cgraph_node *node)
3350 struct cgraph_edge *cs;
3352 for (cs = node->callees; cs; cs = cs->next_callee)
3353 if (edge_within_scc (cs))
3355 struct cgraph_node *callee;
3356 struct ipa_node_params *info;
3358 callee = cgraph_function_node (cs->callee, NULL);
3359 info = IPA_NODE_REF (callee);
3361 if (info->node_dead)
3363 info->node_dead = 0;
3364 spread_undeadness (callee);
3369 /* Return true if NODE has a caller from outside of its SCC that is not
3370 dead. Worker callback for cgraph_for_node_and_aliases. */
3372 static bool
3373 has_undead_caller_from_outside_scc_p (struct cgraph_node *node,
3374 void *data ATTRIBUTE_UNUSED)
3376 struct cgraph_edge *cs;
3378 for (cs = node->callers; cs; cs = cs->next_caller)
3379 if (cs->caller->thunk.thunk_p
3380 && cgraph_for_node_and_aliases (cs->caller,
3381 has_undead_caller_from_outside_scc_p,
3382 NULL, true))
3383 return true;
3384 else if (!edge_within_scc (cs)
3385 && !IPA_NODE_REF (cs->caller)->node_dead)
3386 return true;
3387 return false;
3391 /* Identify nodes within the same SCC as NODE which are no longer needed
3392 because of new clones and will be removed as unreachable. */
3394 static void
3395 identify_dead_nodes (struct cgraph_node *node)
3397 struct cgraph_node *v;
3398 for (v = node; v ; v = ((struct ipa_dfs_info *) v->symbol.aux)->next_cycle)
3399 if (cgraph_will_be_removed_from_program_if_no_direct_calls (v)
3400 && !cgraph_for_node_and_aliases (v,
3401 has_undead_caller_from_outside_scc_p,
3402 NULL, true))
3403 IPA_NODE_REF (v)->node_dead = 1;
3405 for (v = node; v ; v = ((struct ipa_dfs_info *) v->symbol.aux)->next_cycle)
3406 if (!IPA_NODE_REF (v)->node_dead)
3407 spread_undeadness (v);
3409 if (dump_file && (dump_flags & TDF_DETAILS))
3411 for (v = node; v ; v = ((struct ipa_dfs_info *) v->symbol.aux)->next_cycle)
3412 if (IPA_NODE_REF (v)->node_dead)
3413 fprintf (dump_file, " Marking node as dead: %s/%i.\n",
3414 cgraph_node_name (v), v->uid);
3418 /* The decision stage. Iterate over the topological order of call graph nodes
3419 TOPO and make specialized clones if deemed beneficial. */
3421 static void
3422 ipcp_decision_stage (struct topo_info *topo)
3424 int i;
3426 if (dump_file)
3427 fprintf (dump_file, "\nIPA decision stage:\n\n");
3429 for (i = topo->nnodes - 1; i >= 0; i--)
3431 struct cgraph_node *node = topo->order[i];
3432 bool change = false, iterate = true;
3434 while (iterate)
3436 struct cgraph_node *v;
3437 iterate = false;
3438 for (v = node; v ; v = ((struct ipa_dfs_info *) v->symbol.aux)->next_cycle)
3439 if (cgraph_function_with_gimple_body_p (v)
3440 && ipcp_versionable_function_p (v))
3441 iterate |= decide_whether_version_node (v);
3443 change |= iterate;
3445 if (change)
3446 identify_dead_nodes (node);
3450 /* The IPCP driver. */
3452 static unsigned int
3453 ipcp_driver (void)
3455 struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
3456 struct topo_info topo;
3458 ipa_check_create_node_params ();
3459 ipa_check_create_edge_args ();
3460 grow_next_edge_clone_vector ();
3461 edge_duplication_hook_holder =
3462 cgraph_add_edge_duplication_hook (&ipcp_edge_duplication_hook, NULL);
3463 ipcp_values_pool = create_alloc_pool ("IPA-CP values",
3464 sizeof (struct ipcp_value), 32);
3465 ipcp_sources_pool = create_alloc_pool ("IPA-CP value sources",
3466 sizeof (struct ipcp_value_source), 64);
3467 ipcp_agg_lattice_pool = create_alloc_pool ("IPA_CP aggregate lattices",
3468 sizeof (struct ipcp_agg_lattice),
3469 32);
3470 if (dump_file)
3472 fprintf (dump_file, "\nIPA structures before propagation:\n");
3473 if (dump_flags & TDF_DETAILS)
3474 ipa_print_all_params (dump_file);
3475 ipa_print_all_jump_functions (dump_file);
3478 /* Topological sort. */
3479 build_toporder_info (&topo);
3480 /* Do the interprocedural propagation. */
3481 ipcp_propagate_stage (&topo);
3482 /* Decide what constant propagation and cloning should be performed. */
3483 ipcp_decision_stage (&topo);
3485 /* Free all IPCP structures. */
3486 free_toporder_info (&topo);
3487 VEC_free (cgraph_edge_p, heap, next_edge_clone);
3488 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
3489 ipa_free_all_structures_after_ipa_cp ();
3490 if (dump_file)
3491 fprintf (dump_file, "\nIPA constant propagation end\n");
3492 return 0;
3495 /* Initialization and computation of IPCP data structures. This is the initial
3496 intraprocedural analysis of functions, which gathers information to be
3497 propagated later on. */
3499 static void
3500 ipcp_generate_summary (void)
3502 struct cgraph_node *node;
3504 if (dump_file)
3505 fprintf (dump_file, "\nIPA constant propagation start:\n");
3506 ipa_register_cgraph_hooks ();
3508 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
3510 node->local.versionable
3511 = tree_versionable_function_p (node->symbol.decl);
3512 ipa_analyze_node (node);
3516 /* Write ipcp summary for nodes in SET. */
3518 static void
3519 ipcp_write_summary (void)
3521 ipa_prop_write_jump_functions ();
3524 /* Read ipcp summary. */
3526 static void
3527 ipcp_read_summary (void)
3529 ipa_prop_read_jump_functions ();
3532 /* Gate for IPCP optimization. */
3534 static bool
3535 cgraph_gate_cp (void)
3537 /* FIXME: We should remove the optimize check after we ensure we never run
3538 IPA passes when not optimizing. */
3539 return flag_ipa_cp && optimize;
3542 struct ipa_opt_pass_d pass_ipa_cp =
3545 IPA_PASS,
3546 "cp", /* name */
3547 OPTGROUP_NONE, /* optinfo_flags */
3548 cgraph_gate_cp, /* gate */
3549 ipcp_driver, /* execute */
3550 NULL, /* sub */
3551 NULL, /* next */
3552 0, /* static_pass_number */
3553 TV_IPA_CONSTANT_PROP, /* tv_id */
3554 0, /* properties_required */
3555 0, /* properties_provided */
3556 0, /* properties_destroyed */
3557 0, /* todo_flags_start */
3558 TODO_dump_symtab |
3559 TODO_remove_functions | TODO_ggc_collect /* todo_flags_finish */
3561 ipcp_generate_summary, /* generate_summary */
3562 ipcp_write_summary, /* write_summary */
3563 ipcp_read_summary, /* read_summary */
3564 ipa_prop_write_all_agg_replacement, /* write_optimization_summary */
3565 ipa_prop_read_all_agg_replacement, /* read_optimization_summary */
3566 NULL, /* stmt_fixup */
3567 0, /* TODOs */
3568 ipcp_transform_function, /* function_transform */
3569 NULL, /* variable_transform */