Update count_scale for AutoFDO to prevent over-scale.
[official-gcc.git] / gcc-4_8 / gcc / ipa-cp.c
blob3901715d75df13cdc611557690ef21c764ec3ac3
1 /* Interprocedural constant propagation
2 Copyright (C) 2005-2013 Free Software Foundation, Inc.
4 Contributed by Razya Ladelsky <RAZYA@il.ibm.com> and Martin Jambor
5 <mjambor@suse.cz>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Interprocedural constant propagation (IPA-CP).
25 The goal of this transformation is to
27 1) discover functions which are always invoked with some arguments with the
28 same known constant values and modify the functions so that the
29 subsequent optimizations can take advantage of the knowledge, and
31 2) partial specialization - create specialized versions of functions
32 transformed in this way if some parameters are known constants only in
33 certain contexts but the estimated tradeoff between speedup and cost size
34 is deemed good.
36 The algorithm also propagates types and attempts to perform type based
37 devirtualization. Types are propagated much like constants.
39 The algorithm basically consists of three stages. In the first, functions
40 are analyzed one at a time and jump functions are constructed for all known
41 call-sites. In the second phase, the pass propagates information from the
42 jump functions across the call to reveal what values are available at what
43 call sites, performs estimations of effects of known values on functions and
44 their callees, and finally decides what specialized extra versions should be
45 created. In the third, the special versions materialize and appropriate
46 calls are redirected.
48 The algorithm used is to a certain extent based on "Interprocedural Constant
49 Propagation", by David Callahan, Keith D Cooper, Ken Kennedy, Linda Torczon,
50 Comp86, pg 152-161 and "A Methodology for Procedure Cloning" by Keith D
51 Cooper, Mary W. Hall, and Ken Kennedy.
54 First stage - intraprocedural analysis
55 =======================================
57 This phase computes jump_function and modification flags.
59 A jump function for a call-site represents the values passed as an actual
60 arguments of a given call-site. In principle, there are three types of
61 values:
63 Pass through - the caller's formal parameter is passed as an actual
64 argument, plus an operation on it can be performed.
65 Constant - a constant is passed as an actual argument.
66 Unknown - neither of the above.
68 All jump function types are described in detail in ipa-prop.h, together with
69 the data structures that represent them and methods of accessing them.
71 ipcp_generate_summary() is the main function of the first stage.
73 Second stage - interprocedural analysis
74 ========================================
76 This stage is itself divided into two phases. In the first, we propagate
77 known values over the call graph, in the second, we make cloning decisions.
78 It uses a different algorithm than the original Callahan's paper.
80 First, we traverse the functions topologically from callers to callees and,
81 for each strongly connected component (SCC), we propagate constants
82 according to previously computed jump functions. We also record what known
83 values depend on other known values and estimate local effects. Finally, we
84 propagate cumulative information about these effects from dependent values
85 to those on which they depend.
87 Second, we again traverse the call graph in the same topological order and
88 make clones for functions which we know are called with the same values in
89 all contexts and decide about extra specialized clones of functions just for
90 some contexts - these decisions are based on both local estimates and
91 cumulative estimates propagated from callees.
93 ipcp_propagate_stage() and ipcp_decision_stage() together constitute the
94 third stage.
96 Third phase - materialization of clones, call statement updates.
97 ============================================
99 This stage is currently performed by call graph code (mainly in cgraphunit.c
100 and tree-inline.c) according to instructions inserted to the call graph by
101 the second stage. */
103 #include "config.h"
104 #include "system.h"
105 #include "coretypes.h"
106 #include "tree.h"
107 #include "target.h"
108 #include "gimple.h"
109 #include "cgraph.h"
110 #include "ipa-prop.h"
111 #include "tree-flow.h"
112 #include "tree-pass.h"
113 #include "flags.h"
114 #include "diagnostic.h"
115 #include "tree-pretty-print.h"
116 #include "tree-inline.h"
117 #include "params.h"
118 #include "dbgcnt.h"
119 #include "ipa-inline.h"
120 #include "ipa-utils.h"
121 #include "l-ipo.h"
123 struct ipcp_value;
125 /* Describes a particular source for an IPA-CP value. */
127 struct ipcp_value_source
129 /* Aggregate offset of the source, negative if the source is scalar value of
130 the argument itself. */
131 HOST_WIDE_INT offset;
132 /* The incoming edge that brought the value. */
133 struct cgraph_edge *cs;
134 /* If the jump function that resulted into his value was a pass-through or an
135 ancestor, this is the ipcp_value of the caller from which the described
136 value has been derived. Otherwise it is NULL. */
137 struct ipcp_value *val;
138 /* Next pointer in a linked list of sources of a value. */
139 struct ipcp_value_source *next;
140 /* If the jump function that resulted into his value was a pass-through or an
141 ancestor, this is the index of the parameter of the caller the jump
142 function references. */
143 int index;
146 /* Describes one particular value stored in struct ipcp_lattice. */
148 struct ipcp_value
150 /* The actual value for the given parameter. This is either an IPA invariant
151 or a TREE_BINFO describing a type that can be used for
152 devirtualization. */
153 tree value;
154 /* The list of sources from which this value originates. */
155 struct ipcp_value_source *sources;
156 /* Next pointers in a linked list of all values in a lattice. */
157 struct ipcp_value *next;
158 /* Next pointers in a linked list of values in a strongly connected component
159 of values. */
160 struct ipcp_value *scc_next;
161 /* Next pointers in a linked list of SCCs of values sorted topologically
162 according their sources. */
163 struct ipcp_value *topo_next;
164 /* A specialized node created for this value, NULL if none has been (so far)
165 created. */
166 struct cgraph_node *spec_node;
167 /* Depth first search number and low link for topological sorting of
168 values. */
169 int dfs, low_link;
170 /* Time benefit and size cost that specializing the function for this value
171 would bring about in this function alone. */
172 int local_time_benefit, local_size_cost;
173 /* Time benefit and size cost that specializing the function for this value
174 can bring about in it's callees (transitively). */
175 int prop_time_benefit, prop_size_cost;
176 /* True if this valye is currently on the topo-sort stack. */
177 bool on_stack;
180 /* Lattice describing potential values of a formal parameter of a function, or
181 a part of an aggreagate. TOP is represented by a lattice with zero values
182 and with contains_variable and bottom flags cleared. BOTTOM is represented
183 by a lattice with the bottom flag set. In that case, values and
184 contains_variable flag should be disregarded. */
186 struct ipcp_lattice
188 /* The list of known values and types in this lattice. Note that values are
189 not deallocated if a lattice is set to bottom because there may be value
190 sources referencing them. */
191 struct ipcp_value *values;
192 /* Number of known values and types in this lattice. */
193 int values_count;
194 /* The lattice contains a variable component (in addition to values). */
195 bool contains_variable;
196 /* The value of the lattice is bottom (i.e. variable and unusable for any
197 propagation). */
198 bool bottom;
201 /* Lattice with an offset to describe a part of an aggregate. */
203 struct ipcp_agg_lattice : public ipcp_lattice
205 /* Offset that is being described by this lattice. */
206 HOST_WIDE_INT offset;
207 /* Size so that we don't have to re-compute it every time we traverse the
208 list. Must correspond to TYPE_SIZE of all lat values. */
209 HOST_WIDE_INT size;
210 /* Next element of the linked list. */
211 struct ipcp_agg_lattice *next;
214 /* Structure containing lattices for a parameter itself and for pieces of
215 aggregates that are passed in the parameter or by a reference in a parameter
216 plus some other useful flags. */
218 struct ipcp_param_lattices
220 /* Lattice describing the value of the parameter itself. */
221 struct ipcp_lattice itself;
222 /* Lattices describing aggregate parts. */
223 struct ipcp_agg_lattice *aggs;
224 /* Number of aggregate lattices */
225 int aggs_count;
226 /* True if aggregate data were passed by reference (as opposed to by
227 value). */
228 bool aggs_by_ref;
229 /* All aggregate lattices contain a variable component (in addition to
230 values). */
231 bool aggs_contain_variable;
232 /* The value of all aggregate lattices is bottom (i.e. variable and unusable
233 for any propagation). */
234 bool aggs_bottom;
236 /* There is a virtual call based on this parameter. */
237 bool virt_call;
240 /* Allocation pools for values and their sources in ipa-cp. */
242 alloc_pool ipcp_values_pool;
243 alloc_pool ipcp_sources_pool;
244 alloc_pool ipcp_agg_lattice_pool;
246 /* Maximal count found in program. */
248 static gcov_type max_count;
250 /* Original overall size of the program. */
252 static long overall_size, max_new_size;
254 /* Head of the linked list of topologically sorted values. */
256 static struct ipcp_value *values_topo;
258 /* Return the param lattices structure corresponding to the Ith formal
259 parameter of the function described by INFO. */
260 static inline struct ipcp_param_lattices *
261 ipa_get_parm_lattices (struct ipa_node_params *info, int i)
263 gcc_assert (i >= 0 && i < ipa_get_param_count (info));
264 gcc_checking_assert (!info->ipcp_orig_node);
265 gcc_checking_assert (info->lattices);
266 return &(info->lattices[i]);
269 /* Return the lattice corresponding to the scalar value of the Ith formal
270 parameter of the function described by INFO. */
271 static inline struct ipcp_lattice *
272 ipa_get_scalar_lat (struct ipa_node_params *info, int i)
274 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
275 return &plats->itself;
278 /* Return whether LAT is a lattice with a single constant and without an
279 undefined value. */
281 static inline bool
282 ipa_lat_is_single_const (struct ipcp_lattice *lat)
284 if (lat->bottom
285 || lat->contains_variable
286 || lat->values_count != 1)
287 return false;
288 else
289 return true;
292 /* Return true iff the CS is an edge within a strongly connected component as
293 computed by ipa_reduced_postorder. */
295 static inline bool
296 edge_within_scc (struct cgraph_edge *cs)
298 struct ipa_dfs_info *caller_dfs = (struct ipa_dfs_info *) cs->caller->symbol.aux;
299 struct ipa_dfs_info *callee_dfs;
300 struct cgraph_node *callee = cgraph_function_node (cs->callee, NULL);
302 callee_dfs = (struct ipa_dfs_info *) callee->symbol.aux;
303 return (caller_dfs
304 && callee_dfs
305 && caller_dfs->scc_no == callee_dfs->scc_no);
308 /* Print V which is extracted from a value in a lattice to F. */
310 static void
311 print_ipcp_constant_value (FILE * f, tree v)
313 if (TREE_CODE (v) == TREE_BINFO)
315 fprintf (f, "BINFO ");
316 print_generic_expr (f, BINFO_TYPE (v), 0);
318 else if (TREE_CODE (v) == ADDR_EXPR
319 && TREE_CODE (TREE_OPERAND (v, 0)) == CONST_DECL)
321 fprintf (f, "& ");
322 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (v, 0)), 0);
324 else
325 print_generic_expr (f, v, 0);
328 /* Print a lattice LAT to F. */
330 static void
331 print_lattice (FILE * f, struct ipcp_lattice *lat,
332 bool dump_sources, bool dump_benefits)
334 struct ipcp_value *val;
335 bool prev = false;
337 if (lat->bottom)
339 fprintf (f, "BOTTOM\n");
340 return;
343 if (!lat->values_count && !lat->contains_variable)
345 fprintf (f, "TOP\n");
346 return;
349 if (lat->contains_variable)
351 fprintf (f, "VARIABLE");
352 prev = true;
353 if (dump_benefits)
354 fprintf (f, "\n");
357 for (val = lat->values; val; val = val->next)
359 if (dump_benefits && prev)
360 fprintf (f, " ");
361 else if (!dump_benefits && prev)
362 fprintf (f, ", ");
363 else
364 prev = true;
366 print_ipcp_constant_value (f, val->value);
368 if (dump_sources)
370 struct ipcp_value_source *s;
372 fprintf (f, " [from:");
373 for (s = val->sources; s; s = s->next)
374 fprintf (f, " %i(%i)", s->cs->caller->uid,s->cs->frequency);
375 fprintf (f, "]");
378 if (dump_benefits)
379 fprintf (f, " [loc_time: %i, loc_size: %i, "
380 "prop_time: %i, prop_size: %i]\n",
381 val->local_time_benefit, val->local_size_cost,
382 val->prop_time_benefit, val->prop_size_cost);
384 if (!dump_benefits)
385 fprintf (f, "\n");
388 /* Print all ipcp_lattices of all functions to F. */
390 static void
391 print_all_lattices (FILE * f, bool dump_sources, bool dump_benefits)
393 struct cgraph_node *node;
394 int i, count;
396 fprintf (f, "\nLattices:\n");
397 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
399 struct ipa_node_params *info;
401 info = IPA_NODE_REF (node);
402 fprintf (f, " Node: %s/%i:\n", cgraph_node_name (node), node->uid);
403 count = ipa_get_param_count (info);
404 for (i = 0; i < count; i++)
406 struct ipcp_agg_lattice *aglat;
407 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
408 fprintf (f, " param [%d]: ", i);
409 print_lattice (f, &plats->itself, dump_sources, dump_benefits);
411 if (plats->virt_call)
412 fprintf (f, " virt_call flag set\n");
414 if (plats->aggs_bottom)
416 fprintf (f, " AGGS BOTTOM\n");
417 continue;
419 if (plats->aggs_contain_variable)
420 fprintf (f, " AGGS VARIABLE\n");
421 for (aglat = plats->aggs; aglat; aglat = aglat->next)
423 fprintf (f, " %soffset " HOST_WIDE_INT_PRINT_DEC ": ",
424 plats->aggs_by_ref ? "ref " : "", aglat->offset);
425 print_lattice (f, aglat, dump_sources, dump_benefits);
431 /* Determine whether it is at all technically possible to create clones of NODE
432 and store this information in the ipa_node_params structure associated
433 with NODE. */
435 static void
436 determine_versionability (struct cgraph_node *node)
438 const char *reason = NULL;
440 /* There are a number of generic reasons functions cannot be versioned. We
441 also cannot remove parameters if there are type attributes such as fnspec
442 present. */
443 if (node->alias || node->thunk.thunk_p)
444 reason = "alias or thunk";
445 else if (!node->local.versionable)
446 reason = "not a tree_versionable_function";
447 else if (cgraph_function_body_availability (node) <= AVAIL_OVERWRITABLE)
448 reason = "insufficient body availability";
450 if (reason && dump_file && !node->alias && !node->thunk.thunk_p)
451 fprintf (dump_file, "Function %s/%i is not versionable, reason: %s.\n",
452 cgraph_node_name (node), node->uid, reason);
454 node->local.versionable = (reason == NULL);
457 /* Return true if it is at all technically possible to create clones of a
458 NODE. */
460 static bool
461 ipcp_versionable_function_p (struct cgraph_node *node)
463 return node->local.versionable;
466 /* Structure holding accumulated information about callers of a node. */
468 struct caller_statistics
470 gcov_type count_sum;
471 int n_calls, n_hot_calls, freq_sum;
474 /* Initialize fields of STAT to zeroes. */
476 static inline void
477 init_caller_stats (struct caller_statistics *stats)
479 stats->count_sum = 0;
480 stats->n_calls = 0;
481 stats->n_hot_calls = 0;
482 stats->freq_sum = 0;
485 /* Worker callback of cgraph_for_node_and_aliases accumulating statistics of
486 non-thunk incoming edges to NODE. */
488 static bool
489 gather_caller_stats (struct cgraph_node *node, void *data)
491 struct caller_statistics *stats = (struct caller_statistics *) data;
492 struct cgraph_edge *cs;
494 for (cs = node->callers; cs; cs = cs->next_caller)
495 if (cs->caller->thunk.thunk_p)
496 cgraph_for_node_and_aliases (cs->caller, gather_caller_stats,
497 stats, false);
498 else
500 stats->count_sum += cs->count;
501 stats->freq_sum += cs->frequency;
502 stats->n_calls++;
503 if (cgraph_maybe_hot_edge_p (cs))
504 stats->n_hot_calls ++;
506 return false;
510 /* Return true if this NODE is viable candidate for cloning. */
512 static bool
513 ipcp_cloning_candidate_p (struct cgraph_node *node)
515 struct caller_statistics stats;
517 gcc_checking_assert (cgraph_function_with_gimple_body_p (node));
519 if (!flag_ipa_cp_clone)
521 if (dump_file)
522 fprintf (dump_file, "Not considering %s for cloning; "
523 "-fipa-cp-clone disabled.\n",
524 cgraph_node_name (node));
525 return false;
528 if (!optimize_function_for_speed_p (DECL_STRUCT_FUNCTION (node->symbol.decl)))
530 if (dump_file)
531 fprintf (dump_file, "Not considering %s for cloning; "
532 "optimizing it for size.\n",
533 cgraph_node_name (node));
534 return false;
537 init_caller_stats (&stats);
538 cgraph_for_node_and_aliases (node, gather_caller_stats, &stats, false);
540 if (inline_summary (node)->self_size < stats.n_calls)
542 if (dump_file)
543 fprintf (dump_file, "Considering %s for cloning; code might shrink.\n",
544 cgraph_node_name (node));
545 return true;
548 /* When profile is available and function is hot, propagate into it even if
549 calls seems cold; constant propagation can improve function's speed
550 significantly. */
551 if (max_count)
553 if (stats.count_sum > node->count * 90 / 100)
555 if (dump_file)
556 fprintf (dump_file, "Considering %s for cloning; "
557 "usually called directly.\n",
558 cgraph_node_name (node));
559 return true;
562 if (!stats.n_hot_calls)
564 if (dump_file)
565 fprintf (dump_file, "Not considering %s for cloning; no hot calls.\n",
566 cgraph_node_name (node));
567 return false;
569 if (dump_file)
570 fprintf (dump_file, "Considering %s for cloning.\n",
571 cgraph_node_name (node));
572 return true;
575 /* Arrays representing a topological ordering of call graph nodes and a stack
576 of noes used during constant propagation. */
578 struct topo_info
580 struct cgraph_node **order;
581 struct cgraph_node **stack;
582 int nnodes, stack_top;
585 /* Allocate the arrays in TOPO and topologically sort the nodes into order. */
587 static void
588 build_toporder_info (struct topo_info *topo)
590 topo->order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
591 topo->stack = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
592 topo->stack_top = 0;
593 topo->nnodes = ipa_reduced_postorder (topo->order, true, true, NULL);
596 /* Free information about strongly connected components and the arrays in
597 TOPO. */
599 static void
600 free_toporder_info (struct topo_info *topo)
602 ipa_free_postorder_info ();
603 free (topo->order);
604 free (topo->stack);
607 /* Add NODE to the stack in TOPO, unless it is already there. */
609 static inline void
610 push_node_to_stack (struct topo_info *topo, struct cgraph_node *node)
612 struct ipa_node_params *info = IPA_NODE_REF (node);
613 if (info->node_enqueued)
614 return;
615 info->node_enqueued = 1;
616 topo->stack[topo->stack_top++] = node;
619 /* Pop a node from the stack in TOPO and return it or return NULL if the stack
620 is empty. */
622 static struct cgraph_node *
623 pop_node_from_stack (struct topo_info *topo)
625 if (topo->stack_top)
627 struct cgraph_node *node;
628 topo->stack_top--;
629 node = topo->stack[topo->stack_top];
630 IPA_NODE_REF (node)->node_enqueued = 0;
631 return node;
633 else
634 return NULL;
637 /* Set lattice LAT to bottom and return true if it previously was not set as
638 such. */
640 static inline bool
641 set_lattice_to_bottom (struct ipcp_lattice *lat)
643 bool ret = !lat->bottom;
644 lat->bottom = true;
645 return ret;
648 /* Mark lattice as containing an unknown value and return true if it previously
649 was not marked as such. */
651 static inline bool
652 set_lattice_contains_variable (struct ipcp_lattice *lat)
654 bool ret = !lat->contains_variable;
655 lat->contains_variable = true;
656 return ret;
659 /* Set all aggegate lattices in PLATS to bottom and return true if they were
660 not previously set as such. */
662 static inline bool
663 set_agg_lats_to_bottom (struct ipcp_param_lattices *plats)
665 bool ret = !plats->aggs_bottom;
666 plats->aggs_bottom = true;
667 return ret;
670 /* Mark all aggegate lattices in PLATS as containing an unknown value and
671 return true if they were not previously marked as such. */
673 static inline bool
674 set_agg_lats_contain_variable (struct ipcp_param_lattices *plats)
676 bool ret = !plats->aggs_contain_variable;
677 plats->aggs_contain_variable = true;
678 return ret;
681 /* Mark bot aggregate and scalar lattices as containing an unknown variable,
682 return true is any of them has not been marked as such so far. */
684 static inline bool
685 set_all_contains_variable (struct ipcp_param_lattices *plats)
687 bool ret = !plats->itself.contains_variable || !plats->aggs_contain_variable;
688 plats->itself.contains_variable = true;
689 plats->aggs_contain_variable = true;
690 return ret;
693 /* Initialize ipcp_lattices. */
695 static void
696 initialize_node_lattices (struct cgraph_node *node)
698 struct ipa_node_params *info = IPA_NODE_REF (node);
699 struct cgraph_edge *ie;
700 bool disable = false, variable = false;
701 int i;
703 gcc_checking_assert (cgraph_function_with_gimple_body_p (node));
704 if (!node->local.local)
706 /* When cloning is allowed, we can assume that externally visible
707 functions are not called. We will compensate this by cloning
708 later. */
709 if (ipcp_versionable_function_p (node)
710 && ipcp_cloning_candidate_p (node))
711 variable = true;
712 else
713 disable = true;
716 if (disable || variable)
718 for (i = 0; i < ipa_get_param_count (info) ; i++)
720 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
721 if (disable)
723 set_lattice_to_bottom (&plats->itself);
724 set_agg_lats_to_bottom (plats);
726 else
727 set_all_contains_variable (plats);
729 if (dump_file && (dump_flags & TDF_DETAILS)
730 && !node->alias && !node->thunk.thunk_p)
731 fprintf (dump_file, "Marking all lattices of %s/%i as %s\n",
732 cgraph_node_name (node), node->uid,
733 disable ? "BOTTOM" : "VARIABLE");
735 if (!disable)
736 for (i = 0; i < ipa_get_param_count (info) ; i++)
738 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
739 tree t = TREE_TYPE (ipa_get_param(info, i));
741 if (POINTER_TYPE_P (t) && TYPE_RESTRICT (t)
742 && TREE_CODE (TREE_TYPE (t)) == ARRAY_TYPE)
744 set_lattice_to_bottom (&plats->itself);
745 if (dump_file && (dump_flags & TDF_DETAILS)
746 && !node->alias && !node->thunk.thunk_p)
747 fprintf (dump_file, "Going to ignore param %i of of %s/%i.\n",
748 i, cgraph_node_name (node), node->uid);
752 for (ie = node->indirect_calls; ie; ie = ie->next_callee)
753 if (ie->indirect_info->polymorphic
754 && ie->indirect_info->param_index >= 0)
756 gcc_checking_assert (ie->indirect_info->param_index >= 0);
757 ipa_get_parm_lattices (info,
758 ie->indirect_info->param_index)->virt_call = 1;
762 /* Return the result of a (possibly arithmetic) pass through jump function
763 JFUNC on the constant value INPUT. Return NULL_TREE if that cannot be
764 determined or itself is considered an interprocedural invariant. */
766 static tree
767 ipa_get_jf_pass_through_result (struct ipa_jump_func *jfunc, tree input)
769 tree restype, res;
771 if (ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
772 return input;
773 else if (TREE_CODE (input) == TREE_BINFO)
774 return NULL_TREE;
776 gcc_checking_assert (is_gimple_ip_invariant (input));
777 if (TREE_CODE_CLASS (ipa_get_jf_pass_through_operation (jfunc))
778 == tcc_comparison)
779 restype = boolean_type_node;
780 else
781 restype = TREE_TYPE (input);
782 res = fold_binary (ipa_get_jf_pass_through_operation (jfunc), restype,
783 input, ipa_get_jf_pass_through_operand (jfunc));
785 if (res && !is_gimple_ip_invariant (res))
786 return NULL_TREE;
788 return res;
791 /* Return the result of an ancestor jump function JFUNC on the constant value
792 INPUT. Return NULL_TREE if that cannot be determined. */
794 static tree
795 ipa_get_jf_ancestor_result (struct ipa_jump_func *jfunc, tree input)
797 if (TREE_CODE (input) == TREE_BINFO)
798 return get_binfo_at_offset (input,
799 ipa_get_jf_ancestor_offset (jfunc),
800 ipa_get_jf_ancestor_type (jfunc));
801 else if (TREE_CODE (input) == ADDR_EXPR)
803 tree t = TREE_OPERAND (input, 0);
804 t = build_ref_for_offset (EXPR_LOCATION (t), t,
805 ipa_get_jf_ancestor_offset (jfunc),
806 ipa_get_jf_ancestor_type (jfunc), NULL, false);
807 return build_fold_addr_expr (t);
809 else
810 return NULL_TREE;
813 /* Extract the acual BINFO being described by JFUNC which must be a known type
814 jump function. */
816 static tree
817 ipa_value_from_known_type_jfunc (struct ipa_jump_func *jfunc)
819 tree base_binfo = TYPE_BINFO (ipa_get_jf_known_type_base_type (jfunc));
820 if (!base_binfo)
821 return NULL_TREE;
822 return get_binfo_at_offset (base_binfo,
823 ipa_get_jf_known_type_offset (jfunc),
824 ipa_get_jf_known_type_component_type (jfunc));
827 /* Determine whether JFUNC evaluates to a known value (that is either a
828 constant or a binfo) and if so, return it. Otherwise return NULL. INFO
829 describes the caller node so that pass-through jump functions can be
830 evaluated. */
832 tree
833 ipa_value_from_jfunc (struct ipa_node_params *info, struct ipa_jump_func *jfunc)
835 if (jfunc->type == IPA_JF_CONST)
836 return ipa_get_jf_constant (jfunc);
837 else if (jfunc->type == IPA_JF_KNOWN_TYPE)
838 return ipa_value_from_known_type_jfunc (jfunc);
839 else if (jfunc->type == IPA_JF_PASS_THROUGH
840 || jfunc->type == IPA_JF_ANCESTOR)
842 tree input;
843 int idx;
845 if (jfunc->type == IPA_JF_PASS_THROUGH)
846 idx = ipa_get_jf_pass_through_formal_id (jfunc);
847 else
848 idx = ipa_get_jf_ancestor_formal_id (jfunc);
850 if (info->ipcp_orig_node)
851 input = info->known_vals[idx];
852 else
854 struct ipcp_lattice *lat;
856 if (!info->lattices)
858 gcc_checking_assert (!flag_ipa_cp);
859 return NULL_TREE;
861 lat = ipa_get_scalar_lat (info, idx);
862 if (!ipa_lat_is_single_const (lat))
863 return NULL_TREE;
864 input = lat->values->value;
867 if (!input)
868 return NULL_TREE;
870 if (jfunc->type == IPA_JF_PASS_THROUGH)
871 return ipa_get_jf_pass_through_result (jfunc, input);
872 else
873 return ipa_get_jf_ancestor_result (jfunc, input);
875 else
876 return NULL_TREE;
880 /* If checking is enabled, verify that no lattice is in the TOP state, i.e. not
881 bottom, not containing a variable component and without any known value at
882 the same time. */
884 DEBUG_FUNCTION void
885 ipcp_verify_propagated_values (void)
887 struct cgraph_node *node;
889 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
891 struct ipa_node_params *info = IPA_NODE_REF (node);
892 int i, count = ipa_get_param_count (info);
894 for (i = 0; i < count; i++)
896 struct ipcp_lattice *lat = ipa_get_scalar_lat (info, i);
898 if (!lat->bottom
899 && !lat->contains_variable
900 && lat->values_count == 0)
902 if (dump_file)
904 fprintf (dump_file, "\nIPA lattices after constant "
905 "propagation:\n");
906 print_all_lattices (dump_file, true, false);
909 gcc_unreachable ();
915 /* Return true iff X and Y should be considered equal values by IPA-CP. */
917 static bool
918 values_equal_for_ipcp_p (tree x, tree y)
920 gcc_checking_assert (x != NULL_TREE && y != NULL_TREE);
922 if (x == y)
923 return true;
925 if (TREE_CODE (x) == TREE_BINFO || TREE_CODE (y) == TREE_BINFO)
926 return false;
928 if (TREE_CODE (x) == ADDR_EXPR
929 && TREE_CODE (y) == ADDR_EXPR
930 && TREE_CODE (TREE_OPERAND (x, 0)) == CONST_DECL
931 && TREE_CODE (TREE_OPERAND (y, 0)) == CONST_DECL)
932 return operand_equal_p (DECL_INITIAL (TREE_OPERAND (x, 0)),
933 DECL_INITIAL (TREE_OPERAND (y, 0)), 0);
934 else
935 return operand_equal_p (x, y, 0);
938 /* Add a new value source to VAL, marking that a value comes from edge CS and
939 (if the underlying jump function is a pass-through or an ancestor one) from
940 a caller value SRC_VAL of a caller parameter described by SRC_INDEX. OFFSET
941 is negative if the source was the scalar value of the parameter itself or
942 the offset within an aggregate. */
944 static void
945 add_value_source (struct ipcp_value *val, struct cgraph_edge *cs,
946 struct ipcp_value *src_val, int src_idx, HOST_WIDE_INT offset)
948 struct ipcp_value_source *src;
950 src = (struct ipcp_value_source *) pool_alloc (ipcp_sources_pool);
951 src->offset = offset;
952 src->cs = cs;
953 src->val = src_val;
954 src->index = src_idx;
956 src->next = val->sources;
957 val->sources = src;
960 /* Try to add NEWVAL to LAT, potentially creating a new struct ipcp_value for
961 it. CS, SRC_VAL SRC_INDEX and OFFSET are meant for add_value_source and
962 have the same meaning. */
964 static bool
965 add_value_to_lattice (struct ipcp_lattice *lat, tree newval,
966 struct cgraph_edge *cs, struct ipcp_value *src_val,
967 int src_idx, HOST_WIDE_INT offset)
969 struct ipcp_value *val;
971 if (lat->bottom)
972 return false;
974 for (val = lat->values; val; val = val->next)
975 if (values_equal_for_ipcp_p (val->value, newval))
977 if (edge_within_scc (cs))
979 struct ipcp_value_source *s;
980 for (s = val->sources; s ; s = s->next)
981 if (s->cs == cs)
982 break;
983 if (s)
984 return false;
987 add_value_source (val, cs, src_val, src_idx, offset);
988 return false;
991 if (lat->values_count == PARAM_VALUE (PARAM_IPA_CP_VALUE_LIST_SIZE))
993 /* We can only free sources, not the values themselves, because sources
994 of other values in this this SCC might point to them. */
995 for (val = lat->values; val; val = val->next)
997 while (val->sources)
999 struct ipcp_value_source *src = val->sources;
1000 val->sources = src->next;
1001 pool_free (ipcp_sources_pool, src);
1005 lat->values = NULL;
1006 return set_lattice_to_bottom (lat);
1009 lat->values_count++;
1010 val = (struct ipcp_value *) pool_alloc (ipcp_values_pool);
1011 memset (val, 0, sizeof (*val));
1013 add_value_source (val, cs, src_val, src_idx, offset);
1014 val->value = newval;
1015 val->next = lat->values;
1016 lat->values = val;
1017 return true;
1020 /* Like above but passes a special value of offset to distinguish that the
1021 origin is the scalar value of the parameter rather than a part of an
1022 aggregate. */
1024 static inline bool
1025 add_scalar_value_to_lattice (struct ipcp_lattice *lat, tree newval,
1026 struct cgraph_edge *cs,
1027 struct ipcp_value *src_val, int src_idx)
1029 return add_value_to_lattice (lat, newval, cs, src_val, src_idx, -1);
1032 /* Propagate values through a pass-through jump function JFUNC associated with
1033 edge CS, taking values from SRC_LAT and putting them into DEST_LAT. SRC_IDX
1034 is the index of the source parameter. */
1036 static bool
1037 propagate_vals_accross_pass_through (struct cgraph_edge *cs,
1038 struct ipa_jump_func *jfunc,
1039 struct ipcp_lattice *src_lat,
1040 struct ipcp_lattice *dest_lat,
1041 int src_idx)
1043 struct ipcp_value *src_val;
1044 bool ret = false;
1046 if (ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
1047 for (src_val = src_lat->values; src_val; src_val = src_val->next)
1048 ret |= add_scalar_value_to_lattice (dest_lat, src_val->value, cs,
1049 src_val, src_idx);
1050 /* Do not create new values when propagating within an SCC because if there
1051 are arithmetic functions with circular dependencies, there is infinite
1052 number of them and we would just make lattices bottom. */
1053 else if (edge_within_scc (cs))
1054 ret = set_lattice_contains_variable (dest_lat);
1055 else
1056 for (src_val = src_lat->values; src_val; src_val = src_val->next)
1058 tree cstval = src_val->value;
1060 if (TREE_CODE (cstval) == TREE_BINFO)
1062 ret |= set_lattice_contains_variable (dest_lat);
1063 continue;
1065 cstval = ipa_get_jf_pass_through_result (jfunc, cstval);
1067 if (cstval)
1068 ret |= add_scalar_value_to_lattice (dest_lat, cstval, cs, src_val,
1069 src_idx);
1070 else
1071 ret |= set_lattice_contains_variable (dest_lat);
1074 return ret;
1077 /* Propagate values through an ancestor jump function JFUNC associated with
1078 edge CS, taking values from SRC_LAT and putting them into DEST_LAT. SRC_IDX
1079 is the index of the source parameter. */
1081 static bool
1082 propagate_vals_accross_ancestor (struct cgraph_edge *cs,
1083 struct ipa_jump_func *jfunc,
1084 struct ipcp_lattice *src_lat,
1085 struct ipcp_lattice *dest_lat,
1086 int src_idx)
1088 struct ipcp_value *src_val;
1089 bool ret = false;
1091 if (edge_within_scc (cs))
1092 return set_lattice_contains_variable (dest_lat);
1094 for (src_val = src_lat->values; src_val; src_val = src_val->next)
1096 tree t = ipa_get_jf_ancestor_result (jfunc, src_val->value);
1098 if (t)
1099 ret |= add_scalar_value_to_lattice (dest_lat, t, cs, src_val, src_idx);
1100 else
1101 ret |= set_lattice_contains_variable (dest_lat);
1104 return ret;
1107 /* Propagate scalar values across jump function JFUNC that is associated with
1108 edge CS and put the values into DEST_LAT. */
1110 static bool
1111 propagate_scalar_accross_jump_function (struct cgraph_edge *cs,
1112 struct ipa_jump_func *jfunc,
1113 struct ipcp_lattice *dest_lat)
1115 if (dest_lat->bottom)
1116 return false;
1118 if (jfunc->type == IPA_JF_CONST
1119 || jfunc->type == IPA_JF_KNOWN_TYPE)
1121 tree val;
1123 if (jfunc->type == IPA_JF_KNOWN_TYPE)
1125 val = ipa_value_from_known_type_jfunc (jfunc);
1126 if (!val)
1127 return set_lattice_contains_variable (dest_lat);
1129 else
1130 val = ipa_get_jf_constant (jfunc);
1131 return add_scalar_value_to_lattice (dest_lat, val, cs, NULL, 0);
1133 else if (jfunc->type == IPA_JF_PASS_THROUGH
1134 || jfunc->type == IPA_JF_ANCESTOR)
1136 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
1137 struct ipcp_lattice *src_lat;
1138 int src_idx;
1139 bool ret;
1141 if (jfunc->type == IPA_JF_PASS_THROUGH)
1142 src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
1143 else
1144 src_idx = ipa_get_jf_ancestor_formal_id (jfunc);
1146 src_lat = ipa_get_scalar_lat (caller_info, src_idx);
1147 if (src_lat->bottom)
1148 return set_lattice_contains_variable (dest_lat);
1150 /* If we would need to clone the caller and cannot, do not propagate. */
1151 if (!ipcp_versionable_function_p (cs->caller)
1152 && (src_lat->contains_variable
1153 || (src_lat->values_count > 1)))
1154 return set_lattice_contains_variable (dest_lat);
1156 if (jfunc->type == IPA_JF_PASS_THROUGH)
1157 ret = propagate_vals_accross_pass_through (cs, jfunc, src_lat,
1158 dest_lat, src_idx);
1159 else
1160 ret = propagate_vals_accross_ancestor (cs, jfunc, src_lat, dest_lat,
1161 src_idx);
1163 if (src_lat->contains_variable)
1164 ret |= set_lattice_contains_variable (dest_lat);
1166 return ret;
1169 /* TODO: We currently do not handle member method pointers in IPA-CP (we only
1170 use it for indirect inlining), we should propagate them too. */
1171 return set_lattice_contains_variable (dest_lat);
1174 /* If DEST_PLATS already has aggregate items, check that aggs_by_ref matches
1175 NEW_AGGS_BY_REF and if not, mark all aggs as bottoms and return true (in all
1176 other cases, return false). If there are no aggregate items, set
1177 aggs_by_ref to NEW_AGGS_BY_REF. */
1179 static bool
1180 set_check_aggs_by_ref (struct ipcp_param_lattices *dest_plats,
1181 bool new_aggs_by_ref)
1183 if (dest_plats->aggs)
1185 if (dest_plats->aggs_by_ref != new_aggs_by_ref)
1187 set_agg_lats_to_bottom (dest_plats);
1188 return true;
1191 else
1192 dest_plats->aggs_by_ref = new_aggs_by_ref;
1193 return false;
1196 /* Walk aggregate lattices in DEST_PLATS from ***AGLAT on, until ***aglat is an
1197 already existing lattice for the given OFFSET and SIZE, marking all skipped
1198 lattices as containing variable and checking for overlaps. If there is no
1199 already existing lattice for the OFFSET and VAL_SIZE, create one, initialize
1200 it with offset, size and contains_variable to PRE_EXISTING, and return true,
1201 unless there are too many already. If there are two many, return false. If
1202 there are overlaps turn whole DEST_PLATS to bottom and return false. If any
1203 skipped lattices were newly marked as containing variable, set *CHANGE to
1204 true. */
1206 static bool
1207 merge_agg_lats_step (struct ipcp_param_lattices *dest_plats,
1208 HOST_WIDE_INT offset, HOST_WIDE_INT val_size,
1209 struct ipcp_agg_lattice ***aglat,
1210 bool pre_existing, bool *change)
1212 gcc_checking_assert (offset >= 0);
1214 while (**aglat && (**aglat)->offset < offset)
1216 if ((**aglat)->offset + (**aglat)->size > offset)
1218 set_agg_lats_to_bottom (dest_plats);
1219 return false;
1221 *change |= set_lattice_contains_variable (**aglat);
1222 *aglat = &(**aglat)->next;
1225 if (**aglat && (**aglat)->offset == offset)
1227 if ((**aglat)->size != val_size
1228 || ((**aglat)->next
1229 && (**aglat)->next->offset < offset + val_size))
1231 set_agg_lats_to_bottom (dest_plats);
1232 return false;
1234 gcc_checking_assert (!(**aglat)->next
1235 || (**aglat)->next->offset >= offset + val_size);
1236 return true;
1238 else
1240 struct ipcp_agg_lattice *new_al;
1242 if (**aglat && (**aglat)->offset < offset + val_size)
1244 set_agg_lats_to_bottom (dest_plats);
1245 return false;
1247 if (dest_plats->aggs_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1248 return false;
1249 dest_plats->aggs_count++;
1250 new_al = (struct ipcp_agg_lattice *) pool_alloc (ipcp_agg_lattice_pool);
1251 memset (new_al, 0, sizeof (*new_al));
1253 new_al->offset = offset;
1254 new_al->size = val_size;
1255 new_al->contains_variable = pre_existing;
1257 new_al->next = **aglat;
1258 **aglat = new_al;
1259 return true;
1263 /* Set all AGLAT and all other aggregate lattices reachable by next pointers as
1264 containing an unknown value. */
1266 static bool
1267 set_chain_of_aglats_contains_variable (struct ipcp_agg_lattice *aglat)
1269 bool ret = false;
1270 while (aglat)
1272 ret |= set_lattice_contains_variable (aglat);
1273 aglat = aglat->next;
1275 return ret;
1278 /* Merge existing aggregate lattices in SRC_PLATS to DEST_PLATS, subtracting
1279 DELTA_OFFSET. CS is the call graph edge and SRC_IDX the index of the source
1280 parameter used for lattice value sources. Return true if DEST_PLATS changed
1281 in any way. */
1283 static bool
1284 merge_aggregate_lattices (struct cgraph_edge *cs,
1285 struct ipcp_param_lattices *dest_plats,
1286 struct ipcp_param_lattices *src_plats,
1287 int src_idx, HOST_WIDE_INT offset_delta)
1289 bool pre_existing = dest_plats->aggs != NULL;
1290 struct ipcp_agg_lattice **dst_aglat;
1291 bool ret = false;
1293 if (set_check_aggs_by_ref (dest_plats, src_plats->aggs_by_ref))
1294 return true;
1295 if (src_plats->aggs_bottom)
1296 return set_agg_lats_contain_variable (dest_plats);
1297 if (src_plats->aggs_contain_variable)
1298 ret |= set_agg_lats_contain_variable (dest_plats);
1299 dst_aglat = &dest_plats->aggs;
1301 for (struct ipcp_agg_lattice *src_aglat = src_plats->aggs;
1302 src_aglat;
1303 src_aglat = src_aglat->next)
1305 HOST_WIDE_INT new_offset = src_aglat->offset - offset_delta;
1307 if (new_offset < 0)
1308 continue;
1309 if (merge_agg_lats_step (dest_plats, new_offset, src_aglat->size,
1310 &dst_aglat, pre_existing, &ret))
1312 struct ipcp_agg_lattice *new_al = *dst_aglat;
1314 dst_aglat = &(*dst_aglat)->next;
1315 if (src_aglat->bottom)
1317 ret |= set_lattice_contains_variable (new_al);
1318 continue;
1320 if (src_aglat->contains_variable)
1321 ret |= set_lattice_contains_variable (new_al);
1322 for (struct ipcp_value *val = src_aglat->values;
1323 val;
1324 val = val->next)
1325 ret |= add_value_to_lattice (new_al, val->value, cs, val, src_idx,
1326 src_aglat->offset);
1328 else if (dest_plats->aggs_bottom)
1329 return true;
1331 ret |= set_chain_of_aglats_contains_variable (*dst_aglat);
1332 return ret;
1335 /* Determine whether there is anything to propagate FROM SRC_PLATS through a
1336 pass-through JFUNC and if so, whether it has conform and conforms to the
1337 rules about propagating values passed by reference. */
1339 static bool
1340 agg_pass_through_permissible_p (struct ipcp_param_lattices *src_plats,
1341 struct ipa_jump_func *jfunc)
1343 return src_plats->aggs
1344 && (!src_plats->aggs_by_ref
1345 || ipa_get_jf_pass_through_agg_preserved (jfunc));
1348 /* Propagate scalar values across jump function JFUNC that is associated with
1349 edge CS and put the values into DEST_LAT. */
1351 static bool
1352 propagate_aggs_accross_jump_function (struct cgraph_edge *cs,
1353 struct ipa_jump_func *jfunc,
1354 struct ipcp_param_lattices *dest_plats)
1356 bool ret = false;
1358 if (dest_plats->aggs_bottom)
1359 return false;
1361 if (jfunc->type == IPA_JF_PASS_THROUGH
1362 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
1364 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
1365 int src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
1366 struct ipcp_param_lattices *src_plats;
1368 src_plats = ipa_get_parm_lattices (caller_info, src_idx);
1369 if (agg_pass_through_permissible_p (src_plats, jfunc))
1371 /* Currently we do not produce clobber aggregate jump
1372 functions, replace with merging when we do. */
1373 gcc_assert (!jfunc->agg.items);
1374 ret |= merge_aggregate_lattices (cs, dest_plats, src_plats,
1375 src_idx, 0);
1377 else
1378 ret |= set_agg_lats_contain_variable (dest_plats);
1380 else if (jfunc->type == IPA_JF_ANCESTOR
1381 && ipa_get_jf_ancestor_agg_preserved (jfunc))
1383 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
1384 int src_idx = ipa_get_jf_ancestor_formal_id (jfunc);
1385 struct ipcp_param_lattices *src_plats;
1387 src_plats = ipa_get_parm_lattices (caller_info, src_idx);
1388 if (src_plats->aggs && src_plats->aggs_by_ref)
1390 /* Currently we do not produce clobber aggregate jump
1391 functions, replace with merging when we do. */
1392 gcc_assert (!jfunc->agg.items);
1393 ret |= merge_aggregate_lattices (cs, dest_plats, src_plats, src_idx,
1394 ipa_get_jf_ancestor_offset (jfunc));
1396 else if (!src_plats->aggs_by_ref)
1397 ret |= set_agg_lats_to_bottom (dest_plats);
1398 else
1399 ret |= set_agg_lats_contain_variable (dest_plats);
1401 else if (jfunc->agg.items)
1403 bool pre_existing = dest_plats->aggs != NULL;
1404 struct ipcp_agg_lattice **aglat = &dest_plats->aggs;
1405 struct ipa_agg_jf_item *item;
1406 int i;
1408 if (set_check_aggs_by_ref (dest_plats, jfunc->agg.by_ref))
1409 return true;
1411 FOR_EACH_VEC_ELT (*jfunc->agg.items, i, item)
1413 HOST_WIDE_INT val_size;
1415 if (item->offset < 0)
1416 continue;
1417 gcc_checking_assert (is_gimple_ip_invariant (item->value));
1418 val_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (item->value)), 1);
1420 if (merge_agg_lats_step (dest_plats, item->offset, val_size,
1421 &aglat, pre_existing, &ret))
1423 ret |= add_value_to_lattice (*aglat, item->value, cs, NULL, 0, 0);
1424 aglat = &(*aglat)->next;
1426 else if (dest_plats->aggs_bottom)
1427 return true;
1430 ret |= set_chain_of_aglats_contains_variable (*aglat);
1432 else
1433 ret |= set_agg_lats_contain_variable (dest_plats);
1435 return ret;
1438 /* Propagate constants from the caller to the callee of CS. INFO describes the
1439 caller. */
1441 static bool
1442 propagate_constants_accross_call (struct cgraph_edge *cs)
1444 struct ipa_node_params *callee_info;
1445 enum availability availability;
1446 struct cgraph_node *callee, *alias_or_thunk;
1447 struct ipa_edge_args *args;
1448 bool ret = false;
1449 int i, args_count, parms_count;
1451 callee = cgraph_function_node (cs->callee, &availability);
1452 if (!callee->analyzed)
1453 return false;
1454 gcc_checking_assert (cgraph_function_with_gimple_body_p (callee));
1455 callee_info = IPA_NODE_REF (callee);
1457 args = IPA_EDGE_REF (cs);
1458 args_count = ipa_get_cs_argument_count (args);
1459 parms_count = ipa_get_param_count (callee_info);
1461 /* If this call goes through a thunk we must not propagate to the first (0th)
1462 parameter. However, we might need to uncover a thunk from below a series
1463 of aliases first. */
1464 alias_or_thunk = cs->callee;
1465 while (alias_or_thunk->alias)
1466 alias_or_thunk = cgraph_alias_aliased_node (alias_or_thunk);
1467 if (alias_or_thunk->thunk.thunk_p)
1469 ret |= set_all_contains_variable (ipa_get_parm_lattices (callee_info,
1470 0));
1471 i = 1;
1473 else
1474 i = 0;
1476 for (; (i < args_count) && (i < parms_count); i++)
1478 struct ipa_jump_func *jump_func = ipa_get_ith_jump_func (args, i);
1479 struct ipcp_param_lattices *dest_plats;
1481 dest_plats = ipa_get_parm_lattices (callee_info, i);
1482 if (availability == AVAIL_OVERWRITABLE)
1483 ret |= set_all_contains_variable (dest_plats);
1484 else
1486 ret |= propagate_scalar_accross_jump_function (cs, jump_func,
1487 &dest_plats->itself);
1488 ret |= propagate_aggs_accross_jump_function (cs, jump_func,
1489 dest_plats);
1492 for (; i < parms_count; i++)
1493 ret |= set_all_contains_variable (ipa_get_parm_lattices (callee_info, i));
1495 return ret;
1498 /* If an indirect edge IE can be turned into a direct one based on KNOWN_VALS
1499 (which can contain both constants and binfos) or KNOWN_BINFOS (which can be
1500 NULL) return the destination. */
1502 tree
1503 ipa_get_indirect_edge_target (struct cgraph_edge *ie,
1504 vec<tree> known_vals,
1505 vec<tree> known_binfos,
1506 vec<ipa_agg_jump_function_p> known_aggs)
1508 int param_index = ie->indirect_info->param_index;
1509 HOST_WIDE_INT token, anc_offset;
1510 tree otr_type;
1511 tree t;
1513 if (param_index == -1
1514 || known_vals.length () <= (unsigned int) param_index)
1515 return NULL_TREE;
1517 if (!ie->indirect_info->polymorphic)
1519 tree t;
1521 if (ie->indirect_info->agg_contents)
1523 if (known_aggs.length ()
1524 > (unsigned int) param_index)
1526 struct ipa_agg_jump_function *agg;
1527 agg = known_aggs[param_index];
1528 t = ipa_find_agg_cst_for_param (agg, ie->indirect_info->offset,
1529 ie->indirect_info->by_ref);
1531 else
1532 t = NULL;
1534 else
1535 t = known_vals[param_index];
1537 if (t &&
1538 TREE_CODE (t) == ADDR_EXPR
1539 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL)
1540 return TREE_OPERAND (t, 0);
1541 else if (L_IPO_COMP_MODE && t && TREE_CODE (t) == FUNCTION_DECL)
1542 return t;
1543 else
1544 return NULL_TREE;
1547 gcc_assert (!ie->indirect_info->agg_contents);
1548 token = ie->indirect_info->otr_token;
1549 anc_offset = ie->indirect_info->offset;
1550 otr_type = ie->indirect_info->otr_type;
1552 t = known_vals[param_index];
1553 if (!t && known_binfos.length () > (unsigned int) param_index)
1554 t = known_binfos[param_index];
1555 if (!t)
1556 return NULL_TREE;
1558 if (TREE_CODE (t) != TREE_BINFO)
1560 tree binfo;
1561 binfo = gimple_extract_devirt_binfo_from_cst
1562 (t, ie->indirect_info->otr_type);
1563 if (!binfo)
1564 return NULL_TREE;
1565 binfo = get_binfo_at_offset (binfo, anc_offset, otr_type);
1566 if (!binfo)
1567 return NULL_TREE;
1568 return gimple_get_virt_method_for_binfo (token, binfo);
1570 else
1572 tree binfo;
1574 binfo = get_binfo_at_offset (t, anc_offset, otr_type);
1575 if (!binfo)
1576 return NULL_TREE;
1577 return gimple_get_virt_method_for_binfo (token, binfo);
1581 /* Calculate devirtualization time bonus for NODE, assuming we know KNOWN_CSTS
1582 and KNOWN_BINFOS. */
1584 static int
1585 devirtualization_time_bonus (struct cgraph_node *node,
1586 vec<tree> known_csts,
1587 vec<tree> known_binfos)
1589 struct cgraph_edge *ie;
1590 int res = 0;
1592 for (ie = node->indirect_calls; ie; ie = ie->next_callee)
1594 struct cgraph_node *callee;
1595 struct inline_summary *isummary;
1596 tree target;
1598 target = ipa_get_indirect_edge_target (ie, known_csts, known_binfos,
1599 vNULL);
1600 if (!target)
1601 continue;
1603 /* Only bare minimum benefit for clearly un-inlineable targets. */
1604 res += 1;
1605 callee = cgraph_get_node (target);
1606 if (!callee || !callee->analyzed)
1607 continue;
1608 isummary = inline_summary (callee);
1609 if (!isummary->inlinable)
1610 continue;
1612 /* FIXME: The values below need re-considering and perhaps also
1613 integrating into the cost metrics, at lest in some very basic way. */
1614 if (isummary->size <= MAX_INLINE_INSNS_AUTO / 4)
1615 res += 31;
1616 else if (isummary->size <= MAX_INLINE_INSNS_AUTO / 2)
1617 res += 15;
1618 else if (isummary->size <= MAX_INLINE_INSNS_AUTO
1619 || DECL_DECLARED_INLINE_P (callee->symbol.decl))
1620 res += 7;
1623 return res;
1626 /* Return time bonus incurred because of HINTS. */
1628 static int
1629 hint_time_bonus (inline_hints hints)
1631 if (hints & (INLINE_HINT_loop_iterations | INLINE_HINT_loop_stride))
1632 return PARAM_VALUE (PARAM_IPA_CP_LOOP_HINT_BONUS);
1633 return 0;
1636 /* Return true if cloning NODE is a good idea, given the estimated TIME_BENEFIT
1637 and SIZE_COST and with the sum of frequencies of incoming edges to the
1638 potential new clone in FREQUENCIES. */
1640 static bool
1641 good_cloning_opportunity_p (struct cgraph_node *node, int time_benefit,
1642 int freq_sum, gcov_type count_sum, int size_cost)
1644 if (time_benefit == 0
1645 || !flag_ipa_cp_clone
1646 || !optimize_function_for_speed_p (DECL_STRUCT_FUNCTION (node->symbol.decl)))
1647 return false;
1649 gcc_assert (size_cost > 0);
1651 if (max_count)
1653 int factor = (count_sum * 1000) / max_count;
1654 HOST_WIDEST_INT evaluation = (((HOST_WIDEST_INT) time_benefit * factor)
1655 / size_cost);
1657 if (dump_file && (dump_flags & TDF_DETAILS))
1658 fprintf (dump_file, " good_cloning_opportunity_p (time: %i, "
1659 "size: %i, count_sum: " HOST_WIDE_INT_PRINT_DEC
1660 ") -> evaluation: " HOST_WIDEST_INT_PRINT_DEC
1661 ", threshold: %i\n",
1662 time_benefit, size_cost, (HOST_WIDE_INT) count_sum,
1663 evaluation, PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD));
1665 return evaluation >= PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD);
1667 else
1669 HOST_WIDEST_INT evaluation = (((HOST_WIDEST_INT) time_benefit * freq_sum)
1670 / size_cost);
1672 if (dump_file && (dump_flags & TDF_DETAILS))
1673 fprintf (dump_file, " good_cloning_opportunity_p (time: %i, "
1674 "size: %i, freq_sum: %i) -> evaluation: "
1675 HOST_WIDEST_INT_PRINT_DEC ", threshold: %i\n",
1676 time_benefit, size_cost, freq_sum, evaluation,
1677 PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD));
1679 return evaluation >= PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD);
1683 /* Return all context independent values from aggregate lattices in PLATS in a
1684 vector. Return NULL if there are none. */
1686 static vec<ipa_agg_jf_item_t, va_gc> *
1687 context_independent_aggregate_values (struct ipcp_param_lattices *plats)
1689 vec<ipa_agg_jf_item_t, va_gc> *res = NULL;
1691 if (plats->aggs_bottom
1692 || plats->aggs_contain_variable
1693 || plats->aggs_count == 0)
1694 return NULL;
1696 for (struct ipcp_agg_lattice *aglat = plats->aggs;
1697 aglat;
1698 aglat = aglat->next)
1699 if (ipa_lat_is_single_const (aglat))
1701 struct ipa_agg_jf_item item;
1702 item.offset = aglat->offset;
1703 item.value = aglat->values->value;
1704 vec_safe_push (res, item);
1706 return res;
1709 /* Allocate KNOWN_CSTS, KNOWN_BINFOS and, if non-NULL, KNOWN_AGGS and populate
1710 them with values of parameters that are known independent of the context.
1711 INFO describes the function. If REMOVABLE_PARAMS_COST is non-NULL, the
1712 movement cost of all removable parameters will be stored in it. */
1714 static bool
1715 gather_context_independent_values (struct ipa_node_params *info,
1716 vec<tree> *known_csts,
1717 vec<tree> *known_binfos,
1718 vec<ipa_agg_jump_function_t> *known_aggs,
1719 int *removable_params_cost)
1721 int i, count = ipa_get_param_count (info);
1722 bool ret = false;
1724 known_csts->create (0);
1725 known_binfos->create (0);
1726 known_csts->safe_grow_cleared (count);
1727 known_binfos->safe_grow_cleared (count);
1728 if (known_aggs)
1730 known_aggs->create (0);
1731 known_aggs->safe_grow_cleared (count);
1734 if (removable_params_cost)
1735 *removable_params_cost = 0;
1737 for (i = 0; i < count ; i++)
1739 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
1740 struct ipcp_lattice *lat = &plats->itself;
1742 if (ipa_lat_is_single_const (lat))
1744 struct ipcp_value *val = lat->values;
1745 if (TREE_CODE (val->value) != TREE_BINFO)
1747 (*known_csts)[i] = val->value;
1748 if (removable_params_cost)
1749 *removable_params_cost
1750 += estimate_move_cost (TREE_TYPE (val->value));
1751 ret = true;
1753 else if (plats->virt_call)
1755 (*known_binfos)[i] = val->value;
1756 ret = true;
1758 else if (removable_params_cost
1759 && !ipa_is_param_used (info, i))
1760 *removable_params_cost
1761 += estimate_move_cost (TREE_TYPE (ipa_get_param (info, i)));
1763 else if (removable_params_cost
1764 && !ipa_is_param_used (info, i))
1765 *removable_params_cost
1766 += estimate_move_cost (TREE_TYPE (ipa_get_param (info, i)));
1768 if (known_aggs)
1770 vec<ipa_agg_jf_item_t, va_gc> *agg_items;
1771 struct ipa_agg_jump_function *ajf;
1773 agg_items = context_independent_aggregate_values (plats);
1774 ajf = &(*known_aggs)[i];
1775 ajf->items = agg_items;
1776 ajf->by_ref = plats->aggs_by_ref;
1777 ret |= agg_items != NULL;
1781 return ret;
1784 /* The current interface in ipa-inline-analysis requires a pointer vector.
1785 Create it.
1787 FIXME: That interface should be re-worked, this is slightly silly. Still,
1788 I'd like to discuss how to change it first and this demonstrates the
1789 issue. */
1791 static vec<ipa_agg_jump_function_p>
1792 agg_jmp_p_vec_for_t_vec (vec<ipa_agg_jump_function_t> known_aggs)
1794 vec<ipa_agg_jump_function_p> ret;
1795 struct ipa_agg_jump_function *ajf;
1796 int i;
1798 ret.create (known_aggs.length ());
1799 FOR_EACH_VEC_ELT (known_aggs, i, ajf)
1800 ret.quick_push (ajf);
1801 return ret;
1804 /* Iterate over known values of parameters of NODE and estimate the local
1805 effects in terms of time and size they have. */
1807 static void
1808 estimate_local_effects (struct cgraph_node *node)
1810 struct ipa_node_params *info = IPA_NODE_REF (node);
1811 int i, count = ipa_get_param_count (info);
1812 vec<tree> known_csts, known_binfos;
1813 vec<ipa_agg_jump_function_t> known_aggs;
1814 vec<ipa_agg_jump_function_p> known_aggs_ptrs;
1815 bool always_const;
1816 int base_time = inline_summary (node)->time;
1817 int removable_params_cost;
1819 if (!count || !ipcp_versionable_function_p (node))
1820 return;
1822 if (dump_file && (dump_flags & TDF_DETAILS))
1823 fprintf (dump_file, "\nEstimating effects for %s/%i, base_time: %i.\n",
1824 cgraph_node_name (node), node->uid, base_time);
1826 always_const = gather_context_independent_values (info, &known_csts,
1827 &known_binfos, &known_aggs,
1828 &removable_params_cost);
1829 known_aggs_ptrs = agg_jmp_p_vec_for_t_vec (known_aggs);
1830 if (always_const)
1832 struct caller_statistics stats;
1833 inline_hints hints;
1834 int time, size;
1836 init_caller_stats (&stats);
1837 cgraph_for_node_and_aliases (node, gather_caller_stats, &stats, false);
1838 estimate_ipcp_clone_size_and_time (node, known_csts, known_binfos,
1839 known_aggs_ptrs, &size, &time, &hints);
1840 time -= devirtualization_time_bonus (node, known_csts, known_binfos);
1841 time -= hint_time_bonus (hints);
1842 time -= removable_params_cost;
1843 size -= stats.n_calls * removable_params_cost;
1845 if (dump_file)
1846 fprintf (dump_file, " - context independent values, size: %i, "
1847 "time_benefit: %i\n", size, base_time - time);
1849 if (size <= 0
1850 || cgraph_will_be_removed_from_program_if_no_direct_calls (node))
1852 info->do_clone_for_all_contexts = true;
1853 base_time = time;
1855 if (dump_file)
1856 fprintf (dump_file, " Decided to specialize for all "
1857 "known contexts, code not going to grow.\n");
1859 else if (good_cloning_opportunity_p (node, base_time - time,
1860 stats.freq_sum, stats.count_sum,
1861 size))
1863 if (size + overall_size <= max_new_size)
1865 info->do_clone_for_all_contexts = true;
1866 base_time = time;
1867 overall_size += size;
1869 if (dump_file)
1870 fprintf (dump_file, " Decided to specialize for all "
1871 "known contexts, growth deemed beneficial.\n");
1873 else if (dump_file && (dump_flags & TDF_DETAILS))
1874 fprintf (dump_file, " Not cloning for all contexts because "
1875 "max_new_size would be reached with %li.\n",
1876 size + overall_size);
1880 for (i = 0; i < count ; i++)
1882 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
1883 struct ipcp_lattice *lat = &plats->itself;
1884 struct ipcp_value *val;
1885 int emc;
1887 if (lat->bottom
1888 || !lat->values
1889 || known_csts[i]
1890 || known_binfos[i])
1891 continue;
1893 for (val = lat->values; val; val = val->next)
1895 int time, size, time_benefit;
1896 inline_hints hints;
1898 if (TREE_CODE (val->value) != TREE_BINFO)
1900 known_csts[i] = val->value;
1901 known_binfos[i] = NULL_TREE;
1902 emc = estimate_move_cost (TREE_TYPE (val->value));
1904 else if (plats->virt_call)
1906 known_csts[i] = NULL_TREE;
1907 known_binfos[i] = val->value;
1908 emc = 0;
1910 else
1911 continue;
1913 estimate_ipcp_clone_size_and_time (node, known_csts, known_binfos,
1914 known_aggs_ptrs, &size, &time,
1915 &hints);
1916 time_benefit = base_time - time
1917 + devirtualization_time_bonus (node, known_csts, known_binfos)
1918 + hint_time_bonus (hints)
1919 + removable_params_cost + emc;
1921 gcc_checking_assert (size >=0);
1922 /* The inliner-heuristics based estimates may think that in certain
1923 contexts some functions do not have any size at all but we want
1924 all specializations to have at least a tiny cost, not least not to
1925 divide by zero. */
1926 if (size == 0)
1927 size = 1;
1929 if (dump_file && (dump_flags & TDF_DETAILS))
1931 fprintf (dump_file, " - estimates for value ");
1932 print_ipcp_constant_value (dump_file, val->value);
1933 fprintf (dump_file, " for parameter ");
1934 print_generic_expr (dump_file, ipa_get_param (info, i), 0);
1935 fprintf (dump_file, ": time_benefit: %i, size: %i\n",
1936 time_benefit, size);
1939 val->local_time_benefit = time_benefit;
1940 val->local_size_cost = size;
1942 known_binfos[i] = NULL_TREE;
1943 known_csts[i] = NULL_TREE;
1946 for (i = 0; i < count ; i++)
1948 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
1949 struct ipa_agg_jump_function *ajf;
1950 struct ipcp_agg_lattice *aglat;
1952 if (plats->aggs_bottom || !plats->aggs)
1953 continue;
1955 ajf = &known_aggs[i];
1956 for (aglat = plats->aggs; aglat; aglat = aglat->next)
1958 struct ipcp_value *val;
1959 if (aglat->bottom || !aglat->values
1960 /* If the following is true, the one value is in known_aggs. */
1961 || (!plats->aggs_contain_variable
1962 && ipa_lat_is_single_const (aglat)))
1963 continue;
1965 for (val = aglat->values; val; val = val->next)
1967 int time, size, time_benefit;
1968 struct ipa_agg_jf_item item;
1969 inline_hints hints;
1971 item.offset = aglat->offset;
1972 item.value = val->value;
1973 vec_safe_push (ajf->items, item);
1975 estimate_ipcp_clone_size_and_time (node, known_csts, known_binfos,
1976 known_aggs_ptrs, &size, &time,
1977 &hints);
1978 time_benefit = base_time - time
1979 + devirtualization_time_bonus (node, known_csts, known_binfos)
1980 + hint_time_bonus (hints);
1981 gcc_checking_assert (size >=0);
1982 if (size == 0)
1983 size = 1;
1985 if (dump_file && (dump_flags & TDF_DETAILS))
1987 fprintf (dump_file, " - estimates for value ");
1988 print_ipcp_constant_value (dump_file, val->value);
1989 fprintf (dump_file, " for parameter ");
1990 print_generic_expr (dump_file, ipa_get_param (info, i), 0);
1991 fprintf (dump_file, "[%soffset: " HOST_WIDE_INT_PRINT_DEC
1992 "]: time_benefit: %i, size: %i\n",
1993 plats->aggs_by_ref ? "ref " : "",
1994 aglat->offset, time_benefit, size);
1997 val->local_time_benefit = time_benefit;
1998 val->local_size_cost = size;
1999 ajf->items->pop ();
2004 for (i = 0; i < count ; i++)
2005 vec_free (known_aggs[i].items);
2007 known_csts.release ();
2008 known_binfos.release ();
2009 known_aggs.release ();
2010 known_aggs_ptrs.release ();
2014 /* Add value CUR_VAL and all yet-unsorted values it is dependent on to the
2015 topological sort of values. */
2017 static void
2018 add_val_to_toposort (struct ipcp_value *cur_val)
2020 static int dfs_counter = 0;
2021 static struct ipcp_value *stack;
2022 struct ipcp_value_source *src;
2024 if (cur_val->dfs)
2025 return;
2027 dfs_counter++;
2028 cur_val->dfs = dfs_counter;
2029 cur_val->low_link = dfs_counter;
2031 cur_val->topo_next = stack;
2032 stack = cur_val;
2033 cur_val->on_stack = true;
2035 for (src = cur_val->sources; src; src = src->next)
2036 if (src->val)
2038 if (src->val->dfs == 0)
2040 add_val_to_toposort (src->val);
2041 if (src->val->low_link < cur_val->low_link)
2042 cur_val->low_link = src->val->low_link;
2044 else if (src->val->on_stack
2045 && src->val->dfs < cur_val->low_link)
2046 cur_val->low_link = src->val->dfs;
2049 if (cur_val->dfs == cur_val->low_link)
2051 struct ipcp_value *v, *scc_list = NULL;
2055 v = stack;
2056 stack = v->topo_next;
2057 v->on_stack = false;
2059 v->scc_next = scc_list;
2060 scc_list = v;
2062 while (v != cur_val);
2064 cur_val->topo_next = values_topo;
2065 values_topo = cur_val;
2069 /* Add all values in lattices associated with NODE to the topological sort if
2070 they are not there yet. */
2072 static void
2073 add_all_node_vals_to_toposort (struct cgraph_node *node)
2075 struct ipa_node_params *info = IPA_NODE_REF (node);
2076 int i, count = ipa_get_param_count (info);
2078 for (i = 0; i < count ; i++)
2080 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
2081 struct ipcp_lattice *lat = &plats->itself;
2082 struct ipcp_agg_lattice *aglat;
2083 struct ipcp_value *val;
2085 if (!lat->bottom)
2086 for (val = lat->values; val; val = val->next)
2087 add_val_to_toposort (val);
2089 if (!plats->aggs_bottom)
2090 for (aglat = plats->aggs; aglat; aglat = aglat->next)
2091 if (!aglat->bottom)
2092 for (val = aglat->values; val; val = val->next)
2093 add_val_to_toposort (val);
2097 /* One pass of constants propagation along the call graph edges, from callers
2098 to callees (requires topological ordering in TOPO), iterate over strongly
2099 connected components. */
2101 static void
2102 propagate_constants_topo (struct topo_info *topo)
2104 int i;
2106 for (i = topo->nnodes - 1; i >= 0; i--)
2108 struct cgraph_node *v, *node = topo->order[i];
2109 struct ipa_dfs_info *node_dfs_info;
2111 if (!cgraph_function_with_gimple_body_p (node))
2112 continue;
2114 node_dfs_info = (struct ipa_dfs_info *) node->symbol.aux;
2115 /* First, iteratively propagate within the strongly connected component
2116 until all lattices stabilize. */
2117 v = node_dfs_info->next_cycle;
2118 while (v)
2120 push_node_to_stack (topo, v);
2121 v = ((struct ipa_dfs_info *) v->symbol.aux)->next_cycle;
2124 v = node;
2125 while (v)
2127 struct cgraph_edge *cs;
2129 for (cs = v->callees; cs; cs = cs->next_callee)
2130 if (edge_within_scc (cs)
2131 && propagate_constants_accross_call (cs))
2132 push_node_to_stack (topo, cs->callee);
2133 v = pop_node_from_stack (topo);
2136 /* Afterwards, propagate along edges leading out of the SCC, calculates
2137 the local effects of the discovered constants and all valid values to
2138 their topological sort. */
2139 v = node;
2140 while (v)
2142 struct cgraph_edge *cs;
2144 estimate_local_effects (v);
2145 add_all_node_vals_to_toposort (v);
2146 for (cs = v->callees; cs; cs = cs->next_callee)
2147 if (!edge_within_scc (cs))
2148 propagate_constants_accross_call (cs);
2150 v = ((struct ipa_dfs_info *) v->symbol.aux)->next_cycle;
2156 /* Return the sum of A and B if none of them is bigger than INT_MAX/2, return
2157 the bigger one if otherwise. */
2159 static int
2160 safe_add (int a, int b)
2162 if (a > INT_MAX/2 || b > INT_MAX/2)
2163 return a > b ? a : b;
2164 else
2165 return a + b;
2169 /* Propagate the estimated effects of individual values along the topological
2170 from the dependent values to those they depend on. */
2172 static void
2173 propagate_effects (void)
2175 struct ipcp_value *base;
2177 for (base = values_topo; base; base = base->topo_next)
2179 struct ipcp_value_source *src;
2180 struct ipcp_value *val;
2181 int time = 0, size = 0;
2183 for (val = base; val; val = val->scc_next)
2185 time = safe_add (time,
2186 val->local_time_benefit + val->prop_time_benefit);
2187 size = safe_add (size, val->local_size_cost + val->prop_size_cost);
2190 for (val = base; val; val = val->scc_next)
2191 for (src = val->sources; src; src = src->next)
2192 if (src->val
2193 && cgraph_maybe_hot_edge_p (src->cs))
2195 src->val->prop_time_benefit = safe_add (time,
2196 src->val->prop_time_benefit);
2197 src->val->prop_size_cost = safe_add (size,
2198 src->val->prop_size_cost);
2204 /* Propagate constants, binfos and their effects from the summaries
2205 interprocedurally. */
2207 static void
2208 ipcp_propagate_stage (struct topo_info *topo)
2210 struct cgraph_node *node;
2212 if (dump_file)
2213 fprintf (dump_file, "\n Propagating constants:\n\n");
2215 if (in_lto_p)
2216 ipa_update_after_lto_read ();
2219 FOR_EACH_DEFINED_FUNCTION (node)
2221 struct ipa_node_params *info = IPA_NODE_REF (node);
2223 determine_versionability (node);
2224 if (cgraph_function_with_gimple_body_p (node))
2226 info->lattices = XCNEWVEC (struct ipcp_param_lattices,
2227 ipa_get_param_count (info));
2228 initialize_node_lattices (node);
2230 if (node->count > max_count)
2231 max_count = node->count;
2232 overall_size += inline_summary (node)->self_size;
2235 max_new_size = overall_size;
2236 if (max_new_size < PARAM_VALUE (PARAM_LARGE_UNIT_INSNS))
2237 max_new_size = PARAM_VALUE (PARAM_LARGE_UNIT_INSNS);
2238 max_new_size += max_new_size * PARAM_VALUE (PARAM_IPCP_UNIT_GROWTH) / 100 + 1;
2240 if (dump_file)
2241 fprintf (dump_file, "\noverall_size: %li, max_new_size: %li\n",
2242 overall_size, max_new_size);
2244 propagate_constants_topo (topo);
2245 #ifdef ENABLE_CHECKING
2246 ipcp_verify_propagated_values ();
2247 #endif
2248 propagate_effects ();
2250 if (dump_file)
2252 fprintf (dump_file, "\nIPA lattices after all propagation:\n");
2253 print_all_lattices (dump_file, (dump_flags & TDF_DETAILS), true);
2257 /* Discover newly direct outgoing edges from NODE which is a new clone with
2258 known KNOWN_VALS and make them direct. */
2260 static void
2261 ipcp_discover_new_direct_edges (struct cgraph_node *node,
2262 vec<tree> known_vals)
2264 struct cgraph_edge *ie, *next_ie;
2265 bool found = false;
2267 for (ie = node->indirect_calls; ie; ie = next_ie)
2269 tree target;
2271 next_ie = ie->next_callee;
2272 target = ipa_get_indirect_edge_target (ie, known_vals, vNULL, vNULL);
2273 if (target)
2275 ipa_make_edge_direct_to_target (ie, target);
2276 found = true;
2279 /* Turning calls to direct calls will improve overall summary. */
2280 if (found)
2281 inline_update_overall_summary (node);
2284 /* Vector of pointers which for linked lists of clones of an original crgaph
2285 edge. */
2287 static vec<cgraph_edge_p> next_edge_clone;
2289 static inline void
2290 grow_next_edge_clone_vector (void)
2292 if (next_edge_clone.length ()
2293 <= (unsigned) cgraph_edge_max_uid)
2294 next_edge_clone.safe_grow_cleared (cgraph_edge_max_uid + 1);
2297 /* Edge duplication hook to grow the appropriate linked list in
2298 next_edge_clone. */
2300 static void
2301 ipcp_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
2302 __attribute__((unused)) void *data)
2304 grow_next_edge_clone_vector ();
2305 next_edge_clone[dst->uid] = next_edge_clone[src->uid];
2306 next_edge_clone[src->uid] = dst;
2309 /* See if NODE is a clone with a known aggregate value at a given OFFSET of a
2310 parameter with the given INDEX. */
2312 static tree
2313 get_clone_agg_value (struct cgraph_node *node, HOST_WIDEST_INT offset,
2314 int index)
2316 struct ipa_agg_replacement_value *aggval;
2318 aggval = ipa_get_agg_replacements_for_node (node);
2319 while (aggval)
2321 if (aggval->offset == offset
2322 && aggval->index == index)
2323 return aggval->value;
2324 aggval = aggval->next;
2326 return NULL_TREE;
2329 /* Return true if edge CS does bring about the value described by SRC. */
2331 static bool
2332 cgraph_edge_brings_value_p (struct cgraph_edge *cs,
2333 struct ipcp_value_source *src)
2335 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
2336 struct ipa_node_params *dst_info = IPA_NODE_REF (cs->callee);
2338 if ((dst_info->ipcp_orig_node && !dst_info->is_all_contexts_clone)
2339 || caller_info->node_dead)
2340 return false;
2341 if (!src->val)
2342 return true;
2344 if (caller_info->ipcp_orig_node)
2346 tree t;
2347 if (src->offset == -1)
2348 t = caller_info->known_vals[src->index];
2349 else
2350 t = get_clone_agg_value (cs->caller, src->offset, src->index);
2351 return (t != NULL_TREE
2352 && values_equal_for_ipcp_p (src->val->value, t));
2354 else
2356 struct ipcp_agg_lattice *aglat;
2357 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (caller_info,
2358 src->index);
2359 if (src->offset == -1)
2360 return (ipa_lat_is_single_const (&plats->itself)
2361 && values_equal_for_ipcp_p (src->val->value,
2362 plats->itself.values->value));
2363 else
2365 if (plats->aggs_bottom || plats->aggs_contain_variable)
2366 return false;
2367 for (aglat = plats->aggs; aglat; aglat = aglat->next)
2368 if (aglat->offset == src->offset)
2369 return (ipa_lat_is_single_const (aglat)
2370 && values_equal_for_ipcp_p (src->val->value,
2371 aglat->values->value));
2373 return false;
2377 /* Get the next clone in the linked list of clones of an edge. */
2379 static inline struct cgraph_edge *
2380 get_next_cgraph_edge_clone (struct cgraph_edge *cs)
2382 return next_edge_clone[cs->uid];
2385 /* Given VAL, iterate over all its sources and if they still hold, add their
2386 edge frequency and their number into *FREQUENCY and *CALLER_COUNT
2387 respectively. */
2389 static bool
2390 get_info_about_necessary_edges (struct ipcp_value *val, int *freq_sum,
2391 gcov_type *count_sum, int *caller_count)
2393 struct ipcp_value_source *src;
2394 int freq = 0, count = 0;
2395 gcov_type cnt = 0;
2396 bool hot = false;
2398 for (src = val->sources; src; src = src->next)
2400 struct cgraph_edge *cs = src->cs;
2401 while (cs)
2403 if (cgraph_edge_brings_value_p (cs, src))
2405 count++;
2406 freq += cs->frequency;
2407 cnt += cs->count;
2408 hot |= cgraph_maybe_hot_edge_p (cs);
2410 cs = get_next_cgraph_edge_clone (cs);
2414 *freq_sum = freq;
2415 *count_sum = cnt;
2416 *caller_count = count;
2417 return hot;
2420 /* Return a vector of incoming edges that do bring value VAL. It is assumed
2421 their number is known and equal to CALLER_COUNT. */
2423 static vec<cgraph_edge_p>
2424 gather_edges_for_value (struct ipcp_value *val, int caller_count)
2426 struct ipcp_value_source *src;
2427 vec<cgraph_edge_p> ret;
2429 ret.create (caller_count);
2430 for (src = val->sources; src; src = src->next)
2432 struct cgraph_edge *cs = src->cs;
2433 while (cs)
2435 if (cgraph_edge_brings_value_p (cs, src))
2436 ret.quick_push (cs);
2437 cs = get_next_cgraph_edge_clone (cs);
2441 return ret;
2444 /* Construct a replacement map for a know VALUE for a formal parameter PARAM.
2445 Return it or NULL if for some reason it cannot be created. */
2447 static struct ipa_replace_map *
2448 get_replacement_map (tree value, tree parm)
2450 tree req_type = TREE_TYPE (parm);
2451 struct ipa_replace_map *replace_map;
2453 if (!useless_type_conversion_p (req_type, TREE_TYPE (value)))
2455 if (fold_convertible_p (req_type, value))
2456 value = fold_build1 (NOP_EXPR, req_type, value);
2457 else if (TYPE_SIZE (req_type) == TYPE_SIZE (TREE_TYPE (value)))
2458 value = fold_build1 (VIEW_CONVERT_EXPR, req_type, value);
2459 else
2461 if (dump_file)
2463 fprintf (dump_file, " const ");
2464 print_generic_expr (dump_file, value, 0);
2465 fprintf (dump_file, " can't be converted to param ");
2466 print_generic_expr (dump_file, parm, 0);
2467 fprintf (dump_file, "\n");
2469 return NULL;
2473 replace_map = ggc_alloc_ipa_replace_map ();
2474 if (dump_file)
2476 fprintf (dump_file, " replacing param ");
2477 print_generic_expr (dump_file, parm, 0);
2478 fprintf (dump_file, " with const ");
2479 print_generic_expr (dump_file, value, 0);
2480 fprintf (dump_file, "\n");
2482 replace_map->old_tree = parm;
2483 replace_map->new_tree = value;
2484 replace_map->replace_p = true;
2485 replace_map->ref_p = false;
2487 return replace_map;
2490 /* Dump new profiling counts */
2492 static void
2493 dump_profile_updates (struct cgraph_node *orig_node,
2494 struct cgraph_node *new_node)
2496 struct cgraph_edge *cs;
2498 fprintf (dump_file, " setting count of the specialized node to "
2499 HOST_WIDE_INT_PRINT_DEC "\n", (HOST_WIDE_INT) new_node->count);
2500 for (cs = new_node->callees; cs ; cs = cs->next_callee)
2501 fprintf (dump_file, " edge to %s has count "
2502 HOST_WIDE_INT_PRINT_DEC "\n",
2503 cgraph_node_name (cs->callee), (HOST_WIDE_INT) cs->count);
2505 fprintf (dump_file, " setting count of the original node to "
2506 HOST_WIDE_INT_PRINT_DEC "\n", (HOST_WIDE_INT) orig_node->count);
2507 for (cs = orig_node->callees; cs ; cs = cs->next_callee)
2508 fprintf (dump_file, " edge to %s is left with "
2509 HOST_WIDE_INT_PRINT_DEC "\n",
2510 cgraph_node_name (cs->callee), (HOST_WIDE_INT) cs->count);
2513 /* After a specialized NEW_NODE version of ORIG_NODE has been created, update
2514 their profile information to reflect this. */
2516 static void
2517 update_profiling_info (struct cgraph_node *orig_node,
2518 struct cgraph_node *new_node)
2520 struct cgraph_edge *cs;
2521 struct caller_statistics stats;
2522 gcov_type new_sum, orig_sum;
2523 gcov_type remainder, orig_node_count = orig_node->count;
2525 if (orig_node_count == 0)
2526 return;
2528 init_caller_stats (&stats);
2529 cgraph_for_node_and_aliases (orig_node, gather_caller_stats, &stats, false);
2530 orig_sum = stats.count_sum;
2531 init_caller_stats (&stats);
2532 cgraph_for_node_and_aliases (new_node, gather_caller_stats, &stats, false);
2533 new_sum = stats.count_sum;
2535 if (orig_node_count < orig_sum + new_sum)
2537 if (dump_file)
2538 fprintf (dump_file, " Problem: node %s/%i has too low count "
2539 HOST_WIDE_INT_PRINT_DEC " while the sum of incoming "
2540 "counts is " HOST_WIDE_INT_PRINT_DEC "\n",
2541 cgraph_node_name (orig_node), orig_node->uid,
2542 (HOST_WIDE_INT) orig_node_count,
2543 (HOST_WIDE_INT) (orig_sum + new_sum));
2545 orig_node_count = (orig_sum + new_sum) * 12 / 10;
2546 if (dump_file)
2547 fprintf (dump_file, " proceeding by pretending it was "
2548 HOST_WIDE_INT_PRINT_DEC "\n",
2549 (HOST_WIDE_INT) orig_node_count);
2552 new_node->count = new_sum;
2553 remainder = orig_node_count - new_sum;
2554 orig_node->count = remainder;
2556 for (cs = new_node->callees; cs ; cs = cs->next_callee)
2557 if (cs->frequency)
2558 cs->count = cs->count * (new_sum * REG_BR_PROB_BASE
2559 / orig_node_count) / REG_BR_PROB_BASE;
2560 else
2561 cs->count = 0;
2563 for (cs = orig_node->callees; cs ; cs = cs->next_callee)
2564 cs->count = cs->count * (remainder * REG_BR_PROB_BASE
2565 / orig_node_count) / REG_BR_PROB_BASE;
2567 if (dump_file)
2568 dump_profile_updates (orig_node, new_node);
2571 /* Update the respective profile of specialized NEW_NODE and the original
2572 ORIG_NODE after additional edges with cumulative count sum REDIRECTED_SUM
2573 have been redirected to the specialized version. */
2575 static void
2576 update_specialized_profile (struct cgraph_node *new_node,
2577 struct cgraph_node *orig_node,
2578 gcov_type redirected_sum)
2580 struct cgraph_edge *cs;
2581 gcov_type new_node_count, orig_node_count = orig_node->count;
2583 if (dump_file)
2584 fprintf (dump_file, " the sum of counts of redirected edges is "
2585 HOST_WIDE_INT_PRINT_DEC "\n", (HOST_WIDE_INT) redirected_sum);
2586 if (orig_node_count == 0)
2587 return;
2589 gcc_assert (orig_node_count >= redirected_sum);
2591 new_node_count = new_node->count;
2592 new_node->count += redirected_sum;
2593 orig_node->count -= redirected_sum;
2595 for (cs = new_node->callees; cs ; cs = cs->next_callee)
2596 if (cs->frequency)
2597 cs->count += cs->count * redirected_sum / new_node_count;
2598 else
2599 cs->count = 0;
2601 for (cs = orig_node->callees; cs ; cs = cs->next_callee)
2603 gcov_type dec = cs->count * (redirected_sum * REG_BR_PROB_BASE
2604 / orig_node_count) / REG_BR_PROB_BASE;
2605 if (dec < cs->count)
2606 cs->count -= dec;
2607 else
2608 cs->count = 0;
2611 if (dump_file)
2612 dump_profile_updates (orig_node, new_node);
2615 /* Create a specialized version of NODE with known constants and types of
2616 parameters in KNOWN_VALS and redirect all edges in CALLERS to it. */
2618 static struct cgraph_node *
2619 create_specialized_node (struct cgraph_node *node,
2620 vec<tree> known_vals,
2621 struct ipa_agg_replacement_value *aggvals,
2622 vec<cgraph_edge_p> callers)
2624 struct ipa_node_params *new_info, *info = IPA_NODE_REF (node);
2625 vec<ipa_replace_map_p, va_gc> *replace_trees = NULL;
2626 struct cgraph_node *new_node;
2627 int i, count = ipa_get_param_count (info);
2628 bitmap args_to_skip;
2630 gcc_assert (!info->ipcp_orig_node);
2632 if (node->local.can_change_signature)
2634 args_to_skip = BITMAP_GGC_ALLOC ();
2635 for (i = 0; i < count; i++)
2637 tree t = known_vals[i];
2639 if ((t && TREE_CODE (t) != TREE_BINFO)
2640 || !ipa_is_param_used (info, i))
2641 bitmap_set_bit (args_to_skip, i);
2644 else
2646 args_to_skip = NULL;
2647 if (dump_file && (dump_flags & TDF_DETAILS))
2648 fprintf (dump_file, " cannot change function signature\n");
2651 for (i = 0; i < count ; i++)
2653 tree t = known_vals[i];
2654 if (t && TREE_CODE (t) != TREE_BINFO)
2656 struct ipa_replace_map *replace_map;
2658 replace_map = get_replacement_map (t, ipa_get_param (info, i));
2659 if (replace_map)
2660 vec_safe_push (replace_trees, replace_map);
2664 new_node = cgraph_create_virtual_clone (node, callers, replace_trees,
2665 args_to_skip, "constprop");
2666 ipa_set_node_agg_value_chain (new_node, aggvals);
2667 if (dump_file && (dump_flags & TDF_DETAILS))
2669 fprintf (dump_file, " the new node is %s/%i.\n",
2670 cgraph_node_name (new_node), new_node->uid);
2671 if (aggvals)
2672 ipa_dump_agg_replacement_values (dump_file, aggvals);
2674 gcc_checking_assert (ipa_node_params_vector.exists ()
2675 && (ipa_node_params_vector.length ()
2676 > (unsigned) cgraph_max_uid));
2677 update_profiling_info (node, new_node);
2678 new_info = IPA_NODE_REF (new_node);
2679 new_info->ipcp_orig_node = node;
2680 new_info->known_vals = known_vals;
2682 ipcp_discover_new_direct_edges (new_node, known_vals);
2684 callers.release ();
2685 return new_node;
2688 /* Given a NODE, and a subset of its CALLERS, try to populate blanks slots in
2689 KNOWN_VALS with constants and types that are also known for all of the
2690 CALLERS. */
2692 static void
2693 find_more_scalar_values_for_callers_subset (struct cgraph_node *node,
2694 vec<tree> known_vals,
2695 vec<cgraph_edge_p> callers)
2697 struct ipa_node_params *info = IPA_NODE_REF (node);
2698 int i, count = ipa_get_param_count (info);
2700 for (i = 0; i < count ; i++)
2702 struct cgraph_edge *cs;
2703 tree newval = NULL_TREE;
2704 int j;
2706 if (ipa_get_scalar_lat (info, i)->bottom || known_vals[i])
2707 continue;
2709 FOR_EACH_VEC_ELT (callers, j, cs)
2711 struct ipa_jump_func *jump_func;
2712 tree t;
2714 if (i >= ipa_get_cs_argument_count (IPA_EDGE_REF (cs)))
2716 newval = NULL_TREE;
2717 break;
2719 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
2720 t = ipa_value_from_jfunc (IPA_NODE_REF (cs->caller), jump_func);
2721 if (!t
2722 || (newval
2723 && !values_equal_for_ipcp_p (t, newval)))
2725 newval = NULL_TREE;
2726 break;
2728 else
2729 newval = t;
2732 if (newval)
2734 if (dump_file && (dump_flags & TDF_DETAILS))
2736 fprintf (dump_file, " adding an extra known scalar value ");
2737 print_ipcp_constant_value (dump_file, newval);
2738 fprintf (dump_file, " for parameter ");
2739 print_generic_expr (dump_file, ipa_get_param (info, i), 0);
2740 fprintf (dump_file, "\n");
2743 known_vals[i] = newval;
2748 /* Go through PLATS and create a vector of values consisting of values and
2749 offsets (minus OFFSET) of lattices that contain only a single value. */
2751 static vec<ipa_agg_jf_item_t>
2752 copy_plats_to_inter (struct ipcp_param_lattices *plats, HOST_WIDE_INT offset)
2754 vec<ipa_agg_jf_item_t> res = vNULL;
2756 if (!plats->aggs || plats->aggs_contain_variable || plats->aggs_bottom)
2757 return vNULL;
2759 for (struct ipcp_agg_lattice *aglat = plats->aggs; aglat; aglat = aglat->next)
2760 if (ipa_lat_is_single_const (aglat))
2762 struct ipa_agg_jf_item ti;
2763 ti.offset = aglat->offset - offset;
2764 ti.value = aglat->values->value;
2765 res.safe_push (ti);
2767 return res;
2770 /* Intersect all values in INTER with single value lattices in PLATS (while
2771 subtracting OFFSET). */
2773 static void
2774 intersect_with_plats (struct ipcp_param_lattices *plats,
2775 vec<ipa_agg_jf_item_t> *inter,
2776 HOST_WIDE_INT offset)
2778 struct ipcp_agg_lattice *aglat;
2779 struct ipa_agg_jf_item *item;
2780 int k;
2782 if (!plats->aggs || plats->aggs_contain_variable || plats->aggs_bottom)
2784 inter->release ();
2785 return;
2788 aglat = plats->aggs;
2789 FOR_EACH_VEC_ELT (*inter, k, item)
2791 bool found = false;
2792 if (!item->value)
2793 continue;
2794 while (aglat)
2796 if (aglat->offset - offset > item->offset)
2797 break;
2798 if (aglat->offset - offset == item->offset)
2800 gcc_checking_assert (item->value);
2801 if (values_equal_for_ipcp_p (item->value, aglat->values->value))
2802 found = true;
2803 break;
2805 aglat = aglat->next;
2807 if (!found)
2808 item->value = NULL_TREE;
2812 /* Copy agggregate replacement values of NODE (which is an IPA-CP clone) to the
2813 vector result while subtracting OFFSET from the individual value offsets. */
2815 static vec<ipa_agg_jf_item_t>
2816 agg_replacements_to_vector (struct cgraph_node *node, int index,
2817 HOST_WIDE_INT offset)
2819 struct ipa_agg_replacement_value *av;
2820 vec<ipa_agg_jf_item_t> res = vNULL;
2822 for (av = ipa_get_agg_replacements_for_node (node); av; av = av->next)
2823 if (av->index == index
2824 && (av->offset - offset) >= 0)
2826 struct ipa_agg_jf_item item;
2827 gcc_checking_assert (av->value);
2828 item.offset = av->offset - offset;
2829 item.value = av->value;
2830 res.safe_push (item);
2833 return res;
2836 /* Intersect all values in INTER with those that we have already scheduled to
2837 be replaced in parameter number INDEX of NODE, which is an IPA-CP clone
2838 (while subtracting OFFSET). */
2840 static void
2841 intersect_with_agg_replacements (struct cgraph_node *node, int index,
2842 vec<ipa_agg_jf_item_t> *inter,
2843 HOST_WIDE_INT offset)
2845 struct ipa_agg_replacement_value *srcvals;
2846 struct ipa_agg_jf_item *item;
2847 int i;
2849 srcvals = ipa_get_agg_replacements_for_node (node);
2850 if (!srcvals)
2852 inter->release ();
2853 return;
2856 FOR_EACH_VEC_ELT (*inter, i, item)
2858 struct ipa_agg_replacement_value *av;
2859 bool found = false;
2860 if (!item->value)
2861 continue;
2862 for (av = srcvals; av; av = av->next)
2864 gcc_checking_assert (av->value);
2865 if (av->index == index
2866 && av->offset - offset == item->offset)
2868 if (values_equal_for_ipcp_p (item->value, av->value))
2869 found = true;
2870 break;
2873 if (!found)
2874 item->value = NULL_TREE;
2878 /* Intersect values in INTER with aggregate values that come along edge CS to
2879 parameter number INDEX and return it. If INTER does not actually exist yet,
2880 copy all incoming values to it. If we determine we ended up with no values
2881 whatsoever, return a released vector. */
2883 static vec<ipa_agg_jf_item_t>
2884 intersect_aggregates_with_edge (struct cgraph_edge *cs, int index,
2885 vec<ipa_agg_jf_item_t> inter)
2887 struct ipa_jump_func *jfunc;
2888 jfunc = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), index);
2889 if (jfunc->type == IPA_JF_PASS_THROUGH
2890 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
2892 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
2893 int src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
2895 if (caller_info->ipcp_orig_node)
2897 struct cgraph_node *orig_node = caller_info->ipcp_orig_node;
2898 struct ipcp_param_lattices *orig_plats;
2899 orig_plats = ipa_get_parm_lattices (IPA_NODE_REF (orig_node),
2900 src_idx);
2901 if (agg_pass_through_permissible_p (orig_plats, jfunc))
2903 if (!inter.exists ())
2904 inter = agg_replacements_to_vector (cs->caller, src_idx, 0);
2905 else
2906 intersect_with_agg_replacements (cs->caller, src_idx,
2907 &inter, 0);
2910 else
2912 struct ipcp_param_lattices *src_plats;
2913 src_plats = ipa_get_parm_lattices (caller_info, src_idx);
2914 if (agg_pass_through_permissible_p (src_plats, jfunc))
2916 /* Currently we do not produce clobber aggregate jump
2917 functions, adjust when we do. */
2918 gcc_checking_assert (!jfunc->agg.items);
2919 if (!inter.exists ())
2920 inter = copy_plats_to_inter (src_plats, 0);
2921 else
2922 intersect_with_plats (src_plats, &inter, 0);
2926 else if (jfunc->type == IPA_JF_ANCESTOR
2927 && ipa_get_jf_ancestor_agg_preserved (jfunc))
2929 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
2930 int src_idx = ipa_get_jf_ancestor_formal_id (jfunc);
2931 struct ipcp_param_lattices *src_plats;
2932 HOST_WIDE_INT delta = ipa_get_jf_ancestor_offset (jfunc);
2934 if (caller_info->ipcp_orig_node)
2936 if (!inter.exists ())
2937 inter = agg_replacements_to_vector (cs->caller, src_idx, delta);
2938 else
2939 intersect_with_agg_replacements (cs->caller, src_idx, &inter,
2940 delta);
2942 else
2944 src_plats = ipa_get_parm_lattices (caller_info, src_idx);;
2945 /* Currently we do not produce clobber aggregate jump
2946 functions, adjust when we do. */
2947 gcc_checking_assert (!src_plats->aggs || !jfunc->agg.items);
2948 if (!inter.exists ())
2949 inter = copy_plats_to_inter (src_plats, delta);
2950 else
2951 intersect_with_plats (src_plats, &inter, delta);
2954 else if (jfunc->agg.items)
2956 struct ipa_agg_jf_item *item;
2957 int k;
2959 if (!inter.exists ())
2960 for (unsigned i = 0; i < jfunc->agg.items->length (); i++)
2961 inter.safe_push ((*jfunc->agg.items)[i]);
2962 else
2963 FOR_EACH_VEC_ELT (inter, k, item)
2965 int l = 0;
2966 bool found = false;;
2968 if (!item->value)
2969 continue;
2971 while ((unsigned) l < jfunc->agg.items->length ())
2973 struct ipa_agg_jf_item *ti;
2974 ti = &(*jfunc->agg.items)[l];
2975 if (ti->offset > item->offset)
2976 break;
2977 if (ti->offset == item->offset)
2979 gcc_checking_assert (ti->value);
2980 if (values_equal_for_ipcp_p (item->value,
2981 ti->value))
2982 found = true;
2983 break;
2985 l++;
2987 if (!found)
2988 item->value = NULL;
2991 else
2993 inter.release();
2994 return vec<ipa_agg_jf_item_t>();
2996 return inter;
2999 /* Look at edges in CALLERS and collect all known aggregate values that arrive
3000 from all of them. */
3002 static struct ipa_agg_replacement_value *
3003 find_aggregate_values_for_callers_subset (struct cgraph_node *node,
3004 vec<cgraph_edge_p> callers)
3006 struct ipa_node_params *dest_info = IPA_NODE_REF (node);
3007 struct ipa_agg_replacement_value *res = NULL;
3008 struct cgraph_edge *cs;
3009 int i, j, count = ipa_get_param_count (dest_info);
3011 FOR_EACH_VEC_ELT (callers, j, cs)
3013 int c = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
3014 if (c < count)
3015 count = c;
3018 for (i = 0; i < count ; i++)
3020 struct cgraph_edge *cs;
3021 vec<ipa_agg_jf_item_t> inter = vNULL;
3022 struct ipa_agg_jf_item *item;
3023 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (dest_info, i);
3024 int j;
3026 /* Among other things, the following check should deal with all by_ref
3027 mismatches. */
3028 if (plats->aggs_bottom)
3029 continue;
3031 FOR_EACH_VEC_ELT (callers, j, cs)
3033 inter = intersect_aggregates_with_edge (cs, i, inter);
3035 if (!inter.exists ())
3036 goto next_param;
3039 FOR_EACH_VEC_ELT (inter, j, item)
3041 struct ipa_agg_replacement_value *v;
3043 if (!item->value)
3044 continue;
3046 v = ggc_alloc_ipa_agg_replacement_value ();
3047 v->index = i;
3048 v->offset = item->offset;
3049 v->value = item->value;
3050 v->by_ref = plats->aggs_by_ref;
3051 v->next = res;
3052 res = v;
3055 next_param:
3056 if (inter.exists ())
3057 inter.release ();
3059 return res;
3062 /* Turn KNOWN_AGGS into a list of aggreate replacement values. */
3064 static struct ipa_agg_replacement_value *
3065 known_aggs_to_agg_replacement_list (vec<ipa_agg_jump_function_t> known_aggs)
3067 struct ipa_agg_replacement_value *res = NULL;
3068 struct ipa_agg_jump_function *aggjf;
3069 struct ipa_agg_jf_item *item;
3070 int i, j;
3072 FOR_EACH_VEC_ELT (known_aggs, i, aggjf)
3073 FOR_EACH_VEC_SAFE_ELT (aggjf->items, j, item)
3075 struct ipa_agg_replacement_value *v;
3076 v = ggc_alloc_ipa_agg_replacement_value ();
3077 v->index = i;
3078 v->offset = item->offset;
3079 v->value = item->value;
3080 v->by_ref = aggjf->by_ref;
3081 v->next = res;
3082 res = v;
3084 return res;
3087 /* Determine whether CS also brings all scalar values that the NODE is
3088 specialized for. */
3090 static bool
3091 cgraph_edge_brings_all_scalars_for_node (struct cgraph_edge *cs,
3092 struct cgraph_node *node)
3094 struct ipa_node_params *dest_info = IPA_NODE_REF (node);
3095 int count = ipa_get_param_count (dest_info);
3096 struct ipa_node_params *caller_info;
3097 struct ipa_edge_args *args;
3098 int i;
3100 caller_info = IPA_NODE_REF (cs->caller);
3101 args = IPA_EDGE_REF (cs);
3102 for (i = 0; i < count; i++)
3104 struct ipa_jump_func *jump_func;
3105 tree val, t;
3107 val = dest_info->known_vals[i];
3108 if (!val)
3109 continue;
3111 if (i >= ipa_get_cs_argument_count (args))
3112 return false;
3113 jump_func = ipa_get_ith_jump_func (args, i);
3114 t = ipa_value_from_jfunc (caller_info, jump_func);
3115 if (!t || !values_equal_for_ipcp_p (val, t))
3116 return false;
3118 return true;
3121 /* Determine whether CS also brings all aggregate values that NODE is
3122 specialized for. */
3123 static bool
3124 cgraph_edge_brings_all_agg_vals_for_node (struct cgraph_edge *cs,
3125 struct cgraph_node *node)
3127 struct ipa_node_params *orig_caller_info = IPA_NODE_REF (cs->caller);
3128 struct ipa_agg_replacement_value *aggval;
3129 int i, ec, count;
3131 aggval = ipa_get_agg_replacements_for_node (node);
3132 if (!aggval)
3133 return true;
3135 count = ipa_get_param_count (IPA_NODE_REF (node));
3136 ec = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
3137 if (ec < count)
3138 for (struct ipa_agg_replacement_value *av = aggval; av; av = av->next)
3139 if (aggval->index >= ec)
3140 return false;
3142 if (orig_caller_info->ipcp_orig_node)
3143 orig_caller_info = IPA_NODE_REF (orig_caller_info->ipcp_orig_node);
3145 for (i = 0; i < count; i++)
3147 static vec<ipa_agg_jf_item_t> values = vec<ipa_agg_jf_item_t>();
3148 struct ipcp_param_lattices *plats;
3149 bool interesting = false;
3150 for (struct ipa_agg_replacement_value *av = aggval; av; av = av->next)
3151 if (aggval->index == i)
3153 interesting = true;
3154 break;
3156 if (!interesting)
3157 continue;
3159 plats = ipa_get_parm_lattices (orig_caller_info, aggval->index);
3160 if (plats->aggs_bottom)
3161 return false;
3163 values = intersect_aggregates_with_edge (cs, i, values);
3164 if (!values.exists())
3165 return false;
3167 for (struct ipa_agg_replacement_value *av = aggval; av; av = av->next)
3168 if (aggval->index == i)
3170 struct ipa_agg_jf_item *item;
3171 int j;
3172 bool found = false;
3173 FOR_EACH_VEC_ELT (values, j, item)
3174 if (item->value
3175 && item->offset == av->offset
3176 && values_equal_for_ipcp_p (item->value, av->value))
3177 found = true;
3178 if (!found)
3180 values.release();
3181 return false;
3185 return true;
3188 /* Given an original NODE and a VAL for which we have already created a
3189 specialized clone, look whether there are incoming edges that still lead
3190 into the old node but now also bring the requested value and also conform to
3191 all other criteria such that they can be redirected the the special node.
3192 This function can therefore redirect the final edge in a SCC. */
3194 static void
3195 perhaps_add_new_callers (struct cgraph_node *node, struct ipcp_value *val)
3197 struct ipcp_value_source *src;
3198 gcov_type redirected_sum = 0;
3200 for (src = val->sources; src; src = src->next)
3202 struct cgraph_edge *cs = src->cs;
3203 while (cs)
3205 enum availability availability;
3206 struct cgraph_node *dst = cgraph_function_node (cs->callee,
3207 &availability);
3208 if ((dst == node || IPA_NODE_REF (dst)->is_all_contexts_clone)
3209 && availability > AVAIL_OVERWRITABLE
3210 && cgraph_edge_brings_value_p (cs, src))
3212 if (cgraph_edge_brings_all_scalars_for_node (cs, val->spec_node)
3213 && cgraph_edge_brings_all_agg_vals_for_node (cs,
3214 val->spec_node))
3216 if (dump_file)
3217 fprintf (dump_file, " - adding an extra caller %s/%i"
3218 " of %s/%i\n",
3219 xstrdup (cgraph_node_name (cs->caller)),
3220 cs->caller->uid,
3221 xstrdup (cgraph_node_name (val->spec_node)),
3222 val->spec_node->uid);
3224 cgraph_redirect_edge_callee (cs, val->spec_node);
3225 redirected_sum += cs->count;
3228 cs = get_next_cgraph_edge_clone (cs);
3232 if (redirected_sum)
3233 update_specialized_profile (val->spec_node, node, redirected_sum);
3237 /* Copy KNOWN_BINFOS to KNOWN_VALS. */
3239 static void
3240 move_binfos_to_values (vec<tree> known_vals,
3241 vec<tree> known_binfos)
3243 tree t;
3244 int i;
3246 for (i = 0; known_binfos.iterate (i, &t); i++)
3247 if (t)
3248 known_vals[i] = t;
3251 /* Return true if there is a replacement equivalent to VALUE, INDEX and OFFSET
3252 among those in the AGGVALS list. */
3254 DEBUG_FUNCTION bool
3255 ipcp_val_in_agg_replacements_p (struct ipa_agg_replacement_value *aggvals,
3256 int index, HOST_WIDE_INT offset, tree value)
3258 while (aggvals)
3260 if (aggvals->index == index
3261 && aggvals->offset == offset
3262 && values_equal_for_ipcp_p (aggvals->value, value))
3263 return true;
3264 aggvals = aggvals->next;
3266 return false;
3269 /* Decide wheter to create a special version of NODE for value VAL of parameter
3270 at the given INDEX. If OFFSET is -1, the value is for the parameter itself,
3271 otherwise it is stored at the given OFFSET of the parameter. KNOWN_CSTS,
3272 KNOWN_BINFOS and KNOWN_AGGS describe the other already known values. */
3274 static bool
3275 decide_about_value (struct cgraph_node *node, int index, HOST_WIDE_INT offset,
3276 struct ipcp_value *val, vec<tree> known_csts,
3277 vec<tree> known_binfos)
3279 struct ipa_agg_replacement_value *aggvals;
3280 int freq_sum, caller_count;
3281 gcov_type count_sum;
3282 vec<cgraph_edge_p> callers;
3283 vec<tree> kv;
3285 if (val->spec_node)
3287 perhaps_add_new_callers (node, val);
3288 return false;
3290 else if (val->local_size_cost + overall_size > max_new_size)
3292 if (dump_file && (dump_flags & TDF_DETAILS))
3293 fprintf (dump_file, " Ignoring candidate value because "
3294 "max_new_size would be reached with %li.\n",
3295 val->local_size_cost + overall_size);
3296 return false;
3298 else if (!get_info_about_necessary_edges (val, &freq_sum, &count_sum,
3299 &caller_count))
3300 return false;
3302 if (dump_file && (dump_flags & TDF_DETAILS))
3304 fprintf (dump_file, " - considering value ");
3305 print_ipcp_constant_value (dump_file, val->value);
3306 fprintf (dump_file, " for parameter ");
3307 print_generic_expr (dump_file, ipa_get_param (IPA_NODE_REF (node),
3308 index), 0);
3309 if (offset != -1)
3310 fprintf (dump_file, ", offset: " HOST_WIDE_INT_PRINT_DEC, offset);
3311 fprintf (dump_file, " (caller_count: %i)\n", caller_count);
3314 if (!good_cloning_opportunity_p (node, val->local_time_benefit,
3315 freq_sum, count_sum,
3316 val->local_size_cost)
3317 && !good_cloning_opportunity_p (node,
3318 val->local_time_benefit
3319 + val->prop_time_benefit,
3320 freq_sum, count_sum,
3321 val->local_size_cost
3322 + val->prop_size_cost))
3323 return false;
3325 if (dump_file)
3326 fprintf (dump_file, " Creating a specialized node of %s/%i.\n",
3327 cgraph_node_name (node), node->uid);
3329 callers = gather_edges_for_value (val, caller_count);
3330 kv = known_csts.copy ();
3331 move_binfos_to_values (kv, known_binfos);
3332 if (offset == -1)
3333 kv[index] = val->value;
3334 find_more_scalar_values_for_callers_subset (node, kv, callers);
3335 aggvals = find_aggregate_values_for_callers_subset (node, callers);
3336 gcc_checking_assert (offset == -1
3337 || ipcp_val_in_agg_replacements_p (aggvals, index,
3338 offset, val->value));
3339 val->spec_node = create_specialized_node (node, kv, aggvals, callers);
3340 overall_size += val->local_size_cost;
3342 /* TODO: If for some lattice there is only one other known value
3343 left, make a special node for it too. */
3345 return true;
3348 /* Decide whether and what specialized clones of NODE should be created. */
3350 static bool
3351 decide_whether_version_node (struct cgraph_node *node)
3353 struct ipa_node_params *info = IPA_NODE_REF (node);
3354 int i, count = ipa_get_param_count (info);
3355 vec<tree> known_csts, known_binfos;
3356 vec<ipa_agg_jump_function_t> known_aggs = vNULL;
3357 bool ret = false;
3359 if (count == 0)
3360 return false;
3362 if (dump_file && (dump_flags & TDF_DETAILS))
3363 fprintf (dump_file, "\nEvaluating opportunities for %s/%i.\n",
3364 cgraph_node_name (node), node->uid);
3366 gather_context_independent_values (info, &known_csts, &known_binfos,
3367 info->do_clone_for_all_contexts ? &known_aggs
3368 : NULL, NULL);
3370 for (i = 0; i < count ;i++)
3372 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
3373 struct ipcp_lattice *lat = &plats->itself;
3374 struct ipcp_value *val;
3376 if (!lat->bottom
3377 && !known_csts[i]
3378 && !known_binfos[i])
3379 for (val = lat->values; val; val = val->next)
3380 ret |= decide_about_value (node, i, -1, val, known_csts,
3381 known_binfos);
3383 if (!plats->aggs_bottom)
3385 struct ipcp_agg_lattice *aglat;
3386 struct ipcp_value *val;
3387 for (aglat = plats->aggs; aglat; aglat = aglat->next)
3388 if (!aglat->bottom && aglat->values
3389 /* If the following is false, the one value is in
3390 known_aggs. */
3391 && (plats->aggs_contain_variable
3392 || !ipa_lat_is_single_const (aglat)))
3393 for (val = aglat->values; val; val = val->next)
3394 ret |= decide_about_value (node, i, aglat->offset, val,
3395 known_csts, known_binfos);
3397 info = IPA_NODE_REF (node);
3400 if (info->do_clone_for_all_contexts)
3402 struct cgraph_node *clone;
3403 vec<cgraph_edge_p> callers;
3405 if (dump_file)
3406 fprintf (dump_file, " - Creating a specialized node of %s/%i "
3407 "for all known contexts.\n", cgraph_node_name (node),
3408 node->uid);
3410 callers = collect_callers_of_node (node);
3411 move_binfos_to_values (known_csts, known_binfos);
3412 clone = create_specialized_node (node, known_csts,
3413 known_aggs_to_agg_replacement_list (known_aggs),
3414 callers);
3415 info = IPA_NODE_REF (node);
3416 info->do_clone_for_all_contexts = false;
3417 IPA_NODE_REF (clone)->is_all_contexts_clone = true;
3418 for (i = 0; i < count ; i++)
3419 vec_free (known_aggs[i].items);
3420 known_aggs.release ();
3421 ret = true;
3423 else
3424 known_csts.release ();
3426 known_binfos.release ();
3427 return ret;
3430 /* Transitively mark all callees of NODE within the same SCC as not dead. */
3432 static void
3433 spread_undeadness (struct cgraph_node *node)
3435 struct cgraph_edge *cs;
3437 for (cs = node->callees; cs; cs = cs->next_callee)
3438 if (edge_within_scc (cs))
3440 struct cgraph_node *callee;
3441 struct ipa_node_params *info;
3443 callee = cgraph_function_node (cs->callee, NULL);
3444 info = IPA_NODE_REF (callee);
3446 if (info->node_dead)
3448 info->node_dead = 0;
3449 spread_undeadness (callee);
3454 /* Return true if NODE has a caller from outside of its SCC that is not
3455 dead. Worker callback for cgraph_for_node_and_aliases. */
3457 static bool
3458 has_undead_caller_from_outside_scc_p (struct cgraph_node *node,
3459 void *data ATTRIBUTE_UNUSED)
3461 struct cgraph_edge *cs;
3463 for (cs = node->callers; cs; cs = cs->next_caller)
3464 if (cs->caller->thunk.thunk_p
3465 && cgraph_for_node_and_aliases (cs->caller,
3466 has_undead_caller_from_outside_scc_p,
3467 NULL, true))
3468 return true;
3469 else if (!edge_within_scc (cs)
3470 && !IPA_NODE_REF (cs->caller)->node_dead)
3471 return true;
3472 return false;
3476 /* Identify nodes within the same SCC as NODE which are no longer needed
3477 because of new clones and will be removed as unreachable. */
3479 static void
3480 identify_dead_nodes (struct cgraph_node *node)
3482 struct cgraph_node *v;
3483 for (v = node; v ; v = ((struct ipa_dfs_info *) v->symbol.aux)->next_cycle)
3484 if (cgraph_will_be_removed_from_program_if_no_direct_calls (v)
3485 && !cgraph_for_node_and_aliases (v,
3486 has_undead_caller_from_outside_scc_p,
3487 NULL, true))
3488 IPA_NODE_REF (v)->node_dead = 1;
3490 for (v = node; v ; v = ((struct ipa_dfs_info *) v->symbol.aux)->next_cycle)
3491 if (!IPA_NODE_REF (v)->node_dead)
3492 spread_undeadness (v);
3494 if (dump_file && (dump_flags & TDF_DETAILS))
3496 for (v = node; v ; v = ((struct ipa_dfs_info *) v->symbol.aux)->next_cycle)
3497 if (IPA_NODE_REF (v)->node_dead)
3498 fprintf (dump_file, " Marking node as dead: %s/%i.\n",
3499 cgraph_node_name (v), v->uid);
3503 /* The decision stage. Iterate over the topological order of call graph nodes
3504 TOPO and make specialized clones if deemed beneficial. */
3506 static void
3507 ipcp_decision_stage (struct topo_info *topo)
3509 int i;
3511 if (dump_file)
3512 fprintf (dump_file, "\nIPA decision stage:\n\n");
3514 for (i = topo->nnodes - 1; i >= 0; i--)
3516 struct cgraph_node *node = topo->order[i];
3517 bool change = false, iterate = true;
3519 while (iterate)
3521 struct cgraph_node *v;
3522 iterate = false;
3523 for (v = node; v ; v = ((struct ipa_dfs_info *) v->symbol.aux)->next_cycle)
3524 if (cgraph_function_with_gimple_body_p (v)
3525 && ipcp_versionable_function_p (v))
3526 iterate |= decide_whether_version_node (v);
3528 change |= iterate;
3530 if (change)
3531 identify_dead_nodes (node);
3535 /* The IPCP driver. */
3537 static unsigned int
3538 ipcp_driver (void)
3540 struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
3541 struct topo_info topo;
3543 ipa_check_create_node_params ();
3544 ipa_check_create_edge_args ();
3545 grow_next_edge_clone_vector ();
3546 edge_duplication_hook_holder =
3547 cgraph_add_edge_duplication_hook (&ipcp_edge_duplication_hook, NULL);
3548 ipcp_values_pool = create_alloc_pool ("IPA-CP values",
3549 sizeof (struct ipcp_value), 32);
3550 ipcp_sources_pool = create_alloc_pool ("IPA-CP value sources",
3551 sizeof (struct ipcp_value_source), 64);
3552 ipcp_agg_lattice_pool = create_alloc_pool ("IPA_CP aggregate lattices",
3553 sizeof (struct ipcp_agg_lattice),
3554 32);
3555 if (dump_file)
3557 fprintf (dump_file, "\nIPA structures before propagation:\n");
3558 if (dump_flags & TDF_DETAILS)
3559 ipa_print_all_params (dump_file);
3560 ipa_print_all_jump_functions (dump_file);
3563 /* Topological sort. */
3564 build_toporder_info (&topo);
3565 /* Do the interprocedural propagation. */
3566 ipcp_propagate_stage (&topo);
3567 /* Decide what constant propagation and cloning should be performed. */
3568 ipcp_decision_stage (&topo);
3570 /* Free all IPCP structures. */
3571 free_toporder_info (&topo);
3572 next_edge_clone.release ();
3573 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
3574 ipa_free_all_structures_after_ipa_cp ();
3575 if (dump_file)
3576 fprintf (dump_file, "\nIPA constant propagation end\n");
3577 return 0;
3580 /* Initialization and computation of IPCP data structures. This is the initial
3581 intraprocedural analysis of functions, which gathers information to be
3582 propagated later on. */
3584 static void
3585 ipcp_generate_summary (void)
3587 struct cgraph_node *node;
3589 if (dump_file)
3590 fprintf (dump_file, "\nIPA constant propagation start:\n");
3591 ipa_register_cgraph_hooks ();
3593 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
3595 node->local.versionable
3596 = tree_versionable_function_p (node->symbol.decl);
3597 ipa_analyze_node (node);
3601 /* Write ipcp summary for nodes in SET. */
3603 static void
3604 ipcp_write_summary (void)
3606 ipa_prop_write_jump_functions ();
3609 /* Read ipcp summary. */
3611 static void
3612 ipcp_read_summary (void)
3614 ipa_prop_read_jump_functions ();
3617 /* Gate for IPCP optimization. */
3619 static bool
3620 cgraph_gate_cp (void)
3622 /* FIXME: We should remove the optimize check after we ensure we never run
3623 IPA passes when not optimizing. */
3624 return flag_ipa_cp && optimize;
3627 struct ipa_opt_pass_d pass_ipa_cp =
3630 IPA_PASS,
3631 "cp", /* name */
3632 OPTGROUP_NONE, /* optinfo_flags */
3633 cgraph_gate_cp, /* gate */
3634 ipcp_driver, /* execute */
3635 NULL, /* sub */
3636 NULL, /* next */
3637 0, /* static_pass_number */
3638 TV_IPA_CONSTANT_PROP, /* tv_id */
3639 0, /* properties_required */
3640 0, /* properties_provided */
3641 0, /* properties_destroyed */
3642 0, /* todo_flags_start */
3643 TODO_dump_symtab |
3644 TODO_remove_functions | TODO_ggc_collect /* todo_flags_finish */
3646 ipcp_generate_summary, /* generate_summary */
3647 ipcp_write_summary, /* write_summary */
3648 ipcp_read_summary, /* read_summary */
3649 ipa_prop_write_all_agg_replacement, /* write_optimization_summary */
3650 ipa_prop_read_all_agg_replacement, /* read_optimization_summary */
3651 NULL, /* stmt_fixup */
3652 0, /* TODOs */
3653 ipcp_transform_function, /* function_transform */
3654 NULL, /* variable_transform */