* ipa-polymorphic-call.c
[official-gcc.git] / gcc / ipa-cp.c
blob375b49e5f918ae52c69129f5815944f5569643d0
1 /* Interprocedural constant propagation
2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
4 Contributed by Razya Ladelsky <RAZYA@il.ibm.com> and Martin Jambor
5 <mjambor@suse.cz>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Interprocedural constant propagation (IPA-CP).
25 The goal of this transformation is to
27 1) discover functions which are always invoked with some arguments with the
28 same known constant values and modify the functions so that the
29 subsequent optimizations can take advantage of the knowledge, and
31 2) partial specialization - create specialized versions of functions
32 transformed in this way if some parameters are known constants only in
33 certain contexts but the estimated tradeoff between speedup and cost size
34 is deemed good.
36 The algorithm also propagates types and attempts to perform type based
37 devirtualization. Types are propagated much like constants.
39 The algorithm basically consists of three stages. In the first, functions
40 are analyzed one at a time and jump functions are constructed for all known
41 call-sites. In the second phase, the pass propagates information from the
42 jump functions across the call to reveal what values are available at what
43 call sites, performs estimations of effects of known values on functions and
44 their callees, and finally decides what specialized extra versions should be
45 created. In the third, the special versions materialize and appropriate
46 calls are redirected.
48 The algorithm used is to a certain extent based on "Interprocedural Constant
49 Propagation", by David Callahan, Keith D Cooper, Ken Kennedy, Linda Torczon,
50 Comp86, pg 152-161 and "A Methodology for Procedure Cloning" by Keith D
51 Cooper, Mary W. Hall, and Ken Kennedy.
54 First stage - intraprocedural analysis
55 =======================================
57 This phase computes jump_function and modification flags.
59 A jump function for a call-site represents the values passed as an actual
60 arguments of a given call-site. In principle, there are three types of
61 values:
63 Pass through - the caller's formal parameter is passed as an actual
64 argument, plus an operation on it can be performed.
65 Constant - a constant is passed as an actual argument.
66 Unknown - neither of the above.
68 All jump function types are described in detail in ipa-prop.h, together with
69 the data structures that represent them and methods of accessing them.
71 ipcp_generate_summary() is the main function of the first stage.
73 Second stage - interprocedural analysis
74 ========================================
76 This stage is itself divided into two phases. In the first, we propagate
77 known values over the call graph, in the second, we make cloning decisions.
78 It uses a different algorithm than the original Callahan's paper.
80 First, we traverse the functions topologically from callers to callees and,
81 for each strongly connected component (SCC), we propagate constants
82 according to previously computed jump functions. We also record what known
83 values depend on other known values and estimate local effects. Finally, we
84 propagate cumulative information about these effects from dependent values
85 to those on which they depend.
87 Second, we again traverse the call graph in the same topological order and
88 make clones for functions which we know are called with the same values in
89 all contexts and decide about extra specialized clones of functions just for
90 some contexts - these decisions are based on both local estimates and
91 cumulative estimates propagated from callees.
93 ipcp_propagate_stage() and ipcp_decision_stage() together constitute the
94 third stage.
96 Third phase - materialization of clones, call statement updates.
97 ============================================
99 This stage is currently performed by call graph code (mainly in cgraphunit.c
100 and tree-inline.c) according to instructions inserted to the call graph by
101 the second stage. */
103 #include "config.h"
104 #include "system.h"
105 #include "coretypes.h"
106 #include "tree.h"
107 #include "gimple-fold.h"
108 #include "gimple-expr.h"
109 #include "target.h"
110 #include "predict.h"
111 #include "basic-block.h"
112 #include "vec.h"
113 #include "hash-map.h"
114 #include "is-a.h"
115 #include "plugin-api.h"
116 #include "hashtab.h"
117 #include "hash-set.h"
118 #include "machmode.h"
119 #include "tm.h"
120 #include "hard-reg-set.h"
121 #include "input.h"
122 #include "function.h"
123 #include "ipa-ref.h"
124 #include "cgraph.h"
125 #include "alloc-pool.h"
126 #include "ipa-prop.h"
127 #include "bitmap.h"
128 #include "tree-pass.h"
129 #include "flags.h"
130 #include "diagnostic.h"
131 #include "tree-pretty-print.h"
132 #include "tree-inline.h"
133 #include "params.h"
134 #include "ipa-inline.h"
135 #include "ipa-utils.h"
137 template <typename valtype> class ipcp_value;
139 /* Describes a particular source for an IPA-CP value. */
141 template <typename valtype>
142 class ipcp_value_source
144 public:
145 /* Aggregate offset of the source, negative if the source is scalar value of
146 the argument itself. */
147 HOST_WIDE_INT offset;
148 /* The incoming edge that brought the value. */
149 cgraph_edge *cs;
150 /* If the jump function that resulted into his value was a pass-through or an
151 ancestor, this is the ipcp_value of the caller from which the described
152 value has been derived. Otherwise it is NULL. */
153 ipcp_value<valtype> *val;
154 /* Next pointer in a linked list of sources of a value. */
155 ipcp_value_source *next;
156 /* If the jump function that resulted into his value was a pass-through or an
157 ancestor, this is the index of the parameter of the caller the jump
158 function references. */
159 int index;
162 /* Common ancestor for all ipcp_value instantiations. */
164 class ipcp_value_base
166 public:
167 /* Time benefit and size cost that specializing the function for this value
168 would bring about in this function alone. */
169 int local_time_benefit, local_size_cost;
170 /* Time benefit and size cost that specializing the function for this value
171 can bring about in it's callees (transitively). */
172 int prop_time_benefit, prop_size_cost;
175 /* Describes one particular value stored in struct ipcp_lattice. */
177 template <typename valtype>
178 class ipcp_value : public ipcp_value_base
180 public:
181 /* The actual value for the given parameter. */
182 valtype value;
183 /* The list of sources from which this value originates. */
184 ipcp_value_source <valtype> *sources;
185 /* Next pointers in a linked list of all values in a lattice. */
186 ipcp_value *next;
187 /* Next pointers in a linked list of values in a strongly connected component
188 of values. */
189 ipcp_value *scc_next;
190 /* Next pointers in a linked list of SCCs of values sorted topologically
191 according their sources. */
192 ipcp_value *topo_next;
193 /* A specialized node created for this value, NULL if none has been (so far)
194 created. */
195 cgraph_node *spec_node;
196 /* Depth first search number and low link for topological sorting of
197 values. */
198 int dfs, low_link;
199 /* True if this valye is currently on the topo-sort stack. */
200 bool on_stack;
202 void add_source (cgraph_edge *cs, ipcp_value *src_val, int src_idx,
203 HOST_WIDE_INT offset);
206 /* Lattice describing potential values of a formal parameter of a function, or
207 a part of an aggreagate. TOP is represented by a lattice with zero values
208 and with contains_variable and bottom flags cleared. BOTTOM is represented
209 by a lattice with the bottom flag set. In that case, values and
210 contains_variable flag should be disregarded. */
212 template <typename valtype>
213 class ipcp_lattice
215 public:
216 /* The list of known values and types in this lattice. Note that values are
217 not deallocated if a lattice is set to bottom because there may be value
218 sources referencing them. */
219 ipcp_value<valtype> *values;
220 /* Number of known values and types in this lattice. */
221 int values_count;
222 /* The lattice contains a variable component (in addition to values). */
223 bool contains_variable;
224 /* The value of the lattice is bottom (i.e. variable and unusable for any
225 propagation). */
226 bool bottom;
228 inline bool is_single_const ();
229 inline bool set_to_bottom ();
230 inline bool set_contains_variable ();
231 bool add_value (valtype newval, cgraph_edge *cs,
232 ipcp_value<valtype> *src_val = NULL,
233 int src_idx = 0, HOST_WIDE_INT offset = -1);
234 void print (FILE * f, bool dump_sources, bool dump_benefits);
237 /* Lattice of tree values with an offset to describe a part of an
238 aggregate. */
240 class ipcp_agg_lattice : public ipcp_lattice<tree>
242 public:
243 /* Offset that is being described by this lattice. */
244 HOST_WIDE_INT offset;
245 /* Size so that we don't have to re-compute it every time we traverse the
246 list. Must correspond to TYPE_SIZE of all lat values. */
247 HOST_WIDE_INT size;
248 /* Next element of the linked list. */
249 struct ipcp_agg_lattice *next;
252 /* Structure containing lattices for a parameter itself and for pieces of
253 aggregates that are passed in the parameter or by a reference in a parameter
254 plus some other useful flags. */
256 class ipcp_param_lattices
258 public:
259 /* Lattice describing the value of the parameter itself. */
260 ipcp_lattice<tree> itself;
261 /* Lattice describing the the polymorphic contexts of a parameter. */
262 ipcp_lattice<ipa_polymorphic_call_context> ctxlat;
263 /* Lattices describing aggregate parts. */
264 ipcp_agg_lattice *aggs;
265 /* Number of aggregate lattices */
266 int aggs_count;
267 /* True if aggregate data were passed by reference (as opposed to by
268 value). */
269 bool aggs_by_ref;
270 /* All aggregate lattices contain a variable component (in addition to
271 values). */
272 bool aggs_contain_variable;
273 /* The value of all aggregate lattices is bottom (i.e. variable and unusable
274 for any propagation). */
275 bool aggs_bottom;
277 /* There is a virtual call based on this parameter. */
278 bool virt_call;
281 /* Allocation pools for values and their sources in ipa-cp. */
283 alloc_pool ipcp_cst_values_pool;
284 alloc_pool ipcp_poly_ctx_values_pool;
285 alloc_pool ipcp_sources_pool;
286 alloc_pool ipcp_agg_lattice_pool;
288 /* Maximal count found in program. */
290 static gcov_type max_count;
292 /* Original overall size of the program. */
294 static long overall_size, max_new_size;
296 /* Return the param lattices structure corresponding to the Ith formal
297 parameter of the function described by INFO. */
298 static inline struct ipcp_param_lattices *
299 ipa_get_parm_lattices (struct ipa_node_params *info, int i)
301 gcc_assert (i >= 0 && i < ipa_get_param_count (info));
302 gcc_checking_assert (!info->ipcp_orig_node);
303 gcc_checking_assert (info->lattices);
304 return &(info->lattices[i]);
307 /* Return the lattice corresponding to the scalar value of the Ith formal
308 parameter of the function described by INFO. */
309 static inline ipcp_lattice<tree> *
310 ipa_get_scalar_lat (struct ipa_node_params *info, int i)
312 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
313 return &plats->itself;
316 /* Return the lattice corresponding to the scalar value of the Ith formal
317 parameter of the function described by INFO. */
318 static inline ipcp_lattice<ipa_polymorphic_call_context> *
319 ipa_get_poly_ctx_lat (struct ipa_node_params *info, int i)
321 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
322 return &plats->ctxlat;
325 /* Return whether LAT is a lattice with a single constant and without an
326 undefined value. */
328 template <typename valtype>
329 inline bool
330 ipcp_lattice<valtype>::is_single_const ()
332 if (bottom || contains_variable || values_count != 1)
333 return false;
334 else
335 return true;
338 /* Print V which is extracted from a value in a lattice to F. */
340 static void
341 print_ipcp_constant_value (FILE * f, tree v)
343 if (TREE_CODE (v) == ADDR_EXPR
344 && TREE_CODE (TREE_OPERAND (v, 0)) == CONST_DECL)
346 fprintf (f, "& ");
347 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (v, 0)), 0);
349 else
350 print_generic_expr (f, v, 0);
353 /* Print V which is extracted from a value in a lattice to F. */
355 static void
356 print_ipcp_constant_value (FILE * f, ipa_polymorphic_call_context v)
358 v.dump(f, false);
361 /* Print a lattice LAT to F. */
363 template <typename valtype>
364 void
365 ipcp_lattice<valtype>::print (FILE * f, bool dump_sources, bool dump_benefits)
367 ipcp_value<valtype> *val;
368 bool prev = false;
370 if (bottom)
372 fprintf (f, "BOTTOM\n");
373 return;
376 if (!values_count && !contains_variable)
378 fprintf (f, "TOP\n");
379 return;
382 if (contains_variable)
384 fprintf (f, "VARIABLE");
385 prev = true;
386 if (dump_benefits)
387 fprintf (f, "\n");
390 for (val = values; val; val = val->next)
392 if (dump_benefits && prev)
393 fprintf (f, " ");
394 else if (!dump_benefits && prev)
395 fprintf (f, ", ");
396 else
397 prev = true;
399 print_ipcp_constant_value (f, val->value);
401 if (dump_sources)
403 ipcp_value_source<valtype> *s;
405 fprintf (f, " [from:");
406 for (s = val->sources; s; s = s->next)
407 fprintf (f, " %i(%i)", s->cs->caller->order,
408 s->cs->frequency);
409 fprintf (f, "]");
412 if (dump_benefits)
413 fprintf (f, " [loc_time: %i, loc_size: %i, "
414 "prop_time: %i, prop_size: %i]\n",
415 val->local_time_benefit, val->local_size_cost,
416 val->prop_time_benefit, val->prop_size_cost);
418 if (!dump_benefits)
419 fprintf (f, "\n");
422 /* Print all ipcp_lattices of all functions to F. */
424 static void
425 print_all_lattices (FILE * f, bool dump_sources, bool dump_benefits)
427 struct cgraph_node *node;
428 int i, count;
430 fprintf (f, "\nLattices:\n");
431 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
433 struct ipa_node_params *info;
435 info = IPA_NODE_REF (node);
436 fprintf (f, " Node: %s/%i:\n", node->name (),
437 node->order);
438 count = ipa_get_param_count (info);
439 for (i = 0; i < count; i++)
441 struct ipcp_agg_lattice *aglat;
442 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
443 fprintf (f, " param [%d]: ", i);
444 plats->itself.print (f, dump_sources, dump_benefits);
445 fprintf (f, " ctxs: ");
446 plats->ctxlat.print (f, dump_sources, dump_benefits);
447 if (plats->virt_call)
448 fprintf (f, " virt_call flag set\n");
450 if (plats->aggs_bottom)
452 fprintf (f, " AGGS BOTTOM\n");
453 continue;
455 if (plats->aggs_contain_variable)
456 fprintf (f, " AGGS VARIABLE\n");
457 for (aglat = plats->aggs; aglat; aglat = aglat->next)
459 fprintf (f, " %soffset " HOST_WIDE_INT_PRINT_DEC ": ",
460 plats->aggs_by_ref ? "ref " : "", aglat->offset);
461 aglat->print (f, dump_sources, dump_benefits);
467 /* Determine whether it is at all technically possible to create clones of NODE
468 and store this information in the ipa_node_params structure associated
469 with NODE. */
471 static void
472 determine_versionability (struct cgraph_node *node)
474 const char *reason = NULL;
476 /* There are a number of generic reasons functions cannot be versioned. We
477 also cannot remove parameters if there are type attributes such as fnspec
478 present. */
479 if (node->alias || node->thunk.thunk_p)
480 reason = "alias or thunk";
481 else if (!node->local.versionable)
482 reason = "not a tree_versionable_function";
483 else if (node->get_availability () <= AVAIL_INTERPOSABLE)
484 reason = "insufficient body availability";
485 else if (!opt_for_fn (node->decl, optimize)
486 || !opt_for_fn (node->decl, flag_ipa_cp))
487 reason = "non-optimized function";
488 else if (lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (node->decl)))
490 /* Ideally we should clone the SIMD clones themselves and create
491 vector copies of them, so IPA-cp and SIMD clones can happily
492 coexist, but that may not be worth the effort. */
493 reason = "function has SIMD clones";
495 /* Don't clone decls local to a comdat group; it breaks and for C++
496 decloned constructors, inlining is always better anyway. */
497 else if (node->comdat_local_p ())
498 reason = "comdat-local function";
500 if (reason && dump_file && !node->alias && !node->thunk.thunk_p)
501 fprintf (dump_file, "Function %s/%i is not versionable, reason: %s.\n",
502 node->name (), node->order, reason);
504 node->local.versionable = (reason == NULL);
507 /* Return true if it is at all technically possible to create clones of a
508 NODE. */
510 static bool
511 ipcp_versionable_function_p (struct cgraph_node *node)
513 return node->local.versionable;
516 /* Structure holding accumulated information about callers of a node. */
518 struct caller_statistics
520 gcov_type count_sum;
521 int n_calls, n_hot_calls, freq_sum;
524 /* Initialize fields of STAT to zeroes. */
526 static inline void
527 init_caller_stats (struct caller_statistics *stats)
529 stats->count_sum = 0;
530 stats->n_calls = 0;
531 stats->n_hot_calls = 0;
532 stats->freq_sum = 0;
535 /* Worker callback of cgraph_for_node_and_aliases accumulating statistics of
536 non-thunk incoming edges to NODE. */
538 static bool
539 gather_caller_stats (struct cgraph_node *node, void *data)
541 struct caller_statistics *stats = (struct caller_statistics *) data;
542 struct cgraph_edge *cs;
544 for (cs = node->callers; cs; cs = cs->next_caller)
545 if (cs->caller->thunk.thunk_p)
546 cs->caller->call_for_symbol_thunks_and_aliases (gather_caller_stats,
547 stats, false);
548 else
550 stats->count_sum += cs->count;
551 stats->freq_sum += cs->frequency;
552 stats->n_calls++;
553 if (cs->maybe_hot_p ())
554 stats->n_hot_calls ++;
556 return false;
560 /* Return true if this NODE is viable candidate for cloning. */
562 static bool
563 ipcp_cloning_candidate_p (struct cgraph_node *node)
565 struct caller_statistics stats;
567 gcc_checking_assert (node->has_gimple_body_p ());
569 if (!flag_ipa_cp_clone)
571 if (dump_file)
572 fprintf (dump_file, "Not considering %s for cloning; "
573 "-fipa-cp-clone disabled.\n",
574 node->name ());
575 return false;
578 if (!optimize_function_for_speed_p (DECL_STRUCT_FUNCTION (node->decl)))
580 if (dump_file)
581 fprintf (dump_file, "Not considering %s for cloning; "
582 "optimizing it for size.\n",
583 node->name ());
584 return false;
587 init_caller_stats (&stats);
588 node->call_for_symbol_thunks_and_aliases (gather_caller_stats, &stats, false);
590 if (inline_summary (node)->self_size < stats.n_calls)
592 if (dump_file)
593 fprintf (dump_file, "Considering %s for cloning; code might shrink.\n",
594 node->name ());
595 return true;
598 /* When profile is available and function is hot, propagate into it even if
599 calls seems cold; constant propagation can improve function's speed
600 significantly. */
601 if (max_count)
603 if (stats.count_sum > node->count * 90 / 100)
605 if (dump_file)
606 fprintf (dump_file, "Considering %s for cloning; "
607 "usually called directly.\n",
608 node->name ());
609 return true;
612 if (!stats.n_hot_calls)
614 if (dump_file)
615 fprintf (dump_file, "Not considering %s for cloning; no hot calls.\n",
616 node->name ());
617 return false;
619 if (dump_file)
620 fprintf (dump_file, "Considering %s for cloning.\n",
621 node->name ());
622 return true;
625 template <typename valtype>
626 class value_topo_info
628 public:
629 /* Head of the linked list of topologically sorted values. */
630 ipcp_value<valtype> *values_topo;
631 /* Stack for creating SCCs, represented by a linked list too. */
632 ipcp_value<valtype> *stack;
633 /* Counter driving the algorithm in add_val_to_toposort. */
634 int dfs_counter;
636 value_topo_info () : values_topo (NULL), stack (NULL), dfs_counter (0)
638 void add_val (ipcp_value<valtype> *cur_val);
639 void propagate_effects ();
642 /* Arrays representing a topological ordering of call graph nodes and a stack
643 of nodes used during constant propagation and also data required to perform
644 topological sort of values and propagation of benefits in the determined
645 order. */
647 class ipa_topo_info
649 public:
650 /* Array with obtained topological order of cgraph nodes. */
651 struct cgraph_node **order;
652 /* Stack of cgraph nodes used during propagation within SCC until all values
653 in the SCC stabilize. */
654 struct cgraph_node **stack;
655 int nnodes, stack_top;
657 value_topo_info<tree> constants;
658 value_topo_info<ipa_polymorphic_call_context> contexts;
660 ipa_topo_info () : order(NULL), stack(NULL), nnodes(0), stack_top(0),
661 constants ()
665 /* Allocate the arrays in TOPO and topologically sort the nodes into order. */
667 static void
668 build_toporder_info (struct ipa_topo_info *topo)
670 topo->order = XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
671 topo->stack = XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
673 gcc_checking_assert (topo->stack_top == 0);
674 topo->nnodes = ipa_reduced_postorder (topo->order, true, true, NULL);
677 /* Free information about strongly connected components and the arrays in
678 TOPO. */
680 static void
681 free_toporder_info (struct ipa_topo_info *topo)
683 ipa_free_postorder_info ();
684 free (topo->order);
685 free (topo->stack);
688 /* Add NODE to the stack in TOPO, unless it is already there. */
690 static inline void
691 push_node_to_stack (struct ipa_topo_info *topo, struct cgraph_node *node)
693 struct ipa_node_params *info = IPA_NODE_REF (node);
694 if (info->node_enqueued)
695 return;
696 info->node_enqueued = 1;
697 topo->stack[topo->stack_top++] = node;
700 /* Pop a node from the stack in TOPO and return it or return NULL if the stack
701 is empty. */
703 static struct cgraph_node *
704 pop_node_from_stack (struct ipa_topo_info *topo)
706 if (topo->stack_top)
708 struct cgraph_node *node;
709 topo->stack_top--;
710 node = topo->stack[topo->stack_top];
711 IPA_NODE_REF (node)->node_enqueued = 0;
712 return node;
714 else
715 return NULL;
718 /* Set lattice LAT to bottom and return true if it previously was not set as
719 such. */
721 template <typename valtype>
722 inline bool
723 ipcp_lattice<valtype>::set_to_bottom ()
725 bool ret = !bottom;
726 bottom = true;
727 return ret;
730 /* Mark lattice as containing an unknown value and return true if it previously
731 was not marked as such. */
733 template <typename valtype>
734 inline bool
735 ipcp_lattice<valtype>::set_contains_variable ()
737 bool ret = !contains_variable;
738 contains_variable = true;
739 return ret;
742 /* Set all aggegate lattices in PLATS to bottom and return true if they were
743 not previously set as such. */
745 static inline bool
746 set_agg_lats_to_bottom (struct ipcp_param_lattices *plats)
748 bool ret = !plats->aggs_bottom;
749 plats->aggs_bottom = true;
750 return ret;
753 /* Mark all aggegate lattices in PLATS as containing an unknown value and
754 return true if they were not previously marked as such. */
756 static inline bool
757 set_agg_lats_contain_variable (struct ipcp_param_lattices *plats)
759 bool ret = !plats->aggs_contain_variable;
760 plats->aggs_contain_variable = true;
761 return ret;
764 /* Mark bot aggregate and scalar lattices as containing an unknown variable,
765 return true is any of them has not been marked as such so far. */
767 static inline bool
768 set_all_contains_variable (struct ipcp_param_lattices *plats)
770 bool ret;
771 ret = plats->itself.set_contains_variable ();
772 ret |= plats->ctxlat.set_contains_variable ();
773 ret |= set_agg_lats_contain_variable (plats);
774 return ret;
777 /* Initialize ipcp_lattices. */
779 static void
780 initialize_node_lattices (struct cgraph_node *node)
782 struct ipa_node_params *info = IPA_NODE_REF (node);
783 struct cgraph_edge *ie;
784 bool disable = false, variable = false;
785 int i;
787 gcc_checking_assert (node->has_gimple_body_p ());
788 if (!cgraph_local_p (node))
790 /* When cloning is allowed, we can assume that externally visible
791 functions are not called. We will compensate this by cloning
792 later. */
793 if (ipcp_versionable_function_p (node)
794 && ipcp_cloning_candidate_p (node))
795 variable = true;
796 else
797 disable = true;
800 if (disable || variable)
802 for (i = 0; i < ipa_get_param_count (info) ; i++)
804 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
805 if (disable)
807 plats->itself.set_to_bottom ();
808 plats->ctxlat.set_to_bottom ();
809 set_agg_lats_to_bottom (plats);
811 else
812 set_all_contains_variable (plats);
814 if (dump_file && (dump_flags & TDF_DETAILS)
815 && !node->alias && !node->thunk.thunk_p)
816 fprintf (dump_file, "Marking all lattices of %s/%i as %s\n",
817 node->name (), node->order,
818 disable ? "BOTTOM" : "VARIABLE");
821 for (ie = node->indirect_calls; ie; ie = ie->next_callee)
822 if (ie->indirect_info->polymorphic
823 && ie->indirect_info->param_index >= 0)
825 gcc_checking_assert (ie->indirect_info->param_index >= 0);
826 ipa_get_parm_lattices (info,
827 ie->indirect_info->param_index)->virt_call = 1;
831 /* Return the result of a (possibly arithmetic) pass through jump function
832 JFUNC on the constant value INPUT. Return NULL_TREE if that cannot be
833 determined or be considered an interprocedural invariant. */
835 static tree
836 ipa_get_jf_pass_through_result (struct ipa_jump_func *jfunc, tree input)
838 tree restype, res;
840 gcc_checking_assert (is_gimple_ip_invariant (input));
841 if (ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
842 return input;
844 if (TREE_CODE_CLASS (ipa_get_jf_pass_through_operation (jfunc))
845 == tcc_comparison)
846 restype = boolean_type_node;
847 else
848 restype = TREE_TYPE (input);
849 res = fold_binary (ipa_get_jf_pass_through_operation (jfunc), restype,
850 input, ipa_get_jf_pass_through_operand (jfunc));
852 if (res && !is_gimple_ip_invariant (res))
853 return NULL_TREE;
855 return res;
858 /* Return the result of an ancestor jump function JFUNC on the constant value
859 INPUT. Return NULL_TREE if that cannot be determined. */
861 static tree
862 ipa_get_jf_ancestor_result (struct ipa_jump_func *jfunc, tree input)
864 gcc_checking_assert (TREE_CODE (input) != TREE_BINFO);
865 if (TREE_CODE (input) == ADDR_EXPR)
867 tree t = TREE_OPERAND (input, 0);
868 t = build_ref_for_offset (EXPR_LOCATION (t), t,
869 ipa_get_jf_ancestor_offset (jfunc),
870 ptr_type_node, NULL, false);
871 return build_fold_addr_expr (t);
873 else
874 return NULL_TREE;
877 /* Determine whether JFUNC evaluates to a single known constant value and if
878 so, return it. Otherwise return NULL. INFO describes the caller node or
879 the one it is inlined to, so that pass-through jump functions can be
880 evaluated. */
882 tree
883 ipa_value_from_jfunc (struct ipa_node_params *info, struct ipa_jump_func *jfunc)
885 if (jfunc->type == IPA_JF_CONST)
886 return ipa_get_jf_constant (jfunc);
887 else if (jfunc->type == IPA_JF_PASS_THROUGH
888 || jfunc->type == IPA_JF_ANCESTOR)
890 tree input;
891 int idx;
893 if (jfunc->type == IPA_JF_PASS_THROUGH)
894 idx = ipa_get_jf_pass_through_formal_id (jfunc);
895 else
896 idx = ipa_get_jf_ancestor_formal_id (jfunc);
898 if (info->ipcp_orig_node)
899 input = info->known_csts[idx];
900 else
902 ipcp_lattice<tree> *lat;
904 if (!info->lattices)
906 gcc_checking_assert (!flag_ipa_cp);
907 return NULL_TREE;
909 lat = ipa_get_scalar_lat (info, idx);
910 if (!lat->is_single_const ())
911 return NULL_TREE;
912 input = lat->values->value;
915 if (!input)
916 return NULL_TREE;
918 if (jfunc->type == IPA_JF_PASS_THROUGH)
919 return ipa_get_jf_pass_through_result (jfunc, input);
920 else
921 return ipa_get_jf_ancestor_result (jfunc, input);
923 else
924 return NULL_TREE;
927 /* Determie whether JFUNC evaluates to single known polymorphic context, given
928 that INFO describes the caller node or the one it is inlined to, CS is the
929 call graph edge corresponding to JFUNC and CSIDX index of the described
930 parameter. */
932 ipa_polymorphic_call_context
933 ipa_context_from_jfunc (ipa_node_params *info, cgraph_edge *cs, int csidx,
934 ipa_jump_func *jfunc)
936 ipa_edge_args *args = IPA_EDGE_REF (cs);
937 ipa_polymorphic_call_context ctx;
938 ipa_polymorphic_call_context *edge_ctx
939 = cs ? ipa_get_ith_polymorhic_call_context (args, csidx) : NULL;
941 if (edge_ctx && !edge_ctx->useless_p ())
942 ctx = *edge_ctx;
944 if (jfunc->type == IPA_JF_PASS_THROUGH
945 || jfunc->type == IPA_JF_ANCESTOR)
947 ipa_polymorphic_call_context srcctx;
948 int srcidx;
949 bool type_preserved = true;
950 if (jfunc->type == IPA_JF_PASS_THROUGH)
952 if (ipa_get_jf_pass_through_operation (jfunc) != NOP_EXPR)
953 return ctx;
954 type_preserved = ipa_get_jf_pass_through_type_preserved (jfunc);
955 srcidx = ipa_get_jf_pass_through_formal_id (jfunc);
957 else
959 type_preserved = ipa_get_jf_ancestor_type_preserved (jfunc);
960 srcidx = ipa_get_jf_ancestor_formal_id (jfunc);
962 if (info->ipcp_orig_node)
964 if (info->known_contexts.exists ())
965 srcctx = info->known_contexts[srcidx];
967 else
969 if (!info->lattices)
971 gcc_checking_assert (!flag_ipa_cp);
972 return ctx;
974 ipcp_lattice<ipa_polymorphic_call_context> *lat;
975 lat = ipa_get_poly_ctx_lat (info, srcidx);
976 if (!lat->is_single_const ())
977 return ctx;
978 srcctx = lat->values->value;
980 if (srcctx.useless_p ())
981 return ctx;
982 if (jfunc->type == IPA_JF_ANCESTOR)
983 srcctx.offset_by (ipa_get_jf_ancestor_offset (jfunc));
984 if (!type_preserved)
985 srcctx.possible_dynamic_type_change (cs->in_polymorphic_cdtor);
986 srcctx.combine_with (ctx);
987 return srcctx;
990 return ctx;
993 /* If checking is enabled, verify that no lattice is in the TOP state, i.e. not
994 bottom, not containing a variable component and without any known value at
995 the same time. */
997 DEBUG_FUNCTION void
998 ipcp_verify_propagated_values (void)
1000 struct cgraph_node *node;
1002 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
1004 struct ipa_node_params *info = IPA_NODE_REF (node);
1005 int i, count = ipa_get_param_count (info);
1007 for (i = 0; i < count; i++)
1009 ipcp_lattice<tree> *lat = ipa_get_scalar_lat (info, i);
1011 if (!lat->bottom
1012 && !lat->contains_variable
1013 && lat->values_count == 0)
1015 if (dump_file)
1017 symtab_node::dump_table (dump_file);
1018 fprintf (dump_file, "\nIPA lattices after constant "
1019 "propagation, before gcc_unreachable:\n");
1020 print_all_lattices (dump_file, true, false);
1023 gcc_unreachable ();
1029 /* Return true iff X and Y should be considered equal values by IPA-CP. */
1031 static bool
1032 values_equal_for_ipcp_p (tree x, tree y)
1034 gcc_checking_assert (x != NULL_TREE && y != NULL_TREE);
1036 if (x == y)
1037 return true;
1039 if (TREE_CODE (x) == ADDR_EXPR
1040 && TREE_CODE (y) == ADDR_EXPR
1041 && TREE_CODE (TREE_OPERAND (x, 0)) == CONST_DECL
1042 && TREE_CODE (TREE_OPERAND (y, 0)) == CONST_DECL)
1043 return operand_equal_p (DECL_INITIAL (TREE_OPERAND (x, 0)),
1044 DECL_INITIAL (TREE_OPERAND (y, 0)), 0);
1045 else
1046 return operand_equal_p (x, y, 0);
1049 /* Return true iff X and Y should be considered equal contexts by IPA-CP. */
1051 static bool
1052 values_equal_for_ipcp_p (ipa_polymorphic_call_context x,
1053 ipa_polymorphic_call_context y)
1055 return x.equal_to (y);
1059 /* Add a new value source to the value represented by THIS, marking that a
1060 value comes from edge CS and (if the underlying jump function is a
1061 pass-through or an ancestor one) from a caller value SRC_VAL of a caller
1062 parameter described by SRC_INDEX. OFFSET is negative if the source was the
1063 scalar value of the parameter itself or the offset within an aggregate. */
1065 template <typename valtype>
1066 void
1067 ipcp_value<valtype>::add_source (cgraph_edge *cs, ipcp_value *src_val,
1068 int src_idx, HOST_WIDE_INT offset)
1070 ipcp_value_source<valtype> *src;
1072 src = new (pool_alloc (ipcp_sources_pool)) ipcp_value_source<valtype>;
1073 src->offset = offset;
1074 src->cs = cs;
1075 src->val = src_val;
1076 src->index = src_idx;
1078 src->next = sources;
1079 sources = src;
1082 /* Allocate a new ipcp_value holding a tree constant, initialize its value to
1083 SOURCE and clear all other fields. */
1085 static ipcp_value<tree> *
1086 allocate_and_init_ipcp_value (tree source)
1088 ipcp_value<tree> *val;
1090 val = new (pool_alloc (ipcp_cst_values_pool)) ipcp_value<tree>;
1091 memset (val, 0, sizeof (*val));
1092 val->value = source;
1093 return val;
1096 /* Allocate a new ipcp_value holding a polymorphic context, initialize its
1097 value to SOURCE and clear all other fields. */
1099 static ipcp_value<ipa_polymorphic_call_context> *
1100 allocate_and_init_ipcp_value (ipa_polymorphic_call_context source)
1102 ipcp_value<ipa_polymorphic_call_context> *val;
1104 val = new (pool_alloc (ipcp_poly_ctx_values_pool))
1105 ipcp_value<ipa_polymorphic_call_context>;
1106 memset (val, 0, sizeof (*val));
1107 val->value = source;
1108 return val;
1111 /* Try to add NEWVAL to LAT, potentially creating a new ipcp_value for it. CS,
1112 SRC_VAL SRC_INDEX and OFFSET are meant for add_source and have the same
1113 meaning. OFFSET -1 means the source is scalar and not a part of an
1114 aggregate. */
1116 template <typename valtype>
1117 bool
1118 ipcp_lattice<valtype>::add_value (valtype newval, cgraph_edge *cs,
1119 ipcp_value<valtype> *src_val,
1120 int src_idx, HOST_WIDE_INT offset)
1122 ipcp_value<valtype> *val;
1124 if (bottom)
1125 return false;
1127 for (val = values; val; val = val->next)
1128 if (values_equal_for_ipcp_p (val->value, newval))
1130 if (ipa_edge_within_scc (cs))
1132 ipcp_value_source<valtype> *s;
1133 for (s = val->sources; s ; s = s->next)
1134 if (s->cs == cs)
1135 break;
1136 if (s)
1137 return false;
1140 val->add_source (cs, src_val, src_idx, offset);
1141 return false;
1144 if (values_count == PARAM_VALUE (PARAM_IPA_CP_VALUE_LIST_SIZE))
1146 /* We can only free sources, not the values themselves, because sources
1147 of other values in this this SCC might point to them. */
1148 for (val = values; val; val = val->next)
1150 while (val->sources)
1152 ipcp_value_source<valtype> *src = val->sources;
1153 val->sources = src->next;
1154 pool_free (ipcp_sources_pool, src);
1158 values = NULL;
1159 return set_to_bottom ();
1162 values_count++;
1163 val = allocate_and_init_ipcp_value (newval);
1164 val->add_source (cs, src_val, src_idx, offset);
1165 val->next = values;
1166 values = val;
1167 return true;
1170 /* Propagate values through a pass-through jump function JFUNC associated with
1171 edge CS, taking values from SRC_LAT and putting them into DEST_LAT. SRC_IDX
1172 is the index of the source parameter. */
1174 static bool
1175 propagate_vals_accross_pass_through (cgraph_edge *cs,
1176 ipa_jump_func *jfunc,
1177 ipcp_lattice<tree> *src_lat,
1178 ipcp_lattice<tree> *dest_lat,
1179 int src_idx)
1181 ipcp_value<tree> *src_val;
1182 bool ret = false;
1184 /* Do not create new values when propagating within an SCC because if there
1185 are arithmetic functions with circular dependencies, there is infinite
1186 number of them and we would just make lattices bottom. */
1187 if ((ipa_get_jf_pass_through_operation (jfunc) != NOP_EXPR)
1188 && ipa_edge_within_scc (cs))
1189 ret = dest_lat->set_contains_variable ();
1190 else
1191 for (src_val = src_lat->values; src_val; src_val = src_val->next)
1193 tree cstval = ipa_get_jf_pass_through_result (jfunc, src_val->value);
1195 if (cstval)
1196 ret |= dest_lat->add_value (cstval, cs, src_val, src_idx);
1197 else
1198 ret |= dest_lat->set_contains_variable ();
1201 return ret;
1204 /* Propagate values through an ancestor jump function JFUNC associated with
1205 edge CS, taking values from SRC_LAT and putting them into DEST_LAT. SRC_IDX
1206 is the index of the source parameter. */
1208 static bool
1209 propagate_vals_accross_ancestor (struct cgraph_edge *cs,
1210 struct ipa_jump_func *jfunc,
1211 ipcp_lattice<tree> *src_lat,
1212 ipcp_lattice<tree> *dest_lat,
1213 int src_idx)
1215 ipcp_value<tree> *src_val;
1216 bool ret = false;
1218 if (ipa_edge_within_scc (cs))
1219 return dest_lat->set_contains_variable ();
1221 for (src_val = src_lat->values; src_val; src_val = src_val->next)
1223 tree t = ipa_get_jf_ancestor_result (jfunc, src_val->value);
1225 if (t)
1226 ret |= dest_lat->add_value (t, cs, src_val, src_idx);
1227 else
1228 ret |= dest_lat->set_contains_variable ();
1231 return ret;
1234 /* Propagate scalar values across jump function JFUNC that is associated with
1235 edge CS and put the values into DEST_LAT. */
1237 static bool
1238 propagate_scalar_accross_jump_function (struct cgraph_edge *cs,
1239 struct ipa_jump_func *jfunc,
1240 ipcp_lattice<tree> *dest_lat)
1242 if (dest_lat->bottom)
1243 return false;
1245 if (jfunc->type == IPA_JF_CONST)
1247 tree val = ipa_get_jf_constant (jfunc);
1248 return dest_lat->add_value (val, cs, NULL, 0);
1250 else if (jfunc->type == IPA_JF_PASS_THROUGH
1251 || jfunc->type == IPA_JF_ANCESTOR)
1253 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
1254 ipcp_lattice<tree> *src_lat;
1255 int src_idx;
1256 bool ret;
1258 if (jfunc->type == IPA_JF_PASS_THROUGH)
1259 src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
1260 else
1261 src_idx = ipa_get_jf_ancestor_formal_id (jfunc);
1263 src_lat = ipa_get_scalar_lat (caller_info, src_idx);
1264 if (src_lat->bottom)
1265 return dest_lat->set_contains_variable ();
1267 /* If we would need to clone the caller and cannot, do not propagate. */
1268 if (!ipcp_versionable_function_p (cs->caller)
1269 && (src_lat->contains_variable
1270 || (src_lat->values_count > 1)))
1271 return dest_lat->set_contains_variable ();
1273 if (jfunc->type == IPA_JF_PASS_THROUGH)
1274 ret = propagate_vals_accross_pass_through (cs, jfunc, src_lat,
1275 dest_lat, src_idx);
1276 else
1277 ret = propagate_vals_accross_ancestor (cs, jfunc, src_lat, dest_lat,
1278 src_idx);
1280 if (src_lat->contains_variable)
1281 ret |= dest_lat->set_contains_variable ();
1283 return ret;
1286 /* TODO: We currently do not handle member method pointers in IPA-CP (we only
1287 use it for indirect inlining), we should propagate them too. */
1288 return dest_lat->set_contains_variable ();
1291 /* Propagate scalar values across jump function JFUNC that is associated with
1292 edge CS and describes argument IDX and put the values into DEST_LAT. */
1294 static bool
1295 propagate_context_accross_jump_function (cgraph_edge *cs,
1296 ipa_jump_func *jfunc, int idx,
1297 ipcp_lattice<ipa_polymorphic_call_context> *dest_lat)
1299 ipa_edge_args *args = IPA_EDGE_REF (cs);
1300 if (dest_lat->bottom)
1301 return false;
1302 bool ret = false;
1303 bool added_sth = false;
1304 bool type_preserved = true;
1306 ipa_polymorphic_call_context edge_ctx, *edge_ctx_ptr
1307 = ipa_get_ith_polymorhic_call_context (args, idx);
1309 if (edge_ctx_ptr)
1310 edge_ctx = *edge_ctx_ptr;
1312 if (jfunc->type == IPA_JF_PASS_THROUGH
1313 || jfunc->type == IPA_JF_ANCESTOR)
1315 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
1316 int src_idx;
1317 ipcp_lattice<ipa_polymorphic_call_context> *src_lat;
1319 /* TODO: Once we figure out how to propagate speculations, it will
1320 probably be a good idea to switch to speculation if type_preserved is
1321 not set instead of punting. */
1322 if (jfunc->type == IPA_JF_PASS_THROUGH)
1324 if (ipa_get_jf_pass_through_operation (jfunc) != NOP_EXPR)
1325 goto prop_fail;
1326 type_preserved = ipa_get_jf_pass_through_type_preserved (jfunc);
1327 src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
1329 else
1331 type_preserved = ipa_get_jf_ancestor_type_preserved (jfunc);
1332 src_idx = ipa_get_jf_ancestor_formal_id (jfunc);
1335 src_lat = ipa_get_poly_ctx_lat (caller_info, src_idx);
1336 /* If we would need to clone the caller and cannot, do not propagate. */
1337 if (!ipcp_versionable_function_p (cs->caller)
1338 && (src_lat->contains_variable
1339 || (src_lat->values_count > 1)))
1340 goto prop_fail;
1342 ipcp_value<ipa_polymorphic_call_context> *src_val;
1343 for (src_val = src_lat->values; src_val; src_val = src_val->next)
1345 ipa_polymorphic_call_context cur = src_val->value;
1347 if (!type_preserved)
1348 cur.possible_dynamic_type_change (cs->in_polymorphic_cdtor);
1349 if (jfunc->type == IPA_JF_ANCESTOR)
1350 cur.offset_by (ipa_get_jf_ancestor_offset (jfunc));
1351 /* TODO: In cases we know how the context is going to be used,
1352 we can improve the result by passing proper OTR_TYPE. */
1353 cur.combine_with (edge_ctx);
1354 if (!cur.useless_p ())
1356 if (src_lat->contains_variable
1357 && !edge_ctx.equal_to (cur))
1358 ret |= dest_lat->set_contains_variable ();
1359 ret |= dest_lat->add_value (cur, cs, src_val, src_idx);
1360 added_sth = true;
1366 prop_fail:
1367 if (!added_sth)
1369 if (!edge_ctx.useless_p ())
1370 ret |= dest_lat->add_value (edge_ctx, cs);
1371 else
1372 ret |= dest_lat->set_contains_variable ();
1375 return ret;
1378 /* If DEST_PLATS already has aggregate items, check that aggs_by_ref matches
1379 NEW_AGGS_BY_REF and if not, mark all aggs as bottoms and return true (in all
1380 other cases, return false). If there are no aggregate items, set
1381 aggs_by_ref to NEW_AGGS_BY_REF. */
1383 static bool
1384 set_check_aggs_by_ref (struct ipcp_param_lattices *dest_plats,
1385 bool new_aggs_by_ref)
1387 if (dest_plats->aggs)
1389 if (dest_plats->aggs_by_ref != new_aggs_by_ref)
1391 set_agg_lats_to_bottom (dest_plats);
1392 return true;
1395 else
1396 dest_plats->aggs_by_ref = new_aggs_by_ref;
1397 return false;
1400 /* Walk aggregate lattices in DEST_PLATS from ***AGLAT on, until ***aglat is an
1401 already existing lattice for the given OFFSET and SIZE, marking all skipped
1402 lattices as containing variable and checking for overlaps. If there is no
1403 already existing lattice for the OFFSET and VAL_SIZE, create one, initialize
1404 it with offset, size and contains_variable to PRE_EXISTING, and return true,
1405 unless there are too many already. If there are two many, return false. If
1406 there are overlaps turn whole DEST_PLATS to bottom and return false. If any
1407 skipped lattices were newly marked as containing variable, set *CHANGE to
1408 true. */
1410 static bool
1411 merge_agg_lats_step (struct ipcp_param_lattices *dest_plats,
1412 HOST_WIDE_INT offset, HOST_WIDE_INT val_size,
1413 struct ipcp_agg_lattice ***aglat,
1414 bool pre_existing, bool *change)
1416 gcc_checking_assert (offset >= 0);
1418 while (**aglat && (**aglat)->offset < offset)
1420 if ((**aglat)->offset + (**aglat)->size > offset)
1422 set_agg_lats_to_bottom (dest_plats);
1423 return false;
1425 *change |= (**aglat)->set_contains_variable ();
1426 *aglat = &(**aglat)->next;
1429 if (**aglat && (**aglat)->offset == offset)
1431 if ((**aglat)->size != val_size
1432 || ((**aglat)->next
1433 && (**aglat)->next->offset < offset + val_size))
1435 set_agg_lats_to_bottom (dest_plats);
1436 return false;
1438 gcc_checking_assert (!(**aglat)->next
1439 || (**aglat)->next->offset >= offset + val_size);
1440 return true;
1442 else
1444 struct ipcp_agg_lattice *new_al;
1446 if (**aglat && (**aglat)->offset < offset + val_size)
1448 set_agg_lats_to_bottom (dest_plats);
1449 return false;
1451 if (dest_plats->aggs_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1452 return false;
1453 dest_plats->aggs_count++;
1454 new_al = (struct ipcp_agg_lattice *) pool_alloc (ipcp_agg_lattice_pool);
1455 memset (new_al, 0, sizeof (*new_al));
1457 new_al->offset = offset;
1458 new_al->size = val_size;
1459 new_al->contains_variable = pre_existing;
1461 new_al->next = **aglat;
1462 **aglat = new_al;
1463 return true;
1467 /* Set all AGLAT and all other aggregate lattices reachable by next pointers as
1468 containing an unknown value. */
1470 static bool
1471 set_chain_of_aglats_contains_variable (struct ipcp_agg_lattice *aglat)
1473 bool ret = false;
1474 while (aglat)
1476 ret |= aglat->set_contains_variable ();
1477 aglat = aglat->next;
1479 return ret;
1482 /* Merge existing aggregate lattices in SRC_PLATS to DEST_PLATS, subtracting
1483 DELTA_OFFSET. CS is the call graph edge and SRC_IDX the index of the source
1484 parameter used for lattice value sources. Return true if DEST_PLATS changed
1485 in any way. */
1487 static bool
1488 merge_aggregate_lattices (struct cgraph_edge *cs,
1489 struct ipcp_param_lattices *dest_plats,
1490 struct ipcp_param_lattices *src_plats,
1491 int src_idx, HOST_WIDE_INT offset_delta)
1493 bool pre_existing = dest_plats->aggs != NULL;
1494 struct ipcp_agg_lattice **dst_aglat;
1495 bool ret = false;
1497 if (set_check_aggs_by_ref (dest_plats, src_plats->aggs_by_ref))
1498 return true;
1499 if (src_plats->aggs_bottom)
1500 return set_agg_lats_contain_variable (dest_plats);
1501 if (src_plats->aggs_contain_variable)
1502 ret |= set_agg_lats_contain_variable (dest_plats);
1503 dst_aglat = &dest_plats->aggs;
1505 for (struct ipcp_agg_lattice *src_aglat = src_plats->aggs;
1506 src_aglat;
1507 src_aglat = src_aglat->next)
1509 HOST_WIDE_INT new_offset = src_aglat->offset - offset_delta;
1511 if (new_offset < 0)
1512 continue;
1513 if (merge_agg_lats_step (dest_plats, new_offset, src_aglat->size,
1514 &dst_aglat, pre_existing, &ret))
1516 struct ipcp_agg_lattice *new_al = *dst_aglat;
1518 dst_aglat = &(*dst_aglat)->next;
1519 if (src_aglat->bottom)
1521 ret |= new_al->set_contains_variable ();
1522 continue;
1524 if (src_aglat->contains_variable)
1525 ret |= new_al->set_contains_variable ();
1526 for (ipcp_value<tree> *val = src_aglat->values;
1527 val;
1528 val = val->next)
1529 ret |= new_al->add_value (val->value, cs, val, src_idx,
1530 src_aglat->offset);
1532 else if (dest_plats->aggs_bottom)
1533 return true;
1535 ret |= set_chain_of_aglats_contains_variable (*dst_aglat);
1536 return ret;
1539 /* Determine whether there is anything to propagate FROM SRC_PLATS through a
1540 pass-through JFUNC and if so, whether it has conform and conforms to the
1541 rules about propagating values passed by reference. */
1543 static bool
1544 agg_pass_through_permissible_p (struct ipcp_param_lattices *src_plats,
1545 struct ipa_jump_func *jfunc)
1547 return src_plats->aggs
1548 && (!src_plats->aggs_by_ref
1549 || ipa_get_jf_pass_through_agg_preserved (jfunc));
1552 /* Propagate scalar values across jump function JFUNC that is associated with
1553 edge CS and put the values into DEST_LAT. */
1555 static bool
1556 propagate_aggs_accross_jump_function (struct cgraph_edge *cs,
1557 struct ipa_jump_func *jfunc,
1558 struct ipcp_param_lattices *dest_plats)
1560 bool ret = false;
1562 if (dest_plats->aggs_bottom)
1563 return false;
1565 if (jfunc->type == IPA_JF_PASS_THROUGH
1566 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
1568 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
1569 int src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
1570 struct ipcp_param_lattices *src_plats;
1572 src_plats = ipa_get_parm_lattices (caller_info, src_idx);
1573 if (agg_pass_through_permissible_p (src_plats, jfunc))
1575 /* Currently we do not produce clobber aggregate jump
1576 functions, replace with merging when we do. */
1577 gcc_assert (!jfunc->agg.items);
1578 ret |= merge_aggregate_lattices (cs, dest_plats, src_plats,
1579 src_idx, 0);
1581 else
1582 ret |= set_agg_lats_contain_variable (dest_plats);
1584 else if (jfunc->type == IPA_JF_ANCESTOR
1585 && ipa_get_jf_ancestor_agg_preserved (jfunc))
1587 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
1588 int src_idx = ipa_get_jf_ancestor_formal_id (jfunc);
1589 struct ipcp_param_lattices *src_plats;
1591 src_plats = ipa_get_parm_lattices (caller_info, src_idx);
1592 if (src_plats->aggs && src_plats->aggs_by_ref)
1594 /* Currently we do not produce clobber aggregate jump
1595 functions, replace with merging when we do. */
1596 gcc_assert (!jfunc->agg.items);
1597 ret |= merge_aggregate_lattices (cs, dest_plats, src_plats, src_idx,
1598 ipa_get_jf_ancestor_offset (jfunc));
1600 else if (!src_plats->aggs_by_ref)
1601 ret |= set_agg_lats_to_bottom (dest_plats);
1602 else
1603 ret |= set_agg_lats_contain_variable (dest_plats);
1605 else if (jfunc->agg.items)
1607 bool pre_existing = dest_plats->aggs != NULL;
1608 struct ipcp_agg_lattice **aglat = &dest_plats->aggs;
1609 struct ipa_agg_jf_item *item;
1610 int i;
1612 if (set_check_aggs_by_ref (dest_plats, jfunc->agg.by_ref))
1613 return true;
1615 FOR_EACH_VEC_ELT (*jfunc->agg.items, i, item)
1617 HOST_WIDE_INT val_size;
1619 if (item->offset < 0)
1620 continue;
1621 gcc_checking_assert (is_gimple_ip_invariant (item->value));
1622 val_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (item->value)));
1624 if (merge_agg_lats_step (dest_plats, item->offset, val_size,
1625 &aglat, pre_existing, &ret))
1627 ret |= (*aglat)->add_value (item->value, cs, NULL, 0, 0);
1628 aglat = &(*aglat)->next;
1630 else if (dest_plats->aggs_bottom)
1631 return true;
1634 ret |= set_chain_of_aglats_contains_variable (*aglat);
1636 else
1637 ret |= set_agg_lats_contain_variable (dest_plats);
1639 return ret;
1642 /* Propagate constants from the caller to the callee of CS. INFO describes the
1643 caller. */
1645 static bool
1646 propagate_constants_accross_call (struct cgraph_edge *cs)
1648 struct ipa_node_params *callee_info;
1649 enum availability availability;
1650 struct cgraph_node *callee, *alias_or_thunk;
1651 struct ipa_edge_args *args;
1652 bool ret = false;
1653 int i, args_count, parms_count;
1655 callee = cs->callee->function_symbol (&availability);
1656 if (!callee->definition)
1657 return false;
1658 gcc_checking_assert (callee->has_gimple_body_p ());
1659 callee_info = IPA_NODE_REF (callee);
1661 args = IPA_EDGE_REF (cs);
1662 args_count = ipa_get_cs_argument_count (args);
1663 parms_count = ipa_get_param_count (callee_info);
1664 if (parms_count == 0)
1665 return false;
1667 /* No propagation through instrumentation thunks is available yet.
1668 It should be possible with proper mapping of call args and
1669 instrumented callee params in the propagation loop below. But
1670 this case mostly occurs when legacy code calls instrumented code
1671 and it is not a primary target for optimizations.
1672 We detect instrumentation thunks in aliases and thunks chain by
1673 checking instrumentation_clone flag for chain source and target.
1674 Going through instrumentation thunks we always have it changed
1675 from 0 to 1 and all other nodes do not change it. */
1676 if (!cs->callee->instrumentation_clone
1677 && callee->instrumentation_clone)
1679 for (i = 0; i < parms_count; i++)
1680 ret |= set_all_contains_variable (ipa_get_parm_lattices (callee_info,
1681 i));
1682 return ret;
1685 /* If this call goes through a thunk we must not propagate to the first (0th)
1686 parameter. However, we might need to uncover a thunk from below a series
1687 of aliases first. */
1688 alias_or_thunk = cs->callee;
1689 while (alias_or_thunk->alias)
1690 alias_or_thunk = alias_or_thunk->get_alias_target ();
1691 if (alias_or_thunk->thunk.thunk_p)
1693 ret |= set_all_contains_variable (ipa_get_parm_lattices (callee_info,
1694 0));
1695 i = 1;
1697 else
1698 i = 0;
1700 for (; (i < args_count) && (i < parms_count); i++)
1702 struct ipa_jump_func *jump_func = ipa_get_ith_jump_func (args, i);
1703 struct ipcp_param_lattices *dest_plats;
1705 dest_plats = ipa_get_parm_lattices (callee_info, i);
1706 if (availability == AVAIL_INTERPOSABLE)
1707 ret |= set_all_contains_variable (dest_plats);
1708 else
1710 ret |= propagate_scalar_accross_jump_function (cs, jump_func,
1711 &dest_plats->itself);
1712 ret |= propagate_context_accross_jump_function (cs, jump_func, i,
1713 &dest_plats->ctxlat);
1714 ret |= propagate_aggs_accross_jump_function (cs, jump_func,
1715 dest_plats);
1718 for (; i < parms_count; i++)
1719 ret |= set_all_contains_variable (ipa_get_parm_lattices (callee_info, i));
1721 return ret;
1724 /* If an indirect edge IE can be turned into a direct one based on KNOWN_VALS
1725 KNOWN_CONTEXTS, KNOWN_AGGS or AGG_REPS return the destination. The latter
1726 three can be NULL. If AGG_REPS is not NULL, KNOWN_AGGS is ignored. */
1728 static tree
1729 ipa_get_indirect_edge_target_1 (struct cgraph_edge *ie,
1730 vec<tree> known_csts,
1731 vec<ipa_polymorphic_call_context> known_contexts,
1732 vec<ipa_agg_jump_function_p> known_aggs,
1733 struct ipa_agg_replacement_value *agg_reps)
1735 int param_index = ie->indirect_info->param_index;
1736 HOST_WIDE_INT anc_offset;
1737 tree t;
1738 tree target = NULL;
1740 if (param_index == -1
1741 || known_csts.length () <= (unsigned int) param_index)
1742 return NULL_TREE;
1744 if (!ie->indirect_info->polymorphic)
1746 tree t;
1748 if (ie->indirect_info->agg_contents)
1750 if (agg_reps)
1752 t = NULL;
1753 while (agg_reps)
1755 if (agg_reps->index == param_index
1756 && agg_reps->offset == ie->indirect_info->offset
1757 && agg_reps->by_ref == ie->indirect_info->by_ref)
1759 t = agg_reps->value;
1760 break;
1762 agg_reps = agg_reps->next;
1765 else if (known_aggs.length () > (unsigned int) param_index)
1767 struct ipa_agg_jump_function *agg;
1768 agg = known_aggs[param_index];
1769 t = ipa_find_agg_cst_for_param (agg, ie->indirect_info->offset,
1770 ie->indirect_info->by_ref);
1772 else
1773 t = NULL;
1775 else
1776 t = known_csts[param_index];
1778 if (t &&
1779 TREE_CODE (t) == ADDR_EXPR
1780 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL)
1781 return TREE_OPERAND (t, 0);
1782 else
1783 return NULL_TREE;
1786 if (!flag_devirtualize)
1787 return NULL_TREE;
1789 gcc_assert (!ie->indirect_info->agg_contents);
1790 anc_offset = ie->indirect_info->offset;
1792 t = NULL;
1794 /* Try to work out value of virtual table pointer value in replacemnets. */
1795 if (!t && agg_reps && !ie->indirect_info->by_ref
1796 && !ie->indirect_info->vptr_changed)
1798 while (agg_reps)
1800 if (agg_reps->index == param_index
1801 && agg_reps->offset == ie->indirect_info->offset
1802 && agg_reps->by_ref)
1804 t = agg_reps->value;
1805 break;
1807 agg_reps = agg_reps->next;
1811 /* Try to work out value of virtual table pointer value in known
1812 aggregate values. */
1813 if (!t && known_aggs.length () > (unsigned int) param_index
1814 && !ie->indirect_info->by_ref
1815 && !ie->indirect_info->vptr_changed)
1817 struct ipa_agg_jump_function *agg;
1818 agg = known_aggs[param_index];
1819 t = ipa_find_agg_cst_for_param (agg, ie->indirect_info->offset,
1820 true);
1823 /* If we found the virtual table pointer, lookup the target. */
1824 if (t)
1826 tree vtable;
1827 unsigned HOST_WIDE_INT offset;
1828 if (vtable_pointer_value_to_vtable (t, &vtable, &offset))
1830 target = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
1831 vtable, offset);
1832 if (target)
1834 if ((TREE_CODE (TREE_TYPE (target)) == FUNCTION_TYPE
1835 && DECL_FUNCTION_CODE (target) == BUILT_IN_UNREACHABLE)
1836 || !possible_polymorphic_call_target_p
1837 (ie, cgraph_node::get (target)))
1838 target = ipa_impossible_devirt_target (ie, target);
1839 return target;
1844 /* Do we know the constant value of pointer? */
1845 if (!t)
1846 t = known_csts[param_index];
1848 gcc_checking_assert (!t || TREE_CODE (t) != TREE_BINFO);
1850 ipa_polymorphic_call_context context;
1851 if (known_contexts.length () > (unsigned int) param_index)
1853 context = known_contexts[param_index];
1854 context.offset_by (anc_offset);
1855 if (ie->indirect_info->vptr_changed)
1856 context.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
1857 ie->indirect_info->otr_type);
1858 if (t)
1860 ipa_polymorphic_call_context ctx2 = ipa_polymorphic_call_context
1861 (t, ie->indirect_info->otr_type, anc_offset);
1862 if (!ctx2.useless_p ())
1863 context.combine_with (ctx2, ie->indirect_info->otr_type);
1866 else if (t)
1867 context = ipa_polymorphic_call_context (t, ie->indirect_info->otr_type,
1868 anc_offset);
1869 else
1870 return NULL_TREE;
1872 vec <cgraph_node *>targets;
1873 bool final;
1875 targets = possible_polymorphic_call_targets
1876 (ie->indirect_info->otr_type,
1877 ie->indirect_info->otr_token,
1878 context, &final);
1879 if (!final || targets.length () > 1)
1880 return NULL_TREE;
1881 if (targets.length () == 1)
1882 target = targets[0]->decl;
1883 else
1884 target = ipa_impossible_devirt_target (ie, NULL_TREE);
1886 if (target && !possible_polymorphic_call_target_p (ie,
1887 cgraph_node::get (target)))
1888 target = ipa_impossible_devirt_target (ie, target);
1890 return target;
1894 /* If an indirect edge IE can be turned into a direct one based on KNOWN_CSTS,
1895 KNOWN_CONTEXTS (which can be vNULL) or KNOWN_AGGS (which also can be vNULL)
1896 return the destination. */
1898 tree
1899 ipa_get_indirect_edge_target (struct cgraph_edge *ie,
1900 vec<tree> known_csts,
1901 vec<ipa_polymorphic_call_context> known_contexts,
1902 vec<ipa_agg_jump_function_p> known_aggs)
1904 return ipa_get_indirect_edge_target_1 (ie, known_csts, known_contexts,
1905 known_aggs, NULL);
1908 /* Calculate devirtualization time bonus for NODE, assuming we know KNOWN_CSTS
1909 and KNOWN_CONTEXTS. */
1911 static int
1912 devirtualization_time_bonus (struct cgraph_node *node,
1913 vec<tree> known_csts,
1914 vec<ipa_polymorphic_call_context> known_contexts,
1915 vec<ipa_agg_jump_function_p> known_aggs)
1917 struct cgraph_edge *ie;
1918 int res = 0;
1920 for (ie = node->indirect_calls; ie; ie = ie->next_callee)
1922 struct cgraph_node *callee;
1923 struct inline_summary *isummary;
1924 enum availability avail;
1925 tree target;
1927 target = ipa_get_indirect_edge_target (ie, known_csts, known_contexts,
1928 known_aggs);
1929 if (!target)
1930 continue;
1932 /* Only bare minimum benefit for clearly un-inlineable targets. */
1933 res += 1;
1934 callee = cgraph_node::get (target);
1935 if (!callee || !callee->definition)
1936 continue;
1937 callee = callee->function_symbol (&avail);
1938 if (avail < AVAIL_AVAILABLE)
1939 continue;
1940 isummary = inline_summary (callee);
1941 if (!isummary->inlinable)
1942 continue;
1944 /* FIXME: The values below need re-considering and perhaps also
1945 integrating into the cost metrics, at lest in some very basic way. */
1946 if (isummary->size <= MAX_INLINE_INSNS_AUTO / 4)
1947 res += 31;
1948 else if (isummary->size <= MAX_INLINE_INSNS_AUTO / 2)
1949 res += 15;
1950 else if (isummary->size <= MAX_INLINE_INSNS_AUTO
1951 || DECL_DECLARED_INLINE_P (callee->decl))
1952 res += 7;
1955 return res;
1958 /* Return time bonus incurred because of HINTS. */
1960 static int
1961 hint_time_bonus (inline_hints hints)
1963 int result = 0;
1964 if (hints & (INLINE_HINT_loop_iterations | INLINE_HINT_loop_stride))
1965 result += PARAM_VALUE (PARAM_IPA_CP_LOOP_HINT_BONUS);
1966 if (hints & INLINE_HINT_array_index)
1967 result += PARAM_VALUE (PARAM_IPA_CP_ARRAY_INDEX_HINT_BONUS);
1968 return result;
1971 /* Return true if cloning NODE is a good idea, given the estimated TIME_BENEFIT
1972 and SIZE_COST and with the sum of frequencies of incoming edges to the
1973 potential new clone in FREQUENCIES. */
1975 static bool
1976 good_cloning_opportunity_p (struct cgraph_node *node, int time_benefit,
1977 int freq_sum, gcov_type count_sum, int size_cost)
1979 if (time_benefit == 0
1980 || !flag_ipa_cp_clone
1981 || !optimize_function_for_speed_p (DECL_STRUCT_FUNCTION (node->decl)))
1982 return false;
1984 gcc_assert (size_cost > 0);
1986 if (max_count)
1988 int factor = (count_sum * 1000) / max_count;
1989 int64_t evaluation = (((int64_t) time_benefit * factor)
1990 / size_cost);
1992 if (dump_file && (dump_flags & TDF_DETAILS))
1993 fprintf (dump_file, " good_cloning_opportunity_p (time: %i, "
1994 "size: %i, count_sum: " HOST_WIDE_INT_PRINT_DEC
1995 ") -> evaluation: " "%"PRId64
1996 ", threshold: %i\n",
1997 time_benefit, size_cost, (HOST_WIDE_INT) count_sum,
1998 evaluation, PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD));
2000 return evaluation >= PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD);
2002 else
2004 int64_t evaluation = (((int64_t) time_benefit * freq_sum)
2005 / size_cost);
2007 if (dump_file && (dump_flags & TDF_DETAILS))
2008 fprintf (dump_file, " good_cloning_opportunity_p (time: %i, "
2009 "size: %i, freq_sum: %i) -> evaluation: "
2010 "%"PRId64 ", threshold: %i\n",
2011 time_benefit, size_cost, freq_sum, evaluation,
2012 PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD));
2014 return evaluation >= PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD);
2018 /* Return all context independent values from aggregate lattices in PLATS in a
2019 vector. Return NULL if there are none. */
2021 static vec<ipa_agg_jf_item, va_gc> *
2022 context_independent_aggregate_values (struct ipcp_param_lattices *plats)
2024 vec<ipa_agg_jf_item, va_gc> *res = NULL;
2026 if (plats->aggs_bottom
2027 || plats->aggs_contain_variable
2028 || plats->aggs_count == 0)
2029 return NULL;
2031 for (struct ipcp_agg_lattice *aglat = plats->aggs;
2032 aglat;
2033 aglat = aglat->next)
2034 if (aglat->is_single_const ())
2036 struct ipa_agg_jf_item item;
2037 item.offset = aglat->offset;
2038 item.value = aglat->values->value;
2039 vec_safe_push (res, item);
2041 return res;
2044 /* Allocate KNOWN_CSTS, KNOWN_CONTEXTS and, if non-NULL, KNOWN_AGGS and
2045 populate them with values of parameters that are known independent of the
2046 context. INFO describes the function. If REMOVABLE_PARAMS_COST is
2047 non-NULL, the movement cost of all removable parameters will be stored in
2048 it. */
2050 static bool
2051 gather_context_independent_values (struct ipa_node_params *info,
2052 vec<tree> *known_csts,
2053 vec<ipa_polymorphic_call_context>
2054 *known_contexts,
2055 vec<ipa_agg_jump_function> *known_aggs,
2056 int *removable_params_cost)
2058 int i, count = ipa_get_param_count (info);
2059 bool ret = false;
2061 known_csts->create (0);
2062 known_contexts->create (0);
2063 known_csts->safe_grow_cleared (count);
2064 known_contexts->safe_grow_cleared (count);
2065 if (known_aggs)
2067 known_aggs->create (0);
2068 known_aggs->safe_grow_cleared (count);
2071 if (removable_params_cost)
2072 *removable_params_cost = 0;
2074 for (i = 0; i < count ; i++)
2076 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
2077 ipcp_lattice<tree> *lat = &plats->itself;
2079 if (lat->is_single_const ())
2081 ipcp_value<tree> *val = lat->values;
2082 gcc_checking_assert (TREE_CODE (val->value) != TREE_BINFO);
2083 (*known_csts)[i] = val->value;
2084 if (removable_params_cost)
2085 *removable_params_cost
2086 += estimate_move_cost (TREE_TYPE (val->value), false);
2087 ret = true;
2089 else if (removable_params_cost
2090 && !ipa_is_param_used (info, i))
2091 *removable_params_cost
2092 += ipa_get_param_move_cost (info, i);
2094 ipcp_lattice<ipa_polymorphic_call_context> *ctxlat = &plats->ctxlat;
2095 if (ctxlat->is_single_const ())
2097 (*known_contexts)[i] = ctxlat->values->value;
2098 ret = true;
2101 if (known_aggs)
2103 vec<ipa_agg_jf_item, va_gc> *agg_items;
2104 struct ipa_agg_jump_function *ajf;
2106 agg_items = context_independent_aggregate_values (plats);
2107 ajf = &(*known_aggs)[i];
2108 ajf->items = agg_items;
2109 ajf->by_ref = plats->aggs_by_ref;
2110 ret |= agg_items != NULL;
2114 return ret;
2117 /* The current interface in ipa-inline-analysis requires a pointer vector.
2118 Create it.
2120 FIXME: That interface should be re-worked, this is slightly silly. Still,
2121 I'd like to discuss how to change it first and this demonstrates the
2122 issue. */
2124 static vec<ipa_agg_jump_function_p>
2125 agg_jmp_p_vec_for_t_vec (vec<ipa_agg_jump_function> known_aggs)
2127 vec<ipa_agg_jump_function_p> ret;
2128 struct ipa_agg_jump_function *ajf;
2129 int i;
2131 ret.create (known_aggs.length ());
2132 FOR_EACH_VEC_ELT (known_aggs, i, ajf)
2133 ret.quick_push (ajf);
2134 return ret;
2137 /* Perform time and size measurement of NODE with the context given in
2138 KNOWN_CSTS, KNOWN_CONTEXTS and KNOWN_AGGS, calculate the benefit and cost
2139 given BASE_TIME of the node without specialization, REMOVABLE_PARAMS_COST of
2140 all context-independent removable parameters and EST_MOVE_COST of estimated
2141 movement of the considered parameter and store it into VAL. */
2143 static void
2144 perform_estimation_of_a_value (cgraph_node *node, vec<tree> known_csts,
2145 vec<ipa_polymorphic_call_context> known_contexts,
2146 vec<ipa_agg_jump_function_p> known_aggs_ptrs,
2147 int base_time, int removable_params_cost,
2148 int est_move_cost, ipcp_value_base *val)
2150 int time, size, time_benefit;
2151 inline_hints hints;
2153 estimate_ipcp_clone_size_and_time (node, known_csts, known_contexts,
2154 known_aggs_ptrs, &size, &time,
2155 &hints);
2156 time_benefit = base_time - time
2157 + devirtualization_time_bonus (node, known_csts, known_contexts,
2158 known_aggs_ptrs)
2159 + hint_time_bonus (hints)
2160 + removable_params_cost + est_move_cost;
2162 gcc_checking_assert (size >=0);
2163 /* The inliner-heuristics based estimates may think that in certain
2164 contexts some functions do not have any size at all but we want
2165 all specializations to have at least a tiny cost, not least not to
2166 divide by zero. */
2167 if (size == 0)
2168 size = 1;
2170 val->local_time_benefit = time_benefit;
2171 val->local_size_cost = size;
2174 /* Iterate over known values of parameters of NODE and estimate the local
2175 effects in terms of time and size they have. */
2177 static void
2178 estimate_local_effects (struct cgraph_node *node)
2180 struct ipa_node_params *info = IPA_NODE_REF (node);
2181 int i, count = ipa_get_param_count (info);
2182 vec<tree> known_csts;
2183 vec<ipa_polymorphic_call_context> known_contexts;
2184 vec<ipa_agg_jump_function> known_aggs;
2185 vec<ipa_agg_jump_function_p> known_aggs_ptrs;
2186 bool always_const;
2187 int base_time = inline_summary (node)->time;
2188 int removable_params_cost;
2190 if (!count || !ipcp_versionable_function_p (node))
2191 return;
2193 if (dump_file && (dump_flags & TDF_DETAILS))
2194 fprintf (dump_file, "\nEstimating effects for %s/%i, base_time: %i.\n",
2195 node->name (), node->order, base_time);
2197 always_const = gather_context_independent_values (info, &known_csts,
2198 &known_contexts, &known_aggs,
2199 &removable_params_cost);
2200 known_aggs_ptrs = agg_jmp_p_vec_for_t_vec (known_aggs);
2201 if (always_const)
2203 struct caller_statistics stats;
2204 inline_hints hints;
2205 int time, size;
2207 init_caller_stats (&stats);
2208 node->call_for_symbol_thunks_and_aliases (gather_caller_stats, &stats,
2209 false);
2210 estimate_ipcp_clone_size_and_time (node, known_csts, known_contexts,
2211 known_aggs_ptrs, &size, &time, &hints);
2212 time -= devirtualization_time_bonus (node, known_csts, known_contexts,
2213 known_aggs_ptrs);
2214 time -= hint_time_bonus (hints);
2215 time -= removable_params_cost;
2216 size -= stats.n_calls * removable_params_cost;
2218 if (dump_file)
2219 fprintf (dump_file, " - context independent values, size: %i, "
2220 "time_benefit: %i\n", size, base_time - time);
2222 if (size <= 0
2223 || node->will_be_removed_from_program_if_no_direct_calls_p ())
2225 info->do_clone_for_all_contexts = true;
2226 base_time = time;
2228 if (dump_file)
2229 fprintf (dump_file, " Decided to specialize for all "
2230 "known contexts, code not going to grow.\n");
2232 else if (good_cloning_opportunity_p (node, base_time - time,
2233 stats.freq_sum, stats.count_sum,
2234 size))
2236 if (size + overall_size <= max_new_size)
2238 info->do_clone_for_all_contexts = true;
2239 base_time = time;
2240 overall_size += size;
2242 if (dump_file)
2243 fprintf (dump_file, " Decided to specialize for all "
2244 "known contexts, growth deemed beneficial.\n");
2246 else if (dump_file && (dump_flags & TDF_DETAILS))
2247 fprintf (dump_file, " Not cloning for all contexts because "
2248 "max_new_size would be reached with %li.\n",
2249 size + overall_size);
2253 for (i = 0; i < count ; i++)
2255 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
2256 ipcp_lattice<tree> *lat = &plats->itself;
2257 ipcp_value<tree> *val;
2259 if (lat->bottom
2260 || !lat->values
2261 || known_csts[i])
2262 continue;
2264 for (val = lat->values; val; val = val->next)
2266 gcc_checking_assert (TREE_CODE (val->value) != TREE_BINFO);
2267 known_csts[i] = val->value;
2269 int emc = estimate_move_cost (TREE_TYPE (val->value), true);
2270 perform_estimation_of_a_value (node, known_csts, known_contexts,
2271 known_aggs_ptrs, base_time,
2272 removable_params_cost, emc, val);
2274 if (dump_file && (dump_flags & TDF_DETAILS))
2276 fprintf (dump_file, " - estimates for value ");
2277 print_ipcp_constant_value (dump_file, val->value);
2278 fprintf (dump_file, " for ");
2279 ipa_dump_param (dump_file, info, i);
2280 fprintf (dump_file, ": time_benefit: %i, size: %i\n",
2281 val->local_time_benefit, val->local_size_cost);
2284 known_csts[i] = NULL_TREE;
2287 for (i = 0; i < count; i++)
2289 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
2291 if (!plats->virt_call)
2292 continue;
2294 ipcp_lattice<ipa_polymorphic_call_context> *ctxlat = &plats->ctxlat;
2295 ipcp_value<ipa_polymorphic_call_context> *val;
2297 if (ctxlat->bottom
2298 || !ctxlat->values
2299 || !known_contexts[i].useless_p ())
2300 continue;
2302 for (val = ctxlat->values; val; val = val->next)
2304 known_contexts[i] = val->value;
2305 perform_estimation_of_a_value (node, known_csts, known_contexts,
2306 known_aggs_ptrs, base_time,
2307 removable_params_cost, 0, val);
2309 if (dump_file && (dump_flags & TDF_DETAILS))
2311 fprintf (dump_file, " - estimates for polymorphic context ");
2312 print_ipcp_constant_value (dump_file, val->value);
2313 fprintf (dump_file, " for ");
2314 ipa_dump_param (dump_file, info, i);
2315 fprintf (dump_file, ": time_benefit: %i, size: %i\n",
2316 val->local_time_benefit, val->local_size_cost);
2319 known_contexts[i] = ipa_polymorphic_call_context ();
2322 for (i = 0; i < count ; i++)
2324 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
2325 struct ipa_agg_jump_function *ajf;
2326 struct ipcp_agg_lattice *aglat;
2328 if (plats->aggs_bottom || !plats->aggs)
2329 continue;
2331 ajf = &known_aggs[i];
2332 for (aglat = plats->aggs; aglat; aglat = aglat->next)
2334 ipcp_value<tree> *val;
2335 if (aglat->bottom || !aglat->values
2336 /* If the following is true, the one value is in known_aggs. */
2337 || (!plats->aggs_contain_variable
2338 && aglat->is_single_const ()))
2339 continue;
2341 for (val = aglat->values; val; val = val->next)
2343 struct ipa_agg_jf_item item;
2345 item.offset = aglat->offset;
2346 item.value = val->value;
2347 vec_safe_push (ajf->items, item);
2349 perform_estimation_of_a_value (node, known_csts, known_contexts,
2350 known_aggs_ptrs, base_time,
2351 removable_params_cost, 0, val);
2353 if (dump_file && (dump_flags & TDF_DETAILS))
2355 fprintf (dump_file, " - estimates for value ");
2356 print_ipcp_constant_value (dump_file, val->value);
2357 fprintf (dump_file, " for ");
2358 ipa_dump_param (dump_file, info, i);
2359 fprintf (dump_file, "[%soffset: " HOST_WIDE_INT_PRINT_DEC
2360 "]: time_benefit: %i, size: %i\n",
2361 plats->aggs_by_ref ? "ref " : "",
2362 aglat->offset,
2363 val->local_time_benefit, val->local_size_cost);
2366 ajf->items->pop ();
2371 for (i = 0; i < count ; i++)
2372 vec_free (known_aggs[i].items);
2374 known_csts.release ();
2375 known_contexts.release ();
2376 known_aggs.release ();
2377 known_aggs_ptrs.release ();
2381 /* Add value CUR_VAL and all yet-unsorted values it is dependent on to the
2382 topological sort of values. */
2384 template <typename valtype>
2385 void
2386 value_topo_info<valtype>::add_val (ipcp_value<valtype> *cur_val)
2388 ipcp_value_source<valtype> *src;
2390 if (cur_val->dfs)
2391 return;
2393 dfs_counter++;
2394 cur_val->dfs = dfs_counter;
2395 cur_val->low_link = dfs_counter;
2397 cur_val->topo_next = stack;
2398 stack = cur_val;
2399 cur_val->on_stack = true;
2401 for (src = cur_val->sources; src; src = src->next)
2402 if (src->val)
2404 if (src->val->dfs == 0)
2406 add_val (src->val);
2407 if (src->val->low_link < cur_val->low_link)
2408 cur_val->low_link = src->val->low_link;
2410 else if (src->val->on_stack
2411 && src->val->dfs < cur_val->low_link)
2412 cur_val->low_link = src->val->dfs;
2415 if (cur_val->dfs == cur_val->low_link)
2417 ipcp_value<valtype> *v, *scc_list = NULL;
2421 v = stack;
2422 stack = v->topo_next;
2423 v->on_stack = false;
2425 v->scc_next = scc_list;
2426 scc_list = v;
2428 while (v != cur_val);
2430 cur_val->topo_next = values_topo;
2431 values_topo = cur_val;
2435 /* Add all values in lattices associated with NODE to the topological sort if
2436 they are not there yet. */
2438 static void
2439 add_all_node_vals_to_toposort (cgraph_node *node, ipa_topo_info *topo)
2441 struct ipa_node_params *info = IPA_NODE_REF (node);
2442 int i, count = ipa_get_param_count (info);
2444 for (i = 0; i < count ; i++)
2446 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
2447 ipcp_lattice<tree> *lat = &plats->itself;
2448 struct ipcp_agg_lattice *aglat;
2450 if (!lat->bottom)
2452 ipcp_value<tree> *val;
2453 for (val = lat->values; val; val = val->next)
2454 topo->constants.add_val (val);
2457 if (!plats->aggs_bottom)
2458 for (aglat = plats->aggs; aglat; aglat = aglat->next)
2459 if (!aglat->bottom)
2461 ipcp_value<tree> *val;
2462 for (val = aglat->values; val; val = val->next)
2463 topo->constants.add_val (val);
2466 ipcp_lattice<ipa_polymorphic_call_context> *ctxlat = &plats->ctxlat;
2467 if (!ctxlat->bottom)
2469 ipcp_value<ipa_polymorphic_call_context> *ctxval;
2470 for (ctxval = ctxlat->values; ctxval; ctxval = ctxval->next)
2471 topo->contexts.add_val (ctxval);
2476 /* One pass of constants propagation along the call graph edges, from callers
2477 to callees (requires topological ordering in TOPO), iterate over strongly
2478 connected components. */
2480 static void
2481 propagate_constants_topo (struct ipa_topo_info *topo)
2483 int i;
2485 for (i = topo->nnodes - 1; i >= 0; i--)
2487 unsigned j;
2488 struct cgraph_node *v, *node = topo->order[i];
2489 vec<cgraph_node *> cycle_nodes = ipa_get_nodes_in_cycle (node);
2491 /* First, iteratively propagate within the strongly connected component
2492 until all lattices stabilize. */
2493 FOR_EACH_VEC_ELT (cycle_nodes, j, v)
2494 if (v->has_gimple_body_p ())
2495 push_node_to_stack (topo, v);
2497 v = pop_node_from_stack (topo);
2498 while (v)
2500 struct cgraph_edge *cs;
2502 for (cs = v->callees; cs; cs = cs->next_callee)
2503 if (ipa_edge_within_scc (cs)
2504 && propagate_constants_accross_call (cs))
2505 push_node_to_stack (topo, cs->callee);
2506 v = pop_node_from_stack (topo);
2509 /* Afterwards, propagate along edges leading out of the SCC, calculates
2510 the local effects of the discovered constants and all valid values to
2511 their topological sort. */
2512 FOR_EACH_VEC_ELT (cycle_nodes, j, v)
2513 if (v->has_gimple_body_p ())
2515 struct cgraph_edge *cs;
2517 estimate_local_effects (v);
2518 add_all_node_vals_to_toposort (v, topo);
2519 for (cs = v->callees; cs; cs = cs->next_callee)
2520 if (!ipa_edge_within_scc (cs))
2521 propagate_constants_accross_call (cs);
2523 cycle_nodes.release ();
2528 /* Return the sum of A and B if none of them is bigger than INT_MAX/2, return
2529 the bigger one if otherwise. */
2531 static int
2532 safe_add (int a, int b)
2534 if (a > INT_MAX/2 || b > INT_MAX/2)
2535 return a > b ? a : b;
2536 else
2537 return a + b;
2541 /* Propagate the estimated effects of individual values along the topological
2542 from the dependent values to those they depend on. */
2544 template <typename valtype>
2545 void
2546 value_topo_info<valtype>::propagate_effects ()
2548 ipcp_value<valtype> *base;
2550 for (base = values_topo; base; base = base->topo_next)
2552 ipcp_value_source<valtype> *src;
2553 ipcp_value<valtype> *val;
2554 int time = 0, size = 0;
2556 for (val = base; val; val = val->scc_next)
2558 time = safe_add (time,
2559 val->local_time_benefit + val->prop_time_benefit);
2560 size = safe_add (size, val->local_size_cost + val->prop_size_cost);
2563 for (val = base; val; val = val->scc_next)
2564 for (src = val->sources; src; src = src->next)
2565 if (src->val
2566 && src->cs->maybe_hot_p ())
2568 src->val->prop_time_benefit = safe_add (time,
2569 src->val->prop_time_benefit);
2570 src->val->prop_size_cost = safe_add (size,
2571 src->val->prop_size_cost);
2577 /* Propagate constants, polymorphic contexts and their effects from the
2578 summaries interprocedurally. */
2580 static void
2581 ipcp_propagate_stage (struct ipa_topo_info *topo)
2583 struct cgraph_node *node;
2585 if (dump_file)
2586 fprintf (dump_file, "\n Propagating constants:\n\n");
2588 if (in_lto_p)
2589 ipa_update_after_lto_read ();
2592 FOR_EACH_DEFINED_FUNCTION (node)
2594 struct ipa_node_params *info = IPA_NODE_REF (node);
2596 determine_versionability (node);
2597 if (node->has_gimple_body_p ())
2599 info->lattices = XCNEWVEC (struct ipcp_param_lattices,
2600 ipa_get_param_count (info));
2601 initialize_node_lattices (node);
2603 if (node->definition && !node->alias)
2604 overall_size += inline_summary (node)->self_size;
2605 if (node->count > max_count)
2606 max_count = node->count;
2609 max_new_size = overall_size;
2610 if (max_new_size < PARAM_VALUE (PARAM_LARGE_UNIT_INSNS))
2611 max_new_size = PARAM_VALUE (PARAM_LARGE_UNIT_INSNS);
2612 max_new_size += max_new_size * PARAM_VALUE (PARAM_IPCP_UNIT_GROWTH) / 100 + 1;
2614 if (dump_file)
2615 fprintf (dump_file, "\noverall_size: %li, max_new_size: %li\n",
2616 overall_size, max_new_size);
2618 propagate_constants_topo (topo);
2619 #ifdef ENABLE_CHECKING
2620 ipcp_verify_propagated_values ();
2621 #endif
2622 topo->constants.propagate_effects ();
2623 topo->contexts.propagate_effects ();
2625 if (dump_file)
2627 fprintf (dump_file, "\nIPA lattices after all propagation:\n");
2628 print_all_lattices (dump_file, (dump_flags & TDF_DETAILS), true);
2632 /* Discover newly direct outgoing edges from NODE which is a new clone with
2633 known KNOWN_CSTS and make them direct. */
2635 static void
2636 ipcp_discover_new_direct_edges (struct cgraph_node *node,
2637 vec<tree> known_csts,
2638 vec<ipa_polymorphic_call_context>
2639 known_contexts,
2640 struct ipa_agg_replacement_value *aggvals)
2642 struct cgraph_edge *ie, *next_ie;
2643 bool found = false;
2645 for (ie = node->indirect_calls; ie; ie = next_ie)
2647 tree target;
2649 next_ie = ie->next_callee;
2650 target = ipa_get_indirect_edge_target_1 (ie, known_csts, known_contexts,
2651 vNULL, aggvals);
2652 if (target)
2654 bool agg_contents = ie->indirect_info->agg_contents;
2655 bool polymorphic = ie->indirect_info->polymorphic;
2656 int param_index = ie->indirect_info->param_index;
2657 struct cgraph_edge *cs = ipa_make_edge_direct_to_target (ie, target);
2658 found = true;
2660 if (cs && !agg_contents && !polymorphic)
2662 struct ipa_node_params *info = IPA_NODE_REF (node);
2663 int c = ipa_get_controlled_uses (info, param_index);
2664 if (c != IPA_UNDESCRIBED_USE)
2666 struct ipa_ref *to_del;
2668 c--;
2669 ipa_set_controlled_uses (info, param_index, c);
2670 if (dump_file && (dump_flags & TDF_DETAILS))
2671 fprintf (dump_file, " controlled uses count of param "
2672 "%i bumped down to %i\n", param_index, c);
2673 if (c == 0
2674 && (to_del = node->find_reference (cs->callee, NULL, 0)))
2676 if (dump_file && (dump_flags & TDF_DETAILS))
2677 fprintf (dump_file, " and even removing its "
2678 "cloning-created reference\n");
2679 to_del->remove_reference ();
2685 /* Turning calls to direct calls will improve overall summary. */
2686 if (found)
2687 inline_update_overall_summary (node);
2690 /* Vector of pointers which for linked lists of clones of an original crgaph
2691 edge. */
2693 static vec<cgraph_edge *> next_edge_clone;
2694 static vec<cgraph_edge *> prev_edge_clone;
2696 static inline void
2697 grow_edge_clone_vectors (void)
2699 if (next_edge_clone.length ()
2700 <= (unsigned) symtab->edges_max_uid)
2701 next_edge_clone.safe_grow_cleared (symtab->edges_max_uid + 1);
2702 if (prev_edge_clone.length ()
2703 <= (unsigned) symtab->edges_max_uid)
2704 prev_edge_clone.safe_grow_cleared (symtab->edges_max_uid + 1);
2707 /* Edge duplication hook to grow the appropriate linked list in
2708 next_edge_clone. */
2710 static void
2711 ipcp_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
2712 void *)
2714 grow_edge_clone_vectors ();
2716 struct cgraph_edge *old_next = next_edge_clone[src->uid];
2717 if (old_next)
2718 prev_edge_clone[old_next->uid] = dst;
2719 prev_edge_clone[dst->uid] = src;
2721 next_edge_clone[dst->uid] = old_next;
2722 next_edge_clone[src->uid] = dst;
2725 /* Hook that is called by cgraph.c when an edge is removed. */
2727 static void
2728 ipcp_edge_removal_hook (struct cgraph_edge *cs, void *)
2730 grow_edge_clone_vectors ();
2732 struct cgraph_edge *prev = prev_edge_clone[cs->uid];
2733 struct cgraph_edge *next = next_edge_clone[cs->uid];
2734 if (prev)
2735 next_edge_clone[prev->uid] = next;
2736 if (next)
2737 prev_edge_clone[next->uid] = prev;
2740 /* See if NODE is a clone with a known aggregate value at a given OFFSET of a
2741 parameter with the given INDEX. */
2743 static tree
2744 get_clone_agg_value (struct cgraph_node *node, HOST_WIDE_INT offset,
2745 int index)
2747 struct ipa_agg_replacement_value *aggval;
2749 aggval = ipa_get_agg_replacements_for_node (node);
2750 while (aggval)
2752 if (aggval->offset == offset
2753 && aggval->index == index)
2754 return aggval->value;
2755 aggval = aggval->next;
2757 return NULL_TREE;
2760 /* Return true if edge CS does bring about the value described by SRC. */
2762 static bool
2763 cgraph_edge_brings_value_p (struct cgraph_edge *cs,
2764 ipcp_value_source<tree> *src)
2766 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
2767 cgraph_node *real_dest = cs->callee->function_symbol ();
2768 struct ipa_node_params *dst_info = IPA_NODE_REF (real_dest);
2770 if ((dst_info->ipcp_orig_node && !dst_info->is_all_contexts_clone)
2771 || caller_info->node_dead)
2772 return false;
2773 if (!src->val)
2774 return true;
2776 if (caller_info->ipcp_orig_node)
2778 tree t;
2779 if (src->offset == -1)
2780 t = caller_info->known_csts[src->index];
2781 else
2782 t = get_clone_agg_value (cs->caller, src->offset, src->index);
2783 return (t != NULL_TREE
2784 && values_equal_for_ipcp_p (src->val->value, t));
2786 else
2788 struct ipcp_agg_lattice *aglat;
2789 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (caller_info,
2790 src->index);
2791 if (src->offset == -1)
2792 return (plats->itself.is_single_const ()
2793 && values_equal_for_ipcp_p (src->val->value,
2794 plats->itself.values->value));
2795 else
2797 if (plats->aggs_bottom || plats->aggs_contain_variable)
2798 return false;
2799 for (aglat = plats->aggs; aglat; aglat = aglat->next)
2800 if (aglat->offset == src->offset)
2801 return (aglat->is_single_const ()
2802 && values_equal_for_ipcp_p (src->val->value,
2803 aglat->values->value));
2805 return false;
2809 /* Return true if edge CS does bring about the value described by SRC. */
2811 static bool
2812 cgraph_edge_brings_value_p (struct cgraph_edge *cs,
2813 ipcp_value_source<ipa_polymorphic_call_context>
2814 *src)
2816 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
2817 cgraph_node *real_dest = cs->callee->function_symbol ();
2818 struct ipa_node_params *dst_info = IPA_NODE_REF (real_dest);
2820 if ((dst_info->ipcp_orig_node && !dst_info->is_all_contexts_clone)
2821 || caller_info->node_dead)
2822 return false;
2823 if (!src->val)
2824 return true;
2826 if (caller_info->ipcp_orig_node)
2827 return (caller_info->known_contexts.length () > (unsigned) src->index)
2828 && values_equal_for_ipcp_p (src->val->value,
2829 caller_info->known_contexts[src->index]);
2831 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (caller_info,
2832 src->index);
2833 return plats->ctxlat.is_single_const ()
2834 && values_equal_for_ipcp_p (src->val->value,
2835 plats->ctxlat.values->value);
2838 /* Get the next clone in the linked list of clones of an edge. */
2840 static inline struct cgraph_edge *
2841 get_next_cgraph_edge_clone (struct cgraph_edge *cs)
2843 return next_edge_clone[cs->uid];
2846 /* Given VAL, iterate over all its sources and if they still hold, add their
2847 edge frequency and their number into *FREQUENCY and *CALLER_COUNT
2848 respectively. */
2850 template <typename valtype>
2851 static bool
2852 get_info_about_necessary_edges (ipcp_value<valtype> *val, int *freq_sum,
2853 gcov_type *count_sum, int *caller_count)
2855 ipcp_value_source<valtype> *src;
2856 int freq = 0, count = 0;
2857 gcov_type cnt = 0;
2858 bool hot = false;
2860 for (src = val->sources; src; src = src->next)
2862 struct cgraph_edge *cs = src->cs;
2863 while (cs)
2865 if (cgraph_edge_brings_value_p (cs, src))
2867 count++;
2868 freq += cs->frequency;
2869 cnt += cs->count;
2870 hot |= cs->maybe_hot_p ();
2872 cs = get_next_cgraph_edge_clone (cs);
2876 *freq_sum = freq;
2877 *count_sum = cnt;
2878 *caller_count = count;
2879 return hot;
2882 /* Return a vector of incoming edges that do bring value VAL. It is assumed
2883 their number is known and equal to CALLER_COUNT. */
2885 template <typename valtype>
2886 static vec<cgraph_edge *>
2887 gather_edges_for_value (ipcp_value<valtype> *val, int caller_count)
2889 ipcp_value_source<valtype> *src;
2890 vec<cgraph_edge *> ret;
2892 ret.create (caller_count);
2893 for (src = val->sources; src; src = src->next)
2895 struct cgraph_edge *cs = src->cs;
2896 while (cs)
2898 if (cgraph_edge_brings_value_p (cs, src))
2899 ret.quick_push (cs);
2900 cs = get_next_cgraph_edge_clone (cs);
2904 return ret;
2907 /* Construct a replacement map for a know VALUE for a formal parameter PARAM.
2908 Return it or NULL if for some reason it cannot be created. */
2910 static struct ipa_replace_map *
2911 get_replacement_map (struct ipa_node_params *info, tree value, int parm_num)
2913 struct ipa_replace_map *replace_map;
2916 replace_map = ggc_alloc<ipa_replace_map> ();
2917 if (dump_file)
2919 fprintf (dump_file, " replacing ");
2920 ipa_dump_param (dump_file, info, parm_num);
2922 fprintf (dump_file, " with const ");
2923 print_generic_expr (dump_file, value, 0);
2924 fprintf (dump_file, "\n");
2926 replace_map->old_tree = NULL;
2927 replace_map->parm_num = parm_num;
2928 replace_map->new_tree = value;
2929 replace_map->replace_p = true;
2930 replace_map->ref_p = false;
2932 return replace_map;
2935 /* Dump new profiling counts */
2937 static void
2938 dump_profile_updates (struct cgraph_node *orig_node,
2939 struct cgraph_node *new_node)
2941 struct cgraph_edge *cs;
2943 fprintf (dump_file, " setting count of the specialized node to "
2944 HOST_WIDE_INT_PRINT_DEC "\n", (HOST_WIDE_INT) new_node->count);
2945 for (cs = new_node->callees; cs ; cs = cs->next_callee)
2946 fprintf (dump_file, " edge to %s has count "
2947 HOST_WIDE_INT_PRINT_DEC "\n",
2948 cs->callee->name (), (HOST_WIDE_INT) cs->count);
2950 fprintf (dump_file, " setting count of the original node to "
2951 HOST_WIDE_INT_PRINT_DEC "\n", (HOST_WIDE_INT) orig_node->count);
2952 for (cs = orig_node->callees; cs ; cs = cs->next_callee)
2953 fprintf (dump_file, " edge to %s is left with "
2954 HOST_WIDE_INT_PRINT_DEC "\n",
2955 cs->callee->name (), (HOST_WIDE_INT) cs->count);
2958 /* After a specialized NEW_NODE version of ORIG_NODE has been created, update
2959 their profile information to reflect this. */
2961 static void
2962 update_profiling_info (struct cgraph_node *orig_node,
2963 struct cgraph_node *new_node)
2965 struct cgraph_edge *cs;
2966 struct caller_statistics stats;
2967 gcov_type new_sum, orig_sum;
2968 gcov_type remainder, orig_node_count = orig_node->count;
2970 if (orig_node_count == 0)
2971 return;
2973 init_caller_stats (&stats);
2974 orig_node->call_for_symbol_thunks_and_aliases (gather_caller_stats, &stats,
2975 false);
2976 orig_sum = stats.count_sum;
2977 init_caller_stats (&stats);
2978 new_node->call_for_symbol_thunks_and_aliases (gather_caller_stats, &stats,
2979 false);
2980 new_sum = stats.count_sum;
2982 if (orig_node_count < orig_sum + new_sum)
2984 if (dump_file)
2985 fprintf (dump_file, " Problem: node %s/%i has too low count "
2986 HOST_WIDE_INT_PRINT_DEC " while the sum of incoming "
2987 "counts is " HOST_WIDE_INT_PRINT_DEC "\n",
2988 orig_node->name (), orig_node->order,
2989 (HOST_WIDE_INT) orig_node_count,
2990 (HOST_WIDE_INT) (orig_sum + new_sum));
2992 orig_node_count = (orig_sum + new_sum) * 12 / 10;
2993 if (dump_file)
2994 fprintf (dump_file, " proceeding by pretending it was "
2995 HOST_WIDE_INT_PRINT_DEC "\n",
2996 (HOST_WIDE_INT) orig_node_count);
2999 new_node->count = new_sum;
3000 remainder = orig_node_count - new_sum;
3001 orig_node->count = remainder;
3003 for (cs = new_node->callees; cs ; cs = cs->next_callee)
3004 if (cs->frequency)
3005 cs->count = apply_probability (cs->count,
3006 GCOV_COMPUTE_SCALE (new_sum,
3007 orig_node_count));
3008 else
3009 cs->count = 0;
3011 for (cs = orig_node->callees; cs ; cs = cs->next_callee)
3012 cs->count = apply_probability (cs->count,
3013 GCOV_COMPUTE_SCALE (remainder,
3014 orig_node_count));
3016 if (dump_file)
3017 dump_profile_updates (orig_node, new_node);
3020 /* Update the respective profile of specialized NEW_NODE and the original
3021 ORIG_NODE after additional edges with cumulative count sum REDIRECTED_SUM
3022 have been redirected to the specialized version. */
3024 static void
3025 update_specialized_profile (struct cgraph_node *new_node,
3026 struct cgraph_node *orig_node,
3027 gcov_type redirected_sum)
3029 struct cgraph_edge *cs;
3030 gcov_type new_node_count, orig_node_count = orig_node->count;
3032 if (dump_file)
3033 fprintf (dump_file, " the sum of counts of redirected edges is "
3034 HOST_WIDE_INT_PRINT_DEC "\n", (HOST_WIDE_INT) redirected_sum);
3035 if (orig_node_count == 0)
3036 return;
3038 gcc_assert (orig_node_count >= redirected_sum);
3040 new_node_count = new_node->count;
3041 new_node->count += redirected_sum;
3042 orig_node->count -= redirected_sum;
3044 for (cs = new_node->callees; cs ; cs = cs->next_callee)
3045 if (cs->frequency)
3046 cs->count += apply_probability (cs->count,
3047 GCOV_COMPUTE_SCALE (redirected_sum,
3048 new_node_count));
3049 else
3050 cs->count = 0;
3052 for (cs = orig_node->callees; cs ; cs = cs->next_callee)
3054 gcov_type dec = apply_probability (cs->count,
3055 GCOV_COMPUTE_SCALE (redirected_sum,
3056 orig_node_count));
3057 if (dec < cs->count)
3058 cs->count -= dec;
3059 else
3060 cs->count = 0;
3063 if (dump_file)
3064 dump_profile_updates (orig_node, new_node);
3067 /* Create a specialized version of NODE with known constants in KNOWN_CSTS,
3068 known contexts in KNOWN_CONTEXTS and known aggregate values in AGGVALS and
3069 redirect all edges in CALLERS to it. */
3071 static struct cgraph_node *
3072 create_specialized_node (struct cgraph_node *node,
3073 vec<tree> known_csts,
3074 vec<ipa_polymorphic_call_context> known_contexts,
3075 struct ipa_agg_replacement_value *aggvals,
3076 vec<cgraph_edge *> callers)
3078 struct ipa_node_params *new_info, *info = IPA_NODE_REF (node);
3079 vec<ipa_replace_map *, va_gc> *replace_trees = NULL;
3080 struct ipa_agg_replacement_value *av;
3081 struct cgraph_node *new_node;
3082 int i, count = ipa_get_param_count (info);
3083 bitmap args_to_skip;
3085 gcc_assert (!info->ipcp_orig_node);
3087 if (node->local.can_change_signature)
3089 args_to_skip = BITMAP_GGC_ALLOC ();
3090 for (i = 0; i < count; i++)
3092 tree t = known_csts[i];
3094 if (t || !ipa_is_param_used (info, i))
3095 bitmap_set_bit (args_to_skip, i);
3098 else
3100 args_to_skip = NULL;
3101 if (dump_file && (dump_flags & TDF_DETAILS))
3102 fprintf (dump_file, " cannot change function signature\n");
3105 for (i = 0; i < count ; i++)
3107 tree t = known_csts[i];
3108 if (t)
3110 struct ipa_replace_map *replace_map;
3112 gcc_checking_assert (TREE_CODE (t) != TREE_BINFO);
3113 replace_map = get_replacement_map (info, t, i);
3114 if (replace_map)
3115 vec_safe_push (replace_trees, replace_map);
3119 new_node = node->create_virtual_clone (callers, replace_trees,
3120 args_to_skip, "constprop");
3121 ipa_set_node_agg_value_chain (new_node, aggvals);
3122 for (av = aggvals; av; av = av->next)
3123 new_node->maybe_create_reference (av->value, IPA_REF_ADDR, NULL);
3125 if (dump_file && (dump_flags & TDF_DETAILS))
3127 fprintf (dump_file, " the new node is %s/%i.\n",
3128 new_node->name (), new_node->order);
3129 if (known_contexts.exists ())
3131 for (i = 0; i < count ; i++)
3132 if (!known_contexts[i].useless_p ())
3134 fprintf (dump_file, " known ctx %i is ", i);
3135 known_contexts[i].dump (dump_file);
3138 if (aggvals)
3139 ipa_dump_agg_replacement_values (dump_file, aggvals);
3141 ipa_check_create_node_params ();
3142 update_profiling_info (node, new_node);
3143 new_info = IPA_NODE_REF (new_node);
3144 new_info->ipcp_orig_node = node;
3145 new_info->known_csts = known_csts;
3146 new_info->known_contexts = known_contexts;
3148 ipcp_discover_new_direct_edges (new_node, known_csts, known_contexts, aggvals);
3150 callers.release ();
3151 return new_node;
3154 /* Given a NODE, and a subset of its CALLERS, try to populate blanks slots in
3155 KNOWN_CSTS with constants that are also known for all of the CALLERS. */
3157 static void
3158 find_more_scalar_values_for_callers_subset (struct cgraph_node *node,
3159 vec<tree> known_csts,
3160 vec<cgraph_edge *> callers)
3162 struct ipa_node_params *info = IPA_NODE_REF (node);
3163 int i, count = ipa_get_param_count (info);
3165 for (i = 0; i < count ; i++)
3167 struct cgraph_edge *cs;
3168 tree newval = NULL_TREE;
3169 int j;
3170 bool first = true;
3172 if (ipa_get_scalar_lat (info, i)->bottom || known_csts[i])
3173 continue;
3175 FOR_EACH_VEC_ELT (callers, j, cs)
3177 struct ipa_jump_func *jump_func;
3178 tree t;
3180 if (i >= ipa_get_cs_argument_count (IPA_EDGE_REF (cs)))
3182 newval = NULL_TREE;
3183 break;
3185 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
3186 t = ipa_value_from_jfunc (IPA_NODE_REF (cs->caller), jump_func);
3187 if (!t
3188 || (newval
3189 && !values_equal_for_ipcp_p (t, newval))
3190 || (!first && !newval))
3192 newval = NULL_TREE;
3193 break;
3195 else
3196 newval = t;
3197 first = false;
3200 if (newval)
3202 if (dump_file && (dump_flags & TDF_DETAILS))
3204 fprintf (dump_file, " adding an extra known scalar value ");
3205 print_ipcp_constant_value (dump_file, newval);
3206 fprintf (dump_file, " for ");
3207 ipa_dump_param (dump_file, info, i);
3208 fprintf (dump_file, "\n");
3211 known_csts[i] = newval;
3216 /* Given a NODE and a subset of its CALLERS, try to populate plank slots in
3217 KNOWN_CONTEXTS with polymorphic contexts that are also known for all of the
3218 CALLERS. */
3220 static void
3221 find_more_contexts_for_caller_subset (cgraph_node *node,
3222 vec<ipa_polymorphic_call_context>
3223 *known_contexts,
3224 vec<cgraph_edge *> callers)
3226 ipa_node_params *info = IPA_NODE_REF (node);
3227 int i, count = ipa_get_param_count (info);
3229 for (i = 0; i < count ; i++)
3231 cgraph_edge *cs;
3233 if (ipa_get_poly_ctx_lat (info, i)->bottom
3234 || (known_contexts->exists ()
3235 && !(*known_contexts)[i].useless_p ()))
3236 continue;
3238 ipa_polymorphic_call_context newval;
3239 bool first = true;
3240 int j;
3242 FOR_EACH_VEC_ELT (callers, j, cs)
3244 if (i >= ipa_get_cs_argument_count (IPA_EDGE_REF (cs)))
3245 return;
3246 ipa_jump_func *jfunc = ipa_get_ith_jump_func (IPA_EDGE_REF (cs),
3248 ipa_polymorphic_call_context ctx;
3249 ctx = ipa_context_from_jfunc (IPA_NODE_REF (cs->caller), cs, i,
3250 jfunc);
3251 if (first)
3253 newval = ctx;
3254 first = false;
3256 else
3257 newval.meet_with (ctx);
3258 if (newval.useless_p ())
3259 break;
3262 if (!newval.useless_p ())
3264 if (dump_file && (dump_flags & TDF_DETAILS))
3266 fprintf (dump_file, " adding an extra known polymorphic "
3267 "context ");
3268 print_ipcp_constant_value (dump_file, newval);
3269 fprintf (dump_file, " for ");
3270 ipa_dump_param (dump_file, info, i);
3271 fprintf (dump_file, "\n");
3274 if (!known_contexts->exists ())
3275 known_contexts->safe_grow_cleared (ipa_get_param_count (info));
3276 (*known_contexts)[i] = newval;
3282 /* Go through PLATS and create a vector of values consisting of values and
3283 offsets (minus OFFSET) of lattices that contain only a single value. */
3285 static vec<ipa_agg_jf_item>
3286 copy_plats_to_inter (struct ipcp_param_lattices *plats, HOST_WIDE_INT offset)
3288 vec<ipa_agg_jf_item> res = vNULL;
3290 if (!plats->aggs || plats->aggs_contain_variable || plats->aggs_bottom)
3291 return vNULL;
3293 for (struct ipcp_agg_lattice *aglat = plats->aggs; aglat; aglat = aglat->next)
3294 if (aglat->is_single_const ())
3296 struct ipa_agg_jf_item ti;
3297 ti.offset = aglat->offset - offset;
3298 ti.value = aglat->values->value;
3299 res.safe_push (ti);
3301 return res;
3304 /* Intersect all values in INTER with single value lattices in PLATS (while
3305 subtracting OFFSET). */
3307 static void
3308 intersect_with_plats (struct ipcp_param_lattices *plats,
3309 vec<ipa_agg_jf_item> *inter,
3310 HOST_WIDE_INT offset)
3312 struct ipcp_agg_lattice *aglat;
3313 struct ipa_agg_jf_item *item;
3314 int k;
3316 if (!plats->aggs || plats->aggs_contain_variable || plats->aggs_bottom)
3318 inter->release ();
3319 return;
3322 aglat = plats->aggs;
3323 FOR_EACH_VEC_ELT (*inter, k, item)
3325 bool found = false;
3326 if (!item->value)
3327 continue;
3328 while (aglat)
3330 if (aglat->offset - offset > item->offset)
3331 break;
3332 if (aglat->offset - offset == item->offset)
3334 gcc_checking_assert (item->value);
3335 if (values_equal_for_ipcp_p (item->value, aglat->values->value))
3336 found = true;
3337 break;
3339 aglat = aglat->next;
3341 if (!found)
3342 item->value = NULL_TREE;
3346 /* Copy agggregate replacement values of NODE (which is an IPA-CP clone) to the
3347 vector result while subtracting OFFSET from the individual value offsets. */
3349 static vec<ipa_agg_jf_item>
3350 agg_replacements_to_vector (struct cgraph_node *node, int index,
3351 HOST_WIDE_INT offset)
3353 struct ipa_agg_replacement_value *av;
3354 vec<ipa_agg_jf_item> res = vNULL;
3356 for (av = ipa_get_agg_replacements_for_node (node); av; av = av->next)
3357 if (av->index == index
3358 && (av->offset - offset) >= 0)
3360 struct ipa_agg_jf_item item;
3361 gcc_checking_assert (av->value);
3362 item.offset = av->offset - offset;
3363 item.value = av->value;
3364 res.safe_push (item);
3367 return res;
3370 /* Intersect all values in INTER with those that we have already scheduled to
3371 be replaced in parameter number INDEX of NODE, which is an IPA-CP clone
3372 (while subtracting OFFSET). */
3374 static void
3375 intersect_with_agg_replacements (struct cgraph_node *node, int index,
3376 vec<ipa_agg_jf_item> *inter,
3377 HOST_WIDE_INT offset)
3379 struct ipa_agg_replacement_value *srcvals;
3380 struct ipa_agg_jf_item *item;
3381 int i;
3383 srcvals = ipa_get_agg_replacements_for_node (node);
3384 if (!srcvals)
3386 inter->release ();
3387 return;
3390 FOR_EACH_VEC_ELT (*inter, i, item)
3392 struct ipa_agg_replacement_value *av;
3393 bool found = false;
3394 if (!item->value)
3395 continue;
3396 for (av = srcvals; av; av = av->next)
3398 gcc_checking_assert (av->value);
3399 if (av->index == index
3400 && av->offset - offset == item->offset)
3402 if (values_equal_for_ipcp_p (item->value, av->value))
3403 found = true;
3404 break;
3407 if (!found)
3408 item->value = NULL_TREE;
3412 /* Intersect values in INTER with aggregate values that come along edge CS to
3413 parameter number INDEX and return it. If INTER does not actually exist yet,
3414 copy all incoming values to it. If we determine we ended up with no values
3415 whatsoever, return a released vector. */
3417 static vec<ipa_agg_jf_item>
3418 intersect_aggregates_with_edge (struct cgraph_edge *cs, int index,
3419 vec<ipa_agg_jf_item> inter)
3421 struct ipa_jump_func *jfunc;
3422 jfunc = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), index);
3423 if (jfunc->type == IPA_JF_PASS_THROUGH
3424 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3426 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
3427 int src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
3429 if (caller_info->ipcp_orig_node)
3431 struct cgraph_node *orig_node = caller_info->ipcp_orig_node;
3432 struct ipcp_param_lattices *orig_plats;
3433 orig_plats = ipa_get_parm_lattices (IPA_NODE_REF (orig_node),
3434 src_idx);
3435 if (agg_pass_through_permissible_p (orig_plats, jfunc))
3437 if (!inter.exists ())
3438 inter = agg_replacements_to_vector (cs->caller, src_idx, 0);
3439 else
3440 intersect_with_agg_replacements (cs->caller, src_idx,
3441 &inter, 0);
3443 else
3445 inter.release ();
3446 return vNULL;
3449 else
3451 struct ipcp_param_lattices *src_plats;
3452 src_plats = ipa_get_parm_lattices (caller_info, src_idx);
3453 if (agg_pass_through_permissible_p (src_plats, jfunc))
3455 /* Currently we do not produce clobber aggregate jump
3456 functions, adjust when we do. */
3457 gcc_checking_assert (!jfunc->agg.items);
3458 if (!inter.exists ())
3459 inter = copy_plats_to_inter (src_plats, 0);
3460 else
3461 intersect_with_plats (src_plats, &inter, 0);
3463 else
3465 inter.release ();
3466 return vNULL;
3470 else if (jfunc->type == IPA_JF_ANCESTOR
3471 && ipa_get_jf_ancestor_agg_preserved (jfunc))
3473 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
3474 int src_idx = ipa_get_jf_ancestor_formal_id (jfunc);
3475 struct ipcp_param_lattices *src_plats;
3476 HOST_WIDE_INT delta = ipa_get_jf_ancestor_offset (jfunc);
3478 if (caller_info->ipcp_orig_node)
3480 if (!inter.exists ())
3481 inter = agg_replacements_to_vector (cs->caller, src_idx, delta);
3482 else
3483 intersect_with_agg_replacements (cs->caller, src_idx, &inter,
3484 delta);
3486 else
3488 src_plats = ipa_get_parm_lattices (caller_info, src_idx);;
3489 /* Currently we do not produce clobber aggregate jump
3490 functions, adjust when we do. */
3491 gcc_checking_assert (!src_plats->aggs || !jfunc->agg.items);
3492 if (!inter.exists ())
3493 inter = copy_plats_to_inter (src_plats, delta);
3494 else
3495 intersect_with_plats (src_plats, &inter, delta);
3498 else if (jfunc->agg.items)
3500 struct ipa_agg_jf_item *item;
3501 int k;
3503 if (!inter.exists ())
3504 for (unsigned i = 0; i < jfunc->agg.items->length (); i++)
3505 inter.safe_push ((*jfunc->agg.items)[i]);
3506 else
3507 FOR_EACH_VEC_ELT (inter, k, item)
3509 int l = 0;
3510 bool found = false;;
3512 if (!item->value)
3513 continue;
3515 while ((unsigned) l < jfunc->agg.items->length ())
3517 struct ipa_agg_jf_item *ti;
3518 ti = &(*jfunc->agg.items)[l];
3519 if (ti->offset > item->offset)
3520 break;
3521 if (ti->offset == item->offset)
3523 gcc_checking_assert (ti->value);
3524 if (values_equal_for_ipcp_p (item->value,
3525 ti->value))
3526 found = true;
3527 break;
3529 l++;
3531 if (!found)
3532 item->value = NULL;
3535 else
3537 inter.release ();
3538 return vec<ipa_agg_jf_item>();
3540 return inter;
3543 /* Look at edges in CALLERS and collect all known aggregate values that arrive
3544 from all of them. */
3546 static struct ipa_agg_replacement_value *
3547 find_aggregate_values_for_callers_subset (struct cgraph_node *node,
3548 vec<cgraph_edge *> callers)
3550 struct ipa_node_params *dest_info = IPA_NODE_REF (node);
3551 struct ipa_agg_replacement_value *res;
3552 struct ipa_agg_replacement_value **tail = &res;
3553 struct cgraph_edge *cs;
3554 int i, j, count = ipa_get_param_count (dest_info);
3556 FOR_EACH_VEC_ELT (callers, j, cs)
3558 int c = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
3559 if (c < count)
3560 count = c;
3563 for (i = 0; i < count ; i++)
3565 struct cgraph_edge *cs;
3566 vec<ipa_agg_jf_item> inter = vNULL;
3567 struct ipa_agg_jf_item *item;
3568 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (dest_info, i);
3569 int j;
3571 /* Among other things, the following check should deal with all by_ref
3572 mismatches. */
3573 if (plats->aggs_bottom)
3574 continue;
3576 FOR_EACH_VEC_ELT (callers, j, cs)
3578 inter = intersect_aggregates_with_edge (cs, i, inter);
3580 if (!inter.exists ())
3581 goto next_param;
3584 FOR_EACH_VEC_ELT (inter, j, item)
3586 struct ipa_agg_replacement_value *v;
3588 if (!item->value)
3589 continue;
3591 v = ggc_alloc<ipa_agg_replacement_value> ();
3592 v->index = i;
3593 v->offset = item->offset;
3594 v->value = item->value;
3595 v->by_ref = plats->aggs_by_ref;
3596 *tail = v;
3597 tail = &v->next;
3600 next_param:
3601 if (inter.exists ())
3602 inter.release ();
3604 *tail = NULL;
3605 return res;
3608 /* Turn KNOWN_AGGS into a list of aggreate replacement values. */
3610 static struct ipa_agg_replacement_value *
3611 known_aggs_to_agg_replacement_list (vec<ipa_agg_jump_function> known_aggs)
3613 struct ipa_agg_replacement_value *res;
3614 struct ipa_agg_replacement_value **tail = &res;
3615 struct ipa_agg_jump_function *aggjf;
3616 struct ipa_agg_jf_item *item;
3617 int i, j;
3619 FOR_EACH_VEC_ELT (known_aggs, i, aggjf)
3620 FOR_EACH_VEC_SAFE_ELT (aggjf->items, j, item)
3622 struct ipa_agg_replacement_value *v;
3623 v = ggc_alloc<ipa_agg_replacement_value> ();
3624 v->index = i;
3625 v->offset = item->offset;
3626 v->value = item->value;
3627 v->by_ref = aggjf->by_ref;
3628 *tail = v;
3629 tail = &v->next;
3631 *tail = NULL;
3632 return res;
3635 /* Determine whether CS also brings all scalar values that the NODE is
3636 specialized for. */
3638 static bool
3639 cgraph_edge_brings_all_scalars_for_node (struct cgraph_edge *cs,
3640 struct cgraph_node *node)
3642 struct ipa_node_params *dest_info = IPA_NODE_REF (node);
3643 int count = ipa_get_param_count (dest_info);
3644 struct ipa_node_params *caller_info;
3645 struct ipa_edge_args *args;
3646 int i;
3648 caller_info = IPA_NODE_REF (cs->caller);
3649 args = IPA_EDGE_REF (cs);
3650 for (i = 0; i < count; i++)
3652 struct ipa_jump_func *jump_func;
3653 tree val, t;
3655 val = dest_info->known_csts[i];
3656 if (!val)
3657 continue;
3659 if (i >= ipa_get_cs_argument_count (args))
3660 return false;
3661 jump_func = ipa_get_ith_jump_func (args, i);
3662 t = ipa_value_from_jfunc (caller_info, jump_func);
3663 if (!t || !values_equal_for_ipcp_p (val, t))
3664 return false;
3666 return true;
3669 /* Determine whether CS also brings all aggregate values that NODE is
3670 specialized for. */
3671 static bool
3672 cgraph_edge_brings_all_agg_vals_for_node (struct cgraph_edge *cs,
3673 struct cgraph_node *node)
3675 struct ipa_node_params *orig_caller_info = IPA_NODE_REF (cs->caller);
3676 struct ipa_node_params *orig_node_info;
3677 struct ipa_agg_replacement_value *aggval;
3678 int i, ec, count;
3680 aggval = ipa_get_agg_replacements_for_node (node);
3681 if (!aggval)
3682 return true;
3684 count = ipa_get_param_count (IPA_NODE_REF (node));
3685 ec = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
3686 if (ec < count)
3687 for (struct ipa_agg_replacement_value *av = aggval; av; av = av->next)
3688 if (aggval->index >= ec)
3689 return false;
3691 orig_node_info = IPA_NODE_REF (IPA_NODE_REF (node)->ipcp_orig_node);
3692 if (orig_caller_info->ipcp_orig_node)
3693 orig_caller_info = IPA_NODE_REF (orig_caller_info->ipcp_orig_node);
3695 for (i = 0; i < count; i++)
3697 static vec<ipa_agg_jf_item> values = vec<ipa_agg_jf_item>();
3698 struct ipcp_param_lattices *plats;
3699 bool interesting = false;
3700 for (struct ipa_agg_replacement_value *av = aggval; av; av = av->next)
3701 if (aggval->index == i)
3703 interesting = true;
3704 break;
3706 if (!interesting)
3707 continue;
3709 plats = ipa_get_parm_lattices (orig_node_info, aggval->index);
3710 if (plats->aggs_bottom)
3711 return false;
3713 values = intersect_aggregates_with_edge (cs, i, values);
3714 if (!values.exists ())
3715 return false;
3717 for (struct ipa_agg_replacement_value *av = aggval; av; av = av->next)
3718 if (aggval->index == i)
3720 struct ipa_agg_jf_item *item;
3721 int j;
3722 bool found = false;
3723 FOR_EACH_VEC_ELT (values, j, item)
3724 if (item->value
3725 && item->offset == av->offset
3726 && values_equal_for_ipcp_p (item->value, av->value))
3728 found = true;
3729 break;
3731 if (!found)
3733 values.release ();
3734 return false;
3738 return true;
3741 /* Given an original NODE and a VAL for which we have already created a
3742 specialized clone, look whether there are incoming edges that still lead
3743 into the old node but now also bring the requested value and also conform to
3744 all other criteria such that they can be redirected the the special node.
3745 This function can therefore redirect the final edge in a SCC. */
3747 template <typename valtype>
3748 static void
3749 perhaps_add_new_callers (cgraph_node *node, ipcp_value<valtype> *val)
3751 ipcp_value_source<valtype> *src;
3752 gcov_type redirected_sum = 0;
3754 for (src = val->sources; src; src = src->next)
3756 struct cgraph_edge *cs = src->cs;
3757 while (cs)
3759 enum availability availability;
3760 struct cgraph_node *dst = cs->callee->function_symbol (&availability);
3761 if ((dst == node || IPA_NODE_REF (dst)->is_all_contexts_clone)
3762 && availability > AVAIL_INTERPOSABLE
3763 && cgraph_edge_brings_value_p (cs, src))
3765 if (cgraph_edge_brings_all_scalars_for_node (cs, val->spec_node)
3766 && cgraph_edge_brings_all_agg_vals_for_node (cs,
3767 val->spec_node))
3769 if (dump_file)
3770 fprintf (dump_file, " - adding an extra caller %s/%i"
3771 " of %s/%i\n",
3772 xstrdup (cs->caller->name ()),
3773 cs->caller->order,
3774 xstrdup (val->spec_node->name ()),
3775 val->spec_node->order);
3777 cs->redirect_callee (val->spec_node);
3778 redirected_sum += cs->count;
3781 cs = get_next_cgraph_edge_clone (cs);
3785 if (redirected_sum)
3786 update_specialized_profile (val->spec_node, node, redirected_sum);
3789 /* Return true if KNOWN_CONTEXTS contain at least one useful context. */
3791 static bool
3792 known_contexts_useful_p (vec<ipa_polymorphic_call_context> known_contexts)
3794 ipa_polymorphic_call_context *ctx;
3795 int i;
3797 FOR_EACH_VEC_ELT (known_contexts, i, ctx)
3798 if (!ctx->useless_p ())
3799 return true;
3800 return false;
3803 /* Return a copy of KNOWN_CSTS if it is not empty, otherwise return vNULL. */
3805 static vec<ipa_polymorphic_call_context>
3806 copy_useful_known_contexts (vec<ipa_polymorphic_call_context> known_contexts)
3808 if (known_contexts_useful_p (known_contexts))
3809 return known_contexts.copy ();
3810 else
3811 return vNULL;
3814 /* Copy KNOWN_CSTS and modify the copy according to VAL and INDEX. If
3815 non-empty, replace KNOWN_CONTEXTS with its copy too. */
3817 static void
3818 modify_known_vectors_with_val (vec<tree> *known_csts,
3819 vec<ipa_polymorphic_call_context> *known_contexts,
3820 ipcp_value<tree> *val,
3821 int index)
3823 *known_csts = known_csts->copy ();
3824 *known_contexts = copy_useful_known_contexts (*known_contexts);
3825 (*known_csts)[index] = val->value;
3828 /* Replace KNOWN_CSTS with its copy. Also copy KNOWN_CONTEXTS and modify the
3829 copy according to VAL and INDEX. */
3831 static void
3832 modify_known_vectors_with_val (vec<tree> *known_csts,
3833 vec<ipa_polymorphic_call_context> *known_contexts,
3834 ipcp_value<ipa_polymorphic_call_context> *val,
3835 int index)
3837 *known_csts = known_csts->copy ();
3838 *known_contexts = known_contexts->copy ();
3839 (*known_contexts)[index] = val->value;
3842 /* Return true if OFFSET indicates this was not an aggregate value or there is
3843 a replacement equivalent to VALUE, INDEX and OFFSET among those in the
3844 AGGVALS list. */
3846 DEBUG_FUNCTION bool
3847 ipcp_val_agg_replacement_ok_p (ipa_agg_replacement_value *aggvals,
3848 int index, HOST_WIDE_INT offset, tree value)
3850 if (offset == -1)
3851 return true;
3853 while (aggvals)
3855 if (aggvals->index == index
3856 && aggvals->offset == offset
3857 && values_equal_for_ipcp_p (aggvals->value, value))
3858 return true;
3859 aggvals = aggvals->next;
3861 return false;
3864 /* Return true if offset is minus one because source of a polymorphic contect
3865 cannot be an aggregate value. */
3867 DEBUG_FUNCTION bool
3868 ipcp_val_agg_replacement_ok_p (ipa_agg_replacement_value *,
3869 int , HOST_WIDE_INT offset,
3870 ipa_polymorphic_call_context)
3872 return offset == -1;
3875 /* Decide wheter to create a special version of NODE for value VAL of parameter
3876 at the given INDEX. If OFFSET is -1, the value is for the parameter itself,
3877 otherwise it is stored at the given OFFSET of the parameter. KNOWN_CSTS,
3878 KNOWN_CONTEXTS and KNOWN_AGGS describe the other already known values. */
3880 template <typename valtype>
3881 static bool
3882 decide_about_value (struct cgraph_node *node, int index, HOST_WIDE_INT offset,
3883 ipcp_value<valtype> *val, vec<tree> known_csts,
3884 vec<ipa_polymorphic_call_context> known_contexts)
3886 struct ipa_agg_replacement_value *aggvals;
3887 int freq_sum, caller_count;
3888 gcov_type count_sum;
3889 vec<cgraph_edge *> callers;
3891 if (val->spec_node)
3893 perhaps_add_new_callers (node, val);
3894 return false;
3896 else if (val->local_size_cost + overall_size > max_new_size)
3898 if (dump_file && (dump_flags & TDF_DETAILS))
3899 fprintf (dump_file, " Ignoring candidate value because "
3900 "max_new_size would be reached with %li.\n",
3901 val->local_size_cost + overall_size);
3902 return false;
3904 else if (!get_info_about_necessary_edges (val, &freq_sum, &count_sum,
3905 &caller_count))
3906 return false;
3908 if (dump_file && (dump_flags & TDF_DETAILS))
3910 fprintf (dump_file, " - considering value ");
3911 print_ipcp_constant_value (dump_file, val->value);
3912 fprintf (dump_file, " for ");
3913 ipa_dump_param (dump_file, IPA_NODE_REF (node), index);
3914 if (offset != -1)
3915 fprintf (dump_file, ", offset: " HOST_WIDE_INT_PRINT_DEC, offset);
3916 fprintf (dump_file, " (caller_count: %i)\n", caller_count);
3919 if (!good_cloning_opportunity_p (node, val->local_time_benefit,
3920 freq_sum, count_sum,
3921 val->local_size_cost)
3922 && !good_cloning_opportunity_p (node,
3923 val->local_time_benefit
3924 + val->prop_time_benefit,
3925 freq_sum, count_sum,
3926 val->local_size_cost
3927 + val->prop_size_cost))
3928 return false;
3930 if (dump_file)
3931 fprintf (dump_file, " Creating a specialized node of %s/%i.\n",
3932 node->name (), node->order);
3934 callers = gather_edges_for_value (val, caller_count);
3935 if (offset == -1)
3936 modify_known_vectors_with_val (&known_csts, &known_contexts, val, index);
3937 else
3939 known_csts = known_csts.copy ();
3940 known_contexts = copy_useful_known_contexts (known_contexts);
3942 find_more_scalar_values_for_callers_subset (node, known_csts, callers);
3943 find_more_contexts_for_caller_subset (node, &known_contexts, callers);
3944 aggvals = find_aggregate_values_for_callers_subset (node, callers);
3945 gcc_checking_assert (ipcp_val_agg_replacement_ok_p (aggvals, index,
3946 offset, val->value));
3947 val->spec_node = create_specialized_node (node, known_csts, known_contexts,
3948 aggvals, callers);
3949 overall_size += val->local_size_cost;
3951 /* TODO: If for some lattice there is only one other known value
3952 left, make a special node for it too. */
3954 return true;
3957 /* Decide whether and what specialized clones of NODE should be created. */
3959 static bool
3960 decide_whether_version_node (struct cgraph_node *node)
3962 struct ipa_node_params *info = IPA_NODE_REF (node);
3963 int i, count = ipa_get_param_count (info);
3964 vec<tree> known_csts;
3965 vec<ipa_polymorphic_call_context> known_contexts;
3966 vec<ipa_agg_jump_function> known_aggs = vNULL;
3967 bool ret = false;
3969 if (count == 0)
3970 return false;
3972 if (dump_file && (dump_flags & TDF_DETAILS))
3973 fprintf (dump_file, "\nEvaluating opportunities for %s/%i.\n",
3974 node->name (), node->order);
3976 gather_context_independent_values (info, &known_csts, &known_contexts,
3977 info->do_clone_for_all_contexts ? &known_aggs
3978 : NULL, NULL);
3980 for (i = 0; i < count ;i++)
3982 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
3983 ipcp_lattice<tree> *lat = &plats->itself;
3984 ipcp_lattice<ipa_polymorphic_call_context> *ctxlat = &plats->ctxlat;
3986 if (!lat->bottom
3987 && !known_csts[i])
3989 ipcp_value<tree> *val;
3990 for (val = lat->values; val; val = val->next)
3991 ret |= decide_about_value (node, i, -1, val, known_csts,
3992 known_contexts);
3995 if (!plats->aggs_bottom)
3997 struct ipcp_agg_lattice *aglat;
3998 ipcp_value<tree> *val;
3999 for (aglat = plats->aggs; aglat; aglat = aglat->next)
4000 if (!aglat->bottom && aglat->values
4001 /* If the following is false, the one value is in
4002 known_aggs. */
4003 && (plats->aggs_contain_variable
4004 || !aglat->is_single_const ()))
4005 for (val = aglat->values; val; val = val->next)
4006 ret |= decide_about_value (node, i, aglat->offset, val,
4007 known_csts, known_contexts);
4010 if (!ctxlat->bottom
4011 && known_contexts[i].useless_p ())
4013 ipcp_value<ipa_polymorphic_call_context> *val;
4014 for (val = ctxlat->values; val; val = val->next)
4015 ret |= decide_about_value (node, i, -1, val, known_csts,
4016 known_contexts);
4019 info = IPA_NODE_REF (node);
4022 if (info->do_clone_for_all_contexts)
4024 struct cgraph_node *clone;
4025 vec<cgraph_edge *> callers;
4027 if (dump_file)
4028 fprintf (dump_file, " - Creating a specialized node of %s/%i "
4029 "for all known contexts.\n", node->name (),
4030 node->order);
4032 callers = node->collect_callers ();
4034 if (!known_contexts_useful_p (known_contexts))
4036 known_contexts.release ();
4037 known_contexts = vNULL;
4039 clone = create_specialized_node (node, known_csts, known_contexts,
4040 known_aggs_to_agg_replacement_list (known_aggs),
4041 callers);
4042 info = IPA_NODE_REF (node);
4043 info->do_clone_for_all_contexts = false;
4044 IPA_NODE_REF (clone)->is_all_contexts_clone = true;
4045 for (i = 0; i < count ; i++)
4046 vec_free (known_aggs[i].items);
4047 known_aggs.release ();
4048 ret = true;
4050 else
4052 known_csts.release ();
4053 known_contexts.release ();
4056 return ret;
4059 /* Transitively mark all callees of NODE within the same SCC as not dead. */
4061 static void
4062 spread_undeadness (struct cgraph_node *node)
4064 struct cgraph_edge *cs;
4066 for (cs = node->callees; cs; cs = cs->next_callee)
4067 if (ipa_edge_within_scc (cs))
4069 struct cgraph_node *callee;
4070 struct ipa_node_params *info;
4072 callee = cs->callee->function_symbol (NULL);
4073 info = IPA_NODE_REF (callee);
4075 if (info->node_dead)
4077 info->node_dead = 0;
4078 spread_undeadness (callee);
4083 /* Return true if NODE has a caller from outside of its SCC that is not
4084 dead. Worker callback for cgraph_for_node_and_aliases. */
4086 static bool
4087 has_undead_caller_from_outside_scc_p (struct cgraph_node *node,
4088 void *data ATTRIBUTE_UNUSED)
4090 struct cgraph_edge *cs;
4092 for (cs = node->callers; cs; cs = cs->next_caller)
4093 if (cs->caller->thunk.thunk_p
4094 && cs->caller->call_for_symbol_thunks_and_aliases
4095 (has_undead_caller_from_outside_scc_p, NULL, true))
4096 return true;
4097 else if (!ipa_edge_within_scc (cs)
4098 && !IPA_NODE_REF (cs->caller)->node_dead)
4099 return true;
4100 return false;
4104 /* Identify nodes within the same SCC as NODE which are no longer needed
4105 because of new clones and will be removed as unreachable. */
4107 static void
4108 identify_dead_nodes (struct cgraph_node *node)
4110 struct cgraph_node *v;
4111 for (v = node; v ; v = ((struct ipa_dfs_info *) v->aux)->next_cycle)
4112 if (v->will_be_removed_from_program_if_no_direct_calls_p ()
4113 && !v->call_for_symbol_thunks_and_aliases
4114 (has_undead_caller_from_outside_scc_p, NULL, true))
4115 IPA_NODE_REF (v)->node_dead = 1;
4117 for (v = node; v ; v = ((struct ipa_dfs_info *) v->aux)->next_cycle)
4118 if (!IPA_NODE_REF (v)->node_dead)
4119 spread_undeadness (v);
4121 if (dump_file && (dump_flags & TDF_DETAILS))
4123 for (v = node; v ; v = ((struct ipa_dfs_info *) v->aux)->next_cycle)
4124 if (IPA_NODE_REF (v)->node_dead)
4125 fprintf (dump_file, " Marking node as dead: %s/%i.\n",
4126 v->name (), v->order);
4130 /* The decision stage. Iterate over the topological order of call graph nodes
4131 TOPO and make specialized clones if deemed beneficial. */
4133 static void
4134 ipcp_decision_stage (struct ipa_topo_info *topo)
4136 int i;
4138 if (dump_file)
4139 fprintf (dump_file, "\nIPA decision stage:\n\n");
4141 for (i = topo->nnodes - 1; i >= 0; i--)
4143 struct cgraph_node *node = topo->order[i];
4144 bool change = false, iterate = true;
4146 while (iterate)
4148 struct cgraph_node *v;
4149 iterate = false;
4150 for (v = node; v ; v = ((struct ipa_dfs_info *) v->aux)->next_cycle)
4151 if (v->has_gimple_body_p ()
4152 && ipcp_versionable_function_p (v))
4153 iterate |= decide_whether_version_node (v);
4155 change |= iterate;
4157 if (change)
4158 identify_dead_nodes (node);
4162 /* The IPCP driver. */
4164 static unsigned int
4165 ipcp_driver (void)
4167 struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
4168 struct cgraph_edge_hook_list *edge_removal_hook_holder;
4169 struct ipa_topo_info topo;
4171 ipa_check_create_node_params ();
4172 ipa_check_create_edge_args ();
4173 grow_edge_clone_vectors ();
4174 edge_duplication_hook_holder =
4175 symtab->add_edge_duplication_hook (&ipcp_edge_duplication_hook, NULL);
4176 edge_removal_hook_holder =
4177 symtab->add_edge_removal_hook (&ipcp_edge_removal_hook, NULL);
4179 ipcp_cst_values_pool = create_alloc_pool ("IPA-CP constant values",
4180 sizeof (ipcp_value<tree>), 32);
4181 ipcp_poly_ctx_values_pool = create_alloc_pool
4182 ("IPA-CP polymorphic contexts",
4183 sizeof (ipcp_value<ipa_polymorphic_call_context>), 32);
4184 ipcp_sources_pool = create_alloc_pool ("IPA-CP value sources",
4185 sizeof (ipcp_value_source<tree>), 64);
4186 ipcp_agg_lattice_pool = create_alloc_pool ("IPA_CP aggregate lattices",
4187 sizeof (struct ipcp_agg_lattice),
4188 32);
4189 if (dump_file)
4191 fprintf (dump_file, "\nIPA structures before propagation:\n");
4192 if (dump_flags & TDF_DETAILS)
4193 ipa_print_all_params (dump_file);
4194 ipa_print_all_jump_functions (dump_file);
4197 /* Topological sort. */
4198 build_toporder_info (&topo);
4199 /* Do the interprocedural propagation. */
4200 ipcp_propagate_stage (&topo);
4201 /* Decide what constant propagation and cloning should be performed. */
4202 ipcp_decision_stage (&topo);
4204 /* Free all IPCP structures. */
4205 free_toporder_info (&topo);
4206 next_edge_clone.release ();
4207 symtab->remove_edge_removal_hook (edge_removal_hook_holder);
4208 symtab->remove_edge_duplication_hook (edge_duplication_hook_holder);
4209 ipa_free_all_structures_after_ipa_cp ();
4210 if (dump_file)
4211 fprintf (dump_file, "\nIPA constant propagation end\n");
4212 return 0;
4215 /* Initialization and computation of IPCP data structures. This is the initial
4216 intraprocedural analysis of functions, which gathers information to be
4217 propagated later on. */
4219 static void
4220 ipcp_generate_summary (void)
4222 struct cgraph_node *node;
4224 if (dump_file)
4225 fprintf (dump_file, "\nIPA constant propagation start:\n");
4226 ipa_register_cgraph_hooks ();
4228 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
4230 node->local.versionable
4231 = tree_versionable_function_p (node->decl);
4232 ipa_analyze_node (node);
4236 /* Write ipcp summary for nodes in SET. */
4238 static void
4239 ipcp_write_summary (void)
4241 ipa_prop_write_jump_functions ();
4244 /* Read ipcp summary. */
4246 static void
4247 ipcp_read_summary (void)
4249 ipa_prop_read_jump_functions ();
4252 namespace {
4254 const pass_data pass_data_ipa_cp =
4256 IPA_PASS, /* type */
4257 "cp", /* name */
4258 OPTGROUP_NONE, /* optinfo_flags */
4259 TV_IPA_CONSTANT_PROP, /* tv_id */
4260 0, /* properties_required */
4261 0, /* properties_provided */
4262 0, /* properties_destroyed */
4263 0, /* todo_flags_start */
4264 ( TODO_dump_symtab | TODO_remove_functions ), /* todo_flags_finish */
4267 class pass_ipa_cp : public ipa_opt_pass_d
4269 public:
4270 pass_ipa_cp (gcc::context *ctxt)
4271 : ipa_opt_pass_d (pass_data_ipa_cp, ctxt,
4272 ipcp_generate_summary, /* generate_summary */
4273 ipcp_write_summary, /* write_summary */
4274 ipcp_read_summary, /* read_summary */
4275 ipa_prop_write_all_agg_replacement, /*
4276 write_optimization_summary */
4277 ipa_prop_read_all_agg_replacement, /*
4278 read_optimization_summary */
4279 NULL, /* stmt_fixup */
4280 0, /* function_transform_todo_flags_start */
4281 ipcp_transform_function, /* function_transform */
4282 NULL) /* variable_transform */
4285 /* opt_pass methods: */
4286 virtual bool gate (function *)
4288 /* FIXME: We should remove the optimize check after we ensure we never run
4289 IPA passes when not optimizing. */
4290 return flag_ipa_cp && optimize;
4293 virtual unsigned int execute (function *) { return ipcp_driver (); }
4295 }; // class pass_ipa_cp
4297 } // anon namespace
4299 ipa_opt_pass_d *
4300 make_pass_ipa_cp (gcc::context *ctxt)
4302 return new pass_ipa_cp (ctxt);
4305 /* Reset all state within ipa-cp.c so that we can rerun the compiler
4306 within the same process. For use by toplev::finalize. */
4308 void
4309 ipa_cp_c_finalize (void)
4311 max_count = 0;
4312 overall_size = 0;
4313 max_new_size = 0;