2014-12-22 Paolo Carlini <paolo.carlini@oracle.com>
[official-gcc.git] / gcc / ipa-cp.c
blobba434ba16ad64617230c7e74cd6528b2f11a575e
1 /* Interprocedural constant propagation
2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
4 Contributed by Razya Ladelsky <RAZYA@il.ibm.com> and Martin Jambor
5 <mjambor@suse.cz>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Interprocedural constant propagation (IPA-CP).
25 The goal of this transformation is to
27 1) discover functions which are always invoked with some arguments with the
28 same known constant values and modify the functions so that the
29 subsequent optimizations can take advantage of the knowledge, and
31 2) partial specialization - create specialized versions of functions
32 transformed in this way if some parameters are known constants only in
33 certain contexts but the estimated tradeoff between speedup and cost size
34 is deemed good.
36 The algorithm also propagates types and attempts to perform type based
37 devirtualization. Types are propagated much like constants.
39 The algorithm basically consists of three stages. In the first, functions
40 are analyzed one at a time and jump functions are constructed for all known
41 call-sites. In the second phase, the pass propagates information from the
42 jump functions across the call to reveal what values are available at what
43 call sites, performs estimations of effects of known values on functions and
44 their callees, and finally decides what specialized extra versions should be
45 created. In the third, the special versions materialize and appropriate
46 calls are redirected.
48 The algorithm used is to a certain extent based on "Interprocedural Constant
49 Propagation", by David Callahan, Keith D Cooper, Ken Kennedy, Linda Torczon,
50 Comp86, pg 152-161 and "A Methodology for Procedure Cloning" by Keith D
51 Cooper, Mary W. Hall, and Ken Kennedy.
54 First stage - intraprocedural analysis
55 =======================================
57 This phase computes jump_function and modification flags.
59 A jump function for a call-site represents the values passed as an actual
60 arguments of a given call-site. In principle, there are three types of
61 values:
63 Pass through - the caller's formal parameter is passed as an actual
64 argument, plus an operation on it can be performed.
65 Constant - a constant is passed as an actual argument.
66 Unknown - neither of the above.
68 All jump function types are described in detail in ipa-prop.h, together with
69 the data structures that represent them and methods of accessing them.
71 ipcp_generate_summary() is the main function of the first stage.
73 Second stage - interprocedural analysis
74 ========================================
76 This stage is itself divided into two phases. In the first, we propagate
77 known values over the call graph, in the second, we make cloning decisions.
78 It uses a different algorithm than the original Callahan's paper.
80 First, we traverse the functions topologically from callers to callees and,
81 for each strongly connected component (SCC), we propagate constants
82 according to previously computed jump functions. We also record what known
83 values depend on other known values and estimate local effects. Finally, we
84 propagate cumulative information about these effects from dependent values
85 to those on which they depend.
87 Second, we again traverse the call graph in the same topological order and
88 make clones for functions which we know are called with the same values in
89 all contexts and decide about extra specialized clones of functions just for
90 some contexts - these decisions are based on both local estimates and
91 cumulative estimates propagated from callees.
93 ipcp_propagate_stage() and ipcp_decision_stage() together constitute the
94 third stage.
96 Third phase - materialization of clones, call statement updates.
97 ============================================
99 This stage is currently performed by call graph code (mainly in cgraphunit.c
100 and tree-inline.c) according to instructions inserted to the call graph by
101 the second stage. */
103 #include "config.h"
104 #include "system.h"
105 #include "coretypes.h"
106 #include "tree.h"
107 #include "gimple-fold.h"
108 #include "gimple-expr.h"
109 #include "target.h"
110 #include "predict.h"
111 #include "basic-block.h"
112 #include "vec.h"
113 #include "hash-map.h"
114 #include "is-a.h"
115 #include "plugin-api.h"
116 #include "hashtab.h"
117 #include "hash-set.h"
118 #include "machmode.h"
119 #include "tm.h"
120 #include "hard-reg-set.h"
121 #include "input.h"
122 #include "function.h"
123 #include "ipa-ref.h"
124 #include "cgraph.h"
125 #include "alloc-pool.h"
126 #include "symbol-summary.h"
127 #include "ipa-prop.h"
128 #include "bitmap.h"
129 #include "tree-pass.h"
130 #include "flags.h"
131 #include "diagnostic.h"
132 #include "tree-pretty-print.h"
133 #include "tree-inline.h"
134 #include "params.h"
135 #include "ipa-inline.h"
136 #include "ipa-utils.h"
138 template <typename valtype> class ipcp_value;
140 /* Describes a particular source for an IPA-CP value. */
142 template <typename valtype>
143 class ipcp_value_source
145 public:
146 /* Aggregate offset of the source, negative if the source is scalar value of
147 the argument itself. */
148 HOST_WIDE_INT offset;
149 /* The incoming edge that brought the value. */
150 cgraph_edge *cs;
151 /* If the jump function that resulted into his value was a pass-through or an
152 ancestor, this is the ipcp_value of the caller from which the described
153 value has been derived. Otherwise it is NULL. */
154 ipcp_value<valtype> *val;
155 /* Next pointer in a linked list of sources of a value. */
156 ipcp_value_source *next;
157 /* If the jump function that resulted into his value was a pass-through or an
158 ancestor, this is the index of the parameter of the caller the jump
159 function references. */
160 int index;
163 /* Common ancestor for all ipcp_value instantiations. */
165 class ipcp_value_base
167 public:
168 /* Time benefit and size cost that specializing the function for this value
169 would bring about in this function alone. */
170 int local_time_benefit, local_size_cost;
171 /* Time benefit and size cost that specializing the function for this value
172 can bring about in it's callees (transitively). */
173 int prop_time_benefit, prop_size_cost;
176 /* Describes one particular value stored in struct ipcp_lattice. */
178 template <typename valtype>
179 class ipcp_value : public ipcp_value_base
181 public:
182 /* The actual value for the given parameter. */
183 valtype value;
184 /* The list of sources from which this value originates. */
185 ipcp_value_source <valtype> *sources;
186 /* Next pointers in a linked list of all values in a lattice. */
187 ipcp_value *next;
188 /* Next pointers in a linked list of values in a strongly connected component
189 of values. */
190 ipcp_value *scc_next;
191 /* Next pointers in a linked list of SCCs of values sorted topologically
192 according their sources. */
193 ipcp_value *topo_next;
194 /* A specialized node created for this value, NULL if none has been (so far)
195 created. */
196 cgraph_node *spec_node;
197 /* Depth first search number and low link for topological sorting of
198 values. */
199 int dfs, low_link;
200 /* True if this valye is currently on the topo-sort stack. */
201 bool on_stack;
203 void add_source (cgraph_edge *cs, ipcp_value *src_val, int src_idx,
204 HOST_WIDE_INT offset);
207 /* Lattice describing potential values of a formal parameter of a function, or
208 a part of an aggreagate. TOP is represented by a lattice with zero values
209 and with contains_variable and bottom flags cleared. BOTTOM is represented
210 by a lattice with the bottom flag set. In that case, values and
211 contains_variable flag should be disregarded. */
213 template <typename valtype>
214 class ipcp_lattice
216 public:
217 /* The list of known values and types in this lattice. Note that values are
218 not deallocated if a lattice is set to bottom because there may be value
219 sources referencing them. */
220 ipcp_value<valtype> *values;
221 /* Number of known values and types in this lattice. */
222 int values_count;
223 /* The lattice contains a variable component (in addition to values). */
224 bool contains_variable;
225 /* The value of the lattice is bottom (i.e. variable and unusable for any
226 propagation). */
227 bool bottom;
229 inline bool is_single_const ();
230 inline bool set_to_bottom ();
231 inline bool set_contains_variable ();
232 bool add_value (valtype newval, cgraph_edge *cs,
233 ipcp_value<valtype> *src_val = NULL,
234 int src_idx = 0, HOST_WIDE_INT offset = -1);
235 void print (FILE * f, bool dump_sources, bool dump_benefits);
238 /* Lattice of tree values with an offset to describe a part of an
239 aggregate. */
241 class ipcp_agg_lattice : public ipcp_lattice<tree>
243 public:
244 /* Offset that is being described by this lattice. */
245 HOST_WIDE_INT offset;
246 /* Size so that we don't have to re-compute it every time we traverse the
247 list. Must correspond to TYPE_SIZE of all lat values. */
248 HOST_WIDE_INT size;
249 /* Next element of the linked list. */
250 struct ipcp_agg_lattice *next;
253 /* Structure containing lattices for a parameter itself and for pieces of
254 aggregates that are passed in the parameter or by a reference in a parameter
255 plus some other useful flags. */
257 class ipcp_param_lattices
259 public:
260 /* Lattice describing the value of the parameter itself. */
261 ipcp_lattice<tree> itself;
262 /* Lattice describing the the polymorphic contexts of a parameter. */
263 ipcp_lattice<ipa_polymorphic_call_context> ctxlat;
264 /* Lattices describing aggregate parts. */
265 ipcp_agg_lattice *aggs;
266 /* Alignment information. Very basic one value lattice where !known means
267 TOP and zero alignment bottom. */
268 ipa_alignment alignment;
269 /* Number of aggregate lattices */
270 int aggs_count;
271 /* True if aggregate data were passed by reference (as opposed to by
272 value). */
273 bool aggs_by_ref;
274 /* All aggregate lattices contain a variable component (in addition to
275 values). */
276 bool aggs_contain_variable;
277 /* The value of all aggregate lattices is bottom (i.e. variable and unusable
278 for any propagation). */
279 bool aggs_bottom;
281 /* There is a virtual call based on this parameter. */
282 bool virt_call;
285 /* Allocation pools for values and their sources in ipa-cp. */
287 alloc_pool ipcp_cst_values_pool;
288 alloc_pool ipcp_poly_ctx_values_pool;
289 alloc_pool ipcp_sources_pool;
290 alloc_pool ipcp_agg_lattice_pool;
292 /* Maximal count found in program. */
294 static gcov_type max_count;
296 /* Original overall size of the program. */
298 static long overall_size, max_new_size;
300 /* Return the param lattices structure corresponding to the Ith formal
301 parameter of the function described by INFO. */
302 static inline struct ipcp_param_lattices *
303 ipa_get_parm_lattices (struct ipa_node_params *info, int i)
305 gcc_assert (i >= 0 && i < ipa_get_param_count (info));
306 gcc_checking_assert (!info->ipcp_orig_node);
307 gcc_checking_assert (info->lattices);
308 return &(info->lattices[i]);
311 /* Return the lattice corresponding to the scalar value of the Ith formal
312 parameter of the function described by INFO. */
313 static inline ipcp_lattice<tree> *
314 ipa_get_scalar_lat (struct ipa_node_params *info, int i)
316 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
317 return &plats->itself;
320 /* Return the lattice corresponding to the scalar value of the Ith formal
321 parameter of the function described by INFO. */
322 static inline ipcp_lattice<ipa_polymorphic_call_context> *
323 ipa_get_poly_ctx_lat (struct ipa_node_params *info, int i)
325 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
326 return &plats->ctxlat;
329 /* Return whether LAT is a lattice with a single constant and without an
330 undefined value. */
332 template <typename valtype>
333 inline bool
334 ipcp_lattice<valtype>::is_single_const ()
336 if (bottom || contains_variable || values_count != 1)
337 return false;
338 else
339 return true;
342 /* Print V which is extracted from a value in a lattice to F. */
344 static void
345 print_ipcp_constant_value (FILE * f, tree v)
347 if (TREE_CODE (v) == ADDR_EXPR
348 && TREE_CODE (TREE_OPERAND (v, 0)) == CONST_DECL)
350 fprintf (f, "& ");
351 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (v, 0)), 0);
353 else
354 print_generic_expr (f, v, 0);
357 /* Print V which is extracted from a value in a lattice to F. */
359 static void
360 print_ipcp_constant_value (FILE * f, ipa_polymorphic_call_context v)
362 v.dump(f, false);
365 /* Print a lattice LAT to F. */
367 template <typename valtype>
368 void
369 ipcp_lattice<valtype>::print (FILE * f, bool dump_sources, bool dump_benefits)
371 ipcp_value<valtype> *val;
372 bool prev = false;
374 if (bottom)
376 fprintf (f, "BOTTOM\n");
377 return;
380 if (!values_count && !contains_variable)
382 fprintf (f, "TOP\n");
383 return;
386 if (contains_variable)
388 fprintf (f, "VARIABLE");
389 prev = true;
390 if (dump_benefits)
391 fprintf (f, "\n");
394 for (val = values; val; val = val->next)
396 if (dump_benefits && prev)
397 fprintf (f, " ");
398 else if (!dump_benefits && prev)
399 fprintf (f, ", ");
400 else
401 prev = true;
403 print_ipcp_constant_value (f, val->value);
405 if (dump_sources)
407 ipcp_value_source<valtype> *s;
409 fprintf (f, " [from:");
410 for (s = val->sources; s; s = s->next)
411 fprintf (f, " %i(%i)", s->cs->caller->order,
412 s->cs->frequency);
413 fprintf (f, "]");
416 if (dump_benefits)
417 fprintf (f, " [loc_time: %i, loc_size: %i, "
418 "prop_time: %i, prop_size: %i]\n",
419 val->local_time_benefit, val->local_size_cost,
420 val->prop_time_benefit, val->prop_size_cost);
422 if (!dump_benefits)
423 fprintf (f, "\n");
426 /* Print all ipcp_lattices of all functions to F. */
428 static void
429 print_all_lattices (FILE * f, bool dump_sources, bool dump_benefits)
431 struct cgraph_node *node;
432 int i, count;
434 fprintf (f, "\nLattices:\n");
435 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
437 struct ipa_node_params *info;
439 info = IPA_NODE_REF (node);
440 fprintf (f, " Node: %s/%i:\n", node->name (),
441 node->order);
442 count = ipa_get_param_count (info);
443 for (i = 0; i < count; i++)
445 struct ipcp_agg_lattice *aglat;
446 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
447 fprintf (f, " param [%d]: ", i);
448 plats->itself.print (f, dump_sources, dump_benefits);
449 fprintf (f, " ctxs: ");
450 plats->ctxlat.print (f, dump_sources, dump_benefits);
451 if (plats->alignment.known && plats->alignment.align > 0)
452 fprintf (f, " Alignment %u, misalignment %u\n",
453 plats->alignment.align, plats->alignment.misalign);
454 else if (plats->alignment.known)
455 fprintf (f, " Alignment unusable\n");
456 else
457 fprintf (f, " Alignment unknown\n");
458 if (plats->virt_call)
459 fprintf (f, " virt_call flag set\n");
461 if (plats->aggs_bottom)
463 fprintf (f, " AGGS BOTTOM\n");
464 continue;
466 if (plats->aggs_contain_variable)
467 fprintf (f, " AGGS VARIABLE\n");
468 for (aglat = plats->aggs; aglat; aglat = aglat->next)
470 fprintf (f, " %soffset " HOST_WIDE_INT_PRINT_DEC ": ",
471 plats->aggs_by_ref ? "ref " : "", aglat->offset);
472 aglat->print (f, dump_sources, dump_benefits);
478 /* Determine whether it is at all technically possible to create clones of NODE
479 and store this information in the ipa_node_params structure associated
480 with NODE. */
482 static void
483 determine_versionability (struct cgraph_node *node)
485 const char *reason = NULL;
487 /* There are a number of generic reasons functions cannot be versioned. We
488 also cannot remove parameters if there are type attributes such as fnspec
489 present. */
490 if (node->alias || node->thunk.thunk_p)
491 reason = "alias or thunk";
492 else if (!node->local.versionable)
493 reason = "not a tree_versionable_function";
494 else if (node->get_availability () <= AVAIL_INTERPOSABLE)
495 reason = "insufficient body availability";
496 else if (!opt_for_fn (node->decl, optimize)
497 || !opt_for_fn (node->decl, flag_ipa_cp))
498 reason = "non-optimized function";
499 else if (lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (node->decl)))
501 /* Ideally we should clone the SIMD clones themselves and create
502 vector copies of them, so IPA-cp and SIMD clones can happily
503 coexist, but that may not be worth the effort. */
504 reason = "function has SIMD clones";
506 /* Don't clone decls local to a comdat group; it breaks and for C++
507 decloned constructors, inlining is always better anyway. */
508 else if (node->comdat_local_p ())
509 reason = "comdat-local function";
511 if (reason && dump_file && !node->alias && !node->thunk.thunk_p)
512 fprintf (dump_file, "Function %s/%i is not versionable, reason: %s.\n",
513 node->name (), node->order, reason);
515 node->local.versionable = (reason == NULL);
518 /* Return true if it is at all technically possible to create clones of a
519 NODE. */
521 static bool
522 ipcp_versionable_function_p (struct cgraph_node *node)
524 return node->local.versionable;
527 /* Structure holding accumulated information about callers of a node. */
529 struct caller_statistics
531 gcov_type count_sum;
532 int n_calls, n_hot_calls, freq_sum;
535 /* Initialize fields of STAT to zeroes. */
537 static inline void
538 init_caller_stats (struct caller_statistics *stats)
540 stats->count_sum = 0;
541 stats->n_calls = 0;
542 stats->n_hot_calls = 0;
543 stats->freq_sum = 0;
546 /* Worker callback of cgraph_for_node_and_aliases accumulating statistics of
547 non-thunk incoming edges to NODE. */
549 static bool
550 gather_caller_stats (struct cgraph_node *node, void *data)
552 struct caller_statistics *stats = (struct caller_statistics *) data;
553 struct cgraph_edge *cs;
555 for (cs = node->callers; cs; cs = cs->next_caller)
556 if (cs->caller->thunk.thunk_p)
557 cs->caller->call_for_symbol_thunks_and_aliases (gather_caller_stats,
558 stats, false);
559 else
561 stats->count_sum += cs->count;
562 stats->freq_sum += cs->frequency;
563 stats->n_calls++;
564 if (cs->maybe_hot_p ())
565 stats->n_hot_calls ++;
567 return false;
571 /* Return true if this NODE is viable candidate for cloning. */
573 static bool
574 ipcp_cloning_candidate_p (struct cgraph_node *node)
576 struct caller_statistics stats;
578 gcc_checking_assert (node->has_gimple_body_p ());
580 if (!opt_for_fn (node->decl, flag_ipa_cp_clone))
582 if (dump_file)
583 fprintf (dump_file, "Not considering %s for cloning; "
584 "-fipa-cp-clone disabled.\n",
585 node->name ());
586 return false;
589 if (!optimize_function_for_speed_p (DECL_STRUCT_FUNCTION (node->decl)))
591 if (dump_file)
592 fprintf (dump_file, "Not considering %s for cloning; "
593 "optimizing it for size.\n",
594 node->name ());
595 return false;
598 init_caller_stats (&stats);
599 node->call_for_symbol_thunks_and_aliases (gather_caller_stats, &stats, false);
601 if (inline_summaries->get (node)->self_size < stats.n_calls)
603 if (dump_file)
604 fprintf (dump_file, "Considering %s for cloning; code might shrink.\n",
605 node->name ());
606 return true;
609 /* When profile is available and function is hot, propagate into it even if
610 calls seems cold; constant propagation can improve function's speed
611 significantly. */
612 if (max_count)
614 if (stats.count_sum > node->count * 90 / 100)
616 if (dump_file)
617 fprintf (dump_file, "Considering %s for cloning; "
618 "usually called directly.\n",
619 node->name ());
620 return true;
623 if (!stats.n_hot_calls)
625 if (dump_file)
626 fprintf (dump_file, "Not considering %s for cloning; no hot calls.\n",
627 node->name ());
628 return false;
630 if (dump_file)
631 fprintf (dump_file, "Considering %s for cloning.\n",
632 node->name ());
633 return true;
636 template <typename valtype>
637 class value_topo_info
639 public:
640 /* Head of the linked list of topologically sorted values. */
641 ipcp_value<valtype> *values_topo;
642 /* Stack for creating SCCs, represented by a linked list too. */
643 ipcp_value<valtype> *stack;
644 /* Counter driving the algorithm in add_val_to_toposort. */
645 int dfs_counter;
647 value_topo_info () : values_topo (NULL), stack (NULL), dfs_counter (0)
649 void add_val (ipcp_value<valtype> *cur_val);
650 void propagate_effects ();
653 /* Arrays representing a topological ordering of call graph nodes and a stack
654 of nodes used during constant propagation and also data required to perform
655 topological sort of values and propagation of benefits in the determined
656 order. */
658 class ipa_topo_info
660 public:
661 /* Array with obtained topological order of cgraph nodes. */
662 struct cgraph_node **order;
663 /* Stack of cgraph nodes used during propagation within SCC until all values
664 in the SCC stabilize. */
665 struct cgraph_node **stack;
666 int nnodes, stack_top;
668 value_topo_info<tree> constants;
669 value_topo_info<ipa_polymorphic_call_context> contexts;
671 ipa_topo_info () : order(NULL), stack(NULL), nnodes(0), stack_top(0),
672 constants ()
676 /* Allocate the arrays in TOPO and topologically sort the nodes into order. */
678 static void
679 build_toporder_info (struct ipa_topo_info *topo)
681 topo->order = XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
682 topo->stack = XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
684 gcc_checking_assert (topo->stack_top == 0);
685 topo->nnodes = ipa_reduced_postorder (topo->order, true, true, NULL);
688 /* Free information about strongly connected components and the arrays in
689 TOPO. */
691 static void
692 free_toporder_info (struct ipa_topo_info *topo)
694 ipa_free_postorder_info ();
695 free (topo->order);
696 free (topo->stack);
699 /* Add NODE to the stack in TOPO, unless it is already there. */
701 static inline void
702 push_node_to_stack (struct ipa_topo_info *topo, struct cgraph_node *node)
704 struct ipa_node_params *info = IPA_NODE_REF (node);
705 if (info->node_enqueued)
706 return;
707 info->node_enqueued = 1;
708 topo->stack[topo->stack_top++] = node;
711 /* Pop a node from the stack in TOPO and return it or return NULL if the stack
712 is empty. */
714 static struct cgraph_node *
715 pop_node_from_stack (struct ipa_topo_info *topo)
717 if (topo->stack_top)
719 struct cgraph_node *node;
720 topo->stack_top--;
721 node = topo->stack[topo->stack_top];
722 IPA_NODE_REF (node)->node_enqueued = 0;
723 return node;
725 else
726 return NULL;
729 /* Set lattice LAT to bottom and return true if it previously was not set as
730 such. */
732 template <typename valtype>
733 inline bool
734 ipcp_lattice<valtype>::set_to_bottom ()
736 bool ret = !bottom;
737 bottom = true;
738 return ret;
741 /* Mark lattice as containing an unknown value and return true if it previously
742 was not marked as such. */
744 template <typename valtype>
745 inline bool
746 ipcp_lattice<valtype>::set_contains_variable ()
748 bool ret = !contains_variable;
749 contains_variable = true;
750 return ret;
753 /* Set all aggegate lattices in PLATS to bottom and return true if they were
754 not previously set as such. */
756 static inline bool
757 set_agg_lats_to_bottom (struct ipcp_param_lattices *plats)
759 bool ret = !plats->aggs_bottom;
760 plats->aggs_bottom = true;
761 return ret;
764 /* Mark all aggegate lattices in PLATS as containing an unknown value and
765 return true if they were not previously marked as such. */
767 static inline bool
768 set_agg_lats_contain_variable (struct ipcp_param_lattices *plats)
770 bool ret = !plats->aggs_contain_variable;
771 plats->aggs_contain_variable = true;
772 return ret;
775 /* Return true if alignment information in PLATS is known to be unusable. */
777 static inline bool
778 alignment_bottom_p (ipcp_param_lattices *plats)
780 return plats->alignment.known && (plats->alignment.align == 0);
783 /* Set alignment information in PLATS to unusable. Return true if it
784 previously was usable or unknown. */
786 static inline bool
787 set_alignment_to_bottom (ipcp_param_lattices *plats)
789 if (alignment_bottom_p (plats))
790 return false;
791 plats->alignment.known = true;
792 plats->alignment.align = 0;
793 return true;
796 /* Mark bot aggregate and scalar lattices as containing an unknown variable,
797 return true is any of them has not been marked as such so far. */
799 static inline bool
800 set_all_contains_variable (struct ipcp_param_lattices *plats)
802 bool ret;
803 ret = plats->itself.set_contains_variable ();
804 ret |= plats->ctxlat.set_contains_variable ();
805 ret |= set_agg_lats_contain_variable (plats);
806 ret |= set_alignment_to_bottom (plats);
807 return ret;
810 /* Initialize ipcp_lattices. */
812 static void
813 initialize_node_lattices (struct cgraph_node *node)
815 struct ipa_node_params *info = IPA_NODE_REF (node);
816 struct cgraph_edge *ie;
817 bool disable = false, variable = false;
818 int i;
820 gcc_checking_assert (node->has_gimple_body_p ());
821 if (!cgraph_local_p (node))
823 /* When cloning is allowed, we can assume that externally visible
824 functions are not called. We will compensate this by cloning
825 later. */
826 if (ipcp_versionable_function_p (node)
827 && ipcp_cloning_candidate_p (node))
828 variable = true;
829 else
830 disable = true;
833 if (disable || variable)
835 for (i = 0; i < ipa_get_param_count (info) ; i++)
837 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
838 if (disable)
840 plats->itself.set_to_bottom ();
841 plats->ctxlat.set_to_bottom ();
842 set_agg_lats_to_bottom (plats);
843 set_alignment_to_bottom (plats);
845 else
846 set_all_contains_variable (plats);
848 if (dump_file && (dump_flags & TDF_DETAILS)
849 && !node->alias && !node->thunk.thunk_p)
850 fprintf (dump_file, "Marking all lattices of %s/%i as %s\n",
851 node->name (), node->order,
852 disable ? "BOTTOM" : "VARIABLE");
855 for (ie = node->indirect_calls; ie; ie = ie->next_callee)
856 if (ie->indirect_info->polymorphic
857 && ie->indirect_info->param_index >= 0)
859 gcc_checking_assert (ie->indirect_info->param_index >= 0);
860 ipa_get_parm_lattices (info,
861 ie->indirect_info->param_index)->virt_call = 1;
865 /* Return the result of a (possibly arithmetic) pass through jump function
866 JFUNC on the constant value INPUT. Return NULL_TREE if that cannot be
867 determined or be considered an interprocedural invariant. */
869 static tree
870 ipa_get_jf_pass_through_result (struct ipa_jump_func *jfunc, tree input)
872 tree restype, res;
874 gcc_checking_assert (is_gimple_ip_invariant (input));
875 if (ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
876 return input;
878 if (TREE_CODE_CLASS (ipa_get_jf_pass_through_operation (jfunc))
879 == tcc_comparison)
880 restype = boolean_type_node;
881 else
882 restype = TREE_TYPE (input);
883 res = fold_binary (ipa_get_jf_pass_through_operation (jfunc), restype,
884 input, ipa_get_jf_pass_through_operand (jfunc));
886 if (res && !is_gimple_ip_invariant (res))
887 return NULL_TREE;
889 return res;
892 /* Return the result of an ancestor jump function JFUNC on the constant value
893 INPUT. Return NULL_TREE if that cannot be determined. */
895 static tree
896 ipa_get_jf_ancestor_result (struct ipa_jump_func *jfunc, tree input)
898 gcc_checking_assert (TREE_CODE (input) != TREE_BINFO);
899 if (TREE_CODE (input) == ADDR_EXPR)
901 tree t = TREE_OPERAND (input, 0);
902 t = build_ref_for_offset (EXPR_LOCATION (t), t,
903 ipa_get_jf_ancestor_offset (jfunc),
904 ptr_type_node, NULL, false);
905 return build_fold_addr_expr (t);
907 else
908 return NULL_TREE;
911 /* Determine whether JFUNC evaluates to a single known constant value and if
912 so, return it. Otherwise return NULL. INFO describes the caller node or
913 the one it is inlined to, so that pass-through jump functions can be
914 evaluated. */
916 tree
917 ipa_value_from_jfunc (struct ipa_node_params *info, struct ipa_jump_func *jfunc)
919 if (jfunc->type == IPA_JF_CONST)
920 return ipa_get_jf_constant (jfunc);
921 else if (jfunc->type == IPA_JF_PASS_THROUGH
922 || jfunc->type == IPA_JF_ANCESTOR)
924 tree input;
925 int idx;
927 if (jfunc->type == IPA_JF_PASS_THROUGH)
928 idx = ipa_get_jf_pass_through_formal_id (jfunc);
929 else
930 idx = ipa_get_jf_ancestor_formal_id (jfunc);
932 if (info->ipcp_orig_node)
933 input = info->known_csts[idx];
934 else
936 ipcp_lattice<tree> *lat;
938 if (!info->lattices)
939 return NULL_TREE;
940 lat = ipa_get_scalar_lat (info, idx);
941 if (!lat->is_single_const ())
942 return NULL_TREE;
943 input = lat->values->value;
946 if (!input)
947 return NULL_TREE;
949 if (jfunc->type == IPA_JF_PASS_THROUGH)
950 return ipa_get_jf_pass_through_result (jfunc, input);
951 else
952 return ipa_get_jf_ancestor_result (jfunc, input);
954 else
955 return NULL_TREE;
958 /* Determie whether JFUNC evaluates to single known polymorphic context, given
959 that INFO describes the caller node or the one it is inlined to, CS is the
960 call graph edge corresponding to JFUNC and CSIDX index of the described
961 parameter. */
963 ipa_polymorphic_call_context
964 ipa_context_from_jfunc (ipa_node_params *info, cgraph_edge *cs, int csidx,
965 ipa_jump_func *jfunc)
967 ipa_edge_args *args = IPA_EDGE_REF (cs);
968 ipa_polymorphic_call_context ctx;
969 ipa_polymorphic_call_context *edge_ctx
970 = cs ? ipa_get_ith_polymorhic_call_context (args, csidx) : NULL;
972 if (edge_ctx && !edge_ctx->useless_p ())
973 ctx = *edge_ctx;
975 if (jfunc->type == IPA_JF_PASS_THROUGH
976 || jfunc->type == IPA_JF_ANCESTOR)
978 ipa_polymorphic_call_context srcctx;
979 int srcidx;
980 bool type_preserved = true;
981 if (jfunc->type == IPA_JF_PASS_THROUGH)
983 if (ipa_get_jf_pass_through_operation (jfunc) != NOP_EXPR)
984 return ctx;
985 type_preserved = ipa_get_jf_pass_through_type_preserved (jfunc);
986 srcidx = ipa_get_jf_pass_through_formal_id (jfunc);
988 else
990 type_preserved = ipa_get_jf_ancestor_type_preserved (jfunc);
991 srcidx = ipa_get_jf_ancestor_formal_id (jfunc);
993 if (info->ipcp_orig_node)
995 if (info->known_contexts.exists ())
996 srcctx = info->known_contexts[srcidx];
998 else
1000 if (!info->lattices)
1001 return ctx;
1002 ipcp_lattice<ipa_polymorphic_call_context> *lat;
1003 lat = ipa_get_poly_ctx_lat (info, srcidx);
1004 if (!lat->is_single_const ())
1005 return ctx;
1006 srcctx = lat->values->value;
1008 if (srcctx.useless_p ())
1009 return ctx;
1010 if (jfunc->type == IPA_JF_ANCESTOR)
1011 srcctx.offset_by (ipa_get_jf_ancestor_offset (jfunc));
1012 if (!type_preserved)
1013 srcctx.possible_dynamic_type_change (cs->in_polymorphic_cdtor);
1014 srcctx.combine_with (ctx);
1015 return srcctx;
1018 return ctx;
1021 /* If checking is enabled, verify that no lattice is in the TOP state, i.e. not
1022 bottom, not containing a variable component and without any known value at
1023 the same time. */
1025 DEBUG_FUNCTION void
1026 ipcp_verify_propagated_values (void)
1028 struct cgraph_node *node;
1030 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
1032 struct ipa_node_params *info = IPA_NODE_REF (node);
1033 int i, count = ipa_get_param_count (info);
1035 for (i = 0; i < count; i++)
1037 ipcp_lattice<tree> *lat = ipa_get_scalar_lat (info, i);
1039 if (!lat->bottom
1040 && !lat->contains_variable
1041 && lat->values_count == 0)
1043 if (dump_file)
1045 symtab_node::dump_table (dump_file);
1046 fprintf (dump_file, "\nIPA lattices after constant "
1047 "propagation, before gcc_unreachable:\n");
1048 print_all_lattices (dump_file, true, false);
1051 gcc_unreachable ();
1057 /* Return true iff X and Y should be considered equal values by IPA-CP. */
1059 static bool
1060 values_equal_for_ipcp_p (tree x, tree y)
1062 gcc_checking_assert (x != NULL_TREE && y != NULL_TREE);
1064 if (x == y)
1065 return true;
1067 if (TREE_CODE (x) == ADDR_EXPR
1068 && TREE_CODE (y) == ADDR_EXPR
1069 && TREE_CODE (TREE_OPERAND (x, 0)) == CONST_DECL
1070 && TREE_CODE (TREE_OPERAND (y, 0)) == CONST_DECL)
1071 return operand_equal_p (DECL_INITIAL (TREE_OPERAND (x, 0)),
1072 DECL_INITIAL (TREE_OPERAND (y, 0)), 0);
1073 else
1074 return operand_equal_p (x, y, 0);
1077 /* Return true iff X and Y should be considered equal contexts by IPA-CP. */
1079 static bool
1080 values_equal_for_ipcp_p (ipa_polymorphic_call_context x,
1081 ipa_polymorphic_call_context y)
1083 return x.equal_to (y);
1087 /* Add a new value source to the value represented by THIS, marking that a
1088 value comes from edge CS and (if the underlying jump function is a
1089 pass-through or an ancestor one) from a caller value SRC_VAL of a caller
1090 parameter described by SRC_INDEX. OFFSET is negative if the source was the
1091 scalar value of the parameter itself or the offset within an aggregate. */
1093 template <typename valtype>
1094 void
1095 ipcp_value<valtype>::add_source (cgraph_edge *cs, ipcp_value *src_val,
1096 int src_idx, HOST_WIDE_INT offset)
1098 ipcp_value_source<valtype> *src;
1100 src = new (pool_alloc (ipcp_sources_pool)) ipcp_value_source<valtype>;
1101 src->offset = offset;
1102 src->cs = cs;
1103 src->val = src_val;
1104 src->index = src_idx;
1106 src->next = sources;
1107 sources = src;
1110 /* Allocate a new ipcp_value holding a tree constant, initialize its value to
1111 SOURCE and clear all other fields. */
1113 static ipcp_value<tree> *
1114 allocate_and_init_ipcp_value (tree source)
1116 ipcp_value<tree> *val;
1118 val = new (pool_alloc (ipcp_cst_values_pool)) ipcp_value<tree>;
1119 memset (val, 0, sizeof (*val));
1120 val->value = source;
1121 return val;
1124 /* Allocate a new ipcp_value holding a polymorphic context, initialize its
1125 value to SOURCE and clear all other fields. */
1127 static ipcp_value<ipa_polymorphic_call_context> *
1128 allocate_and_init_ipcp_value (ipa_polymorphic_call_context source)
1130 ipcp_value<ipa_polymorphic_call_context> *val;
1132 val = new (pool_alloc (ipcp_poly_ctx_values_pool))
1133 ipcp_value<ipa_polymorphic_call_context>;
1134 memset (val, 0, sizeof (*val));
1135 val->value = source;
1136 return val;
1139 /* Try to add NEWVAL to LAT, potentially creating a new ipcp_value for it. CS,
1140 SRC_VAL SRC_INDEX and OFFSET are meant for add_source and have the same
1141 meaning. OFFSET -1 means the source is scalar and not a part of an
1142 aggregate. */
1144 template <typename valtype>
1145 bool
1146 ipcp_lattice<valtype>::add_value (valtype newval, cgraph_edge *cs,
1147 ipcp_value<valtype> *src_val,
1148 int src_idx, HOST_WIDE_INT offset)
1150 ipcp_value<valtype> *val;
1152 if (bottom)
1153 return false;
1155 for (val = values; val; val = val->next)
1156 if (values_equal_for_ipcp_p (val->value, newval))
1158 if (ipa_edge_within_scc (cs))
1160 ipcp_value_source<valtype> *s;
1161 for (s = val->sources; s ; s = s->next)
1162 if (s->cs == cs)
1163 break;
1164 if (s)
1165 return false;
1168 val->add_source (cs, src_val, src_idx, offset);
1169 return false;
1172 if (values_count == PARAM_VALUE (PARAM_IPA_CP_VALUE_LIST_SIZE))
1174 /* We can only free sources, not the values themselves, because sources
1175 of other values in this this SCC might point to them. */
1176 for (val = values; val; val = val->next)
1178 while (val->sources)
1180 ipcp_value_source<valtype> *src = val->sources;
1181 val->sources = src->next;
1182 pool_free (ipcp_sources_pool, src);
1186 values = NULL;
1187 return set_to_bottom ();
1190 values_count++;
1191 val = allocate_and_init_ipcp_value (newval);
1192 val->add_source (cs, src_val, src_idx, offset);
1193 val->next = values;
1194 values = val;
1195 return true;
1198 /* Propagate values through a pass-through jump function JFUNC associated with
1199 edge CS, taking values from SRC_LAT and putting them into DEST_LAT. SRC_IDX
1200 is the index of the source parameter. */
1202 static bool
1203 propagate_vals_accross_pass_through (cgraph_edge *cs,
1204 ipa_jump_func *jfunc,
1205 ipcp_lattice<tree> *src_lat,
1206 ipcp_lattice<tree> *dest_lat,
1207 int src_idx)
1209 ipcp_value<tree> *src_val;
1210 bool ret = false;
1212 /* Do not create new values when propagating within an SCC because if there
1213 are arithmetic functions with circular dependencies, there is infinite
1214 number of them and we would just make lattices bottom. */
1215 if ((ipa_get_jf_pass_through_operation (jfunc) != NOP_EXPR)
1216 && ipa_edge_within_scc (cs))
1217 ret = dest_lat->set_contains_variable ();
1218 else
1219 for (src_val = src_lat->values; src_val; src_val = src_val->next)
1221 tree cstval = ipa_get_jf_pass_through_result (jfunc, src_val->value);
1223 if (cstval)
1224 ret |= dest_lat->add_value (cstval, cs, src_val, src_idx);
1225 else
1226 ret |= dest_lat->set_contains_variable ();
1229 return ret;
1232 /* Propagate values through an ancestor jump function JFUNC associated with
1233 edge CS, taking values from SRC_LAT and putting them into DEST_LAT. SRC_IDX
1234 is the index of the source parameter. */
1236 static bool
1237 propagate_vals_accross_ancestor (struct cgraph_edge *cs,
1238 struct ipa_jump_func *jfunc,
1239 ipcp_lattice<tree> *src_lat,
1240 ipcp_lattice<tree> *dest_lat,
1241 int src_idx)
1243 ipcp_value<tree> *src_val;
1244 bool ret = false;
1246 if (ipa_edge_within_scc (cs))
1247 return dest_lat->set_contains_variable ();
1249 for (src_val = src_lat->values; src_val; src_val = src_val->next)
1251 tree t = ipa_get_jf_ancestor_result (jfunc, src_val->value);
1253 if (t)
1254 ret |= dest_lat->add_value (t, cs, src_val, src_idx);
1255 else
1256 ret |= dest_lat->set_contains_variable ();
1259 return ret;
1262 /* Propagate scalar values across jump function JFUNC that is associated with
1263 edge CS and put the values into DEST_LAT. */
1265 static bool
1266 propagate_scalar_accross_jump_function (struct cgraph_edge *cs,
1267 struct ipa_jump_func *jfunc,
1268 ipcp_lattice<tree> *dest_lat)
1270 if (dest_lat->bottom)
1271 return false;
1273 if (jfunc->type == IPA_JF_CONST)
1275 tree val = ipa_get_jf_constant (jfunc);
1276 return dest_lat->add_value (val, cs, NULL, 0);
1278 else if (jfunc->type == IPA_JF_PASS_THROUGH
1279 || jfunc->type == IPA_JF_ANCESTOR)
1281 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
1282 ipcp_lattice<tree> *src_lat;
1283 int src_idx;
1284 bool ret;
1286 if (jfunc->type == IPA_JF_PASS_THROUGH)
1287 src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
1288 else
1289 src_idx = ipa_get_jf_ancestor_formal_id (jfunc);
1291 src_lat = ipa_get_scalar_lat (caller_info, src_idx);
1292 if (src_lat->bottom)
1293 return dest_lat->set_contains_variable ();
1295 /* If we would need to clone the caller and cannot, do not propagate. */
1296 if (!ipcp_versionable_function_p (cs->caller)
1297 && (src_lat->contains_variable
1298 || (src_lat->values_count > 1)))
1299 return dest_lat->set_contains_variable ();
1301 if (jfunc->type == IPA_JF_PASS_THROUGH)
1302 ret = propagate_vals_accross_pass_through (cs, jfunc, src_lat,
1303 dest_lat, src_idx);
1304 else
1305 ret = propagate_vals_accross_ancestor (cs, jfunc, src_lat, dest_lat,
1306 src_idx);
1308 if (src_lat->contains_variable)
1309 ret |= dest_lat->set_contains_variable ();
1311 return ret;
1314 /* TODO: We currently do not handle member method pointers in IPA-CP (we only
1315 use it for indirect inlining), we should propagate them too. */
1316 return dest_lat->set_contains_variable ();
1319 /* Propagate scalar values across jump function JFUNC that is associated with
1320 edge CS and describes argument IDX and put the values into DEST_LAT. */
1322 static bool
1323 propagate_context_accross_jump_function (cgraph_edge *cs,
1324 ipa_jump_func *jfunc, int idx,
1325 ipcp_lattice<ipa_polymorphic_call_context> *dest_lat)
1327 ipa_edge_args *args = IPA_EDGE_REF (cs);
1328 if (dest_lat->bottom)
1329 return false;
1330 bool ret = false;
1331 bool added_sth = false;
1332 bool type_preserved = true;
1334 ipa_polymorphic_call_context edge_ctx, *edge_ctx_ptr
1335 = ipa_get_ith_polymorhic_call_context (args, idx);
1337 if (edge_ctx_ptr)
1338 edge_ctx = *edge_ctx_ptr;
1340 if (jfunc->type == IPA_JF_PASS_THROUGH
1341 || jfunc->type == IPA_JF_ANCESTOR)
1343 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
1344 int src_idx;
1345 ipcp_lattice<ipa_polymorphic_call_context> *src_lat;
1347 /* TODO: Once we figure out how to propagate speculations, it will
1348 probably be a good idea to switch to speculation if type_preserved is
1349 not set instead of punting. */
1350 if (jfunc->type == IPA_JF_PASS_THROUGH)
1352 if (ipa_get_jf_pass_through_operation (jfunc) != NOP_EXPR)
1353 goto prop_fail;
1354 type_preserved = ipa_get_jf_pass_through_type_preserved (jfunc);
1355 src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
1357 else
1359 type_preserved = ipa_get_jf_ancestor_type_preserved (jfunc);
1360 src_idx = ipa_get_jf_ancestor_formal_id (jfunc);
1363 src_lat = ipa_get_poly_ctx_lat (caller_info, src_idx);
1364 /* If we would need to clone the caller and cannot, do not propagate. */
1365 if (!ipcp_versionable_function_p (cs->caller)
1366 && (src_lat->contains_variable
1367 || (src_lat->values_count > 1)))
1368 goto prop_fail;
1370 ipcp_value<ipa_polymorphic_call_context> *src_val;
1371 for (src_val = src_lat->values; src_val; src_val = src_val->next)
1373 ipa_polymorphic_call_context cur = src_val->value;
1375 if (!type_preserved)
1376 cur.possible_dynamic_type_change (cs->in_polymorphic_cdtor);
1377 if (jfunc->type == IPA_JF_ANCESTOR)
1378 cur.offset_by (ipa_get_jf_ancestor_offset (jfunc));
1379 /* TODO: In cases we know how the context is going to be used,
1380 we can improve the result by passing proper OTR_TYPE. */
1381 cur.combine_with (edge_ctx);
1382 if (!cur.useless_p ())
1384 if (src_lat->contains_variable
1385 && !edge_ctx.equal_to (cur))
1386 ret |= dest_lat->set_contains_variable ();
1387 ret |= dest_lat->add_value (cur, cs, src_val, src_idx);
1388 added_sth = true;
1394 prop_fail:
1395 if (!added_sth)
1397 if (!edge_ctx.useless_p ())
1398 ret |= dest_lat->add_value (edge_ctx, cs);
1399 else
1400 ret |= dest_lat->set_contains_variable ();
1403 return ret;
1406 /* Propagate alignments across jump function JFUNC that is associated with
1407 edge CS and update DEST_LAT accordingly. */
1409 static bool
1410 propagate_alignment_accross_jump_function (struct cgraph_edge *cs,
1411 struct ipa_jump_func *jfunc,
1412 struct ipcp_param_lattices *dest_lat)
1414 if (alignment_bottom_p (dest_lat))
1415 return false;
1417 ipa_alignment cur;
1418 cur.known = false;
1419 if (jfunc->alignment.known)
1420 cur = jfunc->alignment;
1421 else if (jfunc->type == IPA_JF_PASS_THROUGH
1422 || jfunc->type == IPA_JF_ANCESTOR)
1424 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
1425 struct ipcp_param_lattices *src_lats;
1426 HOST_WIDE_INT offset = 0;
1427 int src_idx;
1429 if (jfunc->type == IPA_JF_PASS_THROUGH)
1431 enum tree_code op = ipa_get_jf_pass_through_operation (jfunc);
1432 if (op != NOP_EXPR)
1434 if (op != POINTER_PLUS_EXPR
1435 && op != PLUS_EXPR
1436 && op != MINUS_EXPR)
1437 goto prop_fail;
1438 tree operand = ipa_get_jf_pass_through_operand (jfunc);
1439 if (!tree_fits_shwi_p (operand))
1440 goto prop_fail;
1441 offset = tree_to_shwi (operand);
1443 src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
1445 else
1447 src_idx = ipa_get_jf_ancestor_formal_id (jfunc);
1448 offset = ipa_get_jf_ancestor_offset (jfunc);
1451 src_lats = ipa_get_parm_lattices (caller_info, src_idx);
1452 if (!src_lats->alignment.known
1453 || alignment_bottom_p (src_lats))
1454 goto prop_fail;
1456 cur = src_lats->alignment;
1457 cur.misalign = (cur.misalign + offset) % cur.align;
1460 if (cur.known)
1462 if (!dest_lat->alignment.known)
1464 dest_lat->alignment = cur;
1465 return true;
1467 else if (dest_lat->alignment.align == cur.align
1468 && dest_lat->alignment.misalign == cur.misalign)
1469 return false;
1472 prop_fail:
1473 set_alignment_to_bottom (dest_lat);
1474 return true;
1477 /* If DEST_PLATS already has aggregate items, check that aggs_by_ref matches
1478 NEW_AGGS_BY_REF and if not, mark all aggs as bottoms and return true (in all
1479 other cases, return false). If there are no aggregate items, set
1480 aggs_by_ref to NEW_AGGS_BY_REF. */
1482 static bool
1483 set_check_aggs_by_ref (struct ipcp_param_lattices *dest_plats,
1484 bool new_aggs_by_ref)
1486 if (dest_plats->aggs)
1488 if (dest_plats->aggs_by_ref != new_aggs_by_ref)
1490 set_agg_lats_to_bottom (dest_plats);
1491 return true;
1494 else
1495 dest_plats->aggs_by_ref = new_aggs_by_ref;
1496 return false;
1499 /* Walk aggregate lattices in DEST_PLATS from ***AGLAT on, until ***aglat is an
1500 already existing lattice for the given OFFSET and SIZE, marking all skipped
1501 lattices as containing variable and checking for overlaps. If there is no
1502 already existing lattice for the OFFSET and VAL_SIZE, create one, initialize
1503 it with offset, size and contains_variable to PRE_EXISTING, and return true,
1504 unless there are too many already. If there are two many, return false. If
1505 there are overlaps turn whole DEST_PLATS to bottom and return false. If any
1506 skipped lattices were newly marked as containing variable, set *CHANGE to
1507 true. */
1509 static bool
1510 merge_agg_lats_step (struct ipcp_param_lattices *dest_plats,
1511 HOST_WIDE_INT offset, HOST_WIDE_INT val_size,
1512 struct ipcp_agg_lattice ***aglat,
1513 bool pre_existing, bool *change)
1515 gcc_checking_assert (offset >= 0);
1517 while (**aglat && (**aglat)->offset < offset)
1519 if ((**aglat)->offset + (**aglat)->size > offset)
1521 set_agg_lats_to_bottom (dest_plats);
1522 return false;
1524 *change |= (**aglat)->set_contains_variable ();
1525 *aglat = &(**aglat)->next;
1528 if (**aglat && (**aglat)->offset == offset)
1530 if ((**aglat)->size != val_size
1531 || ((**aglat)->next
1532 && (**aglat)->next->offset < offset + val_size))
1534 set_agg_lats_to_bottom (dest_plats);
1535 return false;
1537 gcc_checking_assert (!(**aglat)->next
1538 || (**aglat)->next->offset >= offset + val_size);
1539 return true;
1541 else
1543 struct ipcp_agg_lattice *new_al;
1545 if (**aglat && (**aglat)->offset < offset + val_size)
1547 set_agg_lats_to_bottom (dest_plats);
1548 return false;
1550 if (dest_plats->aggs_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1551 return false;
1552 dest_plats->aggs_count++;
1553 new_al = (struct ipcp_agg_lattice *) pool_alloc (ipcp_agg_lattice_pool);
1554 memset (new_al, 0, sizeof (*new_al));
1556 new_al->offset = offset;
1557 new_al->size = val_size;
1558 new_al->contains_variable = pre_existing;
1560 new_al->next = **aglat;
1561 **aglat = new_al;
1562 return true;
1566 /* Set all AGLAT and all other aggregate lattices reachable by next pointers as
1567 containing an unknown value. */
1569 static bool
1570 set_chain_of_aglats_contains_variable (struct ipcp_agg_lattice *aglat)
1572 bool ret = false;
1573 while (aglat)
1575 ret |= aglat->set_contains_variable ();
1576 aglat = aglat->next;
1578 return ret;
1581 /* Merge existing aggregate lattices in SRC_PLATS to DEST_PLATS, subtracting
1582 DELTA_OFFSET. CS is the call graph edge and SRC_IDX the index of the source
1583 parameter used for lattice value sources. Return true if DEST_PLATS changed
1584 in any way. */
1586 static bool
1587 merge_aggregate_lattices (struct cgraph_edge *cs,
1588 struct ipcp_param_lattices *dest_plats,
1589 struct ipcp_param_lattices *src_plats,
1590 int src_idx, HOST_WIDE_INT offset_delta)
1592 bool pre_existing = dest_plats->aggs != NULL;
1593 struct ipcp_agg_lattice **dst_aglat;
1594 bool ret = false;
1596 if (set_check_aggs_by_ref (dest_plats, src_plats->aggs_by_ref))
1597 return true;
1598 if (src_plats->aggs_bottom)
1599 return set_agg_lats_contain_variable (dest_plats);
1600 if (src_plats->aggs_contain_variable)
1601 ret |= set_agg_lats_contain_variable (dest_plats);
1602 dst_aglat = &dest_plats->aggs;
1604 for (struct ipcp_agg_lattice *src_aglat = src_plats->aggs;
1605 src_aglat;
1606 src_aglat = src_aglat->next)
1608 HOST_WIDE_INT new_offset = src_aglat->offset - offset_delta;
1610 if (new_offset < 0)
1611 continue;
1612 if (merge_agg_lats_step (dest_plats, new_offset, src_aglat->size,
1613 &dst_aglat, pre_existing, &ret))
1615 struct ipcp_agg_lattice *new_al = *dst_aglat;
1617 dst_aglat = &(*dst_aglat)->next;
1618 if (src_aglat->bottom)
1620 ret |= new_al->set_contains_variable ();
1621 continue;
1623 if (src_aglat->contains_variable)
1624 ret |= new_al->set_contains_variable ();
1625 for (ipcp_value<tree> *val = src_aglat->values;
1626 val;
1627 val = val->next)
1628 ret |= new_al->add_value (val->value, cs, val, src_idx,
1629 src_aglat->offset);
1631 else if (dest_plats->aggs_bottom)
1632 return true;
1634 ret |= set_chain_of_aglats_contains_variable (*dst_aglat);
1635 return ret;
1638 /* Determine whether there is anything to propagate FROM SRC_PLATS through a
1639 pass-through JFUNC and if so, whether it has conform and conforms to the
1640 rules about propagating values passed by reference. */
1642 static bool
1643 agg_pass_through_permissible_p (struct ipcp_param_lattices *src_plats,
1644 struct ipa_jump_func *jfunc)
1646 return src_plats->aggs
1647 && (!src_plats->aggs_by_ref
1648 || ipa_get_jf_pass_through_agg_preserved (jfunc));
1651 /* Propagate scalar values across jump function JFUNC that is associated with
1652 edge CS and put the values into DEST_LAT. */
1654 static bool
1655 propagate_aggs_accross_jump_function (struct cgraph_edge *cs,
1656 struct ipa_jump_func *jfunc,
1657 struct ipcp_param_lattices *dest_plats)
1659 bool ret = false;
1661 if (dest_plats->aggs_bottom)
1662 return false;
1664 if (jfunc->type == IPA_JF_PASS_THROUGH
1665 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
1667 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
1668 int src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
1669 struct ipcp_param_lattices *src_plats;
1671 src_plats = ipa_get_parm_lattices (caller_info, src_idx);
1672 if (agg_pass_through_permissible_p (src_plats, jfunc))
1674 /* Currently we do not produce clobber aggregate jump
1675 functions, replace with merging when we do. */
1676 gcc_assert (!jfunc->agg.items);
1677 ret |= merge_aggregate_lattices (cs, dest_plats, src_plats,
1678 src_idx, 0);
1680 else
1681 ret |= set_agg_lats_contain_variable (dest_plats);
1683 else if (jfunc->type == IPA_JF_ANCESTOR
1684 && ipa_get_jf_ancestor_agg_preserved (jfunc))
1686 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
1687 int src_idx = ipa_get_jf_ancestor_formal_id (jfunc);
1688 struct ipcp_param_lattices *src_plats;
1690 src_plats = ipa_get_parm_lattices (caller_info, src_idx);
1691 if (src_plats->aggs && src_plats->aggs_by_ref)
1693 /* Currently we do not produce clobber aggregate jump
1694 functions, replace with merging when we do. */
1695 gcc_assert (!jfunc->agg.items);
1696 ret |= merge_aggregate_lattices (cs, dest_plats, src_plats, src_idx,
1697 ipa_get_jf_ancestor_offset (jfunc));
1699 else if (!src_plats->aggs_by_ref)
1700 ret |= set_agg_lats_to_bottom (dest_plats);
1701 else
1702 ret |= set_agg_lats_contain_variable (dest_plats);
1704 else if (jfunc->agg.items)
1706 bool pre_existing = dest_plats->aggs != NULL;
1707 struct ipcp_agg_lattice **aglat = &dest_plats->aggs;
1708 struct ipa_agg_jf_item *item;
1709 int i;
1711 if (set_check_aggs_by_ref (dest_plats, jfunc->agg.by_ref))
1712 return true;
1714 FOR_EACH_VEC_ELT (*jfunc->agg.items, i, item)
1716 HOST_WIDE_INT val_size;
1718 if (item->offset < 0)
1719 continue;
1720 gcc_checking_assert (is_gimple_ip_invariant (item->value));
1721 val_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (item->value)));
1723 if (merge_agg_lats_step (dest_plats, item->offset, val_size,
1724 &aglat, pre_existing, &ret))
1726 ret |= (*aglat)->add_value (item->value, cs, NULL, 0, 0);
1727 aglat = &(*aglat)->next;
1729 else if (dest_plats->aggs_bottom)
1730 return true;
1733 ret |= set_chain_of_aglats_contains_variable (*aglat);
1735 else
1736 ret |= set_agg_lats_contain_variable (dest_plats);
1738 return ret;
1741 /* Propagate constants from the caller to the callee of CS. INFO describes the
1742 caller. */
1744 static bool
1745 propagate_constants_accross_call (struct cgraph_edge *cs)
1747 struct ipa_node_params *callee_info;
1748 enum availability availability;
1749 struct cgraph_node *callee, *alias_or_thunk;
1750 struct ipa_edge_args *args;
1751 bool ret = false;
1752 int i, args_count, parms_count;
1754 callee = cs->callee->function_symbol (&availability);
1755 if (!callee->definition)
1756 return false;
1757 gcc_checking_assert (callee->has_gimple_body_p ());
1758 callee_info = IPA_NODE_REF (callee);
1760 args = IPA_EDGE_REF (cs);
1761 args_count = ipa_get_cs_argument_count (args);
1762 parms_count = ipa_get_param_count (callee_info);
1763 if (parms_count == 0)
1764 return false;
1766 /* No propagation through instrumentation thunks is available yet.
1767 It should be possible with proper mapping of call args and
1768 instrumented callee params in the propagation loop below. But
1769 this case mostly occurs when legacy code calls instrumented code
1770 and it is not a primary target for optimizations.
1771 We detect instrumentation thunks in aliases and thunks chain by
1772 checking instrumentation_clone flag for chain source and target.
1773 Going through instrumentation thunks we always have it changed
1774 from 0 to 1 and all other nodes do not change it. */
1775 if (!cs->callee->instrumentation_clone
1776 && callee->instrumentation_clone)
1778 for (i = 0; i < parms_count; i++)
1779 ret |= set_all_contains_variable (ipa_get_parm_lattices (callee_info,
1780 i));
1781 return ret;
1784 /* If this call goes through a thunk we must not propagate to the first (0th)
1785 parameter. However, we might need to uncover a thunk from below a series
1786 of aliases first. */
1787 alias_or_thunk = cs->callee;
1788 while (alias_or_thunk->alias)
1789 alias_or_thunk = alias_or_thunk->get_alias_target ();
1790 if (alias_or_thunk->thunk.thunk_p)
1792 ret |= set_all_contains_variable (ipa_get_parm_lattices (callee_info,
1793 0));
1794 i = 1;
1796 else
1797 i = 0;
1799 for (; (i < args_count) && (i < parms_count); i++)
1801 struct ipa_jump_func *jump_func = ipa_get_ith_jump_func (args, i);
1802 struct ipcp_param_lattices *dest_plats;
1804 dest_plats = ipa_get_parm_lattices (callee_info, i);
1805 if (availability == AVAIL_INTERPOSABLE)
1806 ret |= set_all_contains_variable (dest_plats);
1807 else
1809 ret |= propagate_scalar_accross_jump_function (cs, jump_func,
1810 &dest_plats->itself);
1811 ret |= propagate_context_accross_jump_function (cs, jump_func, i,
1812 &dest_plats->ctxlat);
1813 ret |= propagate_alignment_accross_jump_function (cs, jump_func,
1814 dest_plats);
1815 ret |= propagate_aggs_accross_jump_function (cs, jump_func,
1816 dest_plats);
1819 for (; i < parms_count; i++)
1820 ret |= set_all_contains_variable (ipa_get_parm_lattices (callee_info, i));
1822 return ret;
1825 /* If an indirect edge IE can be turned into a direct one based on KNOWN_VALS
1826 KNOWN_CONTEXTS, KNOWN_AGGS or AGG_REPS return the destination. The latter
1827 three can be NULL. If AGG_REPS is not NULL, KNOWN_AGGS is ignored. */
1829 static tree
1830 ipa_get_indirect_edge_target_1 (struct cgraph_edge *ie,
1831 vec<tree> known_csts,
1832 vec<ipa_polymorphic_call_context> known_contexts,
1833 vec<ipa_agg_jump_function_p> known_aggs,
1834 struct ipa_agg_replacement_value *agg_reps,
1835 bool *speculative)
1837 int param_index = ie->indirect_info->param_index;
1838 HOST_WIDE_INT anc_offset;
1839 tree t;
1840 tree target = NULL;
1842 *speculative = false;
1844 if (param_index == -1
1845 || known_csts.length () <= (unsigned int) param_index)
1846 return NULL_TREE;
1848 if (!ie->indirect_info->polymorphic)
1850 tree t;
1852 if (ie->indirect_info->agg_contents)
1854 if (agg_reps)
1856 t = NULL;
1857 while (agg_reps)
1859 if (agg_reps->index == param_index
1860 && agg_reps->offset == ie->indirect_info->offset
1861 && agg_reps->by_ref == ie->indirect_info->by_ref)
1863 t = agg_reps->value;
1864 break;
1866 agg_reps = agg_reps->next;
1869 else if (known_aggs.length () > (unsigned int) param_index)
1871 struct ipa_agg_jump_function *agg;
1872 agg = known_aggs[param_index];
1873 t = ipa_find_agg_cst_for_param (agg, ie->indirect_info->offset,
1874 ie->indirect_info->by_ref);
1876 else
1877 t = NULL;
1879 else
1880 t = known_csts[param_index];
1882 if (t &&
1883 TREE_CODE (t) == ADDR_EXPR
1884 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL)
1885 return TREE_OPERAND (t, 0);
1886 else
1887 return NULL_TREE;
1890 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
1891 return NULL_TREE;
1893 gcc_assert (!ie->indirect_info->agg_contents);
1894 anc_offset = ie->indirect_info->offset;
1896 t = NULL;
1898 /* Try to work out value of virtual table pointer value in replacemnets. */
1899 if (!t && agg_reps && !ie->indirect_info->by_ref)
1901 while (agg_reps)
1903 if (agg_reps->index == param_index
1904 && agg_reps->offset == ie->indirect_info->offset
1905 && agg_reps->by_ref)
1907 t = agg_reps->value;
1908 break;
1910 agg_reps = agg_reps->next;
1914 /* Try to work out value of virtual table pointer value in known
1915 aggregate values. */
1916 if (!t && known_aggs.length () > (unsigned int) param_index
1917 && !ie->indirect_info->by_ref)
1919 struct ipa_agg_jump_function *agg;
1920 agg = known_aggs[param_index];
1921 t = ipa_find_agg_cst_for_param (agg, ie->indirect_info->offset,
1922 true);
1925 /* If we found the virtual table pointer, lookup the target. */
1926 if (t)
1928 tree vtable;
1929 unsigned HOST_WIDE_INT offset;
1930 if (vtable_pointer_value_to_vtable (t, &vtable, &offset))
1932 target = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
1933 vtable, offset);
1934 if (target)
1936 if ((TREE_CODE (TREE_TYPE (target)) == FUNCTION_TYPE
1937 && DECL_FUNCTION_CODE (target) == BUILT_IN_UNREACHABLE)
1938 || !possible_polymorphic_call_target_p
1939 (ie, cgraph_node::get (target)))
1940 target = ipa_impossible_devirt_target (ie, target);
1941 *speculative = ie->indirect_info->vptr_changed;
1942 if (!*speculative)
1943 return target;
1948 /* Do we know the constant value of pointer? */
1949 if (!t)
1950 t = known_csts[param_index];
1952 gcc_checking_assert (!t || TREE_CODE (t) != TREE_BINFO);
1954 ipa_polymorphic_call_context context;
1955 if (known_contexts.length () > (unsigned int) param_index)
1957 context = known_contexts[param_index];
1958 context.offset_by (anc_offset);
1959 if (ie->indirect_info->vptr_changed)
1960 context.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
1961 ie->indirect_info->otr_type);
1962 if (t)
1964 ipa_polymorphic_call_context ctx2 = ipa_polymorphic_call_context
1965 (t, ie->indirect_info->otr_type, anc_offset);
1966 if (!ctx2.useless_p ())
1967 context.combine_with (ctx2, ie->indirect_info->otr_type);
1970 else if (t)
1971 context = ipa_polymorphic_call_context (t, ie->indirect_info->otr_type,
1972 anc_offset);
1973 else
1974 return NULL_TREE;
1976 vec <cgraph_node *>targets;
1977 bool final;
1979 targets = possible_polymorphic_call_targets
1980 (ie->indirect_info->otr_type,
1981 ie->indirect_info->otr_token,
1982 context, &final);
1983 if (!final || targets.length () > 1)
1985 struct cgraph_node *node;
1986 if (*speculative)
1987 return target;
1988 if (!opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
1989 || ie->speculative || !ie->maybe_hot_p ())
1990 return NULL;
1991 node = try_speculative_devirtualization (ie->indirect_info->otr_type,
1992 ie->indirect_info->otr_token,
1993 context);
1994 if (node)
1996 *speculative = true;
1997 target = node->decl;
1999 else
2000 return NULL;
2002 else
2004 *speculative = false;
2005 if (targets.length () == 1)
2006 target = targets[0]->decl;
2007 else
2008 target = ipa_impossible_devirt_target (ie, NULL_TREE);
2011 if (target && !possible_polymorphic_call_target_p (ie,
2012 cgraph_node::get (target)))
2013 target = ipa_impossible_devirt_target (ie, target);
2015 return target;
2019 /* If an indirect edge IE can be turned into a direct one based on KNOWN_CSTS,
2020 KNOWN_CONTEXTS (which can be vNULL) or KNOWN_AGGS (which also can be vNULL)
2021 return the destination. */
2023 tree
2024 ipa_get_indirect_edge_target (struct cgraph_edge *ie,
2025 vec<tree> known_csts,
2026 vec<ipa_polymorphic_call_context> known_contexts,
2027 vec<ipa_agg_jump_function_p> known_aggs,
2028 bool *speculative)
2030 return ipa_get_indirect_edge_target_1 (ie, known_csts, known_contexts,
2031 known_aggs, NULL, speculative);
2034 /* Calculate devirtualization time bonus for NODE, assuming we know KNOWN_CSTS
2035 and KNOWN_CONTEXTS. */
2037 static int
2038 devirtualization_time_bonus (struct cgraph_node *node,
2039 vec<tree> known_csts,
2040 vec<ipa_polymorphic_call_context> known_contexts,
2041 vec<ipa_agg_jump_function_p> known_aggs)
2043 struct cgraph_edge *ie;
2044 int res = 0;
2046 for (ie = node->indirect_calls; ie; ie = ie->next_callee)
2048 struct cgraph_node *callee;
2049 struct inline_summary *isummary;
2050 enum availability avail;
2051 tree target;
2052 bool speculative;
2054 target = ipa_get_indirect_edge_target (ie, known_csts, known_contexts,
2055 known_aggs, &speculative);
2056 if (!target)
2057 continue;
2059 /* Only bare minimum benefit for clearly un-inlineable targets. */
2060 res += 1;
2061 callee = cgraph_node::get (target);
2062 if (!callee || !callee->definition)
2063 continue;
2064 callee = callee->function_symbol (&avail);
2065 if (avail < AVAIL_AVAILABLE)
2066 continue;
2067 isummary = inline_summaries->get (callee);
2068 if (!isummary->inlinable)
2069 continue;
2071 /* FIXME: The values below need re-considering and perhaps also
2072 integrating into the cost metrics, at lest in some very basic way. */
2073 if (isummary->size <= MAX_INLINE_INSNS_AUTO / 4)
2074 res += 31 / ((int)speculative + 1);
2075 else if (isummary->size <= MAX_INLINE_INSNS_AUTO / 2)
2076 res += 15 / ((int)speculative + 1);
2077 else if (isummary->size <= MAX_INLINE_INSNS_AUTO
2078 || DECL_DECLARED_INLINE_P (callee->decl))
2079 res += 7 / ((int)speculative + 1);
2082 return res;
2085 /* Return time bonus incurred because of HINTS. */
2087 static int
2088 hint_time_bonus (inline_hints hints)
2090 int result = 0;
2091 if (hints & (INLINE_HINT_loop_iterations | INLINE_HINT_loop_stride))
2092 result += PARAM_VALUE (PARAM_IPA_CP_LOOP_HINT_BONUS);
2093 if (hints & INLINE_HINT_array_index)
2094 result += PARAM_VALUE (PARAM_IPA_CP_ARRAY_INDEX_HINT_BONUS);
2095 return result;
2098 /* Return true if cloning NODE is a good idea, given the estimated TIME_BENEFIT
2099 and SIZE_COST and with the sum of frequencies of incoming edges to the
2100 potential new clone in FREQUENCIES. */
2102 static bool
2103 good_cloning_opportunity_p (struct cgraph_node *node, int time_benefit,
2104 int freq_sum, gcov_type count_sum, int size_cost)
2106 if (time_benefit == 0
2107 || !opt_for_fn (node->decl, flag_ipa_cp_clone)
2108 || !optimize_function_for_speed_p (DECL_STRUCT_FUNCTION (node->decl)))
2109 return false;
2111 gcc_assert (size_cost > 0);
2113 if (max_count)
2115 int factor = (count_sum * 1000) / max_count;
2116 int64_t evaluation = (((int64_t) time_benefit * factor)
2117 / size_cost);
2119 if (dump_file && (dump_flags & TDF_DETAILS))
2120 fprintf (dump_file, " good_cloning_opportunity_p (time: %i, "
2121 "size: %i, count_sum: " HOST_WIDE_INT_PRINT_DEC
2122 ") -> evaluation: " "%"PRId64
2123 ", threshold: %i\n",
2124 time_benefit, size_cost, (HOST_WIDE_INT) count_sum,
2125 evaluation, PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD));
2127 return evaluation >= PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD);
2129 else
2131 int64_t evaluation = (((int64_t) time_benefit * freq_sum)
2132 / size_cost);
2134 if (dump_file && (dump_flags & TDF_DETAILS))
2135 fprintf (dump_file, " good_cloning_opportunity_p (time: %i, "
2136 "size: %i, freq_sum: %i) -> evaluation: "
2137 "%"PRId64 ", threshold: %i\n",
2138 time_benefit, size_cost, freq_sum, evaluation,
2139 PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD));
2141 return evaluation >= PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD);
2145 /* Return all context independent values from aggregate lattices in PLATS in a
2146 vector. Return NULL if there are none. */
2148 static vec<ipa_agg_jf_item, va_gc> *
2149 context_independent_aggregate_values (struct ipcp_param_lattices *plats)
2151 vec<ipa_agg_jf_item, va_gc> *res = NULL;
2153 if (plats->aggs_bottom
2154 || plats->aggs_contain_variable
2155 || plats->aggs_count == 0)
2156 return NULL;
2158 for (struct ipcp_agg_lattice *aglat = plats->aggs;
2159 aglat;
2160 aglat = aglat->next)
2161 if (aglat->is_single_const ())
2163 struct ipa_agg_jf_item item;
2164 item.offset = aglat->offset;
2165 item.value = aglat->values->value;
2166 vec_safe_push (res, item);
2168 return res;
2171 /* Allocate KNOWN_CSTS, KNOWN_CONTEXTS and, if non-NULL, KNOWN_AGGS and
2172 populate them with values of parameters that are known independent of the
2173 context. INFO describes the function. If REMOVABLE_PARAMS_COST is
2174 non-NULL, the movement cost of all removable parameters will be stored in
2175 it. */
2177 static bool
2178 gather_context_independent_values (struct ipa_node_params *info,
2179 vec<tree> *known_csts,
2180 vec<ipa_polymorphic_call_context>
2181 *known_contexts,
2182 vec<ipa_agg_jump_function> *known_aggs,
2183 int *removable_params_cost)
2185 int i, count = ipa_get_param_count (info);
2186 bool ret = false;
2188 known_csts->create (0);
2189 known_contexts->create (0);
2190 known_csts->safe_grow_cleared (count);
2191 known_contexts->safe_grow_cleared (count);
2192 if (known_aggs)
2194 known_aggs->create (0);
2195 known_aggs->safe_grow_cleared (count);
2198 if (removable_params_cost)
2199 *removable_params_cost = 0;
2201 for (i = 0; i < count ; i++)
2203 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
2204 ipcp_lattice<tree> *lat = &plats->itself;
2206 if (lat->is_single_const ())
2208 ipcp_value<tree> *val = lat->values;
2209 gcc_checking_assert (TREE_CODE (val->value) != TREE_BINFO);
2210 (*known_csts)[i] = val->value;
2211 if (removable_params_cost)
2212 *removable_params_cost
2213 += estimate_move_cost (TREE_TYPE (val->value), false);
2214 ret = true;
2216 else if (removable_params_cost
2217 && !ipa_is_param_used (info, i))
2218 *removable_params_cost
2219 += ipa_get_param_move_cost (info, i);
2221 ipcp_lattice<ipa_polymorphic_call_context> *ctxlat = &plats->ctxlat;
2222 if (ctxlat->is_single_const ())
2224 (*known_contexts)[i] = ctxlat->values->value;
2225 ret = true;
2228 if (known_aggs)
2230 vec<ipa_agg_jf_item, va_gc> *agg_items;
2231 struct ipa_agg_jump_function *ajf;
2233 agg_items = context_independent_aggregate_values (plats);
2234 ajf = &(*known_aggs)[i];
2235 ajf->items = agg_items;
2236 ajf->by_ref = plats->aggs_by_ref;
2237 ret |= agg_items != NULL;
2241 return ret;
2244 /* The current interface in ipa-inline-analysis requires a pointer vector.
2245 Create it.
2247 FIXME: That interface should be re-worked, this is slightly silly. Still,
2248 I'd like to discuss how to change it first and this demonstrates the
2249 issue. */
2251 static vec<ipa_agg_jump_function_p>
2252 agg_jmp_p_vec_for_t_vec (vec<ipa_agg_jump_function> known_aggs)
2254 vec<ipa_agg_jump_function_p> ret;
2255 struct ipa_agg_jump_function *ajf;
2256 int i;
2258 ret.create (known_aggs.length ());
2259 FOR_EACH_VEC_ELT (known_aggs, i, ajf)
2260 ret.quick_push (ajf);
2261 return ret;
2264 /* Perform time and size measurement of NODE with the context given in
2265 KNOWN_CSTS, KNOWN_CONTEXTS and KNOWN_AGGS, calculate the benefit and cost
2266 given BASE_TIME of the node without specialization, REMOVABLE_PARAMS_COST of
2267 all context-independent removable parameters and EST_MOVE_COST of estimated
2268 movement of the considered parameter and store it into VAL. */
2270 static void
2271 perform_estimation_of_a_value (cgraph_node *node, vec<tree> known_csts,
2272 vec<ipa_polymorphic_call_context> known_contexts,
2273 vec<ipa_agg_jump_function_p> known_aggs_ptrs,
2274 int base_time, int removable_params_cost,
2275 int est_move_cost, ipcp_value_base *val)
2277 int time, size, time_benefit;
2278 inline_hints hints;
2280 estimate_ipcp_clone_size_and_time (node, known_csts, known_contexts,
2281 known_aggs_ptrs, &size, &time,
2282 &hints);
2283 time_benefit = base_time - time
2284 + devirtualization_time_bonus (node, known_csts, known_contexts,
2285 known_aggs_ptrs)
2286 + hint_time_bonus (hints)
2287 + removable_params_cost + est_move_cost;
2289 gcc_checking_assert (size >=0);
2290 /* The inliner-heuristics based estimates may think that in certain
2291 contexts some functions do not have any size at all but we want
2292 all specializations to have at least a tiny cost, not least not to
2293 divide by zero. */
2294 if (size == 0)
2295 size = 1;
2297 val->local_time_benefit = time_benefit;
2298 val->local_size_cost = size;
2301 /* Iterate over known values of parameters of NODE and estimate the local
2302 effects in terms of time and size they have. */
2304 static void
2305 estimate_local_effects (struct cgraph_node *node)
2307 struct ipa_node_params *info = IPA_NODE_REF (node);
2308 int i, count = ipa_get_param_count (info);
2309 vec<tree> known_csts;
2310 vec<ipa_polymorphic_call_context> known_contexts;
2311 vec<ipa_agg_jump_function> known_aggs;
2312 vec<ipa_agg_jump_function_p> known_aggs_ptrs;
2313 bool always_const;
2314 int base_time = inline_summaries->get (node)->time;
2315 int removable_params_cost;
2317 if (!count || !ipcp_versionable_function_p (node))
2318 return;
2320 if (dump_file && (dump_flags & TDF_DETAILS))
2321 fprintf (dump_file, "\nEstimating effects for %s/%i, base_time: %i.\n",
2322 node->name (), node->order, base_time);
2324 always_const = gather_context_independent_values (info, &known_csts,
2325 &known_contexts, &known_aggs,
2326 &removable_params_cost);
2327 known_aggs_ptrs = agg_jmp_p_vec_for_t_vec (known_aggs);
2328 if (always_const)
2330 struct caller_statistics stats;
2331 inline_hints hints;
2332 int time, size;
2334 init_caller_stats (&stats);
2335 node->call_for_symbol_thunks_and_aliases (gather_caller_stats, &stats,
2336 false);
2337 estimate_ipcp_clone_size_and_time (node, known_csts, known_contexts,
2338 known_aggs_ptrs, &size, &time, &hints);
2339 time -= devirtualization_time_bonus (node, known_csts, known_contexts,
2340 known_aggs_ptrs);
2341 time -= hint_time_bonus (hints);
2342 time -= removable_params_cost;
2343 size -= stats.n_calls * removable_params_cost;
2345 if (dump_file)
2346 fprintf (dump_file, " - context independent values, size: %i, "
2347 "time_benefit: %i\n", size, base_time - time);
2349 if (size <= 0
2350 || node->will_be_removed_from_program_if_no_direct_calls_p ())
2352 info->do_clone_for_all_contexts = true;
2353 base_time = time;
2355 if (dump_file)
2356 fprintf (dump_file, " Decided to specialize for all "
2357 "known contexts, code not going to grow.\n");
2359 else if (good_cloning_opportunity_p (node, base_time - time,
2360 stats.freq_sum, stats.count_sum,
2361 size))
2363 if (size + overall_size <= max_new_size)
2365 info->do_clone_for_all_contexts = true;
2366 base_time = time;
2367 overall_size += size;
2369 if (dump_file)
2370 fprintf (dump_file, " Decided to specialize for all "
2371 "known contexts, growth deemed beneficial.\n");
2373 else if (dump_file && (dump_flags & TDF_DETAILS))
2374 fprintf (dump_file, " Not cloning for all contexts because "
2375 "max_new_size would be reached with %li.\n",
2376 size + overall_size);
2380 for (i = 0; i < count ; i++)
2382 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
2383 ipcp_lattice<tree> *lat = &plats->itself;
2384 ipcp_value<tree> *val;
2386 if (lat->bottom
2387 || !lat->values
2388 || known_csts[i])
2389 continue;
2391 for (val = lat->values; val; val = val->next)
2393 gcc_checking_assert (TREE_CODE (val->value) != TREE_BINFO);
2394 known_csts[i] = val->value;
2396 int emc = estimate_move_cost (TREE_TYPE (val->value), true);
2397 perform_estimation_of_a_value (node, known_csts, known_contexts,
2398 known_aggs_ptrs, base_time,
2399 removable_params_cost, emc, val);
2401 if (dump_file && (dump_flags & TDF_DETAILS))
2403 fprintf (dump_file, " - estimates for value ");
2404 print_ipcp_constant_value (dump_file, val->value);
2405 fprintf (dump_file, " for ");
2406 ipa_dump_param (dump_file, info, i);
2407 fprintf (dump_file, ": time_benefit: %i, size: %i\n",
2408 val->local_time_benefit, val->local_size_cost);
2411 known_csts[i] = NULL_TREE;
2414 for (i = 0; i < count; i++)
2416 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
2418 if (!plats->virt_call)
2419 continue;
2421 ipcp_lattice<ipa_polymorphic_call_context> *ctxlat = &plats->ctxlat;
2422 ipcp_value<ipa_polymorphic_call_context> *val;
2424 if (ctxlat->bottom
2425 || !ctxlat->values
2426 || !known_contexts[i].useless_p ())
2427 continue;
2429 for (val = ctxlat->values; val; val = val->next)
2431 known_contexts[i] = val->value;
2432 perform_estimation_of_a_value (node, known_csts, known_contexts,
2433 known_aggs_ptrs, base_time,
2434 removable_params_cost, 0, val);
2436 if (dump_file && (dump_flags & TDF_DETAILS))
2438 fprintf (dump_file, " - estimates for polymorphic context ");
2439 print_ipcp_constant_value (dump_file, val->value);
2440 fprintf (dump_file, " for ");
2441 ipa_dump_param (dump_file, info, i);
2442 fprintf (dump_file, ": time_benefit: %i, size: %i\n",
2443 val->local_time_benefit, val->local_size_cost);
2446 known_contexts[i] = ipa_polymorphic_call_context ();
2449 for (i = 0; i < count ; i++)
2451 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
2452 struct ipa_agg_jump_function *ajf;
2453 struct ipcp_agg_lattice *aglat;
2455 if (plats->aggs_bottom || !plats->aggs)
2456 continue;
2458 ajf = &known_aggs[i];
2459 for (aglat = plats->aggs; aglat; aglat = aglat->next)
2461 ipcp_value<tree> *val;
2462 if (aglat->bottom || !aglat->values
2463 /* If the following is true, the one value is in known_aggs. */
2464 || (!plats->aggs_contain_variable
2465 && aglat->is_single_const ()))
2466 continue;
2468 for (val = aglat->values; val; val = val->next)
2470 struct ipa_agg_jf_item item;
2472 item.offset = aglat->offset;
2473 item.value = val->value;
2474 vec_safe_push (ajf->items, item);
2476 perform_estimation_of_a_value (node, known_csts, known_contexts,
2477 known_aggs_ptrs, base_time,
2478 removable_params_cost, 0, val);
2480 if (dump_file && (dump_flags & TDF_DETAILS))
2482 fprintf (dump_file, " - estimates for value ");
2483 print_ipcp_constant_value (dump_file, val->value);
2484 fprintf (dump_file, " for ");
2485 ipa_dump_param (dump_file, info, i);
2486 fprintf (dump_file, "[%soffset: " HOST_WIDE_INT_PRINT_DEC
2487 "]: time_benefit: %i, size: %i\n",
2488 plats->aggs_by_ref ? "ref " : "",
2489 aglat->offset,
2490 val->local_time_benefit, val->local_size_cost);
2493 ajf->items->pop ();
2498 for (i = 0; i < count ; i++)
2499 vec_free (known_aggs[i].items);
2501 known_csts.release ();
2502 known_contexts.release ();
2503 known_aggs.release ();
2504 known_aggs_ptrs.release ();
2508 /* Add value CUR_VAL and all yet-unsorted values it is dependent on to the
2509 topological sort of values. */
2511 template <typename valtype>
2512 void
2513 value_topo_info<valtype>::add_val (ipcp_value<valtype> *cur_val)
2515 ipcp_value_source<valtype> *src;
2517 if (cur_val->dfs)
2518 return;
2520 dfs_counter++;
2521 cur_val->dfs = dfs_counter;
2522 cur_val->low_link = dfs_counter;
2524 cur_val->topo_next = stack;
2525 stack = cur_val;
2526 cur_val->on_stack = true;
2528 for (src = cur_val->sources; src; src = src->next)
2529 if (src->val)
2531 if (src->val->dfs == 0)
2533 add_val (src->val);
2534 if (src->val->low_link < cur_val->low_link)
2535 cur_val->low_link = src->val->low_link;
2537 else if (src->val->on_stack
2538 && src->val->dfs < cur_val->low_link)
2539 cur_val->low_link = src->val->dfs;
2542 if (cur_val->dfs == cur_val->low_link)
2544 ipcp_value<valtype> *v, *scc_list = NULL;
2548 v = stack;
2549 stack = v->topo_next;
2550 v->on_stack = false;
2552 v->scc_next = scc_list;
2553 scc_list = v;
2555 while (v != cur_val);
2557 cur_val->topo_next = values_topo;
2558 values_topo = cur_val;
2562 /* Add all values in lattices associated with NODE to the topological sort if
2563 they are not there yet. */
2565 static void
2566 add_all_node_vals_to_toposort (cgraph_node *node, ipa_topo_info *topo)
2568 struct ipa_node_params *info = IPA_NODE_REF (node);
2569 int i, count = ipa_get_param_count (info);
2571 for (i = 0; i < count ; i++)
2573 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
2574 ipcp_lattice<tree> *lat = &plats->itself;
2575 struct ipcp_agg_lattice *aglat;
2577 if (!lat->bottom)
2579 ipcp_value<tree> *val;
2580 for (val = lat->values; val; val = val->next)
2581 topo->constants.add_val (val);
2584 if (!plats->aggs_bottom)
2585 for (aglat = plats->aggs; aglat; aglat = aglat->next)
2586 if (!aglat->bottom)
2588 ipcp_value<tree> *val;
2589 for (val = aglat->values; val; val = val->next)
2590 topo->constants.add_val (val);
2593 ipcp_lattice<ipa_polymorphic_call_context> *ctxlat = &plats->ctxlat;
2594 if (!ctxlat->bottom)
2596 ipcp_value<ipa_polymorphic_call_context> *ctxval;
2597 for (ctxval = ctxlat->values; ctxval; ctxval = ctxval->next)
2598 topo->contexts.add_val (ctxval);
2603 /* One pass of constants propagation along the call graph edges, from callers
2604 to callees (requires topological ordering in TOPO), iterate over strongly
2605 connected components. */
2607 static void
2608 propagate_constants_topo (struct ipa_topo_info *topo)
2610 int i;
2612 for (i = topo->nnodes - 1; i >= 0; i--)
2614 unsigned j;
2615 struct cgraph_node *v, *node = topo->order[i];
2616 vec<cgraph_node *> cycle_nodes = ipa_get_nodes_in_cycle (node);
2618 /* First, iteratively propagate within the strongly connected component
2619 until all lattices stabilize. */
2620 FOR_EACH_VEC_ELT (cycle_nodes, j, v)
2621 if (v->has_gimple_body_p ())
2622 push_node_to_stack (topo, v);
2624 v = pop_node_from_stack (topo);
2625 while (v)
2627 struct cgraph_edge *cs;
2629 for (cs = v->callees; cs; cs = cs->next_callee)
2630 if (ipa_edge_within_scc (cs)
2631 && propagate_constants_accross_call (cs))
2632 push_node_to_stack (topo, cs->callee);
2633 v = pop_node_from_stack (topo);
2636 /* Afterwards, propagate along edges leading out of the SCC, calculates
2637 the local effects of the discovered constants and all valid values to
2638 their topological sort. */
2639 FOR_EACH_VEC_ELT (cycle_nodes, j, v)
2640 if (v->has_gimple_body_p ())
2642 struct cgraph_edge *cs;
2644 estimate_local_effects (v);
2645 add_all_node_vals_to_toposort (v, topo);
2646 for (cs = v->callees; cs; cs = cs->next_callee)
2647 if (!ipa_edge_within_scc (cs))
2648 propagate_constants_accross_call (cs);
2650 cycle_nodes.release ();
2655 /* Return the sum of A and B if none of them is bigger than INT_MAX/2, return
2656 the bigger one if otherwise. */
2658 static int
2659 safe_add (int a, int b)
2661 if (a > INT_MAX/2 || b > INT_MAX/2)
2662 return a > b ? a : b;
2663 else
2664 return a + b;
2668 /* Propagate the estimated effects of individual values along the topological
2669 from the dependent values to those they depend on. */
2671 template <typename valtype>
2672 void
2673 value_topo_info<valtype>::propagate_effects ()
2675 ipcp_value<valtype> *base;
2677 for (base = values_topo; base; base = base->topo_next)
2679 ipcp_value_source<valtype> *src;
2680 ipcp_value<valtype> *val;
2681 int time = 0, size = 0;
2683 for (val = base; val; val = val->scc_next)
2685 time = safe_add (time,
2686 val->local_time_benefit + val->prop_time_benefit);
2687 size = safe_add (size, val->local_size_cost + val->prop_size_cost);
2690 for (val = base; val; val = val->scc_next)
2691 for (src = val->sources; src; src = src->next)
2692 if (src->val
2693 && src->cs->maybe_hot_p ())
2695 src->val->prop_time_benefit = safe_add (time,
2696 src->val->prop_time_benefit);
2697 src->val->prop_size_cost = safe_add (size,
2698 src->val->prop_size_cost);
2704 /* Propagate constants, polymorphic contexts and their effects from the
2705 summaries interprocedurally. */
2707 static void
2708 ipcp_propagate_stage (struct ipa_topo_info *topo)
2710 struct cgraph_node *node;
2712 if (dump_file)
2713 fprintf (dump_file, "\n Propagating constants:\n\n");
2715 if (in_lto_p)
2716 ipa_update_after_lto_read ();
2719 FOR_EACH_DEFINED_FUNCTION (node)
2721 struct ipa_node_params *info = IPA_NODE_REF (node);
2723 determine_versionability (node);
2724 if (node->has_gimple_body_p ())
2726 info->lattices = XCNEWVEC (struct ipcp_param_lattices,
2727 ipa_get_param_count (info));
2728 initialize_node_lattices (node);
2730 if (node->definition && !node->alias)
2731 overall_size += inline_summaries->get (node)->self_size;
2732 if (node->count > max_count)
2733 max_count = node->count;
2736 max_new_size = overall_size;
2737 if (max_new_size < PARAM_VALUE (PARAM_LARGE_UNIT_INSNS))
2738 max_new_size = PARAM_VALUE (PARAM_LARGE_UNIT_INSNS);
2739 max_new_size += max_new_size * PARAM_VALUE (PARAM_IPCP_UNIT_GROWTH) / 100 + 1;
2741 if (dump_file)
2742 fprintf (dump_file, "\noverall_size: %li, max_new_size: %li\n",
2743 overall_size, max_new_size);
2745 propagate_constants_topo (topo);
2746 #ifdef ENABLE_CHECKING
2747 ipcp_verify_propagated_values ();
2748 #endif
2749 topo->constants.propagate_effects ();
2750 topo->contexts.propagate_effects ();
2752 if (dump_file)
2754 fprintf (dump_file, "\nIPA lattices after all propagation:\n");
2755 print_all_lattices (dump_file, (dump_flags & TDF_DETAILS), true);
2759 /* Discover newly direct outgoing edges from NODE which is a new clone with
2760 known KNOWN_CSTS and make them direct. */
2762 static void
2763 ipcp_discover_new_direct_edges (struct cgraph_node *node,
2764 vec<tree> known_csts,
2765 vec<ipa_polymorphic_call_context>
2766 known_contexts,
2767 struct ipa_agg_replacement_value *aggvals)
2769 struct cgraph_edge *ie, *next_ie;
2770 bool found = false;
2772 for (ie = node->indirect_calls; ie; ie = next_ie)
2774 tree target;
2775 bool speculative;
2777 next_ie = ie->next_callee;
2778 target = ipa_get_indirect_edge_target_1 (ie, known_csts, known_contexts,
2779 vNULL, aggvals, &speculative);
2780 if (target)
2782 bool agg_contents = ie->indirect_info->agg_contents;
2783 bool polymorphic = ie->indirect_info->polymorphic;
2784 int param_index = ie->indirect_info->param_index;
2785 struct cgraph_edge *cs = ipa_make_edge_direct_to_target (ie, target,
2786 speculative);
2787 found = true;
2789 if (cs && !agg_contents && !polymorphic)
2791 struct ipa_node_params *info = IPA_NODE_REF (node);
2792 int c = ipa_get_controlled_uses (info, param_index);
2793 if (c != IPA_UNDESCRIBED_USE)
2795 struct ipa_ref *to_del;
2797 c--;
2798 ipa_set_controlled_uses (info, param_index, c);
2799 if (dump_file && (dump_flags & TDF_DETAILS))
2800 fprintf (dump_file, " controlled uses count of param "
2801 "%i bumped down to %i\n", param_index, c);
2802 if (c == 0
2803 && (to_del = node->find_reference (cs->callee, NULL, 0)))
2805 if (dump_file && (dump_flags & TDF_DETAILS))
2806 fprintf (dump_file, " and even removing its "
2807 "cloning-created reference\n");
2808 to_del->remove_reference ();
2814 /* Turning calls to direct calls will improve overall summary. */
2815 if (found)
2816 inline_update_overall_summary (node);
2819 /* Vector of pointers which for linked lists of clones of an original crgaph
2820 edge. */
2822 static vec<cgraph_edge *> next_edge_clone;
2823 static vec<cgraph_edge *> prev_edge_clone;
2825 static inline void
2826 grow_edge_clone_vectors (void)
2828 if (next_edge_clone.length ()
2829 <= (unsigned) symtab->edges_max_uid)
2830 next_edge_clone.safe_grow_cleared (symtab->edges_max_uid + 1);
2831 if (prev_edge_clone.length ()
2832 <= (unsigned) symtab->edges_max_uid)
2833 prev_edge_clone.safe_grow_cleared (symtab->edges_max_uid + 1);
2836 /* Edge duplication hook to grow the appropriate linked list in
2837 next_edge_clone. */
2839 static void
2840 ipcp_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
2841 void *)
2843 grow_edge_clone_vectors ();
2845 struct cgraph_edge *old_next = next_edge_clone[src->uid];
2846 if (old_next)
2847 prev_edge_clone[old_next->uid] = dst;
2848 prev_edge_clone[dst->uid] = src;
2850 next_edge_clone[dst->uid] = old_next;
2851 next_edge_clone[src->uid] = dst;
2854 /* Hook that is called by cgraph.c when an edge is removed. */
2856 static void
2857 ipcp_edge_removal_hook (struct cgraph_edge *cs, void *)
2859 grow_edge_clone_vectors ();
2861 struct cgraph_edge *prev = prev_edge_clone[cs->uid];
2862 struct cgraph_edge *next = next_edge_clone[cs->uid];
2863 if (prev)
2864 next_edge_clone[prev->uid] = next;
2865 if (next)
2866 prev_edge_clone[next->uid] = prev;
2869 /* See if NODE is a clone with a known aggregate value at a given OFFSET of a
2870 parameter with the given INDEX. */
2872 static tree
2873 get_clone_agg_value (struct cgraph_node *node, HOST_WIDE_INT offset,
2874 int index)
2876 struct ipa_agg_replacement_value *aggval;
2878 aggval = ipa_get_agg_replacements_for_node (node);
2879 while (aggval)
2881 if (aggval->offset == offset
2882 && aggval->index == index)
2883 return aggval->value;
2884 aggval = aggval->next;
2886 return NULL_TREE;
2889 /* Return true is NODE is DEST or its clone for all contexts. */
2891 static bool
2892 same_node_or_its_all_contexts_clone_p (cgraph_node *node, cgraph_node *dest)
2894 if (node == dest)
2895 return true;
2897 struct ipa_node_params *info = IPA_NODE_REF (node);
2898 return info->is_all_contexts_clone && info->ipcp_orig_node == dest;
2901 /* Return true if edge CS does bring about the value described by SRC to node
2902 DEST or its clone for all contexts. */
2904 static bool
2905 cgraph_edge_brings_value_p (cgraph_edge *cs, ipcp_value_source<tree> *src,
2906 cgraph_node *dest)
2908 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
2909 enum availability availability;
2910 cgraph_node *real_dest = cs->callee->function_symbol (&availability);
2912 if (!same_node_or_its_all_contexts_clone_p (real_dest, dest)
2913 || availability <= AVAIL_INTERPOSABLE
2914 || caller_info->node_dead)
2915 return false;
2916 if (!src->val)
2917 return true;
2919 if (caller_info->ipcp_orig_node)
2921 tree t;
2922 if (src->offset == -1)
2923 t = caller_info->known_csts[src->index];
2924 else
2925 t = get_clone_agg_value (cs->caller, src->offset, src->index);
2926 return (t != NULL_TREE
2927 && values_equal_for_ipcp_p (src->val->value, t));
2929 else
2931 struct ipcp_agg_lattice *aglat;
2932 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (caller_info,
2933 src->index);
2934 if (src->offset == -1)
2935 return (plats->itself.is_single_const ()
2936 && values_equal_for_ipcp_p (src->val->value,
2937 plats->itself.values->value));
2938 else
2940 if (plats->aggs_bottom || plats->aggs_contain_variable)
2941 return false;
2942 for (aglat = plats->aggs; aglat; aglat = aglat->next)
2943 if (aglat->offset == src->offset)
2944 return (aglat->is_single_const ()
2945 && values_equal_for_ipcp_p (src->val->value,
2946 aglat->values->value));
2948 return false;
2952 /* Return true if edge CS does bring about the value described by SRC to node
2953 DEST or its clone for all contexts. */
2955 static bool
2956 cgraph_edge_brings_value_p (cgraph_edge *cs,
2957 ipcp_value_source<ipa_polymorphic_call_context> *src,
2958 cgraph_node *dest)
2960 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
2961 cgraph_node *real_dest = cs->callee->function_symbol ();
2963 if (!same_node_or_its_all_contexts_clone_p (real_dest, dest)
2964 || caller_info->node_dead)
2965 return false;
2966 if (!src->val)
2967 return true;
2969 if (caller_info->ipcp_orig_node)
2970 return (caller_info->known_contexts.length () > (unsigned) src->index)
2971 && values_equal_for_ipcp_p (src->val->value,
2972 caller_info->known_contexts[src->index]);
2974 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (caller_info,
2975 src->index);
2976 return plats->ctxlat.is_single_const ()
2977 && values_equal_for_ipcp_p (src->val->value,
2978 plats->ctxlat.values->value);
2981 /* Get the next clone in the linked list of clones of an edge. */
2983 static inline struct cgraph_edge *
2984 get_next_cgraph_edge_clone (struct cgraph_edge *cs)
2986 return next_edge_clone[cs->uid];
2989 /* Given VAL that is intended for DEST, iterate over all its sources and if
2990 they still hold, add their edge frequency and their number into *FREQUENCY
2991 and *CALLER_COUNT respectively. */
2993 template <typename valtype>
2994 static bool
2995 get_info_about_necessary_edges (ipcp_value<valtype> *val, cgraph_node *dest,
2996 int *freq_sum,
2997 gcov_type *count_sum, int *caller_count)
2999 ipcp_value_source<valtype> *src;
3000 int freq = 0, count = 0;
3001 gcov_type cnt = 0;
3002 bool hot = false;
3004 for (src = val->sources; src; src = src->next)
3006 struct cgraph_edge *cs = src->cs;
3007 while (cs)
3009 if (cgraph_edge_brings_value_p (cs, src, dest))
3011 count++;
3012 freq += cs->frequency;
3013 cnt += cs->count;
3014 hot |= cs->maybe_hot_p ();
3016 cs = get_next_cgraph_edge_clone (cs);
3020 *freq_sum = freq;
3021 *count_sum = cnt;
3022 *caller_count = count;
3023 return hot;
3026 /* Return a vector of incoming edges that do bring value VAL to node DEST. It
3027 is assumed their number is known and equal to CALLER_COUNT. */
3029 template <typename valtype>
3030 static vec<cgraph_edge *>
3031 gather_edges_for_value (ipcp_value<valtype> *val, cgraph_node *dest,
3032 int caller_count)
3034 ipcp_value_source<valtype> *src;
3035 vec<cgraph_edge *> ret;
3037 ret.create (caller_count);
3038 for (src = val->sources; src; src = src->next)
3040 struct cgraph_edge *cs = src->cs;
3041 while (cs)
3043 if (cgraph_edge_brings_value_p (cs, src, dest))
3044 ret.quick_push (cs);
3045 cs = get_next_cgraph_edge_clone (cs);
3049 return ret;
3052 /* Construct a replacement map for a know VALUE for a formal parameter PARAM.
3053 Return it or NULL if for some reason it cannot be created. */
3055 static struct ipa_replace_map *
3056 get_replacement_map (struct ipa_node_params *info, tree value, int parm_num)
3058 struct ipa_replace_map *replace_map;
3061 replace_map = ggc_alloc<ipa_replace_map> ();
3062 if (dump_file)
3064 fprintf (dump_file, " replacing ");
3065 ipa_dump_param (dump_file, info, parm_num);
3067 fprintf (dump_file, " with const ");
3068 print_generic_expr (dump_file, value, 0);
3069 fprintf (dump_file, "\n");
3071 replace_map->old_tree = NULL;
3072 replace_map->parm_num = parm_num;
3073 replace_map->new_tree = value;
3074 replace_map->replace_p = true;
3075 replace_map->ref_p = false;
3077 return replace_map;
3080 /* Dump new profiling counts */
3082 static void
3083 dump_profile_updates (struct cgraph_node *orig_node,
3084 struct cgraph_node *new_node)
3086 struct cgraph_edge *cs;
3088 fprintf (dump_file, " setting count of the specialized node to "
3089 HOST_WIDE_INT_PRINT_DEC "\n", (HOST_WIDE_INT) new_node->count);
3090 for (cs = new_node->callees; cs ; cs = cs->next_callee)
3091 fprintf (dump_file, " edge to %s has count "
3092 HOST_WIDE_INT_PRINT_DEC "\n",
3093 cs->callee->name (), (HOST_WIDE_INT) cs->count);
3095 fprintf (dump_file, " setting count of the original node to "
3096 HOST_WIDE_INT_PRINT_DEC "\n", (HOST_WIDE_INT) orig_node->count);
3097 for (cs = orig_node->callees; cs ; cs = cs->next_callee)
3098 fprintf (dump_file, " edge to %s is left with "
3099 HOST_WIDE_INT_PRINT_DEC "\n",
3100 cs->callee->name (), (HOST_WIDE_INT) cs->count);
3103 /* After a specialized NEW_NODE version of ORIG_NODE has been created, update
3104 their profile information to reflect this. */
3106 static void
3107 update_profiling_info (struct cgraph_node *orig_node,
3108 struct cgraph_node *new_node)
3110 struct cgraph_edge *cs;
3111 struct caller_statistics stats;
3112 gcov_type new_sum, orig_sum;
3113 gcov_type remainder, orig_node_count = orig_node->count;
3115 if (orig_node_count == 0)
3116 return;
3118 init_caller_stats (&stats);
3119 orig_node->call_for_symbol_thunks_and_aliases (gather_caller_stats, &stats,
3120 false);
3121 orig_sum = stats.count_sum;
3122 init_caller_stats (&stats);
3123 new_node->call_for_symbol_thunks_and_aliases (gather_caller_stats, &stats,
3124 false);
3125 new_sum = stats.count_sum;
3127 if (orig_node_count < orig_sum + new_sum)
3129 if (dump_file)
3130 fprintf (dump_file, " Problem: node %s/%i has too low count "
3131 HOST_WIDE_INT_PRINT_DEC " while the sum of incoming "
3132 "counts is " HOST_WIDE_INT_PRINT_DEC "\n",
3133 orig_node->name (), orig_node->order,
3134 (HOST_WIDE_INT) orig_node_count,
3135 (HOST_WIDE_INT) (orig_sum + new_sum));
3137 orig_node_count = (orig_sum + new_sum) * 12 / 10;
3138 if (dump_file)
3139 fprintf (dump_file, " proceeding by pretending it was "
3140 HOST_WIDE_INT_PRINT_DEC "\n",
3141 (HOST_WIDE_INT) orig_node_count);
3144 new_node->count = new_sum;
3145 remainder = orig_node_count - new_sum;
3146 orig_node->count = remainder;
3148 for (cs = new_node->callees; cs ; cs = cs->next_callee)
3149 if (cs->frequency)
3150 cs->count = apply_probability (cs->count,
3151 GCOV_COMPUTE_SCALE (new_sum,
3152 orig_node_count));
3153 else
3154 cs->count = 0;
3156 for (cs = orig_node->callees; cs ; cs = cs->next_callee)
3157 cs->count = apply_probability (cs->count,
3158 GCOV_COMPUTE_SCALE (remainder,
3159 orig_node_count));
3161 if (dump_file)
3162 dump_profile_updates (orig_node, new_node);
3165 /* Update the respective profile of specialized NEW_NODE and the original
3166 ORIG_NODE after additional edges with cumulative count sum REDIRECTED_SUM
3167 have been redirected to the specialized version. */
3169 static void
3170 update_specialized_profile (struct cgraph_node *new_node,
3171 struct cgraph_node *orig_node,
3172 gcov_type redirected_sum)
3174 struct cgraph_edge *cs;
3175 gcov_type new_node_count, orig_node_count = orig_node->count;
3177 if (dump_file)
3178 fprintf (dump_file, " the sum of counts of redirected edges is "
3179 HOST_WIDE_INT_PRINT_DEC "\n", (HOST_WIDE_INT) redirected_sum);
3180 if (orig_node_count == 0)
3181 return;
3183 gcc_assert (orig_node_count >= redirected_sum);
3185 new_node_count = new_node->count;
3186 new_node->count += redirected_sum;
3187 orig_node->count -= redirected_sum;
3189 for (cs = new_node->callees; cs ; cs = cs->next_callee)
3190 if (cs->frequency)
3191 cs->count += apply_probability (cs->count,
3192 GCOV_COMPUTE_SCALE (redirected_sum,
3193 new_node_count));
3194 else
3195 cs->count = 0;
3197 for (cs = orig_node->callees; cs ; cs = cs->next_callee)
3199 gcov_type dec = apply_probability (cs->count,
3200 GCOV_COMPUTE_SCALE (redirected_sum,
3201 orig_node_count));
3202 if (dec < cs->count)
3203 cs->count -= dec;
3204 else
3205 cs->count = 0;
3208 if (dump_file)
3209 dump_profile_updates (orig_node, new_node);
3212 /* Create a specialized version of NODE with known constants in KNOWN_CSTS,
3213 known contexts in KNOWN_CONTEXTS and known aggregate values in AGGVALS and
3214 redirect all edges in CALLERS to it. */
3216 static struct cgraph_node *
3217 create_specialized_node (struct cgraph_node *node,
3218 vec<tree> known_csts,
3219 vec<ipa_polymorphic_call_context> known_contexts,
3220 struct ipa_agg_replacement_value *aggvals,
3221 vec<cgraph_edge *> callers)
3223 struct ipa_node_params *new_info, *info = IPA_NODE_REF (node);
3224 vec<ipa_replace_map *, va_gc> *replace_trees = NULL;
3225 struct ipa_agg_replacement_value *av;
3226 struct cgraph_node *new_node;
3227 int i, count = ipa_get_param_count (info);
3228 bitmap args_to_skip;
3230 gcc_assert (!info->ipcp_orig_node);
3232 if (node->local.can_change_signature)
3234 args_to_skip = BITMAP_GGC_ALLOC ();
3235 for (i = 0; i < count; i++)
3237 tree t = known_csts[i];
3239 if (t || !ipa_is_param_used (info, i))
3240 bitmap_set_bit (args_to_skip, i);
3243 else
3245 args_to_skip = NULL;
3246 if (dump_file && (dump_flags & TDF_DETAILS))
3247 fprintf (dump_file, " cannot change function signature\n");
3250 for (i = 0; i < count ; i++)
3252 tree t = known_csts[i];
3253 if (t)
3255 struct ipa_replace_map *replace_map;
3257 gcc_checking_assert (TREE_CODE (t) != TREE_BINFO);
3258 replace_map = get_replacement_map (info, t, i);
3259 if (replace_map)
3260 vec_safe_push (replace_trees, replace_map);
3264 new_node = node->create_virtual_clone (callers, replace_trees,
3265 args_to_skip, "constprop");
3266 ipa_set_node_agg_value_chain (new_node, aggvals);
3267 for (av = aggvals; av; av = av->next)
3268 new_node->maybe_create_reference (av->value, IPA_REF_ADDR, NULL);
3270 if (dump_file && (dump_flags & TDF_DETAILS))
3272 fprintf (dump_file, " the new node is %s/%i.\n",
3273 new_node->name (), new_node->order);
3274 if (known_contexts.exists ())
3276 for (i = 0; i < count ; i++)
3277 if (!known_contexts[i].useless_p ())
3279 fprintf (dump_file, " known ctx %i is ", i);
3280 known_contexts[i].dump (dump_file);
3283 if (aggvals)
3284 ipa_dump_agg_replacement_values (dump_file, aggvals);
3286 ipa_check_create_node_params ();
3287 update_profiling_info (node, new_node);
3288 new_info = IPA_NODE_REF (new_node);
3289 new_info->ipcp_orig_node = node;
3290 new_info->known_csts = known_csts;
3291 new_info->known_contexts = known_contexts;
3293 ipcp_discover_new_direct_edges (new_node, known_csts, known_contexts, aggvals);
3295 callers.release ();
3296 return new_node;
3299 /* Given a NODE, and a subset of its CALLERS, try to populate blanks slots in
3300 KNOWN_CSTS with constants that are also known for all of the CALLERS. */
3302 static void
3303 find_more_scalar_values_for_callers_subset (struct cgraph_node *node,
3304 vec<tree> known_csts,
3305 vec<cgraph_edge *> callers)
3307 struct ipa_node_params *info = IPA_NODE_REF (node);
3308 int i, count = ipa_get_param_count (info);
3310 for (i = 0; i < count ; i++)
3312 struct cgraph_edge *cs;
3313 tree newval = NULL_TREE;
3314 int j;
3315 bool first = true;
3317 if (ipa_get_scalar_lat (info, i)->bottom || known_csts[i])
3318 continue;
3320 FOR_EACH_VEC_ELT (callers, j, cs)
3322 struct ipa_jump_func *jump_func;
3323 tree t;
3325 if (i >= ipa_get_cs_argument_count (IPA_EDGE_REF (cs)))
3327 newval = NULL_TREE;
3328 break;
3330 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
3331 t = ipa_value_from_jfunc (IPA_NODE_REF (cs->caller), jump_func);
3332 if (!t
3333 || (newval
3334 && !values_equal_for_ipcp_p (t, newval))
3335 || (!first && !newval))
3337 newval = NULL_TREE;
3338 break;
3340 else
3341 newval = t;
3342 first = false;
3345 if (newval)
3347 if (dump_file && (dump_flags & TDF_DETAILS))
3349 fprintf (dump_file, " adding an extra known scalar value ");
3350 print_ipcp_constant_value (dump_file, newval);
3351 fprintf (dump_file, " for ");
3352 ipa_dump_param (dump_file, info, i);
3353 fprintf (dump_file, "\n");
3356 known_csts[i] = newval;
3361 /* Given a NODE and a subset of its CALLERS, try to populate plank slots in
3362 KNOWN_CONTEXTS with polymorphic contexts that are also known for all of the
3363 CALLERS. */
3365 static void
3366 find_more_contexts_for_caller_subset (cgraph_node *node,
3367 vec<ipa_polymorphic_call_context>
3368 *known_contexts,
3369 vec<cgraph_edge *> callers)
3371 ipa_node_params *info = IPA_NODE_REF (node);
3372 int i, count = ipa_get_param_count (info);
3374 for (i = 0; i < count ; i++)
3376 cgraph_edge *cs;
3378 if (ipa_get_poly_ctx_lat (info, i)->bottom
3379 || (known_contexts->exists ()
3380 && !(*known_contexts)[i].useless_p ()))
3381 continue;
3383 ipa_polymorphic_call_context newval;
3384 bool first = true;
3385 int j;
3387 FOR_EACH_VEC_ELT (callers, j, cs)
3389 if (i >= ipa_get_cs_argument_count (IPA_EDGE_REF (cs)))
3390 return;
3391 ipa_jump_func *jfunc = ipa_get_ith_jump_func (IPA_EDGE_REF (cs),
3393 ipa_polymorphic_call_context ctx;
3394 ctx = ipa_context_from_jfunc (IPA_NODE_REF (cs->caller), cs, i,
3395 jfunc);
3396 if (first)
3398 newval = ctx;
3399 first = false;
3401 else
3402 newval.meet_with (ctx);
3403 if (newval.useless_p ())
3404 break;
3407 if (!newval.useless_p ())
3409 if (dump_file && (dump_flags & TDF_DETAILS))
3411 fprintf (dump_file, " adding an extra known polymorphic "
3412 "context ");
3413 print_ipcp_constant_value (dump_file, newval);
3414 fprintf (dump_file, " for ");
3415 ipa_dump_param (dump_file, info, i);
3416 fprintf (dump_file, "\n");
3419 if (!known_contexts->exists ())
3420 known_contexts->safe_grow_cleared (ipa_get_param_count (info));
3421 (*known_contexts)[i] = newval;
3427 /* Go through PLATS and create a vector of values consisting of values and
3428 offsets (minus OFFSET) of lattices that contain only a single value. */
3430 static vec<ipa_agg_jf_item>
3431 copy_plats_to_inter (struct ipcp_param_lattices *plats, HOST_WIDE_INT offset)
3433 vec<ipa_agg_jf_item> res = vNULL;
3435 if (!plats->aggs || plats->aggs_contain_variable || plats->aggs_bottom)
3436 return vNULL;
3438 for (struct ipcp_agg_lattice *aglat = plats->aggs; aglat; aglat = aglat->next)
3439 if (aglat->is_single_const ())
3441 struct ipa_agg_jf_item ti;
3442 ti.offset = aglat->offset - offset;
3443 ti.value = aglat->values->value;
3444 res.safe_push (ti);
3446 return res;
3449 /* Intersect all values in INTER with single value lattices in PLATS (while
3450 subtracting OFFSET). */
3452 static void
3453 intersect_with_plats (struct ipcp_param_lattices *plats,
3454 vec<ipa_agg_jf_item> *inter,
3455 HOST_WIDE_INT offset)
3457 struct ipcp_agg_lattice *aglat;
3458 struct ipa_agg_jf_item *item;
3459 int k;
3461 if (!plats->aggs || plats->aggs_contain_variable || plats->aggs_bottom)
3463 inter->release ();
3464 return;
3467 aglat = plats->aggs;
3468 FOR_EACH_VEC_ELT (*inter, k, item)
3470 bool found = false;
3471 if (!item->value)
3472 continue;
3473 while (aglat)
3475 if (aglat->offset - offset > item->offset)
3476 break;
3477 if (aglat->offset - offset == item->offset)
3479 gcc_checking_assert (item->value);
3480 if (values_equal_for_ipcp_p (item->value, aglat->values->value))
3481 found = true;
3482 break;
3484 aglat = aglat->next;
3486 if (!found)
3487 item->value = NULL_TREE;
3491 /* Copy agggregate replacement values of NODE (which is an IPA-CP clone) to the
3492 vector result while subtracting OFFSET from the individual value offsets. */
3494 static vec<ipa_agg_jf_item>
3495 agg_replacements_to_vector (struct cgraph_node *node, int index,
3496 HOST_WIDE_INT offset)
3498 struct ipa_agg_replacement_value *av;
3499 vec<ipa_agg_jf_item> res = vNULL;
3501 for (av = ipa_get_agg_replacements_for_node (node); av; av = av->next)
3502 if (av->index == index
3503 && (av->offset - offset) >= 0)
3505 struct ipa_agg_jf_item item;
3506 gcc_checking_assert (av->value);
3507 item.offset = av->offset - offset;
3508 item.value = av->value;
3509 res.safe_push (item);
3512 return res;
3515 /* Intersect all values in INTER with those that we have already scheduled to
3516 be replaced in parameter number INDEX of NODE, which is an IPA-CP clone
3517 (while subtracting OFFSET). */
3519 static void
3520 intersect_with_agg_replacements (struct cgraph_node *node, int index,
3521 vec<ipa_agg_jf_item> *inter,
3522 HOST_WIDE_INT offset)
3524 struct ipa_agg_replacement_value *srcvals;
3525 struct ipa_agg_jf_item *item;
3526 int i;
3528 srcvals = ipa_get_agg_replacements_for_node (node);
3529 if (!srcvals)
3531 inter->release ();
3532 return;
3535 FOR_EACH_VEC_ELT (*inter, i, item)
3537 struct ipa_agg_replacement_value *av;
3538 bool found = false;
3539 if (!item->value)
3540 continue;
3541 for (av = srcvals; av; av = av->next)
3543 gcc_checking_assert (av->value);
3544 if (av->index == index
3545 && av->offset - offset == item->offset)
3547 if (values_equal_for_ipcp_p (item->value, av->value))
3548 found = true;
3549 break;
3552 if (!found)
3553 item->value = NULL_TREE;
3557 /* Intersect values in INTER with aggregate values that come along edge CS to
3558 parameter number INDEX and return it. If INTER does not actually exist yet,
3559 copy all incoming values to it. If we determine we ended up with no values
3560 whatsoever, return a released vector. */
3562 static vec<ipa_agg_jf_item>
3563 intersect_aggregates_with_edge (struct cgraph_edge *cs, int index,
3564 vec<ipa_agg_jf_item> inter)
3566 struct ipa_jump_func *jfunc;
3567 jfunc = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), index);
3568 if (jfunc->type == IPA_JF_PASS_THROUGH
3569 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3571 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
3572 int src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
3574 if (caller_info->ipcp_orig_node)
3576 struct cgraph_node *orig_node = caller_info->ipcp_orig_node;
3577 struct ipcp_param_lattices *orig_plats;
3578 orig_plats = ipa_get_parm_lattices (IPA_NODE_REF (orig_node),
3579 src_idx);
3580 if (agg_pass_through_permissible_p (orig_plats, jfunc))
3582 if (!inter.exists ())
3583 inter = agg_replacements_to_vector (cs->caller, src_idx, 0);
3584 else
3585 intersect_with_agg_replacements (cs->caller, src_idx,
3586 &inter, 0);
3588 else
3590 inter.release ();
3591 return vNULL;
3594 else
3596 struct ipcp_param_lattices *src_plats;
3597 src_plats = ipa_get_parm_lattices (caller_info, src_idx);
3598 if (agg_pass_through_permissible_p (src_plats, jfunc))
3600 /* Currently we do not produce clobber aggregate jump
3601 functions, adjust when we do. */
3602 gcc_checking_assert (!jfunc->agg.items);
3603 if (!inter.exists ())
3604 inter = copy_plats_to_inter (src_plats, 0);
3605 else
3606 intersect_with_plats (src_plats, &inter, 0);
3608 else
3610 inter.release ();
3611 return vNULL;
3615 else if (jfunc->type == IPA_JF_ANCESTOR
3616 && ipa_get_jf_ancestor_agg_preserved (jfunc))
3618 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
3619 int src_idx = ipa_get_jf_ancestor_formal_id (jfunc);
3620 struct ipcp_param_lattices *src_plats;
3621 HOST_WIDE_INT delta = ipa_get_jf_ancestor_offset (jfunc);
3623 if (caller_info->ipcp_orig_node)
3625 if (!inter.exists ())
3626 inter = agg_replacements_to_vector (cs->caller, src_idx, delta);
3627 else
3628 intersect_with_agg_replacements (cs->caller, src_idx, &inter,
3629 delta);
3631 else
3633 src_plats = ipa_get_parm_lattices (caller_info, src_idx);;
3634 /* Currently we do not produce clobber aggregate jump
3635 functions, adjust when we do. */
3636 gcc_checking_assert (!src_plats->aggs || !jfunc->agg.items);
3637 if (!inter.exists ())
3638 inter = copy_plats_to_inter (src_plats, delta);
3639 else
3640 intersect_with_plats (src_plats, &inter, delta);
3643 else if (jfunc->agg.items)
3645 struct ipa_agg_jf_item *item;
3646 int k;
3648 if (!inter.exists ())
3649 for (unsigned i = 0; i < jfunc->agg.items->length (); i++)
3650 inter.safe_push ((*jfunc->agg.items)[i]);
3651 else
3652 FOR_EACH_VEC_ELT (inter, k, item)
3654 int l = 0;
3655 bool found = false;;
3657 if (!item->value)
3658 continue;
3660 while ((unsigned) l < jfunc->agg.items->length ())
3662 struct ipa_agg_jf_item *ti;
3663 ti = &(*jfunc->agg.items)[l];
3664 if (ti->offset > item->offset)
3665 break;
3666 if (ti->offset == item->offset)
3668 gcc_checking_assert (ti->value);
3669 if (values_equal_for_ipcp_p (item->value,
3670 ti->value))
3671 found = true;
3672 break;
3674 l++;
3676 if (!found)
3677 item->value = NULL;
3680 else
3682 inter.release ();
3683 return vec<ipa_agg_jf_item>();
3685 return inter;
3688 /* Look at edges in CALLERS and collect all known aggregate values that arrive
3689 from all of them. */
3691 static struct ipa_agg_replacement_value *
3692 find_aggregate_values_for_callers_subset (struct cgraph_node *node,
3693 vec<cgraph_edge *> callers)
3695 struct ipa_node_params *dest_info = IPA_NODE_REF (node);
3696 struct ipa_agg_replacement_value *res;
3697 struct ipa_agg_replacement_value **tail = &res;
3698 struct cgraph_edge *cs;
3699 int i, j, count = ipa_get_param_count (dest_info);
3701 FOR_EACH_VEC_ELT (callers, j, cs)
3703 int c = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
3704 if (c < count)
3705 count = c;
3708 for (i = 0; i < count ; i++)
3710 struct cgraph_edge *cs;
3711 vec<ipa_agg_jf_item> inter = vNULL;
3712 struct ipa_agg_jf_item *item;
3713 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (dest_info, i);
3714 int j;
3716 /* Among other things, the following check should deal with all by_ref
3717 mismatches. */
3718 if (plats->aggs_bottom)
3719 continue;
3721 FOR_EACH_VEC_ELT (callers, j, cs)
3723 inter = intersect_aggregates_with_edge (cs, i, inter);
3725 if (!inter.exists ())
3726 goto next_param;
3729 FOR_EACH_VEC_ELT (inter, j, item)
3731 struct ipa_agg_replacement_value *v;
3733 if (!item->value)
3734 continue;
3736 v = ggc_alloc<ipa_agg_replacement_value> ();
3737 v->index = i;
3738 v->offset = item->offset;
3739 v->value = item->value;
3740 v->by_ref = plats->aggs_by_ref;
3741 *tail = v;
3742 tail = &v->next;
3745 next_param:
3746 if (inter.exists ())
3747 inter.release ();
3749 *tail = NULL;
3750 return res;
3753 /* Turn KNOWN_AGGS into a list of aggreate replacement values. */
3755 static struct ipa_agg_replacement_value *
3756 known_aggs_to_agg_replacement_list (vec<ipa_agg_jump_function> known_aggs)
3758 struct ipa_agg_replacement_value *res;
3759 struct ipa_agg_replacement_value **tail = &res;
3760 struct ipa_agg_jump_function *aggjf;
3761 struct ipa_agg_jf_item *item;
3762 int i, j;
3764 FOR_EACH_VEC_ELT (known_aggs, i, aggjf)
3765 FOR_EACH_VEC_SAFE_ELT (aggjf->items, j, item)
3767 struct ipa_agg_replacement_value *v;
3768 v = ggc_alloc<ipa_agg_replacement_value> ();
3769 v->index = i;
3770 v->offset = item->offset;
3771 v->value = item->value;
3772 v->by_ref = aggjf->by_ref;
3773 *tail = v;
3774 tail = &v->next;
3776 *tail = NULL;
3777 return res;
3780 /* Determine whether CS also brings all scalar values that the NODE is
3781 specialized for. */
3783 static bool
3784 cgraph_edge_brings_all_scalars_for_node (struct cgraph_edge *cs,
3785 struct cgraph_node *node)
3787 struct ipa_node_params *dest_info = IPA_NODE_REF (node);
3788 int count = ipa_get_param_count (dest_info);
3789 struct ipa_node_params *caller_info;
3790 struct ipa_edge_args *args;
3791 int i;
3793 caller_info = IPA_NODE_REF (cs->caller);
3794 args = IPA_EDGE_REF (cs);
3795 for (i = 0; i < count; i++)
3797 struct ipa_jump_func *jump_func;
3798 tree val, t;
3800 val = dest_info->known_csts[i];
3801 if (!val)
3802 continue;
3804 if (i >= ipa_get_cs_argument_count (args))
3805 return false;
3806 jump_func = ipa_get_ith_jump_func (args, i);
3807 t = ipa_value_from_jfunc (caller_info, jump_func);
3808 if (!t || !values_equal_for_ipcp_p (val, t))
3809 return false;
3811 return true;
3814 /* Determine whether CS also brings all aggregate values that NODE is
3815 specialized for. */
3816 static bool
3817 cgraph_edge_brings_all_agg_vals_for_node (struct cgraph_edge *cs,
3818 struct cgraph_node *node)
3820 struct ipa_node_params *orig_caller_info = IPA_NODE_REF (cs->caller);
3821 struct ipa_node_params *orig_node_info;
3822 struct ipa_agg_replacement_value *aggval;
3823 int i, ec, count;
3825 aggval = ipa_get_agg_replacements_for_node (node);
3826 if (!aggval)
3827 return true;
3829 count = ipa_get_param_count (IPA_NODE_REF (node));
3830 ec = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
3831 if (ec < count)
3832 for (struct ipa_agg_replacement_value *av = aggval; av; av = av->next)
3833 if (aggval->index >= ec)
3834 return false;
3836 orig_node_info = IPA_NODE_REF (IPA_NODE_REF (node)->ipcp_orig_node);
3837 if (orig_caller_info->ipcp_orig_node)
3838 orig_caller_info = IPA_NODE_REF (orig_caller_info->ipcp_orig_node);
3840 for (i = 0; i < count; i++)
3842 static vec<ipa_agg_jf_item> values = vec<ipa_agg_jf_item>();
3843 struct ipcp_param_lattices *plats;
3844 bool interesting = false;
3845 for (struct ipa_agg_replacement_value *av = aggval; av; av = av->next)
3846 if (aggval->index == i)
3848 interesting = true;
3849 break;
3851 if (!interesting)
3852 continue;
3854 plats = ipa_get_parm_lattices (orig_node_info, aggval->index);
3855 if (plats->aggs_bottom)
3856 return false;
3858 values = intersect_aggregates_with_edge (cs, i, values);
3859 if (!values.exists ())
3860 return false;
3862 for (struct ipa_agg_replacement_value *av = aggval; av; av = av->next)
3863 if (aggval->index == i)
3865 struct ipa_agg_jf_item *item;
3866 int j;
3867 bool found = false;
3868 FOR_EACH_VEC_ELT (values, j, item)
3869 if (item->value
3870 && item->offset == av->offset
3871 && values_equal_for_ipcp_p (item->value, av->value))
3873 found = true;
3874 break;
3876 if (!found)
3878 values.release ();
3879 return false;
3883 return true;
3886 /* Given an original NODE and a VAL for which we have already created a
3887 specialized clone, look whether there are incoming edges that still lead
3888 into the old node but now also bring the requested value and also conform to
3889 all other criteria such that they can be redirected the the special node.
3890 This function can therefore redirect the final edge in a SCC. */
3892 template <typename valtype>
3893 static void
3894 perhaps_add_new_callers (cgraph_node *node, ipcp_value<valtype> *val)
3896 ipcp_value_source<valtype> *src;
3897 gcov_type redirected_sum = 0;
3899 for (src = val->sources; src; src = src->next)
3901 struct cgraph_edge *cs = src->cs;
3902 while (cs)
3904 if (cgraph_edge_brings_value_p (cs, src, node)
3905 && cgraph_edge_brings_all_scalars_for_node (cs, val->spec_node)
3906 && cgraph_edge_brings_all_agg_vals_for_node (cs, val->spec_node))
3908 if (dump_file)
3909 fprintf (dump_file, " - adding an extra caller %s/%i"
3910 " of %s/%i\n",
3911 xstrdup_for_dump (cs->caller->name ()),
3912 cs->caller->order,
3913 xstrdup_for_dump (val->spec_node->name ()),
3914 val->spec_node->order);
3916 cs->redirect_callee_duplicating_thunks (val->spec_node);
3917 val->spec_node->expand_all_artificial_thunks ();
3918 redirected_sum += cs->count;
3920 cs = get_next_cgraph_edge_clone (cs);
3924 if (redirected_sum)
3925 update_specialized_profile (val->spec_node, node, redirected_sum);
3928 /* Return true if KNOWN_CONTEXTS contain at least one useful context. */
3930 static bool
3931 known_contexts_useful_p (vec<ipa_polymorphic_call_context> known_contexts)
3933 ipa_polymorphic_call_context *ctx;
3934 int i;
3936 FOR_EACH_VEC_ELT (known_contexts, i, ctx)
3937 if (!ctx->useless_p ())
3938 return true;
3939 return false;
3942 /* Return a copy of KNOWN_CSTS if it is not empty, otherwise return vNULL. */
3944 static vec<ipa_polymorphic_call_context>
3945 copy_useful_known_contexts (vec<ipa_polymorphic_call_context> known_contexts)
3947 if (known_contexts_useful_p (known_contexts))
3948 return known_contexts.copy ();
3949 else
3950 return vNULL;
3953 /* Copy KNOWN_CSTS and modify the copy according to VAL and INDEX. If
3954 non-empty, replace KNOWN_CONTEXTS with its copy too. */
3956 static void
3957 modify_known_vectors_with_val (vec<tree> *known_csts,
3958 vec<ipa_polymorphic_call_context> *known_contexts,
3959 ipcp_value<tree> *val,
3960 int index)
3962 *known_csts = known_csts->copy ();
3963 *known_contexts = copy_useful_known_contexts (*known_contexts);
3964 (*known_csts)[index] = val->value;
3967 /* Replace KNOWN_CSTS with its copy. Also copy KNOWN_CONTEXTS and modify the
3968 copy according to VAL and INDEX. */
3970 static void
3971 modify_known_vectors_with_val (vec<tree> *known_csts,
3972 vec<ipa_polymorphic_call_context> *known_contexts,
3973 ipcp_value<ipa_polymorphic_call_context> *val,
3974 int index)
3976 *known_csts = known_csts->copy ();
3977 *known_contexts = known_contexts->copy ();
3978 (*known_contexts)[index] = val->value;
3981 /* Return true if OFFSET indicates this was not an aggregate value or there is
3982 a replacement equivalent to VALUE, INDEX and OFFSET among those in the
3983 AGGVALS list. */
3985 DEBUG_FUNCTION bool
3986 ipcp_val_agg_replacement_ok_p (ipa_agg_replacement_value *aggvals,
3987 int index, HOST_WIDE_INT offset, tree value)
3989 if (offset == -1)
3990 return true;
3992 while (aggvals)
3994 if (aggvals->index == index
3995 && aggvals->offset == offset
3996 && values_equal_for_ipcp_p (aggvals->value, value))
3997 return true;
3998 aggvals = aggvals->next;
4000 return false;
4003 /* Return true if offset is minus one because source of a polymorphic contect
4004 cannot be an aggregate value. */
4006 DEBUG_FUNCTION bool
4007 ipcp_val_agg_replacement_ok_p (ipa_agg_replacement_value *,
4008 int , HOST_WIDE_INT offset,
4009 ipa_polymorphic_call_context)
4011 return offset == -1;
4014 /* Decide wheter to create a special version of NODE for value VAL of parameter
4015 at the given INDEX. If OFFSET is -1, the value is for the parameter itself,
4016 otherwise it is stored at the given OFFSET of the parameter. KNOWN_CSTS,
4017 KNOWN_CONTEXTS and KNOWN_AGGS describe the other already known values. */
4019 template <typename valtype>
4020 static bool
4021 decide_about_value (struct cgraph_node *node, int index, HOST_WIDE_INT offset,
4022 ipcp_value<valtype> *val, vec<tree> known_csts,
4023 vec<ipa_polymorphic_call_context> known_contexts)
4025 struct ipa_agg_replacement_value *aggvals;
4026 int freq_sum, caller_count;
4027 gcov_type count_sum;
4028 vec<cgraph_edge *> callers;
4030 if (val->spec_node)
4032 perhaps_add_new_callers (node, val);
4033 return false;
4035 else if (val->local_size_cost + overall_size > max_new_size)
4037 if (dump_file && (dump_flags & TDF_DETAILS))
4038 fprintf (dump_file, " Ignoring candidate value because "
4039 "max_new_size would be reached with %li.\n",
4040 val->local_size_cost + overall_size);
4041 return false;
4043 else if (!get_info_about_necessary_edges (val, node, &freq_sum, &count_sum,
4044 &caller_count))
4045 return false;
4047 if (dump_file && (dump_flags & TDF_DETAILS))
4049 fprintf (dump_file, " - considering value ");
4050 print_ipcp_constant_value (dump_file, val->value);
4051 fprintf (dump_file, " for ");
4052 ipa_dump_param (dump_file, IPA_NODE_REF (node), index);
4053 if (offset != -1)
4054 fprintf (dump_file, ", offset: " HOST_WIDE_INT_PRINT_DEC, offset);
4055 fprintf (dump_file, " (caller_count: %i)\n", caller_count);
4058 if (!good_cloning_opportunity_p (node, val->local_time_benefit,
4059 freq_sum, count_sum,
4060 val->local_size_cost)
4061 && !good_cloning_opportunity_p (node,
4062 val->local_time_benefit
4063 + val->prop_time_benefit,
4064 freq_sum, count_sum,
4065 val->local_size_cost
4066 + val->prop_size_cost))
4067 return false;
4069 if (dump_file)
4070 fprintf (dump_file, " Creating a specialized node of %s/%i.\n",
4071 node->name (), node->order);
4073 callers = gather_edges_for_value (val, node, caller_count);
4074 if (offset == -1)
4075 modify_known_vectors_with_val (&known_csts, &known_contexts, val, index);
4076 else
4078 known_csts = known_csts.copy ();
4079 known_contexts = copy_useful_known_contexts (known_contexts);
4081 find_more_scalar_values_for_callers_subset (node, known_csts, callers);
4082 find_more_contexts_for_caller_subset (node, &known_contexts, callers);
4083 aggvals = find_aggregate_values_for_callers_subset (node, callers);
4084 gcc_checking_assert (ipcp_val_agg_replacement_ok_p (aggvals, index,
4085 offset, val->value));
4086 val->spec_node = create_specialized_node (node, known_csts, known_contexts,
4087 aggvals, callers);
4088 overall_size += val->local_size_cost;
4090 /* TODO: If for some lattice there is only one other known value
4091 left, make a special node for it too. */
4093 return true;
4096 /* Decide whether and what specialized clones of NODE should be created. */
4098 static bool
4099 decide_whether_version_node (struct cgraph_node *node)
4101 struct ipa_node_params *info = IPA_NODE_REF (node);
4102 int i, count = ipa_get_param_count (info);
4103 vec<tree> known_csts;
4104 vec<ipa_polymorphic_call_context> known_contexts;
4105 vec<ipa_agg_jump_function> known_aggs = vNULL;
4106 bool ret = false;
4108 if (count == 0)
4109 return false;
4111 if (dump_file && (dump_flags & TDF_DETAILS))
4112 fprintf (dump_file, "\nEvaluating opportunities for %s/%i.\n",
4113 node->name (), node->order);
4115 gather_context_independent_values (info, &known_csts, &known_contexts,
4116 info->do_clone_for_all_contexts ? &known_aggs
4117 : NULL, NULL);
4119 for (i = 0; i < count ;i++)
4121 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
4122 ipcp_lattice<tree> *lat = &plats->itself;
4123 ipcp_lattice<ipa_polymorphic_call_context> *ctxlat = &plats->ctxlat;
4125 if (!lat->bottom
4126 && !known_csts[i])
4128 ipcp_value<tree> *val;
4129 for (val = lat->values; val; val = val->next)
4130 ret |= decide_about_value (node, i, -1, val, known_csts,
4131 known_contexts);
4134 if (!plats->aggs_bottom)
4136 struct ipcp_agg_lattice *aglat;
4137 ipcp_value<tree> *val;
4138 for (aglat = plats->aggs; aglat; aglat = aglat->next)
4139 if (!aglat->bottom && aglat->values
4140 /* If the following is false, the one value is in
4141 known_aggs. */
4142 && (plats->aggs_contain_variable
4143 || !aglat->is_single_const ()))
4144 for (val = aglat->values; val; val = val->next)
4145 ret |= decide_about_value (node, i, aglat->offset, val,
4146 known_csts, known_contexts);
4149 if (!ctxlat->bottom
4150 && known_contexts[i].useless_p ())
4152 ipcp_value<ipa_polymorphic_call_context> *val;
4153 for (val = ctxlat->values; val; val = val->next)
4154 ret |= decide_about_value (node, i, -1, val, known_csts,
4155 known_contexts);
4158 info = IPA_NODE_REF (node);
4161 if (info->do_clone_for_all_contexts)
4163 struct cgraph_node *clone;
4164 vec<cgraph_edge *> callers;
4166 if (dump_file)
4167 fprintf (dump_file, " - Creating a specialized node of %s/%i "
4168 "for all known contexts.\n", node->name (),
4169 node->order);
4171 callers = node->collect_callers ();
4173 if (!known_contexts_useful_p (known_contexts))
4175 known_contexts.release ();
4176 known_contexts = vNULL;
4178 clone = create_specialized_node (node, known_csts, known_contexts,
4179 known_aggs_to_agg_replacement_list (known_aggs),
4180 callers);
4181 info = IPA_NODE_REF (node);
4182 info->do_clone_for_all_contexts = false;
4183 IPA_NODE_REF (clone)->is_all_contexts_clone = true;
4184 for (i = 0; i < count ; i++)
4185 vec_free (known_aggs[i].items);
4186 known_aggs.release ();
4187 ret = true;
4189 else
4191 known_csts.release ();
4192 known_contexts.release ();
4195 return ret;
4198 /* Transitively mark all callees of NODE within the same SCC as not dead. */
4200 static void
4201 spread_undeadness (struct cgraph_node *node)
4203 struct cgraph_edge *cs;
4205 for (cs = node->callees; cs; cs = cs->next_callee)
4206 if (ipa_edge_within_scc (cs))
4208 struct cgraph_node *callee;
4209 struct ipa_node_params *info;
4211 callee = cs->callee->function_symbol (NULL);
4212 info = IPA_NODE_REF (callee);
4214 if (info->node_dead)
4216 info->node_dead = 0;
4217 spread_undeadness (callee);
4222 /* Return true if NODE has a caller from outside of its SCC that is not
4223 dead. Worker callback for cgraph_for_node_and_aliases. */
4225 static bool
4226 has_undead_caller_from_outside_scc_p (struct cgraph_node *node,
4227 void *data ATTRIBUTE_UNUSED)
4229 struct cgraph_edge *cs;
4231 for (cs = node->callers; cs; cs = cs->next_caller)
4232 if (cs->caller->thunk.thunk_p
4233 && cs->caller->call_for_symbol_thunks_and_aliases
4234 (has_undead_caller_from_outside_scc_p, NULL, true))
4235 return true;
4236 else if (!ipa_edge_within_scc (cs)
4237 && !IPA_NODE_REF (cs->caller)->node_dead)
4238 return true;
4239 return false;
4243 /* Identify nodes within the same SCC as NODE which are no longer needed
4244 because of new clones and will be removed as unreachable. */
4246 static void
4247 identify_dead_nodes (struct cgraph_node *node)
4249 struct cgraph_node *v;
4250 for (v = node; v ; v = ((struct ipa_dfs_info *) v->aux)->next_cycle)
4251 if (v->will_be_removed_from_program_if_no_direct_calls_p ()
4252 && !v->call_for_symbol_thunks_and_aliases
4253 (has_undead_caller_from_outside_scc_p, NULL, true))
4254 IPA_NODE_REF (v)->node_dead = 1;
4256 for (v = node; v ; v = ((struct ipa_dfs_info *) v->aux)->next_cycle)
4257 if (!IPA_NODE_REF (v)->node_dead)
4258 spread_undeadness (v);
4260 if (dump_file && (dump_flags & TDF_DETAILS))
4262 for (v = node; v ; v = ((struct ipa_dfs_info *) v->aux)->next_cycle)
4263 if (IPA_NODE_REF (v)->node_dead)
4264 fprintf (dump_file, " Marking node as dead: %s/%i.\n",
4265 v->name (), v->order);
4269 /* The decision stage. Iterate over the topological order of call graph nodes
4270 TOPO and make specialized clones if deemed beneficial. */
4272 static void
4273 ipcp_decision_stage (struct ipa_topo_info *topo)
4275 int i;
4277 if (dump_file)
4278 fprintf (dump_file, "\nIPA decision stage:\n\n");
4280 for (i = topo->nnodes - 1; i >= 0; i--)
4282 struct cgraph_node *node = topo->order[i];
4283 bool change = false, iterate = true;
4285 while (iterate)
4287 struct cgraph_node *v;
4288 iterate = false;
4289 for (v = node; v ; v = ((struct ipa_dfs_info *) v->aux)->next_cycle)
4290 if (v->has_gimple_body_p ()
4291 && ipcp_versionable_function_p (v))
4292 iterate |= decide_whether_version_node (v);
4294 change |= iterate;
4296 if (change)
4297 identify_dead_nodes (node);
4301 /* Look up all alignment information that we have discovered and copy it over
4302 to the transformation summary. */
4304 static void
4305 ipcp_store_alignment_results (void)
4307 cgraph_node *node;
4309 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
4311 ipa_node_params *info = IPA_NODE_REF (node);
4312 bool dumped_sth = false;
4313 bool found_useful_result = false;
4315 if (info->ipcp_orig_node)
4316 info = IPA_NODE_REF (info->ipcp_orig_node);
4318 unsigned count = ipa_get_param_count (info);
4319 for (unsigned i = 0; i < count ; i++)
4321 ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
4322 if (plats->alignment.known
4323 && plats->alignment.align > 0)
4325 found_useful_result = true;
4326 break;
4329 if (!found_useful_result)
4330 continue;
4332 ipcp_grow_transformations_if_necessary ();
4333 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
4334 vec_safe_reserve_exact (ts->alignments, count);
4336 for (unsigned i = 0; i < count ; i++)
4338 ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
4340 if (plats->alignment.align == 0)
4341 plats->alignment.known = false;
4343 ts->alignments->quick_push (plats->alignment);
4344 if (!dump_file || !plats->alignment.known)
4345 continue;
4346 if (!dumped_sth)
4348 fprintf (dump_file, "Propagated alignment info for function %s/%i:\n",
4349 node->name (), node->order);
4350 dumped_sth = true;
4352 fprintf (dump_file, " param %i: align: %u, misalign: %u\n",
4353 i, plats->alignment.align, plats->alignment.misalign);
4358 /* The IPCP driver. */
4360 static unsigned int
4361 ipcp_driver (void)
4363 struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
4364 struct cgraph_edge_hook_list *edge_removal_hook_holder;
4365 struct ipa_topo_info topo;
4367 ipa_check_create_node_params ();
4368 ipa_check_create_edge_args ();
4369 grow_edge_clone_vectors ();
4370 edge_duplication_hook_holder =
4371 symtab->add_edge_duplication_hook (&ipcp_edge_duplication_hook, NULL);
4372 edge_removal_hook_holder =
4373 symtab->add_edge_removal_hook (&ipcp_edge_removal_hook, NULL);
4375 ipcp_cst_values_pool = create_alloc_pool ("IPA-CP constant values",
4376 sizeof (ipcp_value<tree>), 32);
4377 ipcp_poly_ctx_values_pool = create_alloc_pool
4378 ("IPA-CP polymorphic contexts",
4379 sizeof (ipcp_value<ipa_polymorphic_call_context>), 32);
4380 ipcp_sources_pool = create_alloc_pool ("IPA-CP value sources",
4381 sizeof (ipcp_value_source<tree>), 64);
4382 ipcp_agg_lattice_pool = create_alloc_pool ("IPA_CP aggregate lattices",
4383 sizeof (struct ipcp_agg_lattice),
4384 32);
4385 if (dump_file)
4387 fprintf (dump_file, "\nIPA structures before propagation:\n");
4388 if (dump_flags & TDF_DETAILS)
4389 ipa_print_all_params (dump_file);
4390 ipa_print_all_jump_functions (dump_file);
4393 /* Topological sort. */
4394 build_toporder_info (&topo);
4395 /* Do the interprocedural propagation. */
4396 ipcp_propagate_stage (&topo);
4397 /* Decide what constant propagation and cloning should be performed. */
4398 ipcp_decision_stage (&topo);
4399 /* Store results of alignment propagation. */
4400 ipcp_store_alignment_results ();
4402 /* Free all IPCP structures. */
4403 free_toporder_info (&topo);
4404 next_edge_clone.release ();
4405 symtab->remove_edge_removal_hook (edge_removal_hook_holder);
4406 symtab->remove_edge_duplication_hook (edge_duplication_hook_holder);
4407 ipa_free_all_structures_after_ipa_cp ();
4408 if (dump_file)
4409 fprintf (dump_file, "\nIPA constant propagation end\n");
4410 return 0;
4413 /* Initialization and computation of IPCP data structures. This is the initial
4414 intraprocedural analysis of functions, which gathers information to be
4415 propagated later on. */
4417 static void
4418 ipcp_generate_summary (void)
4420 struct cgraph_node *node;
4422 if (dump_file)
4423 fprintf (dump_file, "\nIPA constant propagation start:\n");
4424 ipa_register_cgraph_hooks ();
4426 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
4428 node->local.versionable
4429 = tree_versionable_function_p (node->decl);
4430 ipa_analyze_node (node);
4434 /* Write ipcp summary for nodes in SET. */
4436 static void
4437 ipcp_write_summary (void)
4439 ipa_prop_write_jump_functions ();
4442 /* Read ipcp summary. */
4444 static void
4445 ipcp_read_summary (void)
4447 ipa_prop_read_jump_functions ();
4450 namespace {
4452 const pass_data pass_data_ipa_cp =
4454 IPA_PASS, /* type */
4455 "cp", /* name */
4456 OPTGROUP_NONE, /* optinfo_flags */
4457 TV_IPA_CONSTANT_PROP, /* tv_id */
4458 0, /* properties_required */
4459 0, /* properties_provided */
4460 0, /* properties_destroyed */
4461 0, /* todo_flags_start */
4462 ( TODO_dump_symtab | TODO_remove_functions ), /* todo_flags_finish */
4465 class pass_ipa_cp : public ipa_opt_pass_d
4467 public:
4468 pass_ipa_cp (gcc::context *ctxt)
4469 : ipa_opt_pass_d (pass_data_ipa_cp, ctxt,
4470 ipcp_generate_summary, /* generate_summary */
4471 ipcp_write_summary, /* write_summary */
4472 ipcp_read_summary, /* read_summary */
4473 ipcp_write_transformation_summaries, /*
4474 write_optimization_summary */
4475 ipcp_read_transformation_summaries, /*
4476 read_optimization_summary */
4477 NULL, /* stmt_fixup */
4478 0, /* function_transform_todo_flags_start */
4479 ipcp_transform_function, /* function_transform */
4480 NULL) /* variable_transform */
4483 /* opt_pass methods: */
4484 virtual bool gate (function *)
4486 /* FIXME: We should remove the optimize check after we ensure we never run
4487 IPA passes when not optimizing. */
4488 return (flag_ipa_cp && optimize) || in_lto_p;
4491 virtual unsigned int execute (function *) { return ipcp_driver (); }
4493 }; // class pass_ipa_cp
4495 } // anon namespace
4497 ipa_opt_pass_d *
4498 make_pass_ipa_cp (gcc::context *ctxt)
4500 return new pass_ipa_cp (ctxt);
4503 /* Reset all state within ipa-cp.c so that we can rerun the compiler
4504 within the same process. For use by toplev::finalize. */
4506 void
4507 ipa_cp_c_finalize (void)
4509 max_count = 0;
4510 overall_size = 0;
4511 max_new_size = 0;