PR ipa/65028
[official-gcc.git] / gcc / ipa-cp.c
blobbfe4d972e735e06542c2d83341ad8d575f996960
1 /* Interprocedural constant propagation
2 Copyright (C) 2005-2015 Free Software Foundation, Inc.
4 Contributed by Razya Ladelsky <RAZYA@il.ibm.com> and Martin Jambor
5 <mjambor@suse.cz>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Interprocedural constant propagation (IPA-CP).
25 The goal of this transformation is to
27 1) discover functions which are always invoked with some arguments with the
28 same known constant values and modify the functions so that the
29 subsequent optimizations can take advantage of the knowledge, and
31 2) partial specialization - create specialized versions of functions
32 transformed in this way if some parameters are known constants only in
33 certain contexts but the estimated tradeoff between speedup and cost size
34 is deemed good.
36 The algorithm also propagates types and attempts to perform type based
37 devirtualization. Types are propagated much like constants.
39 The algorithm basically consists of three stages. In the first, functions
40 are analyzed one at a time and jump functions are constructed for all known
41 call-sites. In the second phase, the pass propagates information from the
42 jump functions across the call to reveal what values are available at what
43 call sites, performs estimations of effects of known values on functions and
44 their callees, and finally decides what specialized extra versions should be
45 created. In the third, the special versions materialize and appropriate
46 calls are redirected.
48 The algorithm used is to a certain extent based on "Interprocedural Constant
49 Propagation", by David Callahan, Keith D Cooper, Ken Kennedy, Linda Torczon,
50 Comp86, pg 152-161 and "A Methodology for Procedure Cloning" by Keith D
51 Cooper, Mary W. Hall, and Ken Kennedy.
54 First stage - intraprocedural analysis
55 =======================================
57 This phase computes jump_function and modification flags.
59 A jump function for a call-site represents the values passed as an actual
60 arguments of a given call-site. In principle, there are three types of
61 values:
63 Pass through - the caller's formal parameter is passed as an actual
64 argument, plus an operation on it can be performed.
65 Constant - a constant is passed as an actual argument.
66 Unknown - neither of the above.
68 All jump function types are described in detail in ipa-prop.h, together with
69 the data structures that represent them and methods of accessing them.
71 ipcp_generate_summary() is the main function of the first stage.
73 Second stage - interprocedural analysis
74 ========================================
76 This stage is itself divided into two phases. In the first, we propagate
77 known values over the call graph, in the second, we make cloning decisions.
78 It uses a different algorithm than the original Callahan's paper.
80 First, we traverse the functions topologically from callers to callees and,
81 for each strongly connected component (SCC), we propagate constants
82 according to previously computed jump functions. We also record what known
83 values depend on other known values and estimate local effects. Finally, we
84 propagate cumulative information about these effects from dependent values
85 to those on which they depend.
87 Second, we again traverse the call graph in the same topological order and
88 make clones for functions which we know are called with the same values in
89 all contexts and decide about extra specialized clones of functions just for
90 some contexts - these decisions are based on both local estimates and
91 cumulative estimates propagated from callees.
93 ipcp_propagate_stage() and ipcp_decision_stage() together constitute the
94 third stage.
96 Third phase - materialization of clones, call statement updates.
97 ============================================
99 This stage is currently performed by call graph code (mainly in cgraphunit.c
100 and tree-inline.c) according to instructions inserted to the call graph by
101 the second stage. */
103 #include "config.h"
104 #include "system.h"
105 #include "coretypes.h"
106 #include "hash-set.h"
107 #include "machmode.h"
108 #include "vec.h"
109 #include "hash-map.h"
110 #include "double-int.h"
111 #include "input.h"
112 #include "alias.h"
113 #include "symtab.h"
114 #include "options.h"
115 #include "wide-int.h"
116 #include "inchash.h"
117 #include "tree.h"
118 #include "fold-const.h"
119 #include "gimple-fold.h"
120 #include "gimple-expr.h"
121 #include "target.h"
122 #include "predict.h"
123 #include "basic-block.h"
124 #include "is-a.h"
125 #include "plugin-api.h"
126 #include "tm.h"
127 #include "hard-reg-set.h"
128 #include "input.h"
129 #include "function.h"
130 #include "ipa-ref.h"
131 #include "cgraph.h"
132 #include "alloc-pool.h"
133 #include "symbol-summary.h"
134 #include "ipa-prop.h"
135 #include "bitmap.h"
136 #include "tree-pass.h"
137 #include "flags.h"
138 #include "diagnostic.h"
139 #include "tree-pretty-print.h"
140 #include "tree-inline.h"
141 #include "params.h"
142 #include "ipa-inline.h"
143 #include "ipa-utils.h"
145 template <typename valtype> class ipcp_value;
147 /* Describes a particular source for an IPA-CP value. */
149 template <typename valtype>
150 class ipcp_value_source
152 public:
153 /* Aggregate offset of the source, negative if the source is scalar value of
154 the argument itself. */
155 HOST_WIDE_INT offset;
156 /* The incoming edge that brought the value. */
157 cgraph_edge *cs;
158 /* If the jump function that resulted into his value was a pass-through or an
159 ancestor, this is the ipcp_value of the caller from which the described
160 value has been derived. Otherwise it is NULL. */
161 ipcp_value<valtype> *val;
162 /* Next pointer in a linked list of sources of a value. */
163 ipcp_value_source *next;
164 /* If the jump function that resulted into his value was a pass-through or an
165 ancestor, this is the index of the parameter of the caller the jump
166 function references. */
167 int index;
170 /* Common ancestor for all ipcp_value instantiations. */
172 class ipcp_value_base
174 public:
175 /* Time benefit and size cost that specializing the function for this value
176 would bring about in this function alone. */
177 int local_time_benefit, local_size_cost;
178 /* Time benefit and size cost that specializing the function for this value
179 can bring about in it's callees (transitively). */
180 int prop_time_benefit, prop_size_cost;
183 /* Describes one particular value stored in struct ipcp_lattice. */
185 template <typename valtype>
186 class ipcp_value : public ipcp_value_base
188 public:
189 /* The actual value for the given parameter. */
190 valtype value;
191 /* The list of sources from which this value originates. */
192 ipcp_value_source <valtype> *sources;
193 /* Next pointers in a linked list of all values in a lattice. */
194 ipcp_value *next;
195 /* Next pointers in a linked list of values in a strongly connected component
196 of values. */
197 ipcp_value *scc_next;
198 /* Next pointers in a linked list of SCCs of values sorted topologically
199 according their sources. */
200 ipcp_value *topo_next;
201 /* A specialized node created for this value, NULL if none has been (so far)
202 created. */
203 cgraph_node *spec_node;
204 /* Depth first search number and low link for topological sorting of
205 values. */
206 int dfs, low_link;
207 /* True if this valye is currently on the topo-sort stack. */
208 bool on_stack;
210 void add_source (cgraph_edge *cs, ipcp_value *src_val, int src_idx,
211 HOST_WIDE_INT offset);
214 /* Lattice describing potential values of a formal parameter of a function, or
215 a part of an aggreagate. TOP is represented by a lattice with zero values
216 and with contains_variable and bottom flags cleared. BOTTOM is represented
217 by a lattice with the bottom flag set. In that case, values and
218 contains_variable flag should be disregarded. */
220 template <typename valtype>
221 class ipcp_lattice
223 public:
224 /* The list of known values and types in this lattice. Note that values are
225 not deallocated if a lattice is set to bottom because there may be value
226 sources referencing them. */
227 ipcp_value<valtype> *values;
228 /* Number of known values and types in this lattice. */
229 int values_count;
230 /* The lattice contains a variable component (in addition to values). */
231 bool contains_variable;
232 /* The value of the lattice is bottom (i.e. variable and unusable for any
233 propagation). */
234 bool bottom;
236 inline bool is_single_const ();
237 inline bool set_to_bottom ();
238 inline bool set_contains_variable ();
239 bool add_value (valtype newval, cgraph_edge *cs,
240 ipcp_value<valtype> *src_val = NULL,
241 int src_idx = 0, HOST_WIDE_INT offset = -1);
242 void print (FILE * f, bool dump_sources, bool dump_benefits);
245 /* Lattice of tree values with an offset to describe a part of an
246 aggregate. */
248 class ipcp_agg_lattice : public ipcp_lattice<tree>
250 public:
251 /* Offset that is being described by this lattice. */
252 HOST_WIDE_INT offset;
253 /* Size so that we don't have to re-compute it every time we traverse the
254 list. Must correspond to TYPE_SIZE of all lat values. */
255 HOST_WIDE_INT size;
256 /* Next element of the linked list. */
257 struct ipcp_agg_lattice *next;
260 /* Structure containing lattices for a parameter itself and for pieces of
261 aggregates that are passed in the parameter or by a reference in a parameter
262 plus some other useful flags. */
264 class ipcp_param_lattices
266 public:
267 /* Lattice describing the value of the parameter itself. */
268 ipcp_lattice<tree> itself;
269 /* Lattice describing the the polymorphic contexts of a parameter. */
270 ipcp_lattice<ipa_polymorphic_call_context> ctxlat;
271 /* Lattices describing aggregate parts. */
272 ipcp_agg_lattice *aggs;
273 /* Alignment information. Very basic one value lattice where !known means
274 TOP and zero alignment bottom. */
275 ipa_alignment alignment;
276 /* Number of aggregate lattices */
277 int aggs_count;
278 /* True if aggregate data were passed by reference (as opposed to by
279 value). */
280 bool aggs_by_ref;
281 /* All aggregate lattices contain a variable component (in addition to
282 values). */
283 bool aggs_contain_variable;
284 /* The value of all aggregate lattices is bottom (i.e. variable and unusable
285 for any propagation). */
286 bool aggs_bottom;
288 /* There is a virtual call based on this parameter. */
289 bool virt_call;
292 /* Allocation pools for values and their sources in ipa-cp. */
294 alloc_pool ipcp_cst_values_pool;
295 alloc_pool ipcp_poly_ctx_values_pool;
296 alloc_pool ipcp_sources_pool;
297 alloc_pool ipcp_agg_lattice_pool;
299 /* Maximal count found in program. */
301 static gcov_type max_count;
303 /* Original overall size of the program. */
305 static long overall_size, max_new_size;
307 /* Return the param lattices structure corresponding to the Ith formal
308 parameter of the function described by INFO. */
309 static inline struct ipcp_param_lattices *
310 ipa_get_parm_lattices (struct ipa_node_params *info, int i)
312 gcc_assert (i >= 0 && i < ipa_get_param_count (info));
313 gcc_checking_assert (!info->ipcp_orig_node);
314 gcc_checking_assert (info->lattices);
315 return &(info->lattices[i]);
318 /* Return the lattice corresponding to the scalar value of the Ith formal
319 parameter of the function described by INFO. */
320 static inline ipcp_lattice<tree> *
321 ipa_get_scalar_lat (struct ipa_node_params *info, int i)
323 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
324 return &plats->itself;
327 /* Return the lattice corresponding to the scalar value of the Ith formal
328 parameter of the function described by INFO. */
329 static inline ipcp_lattice<ipa_polymorphic_call_context> *
330 ipa_get_poly_ctx_lat (struct ipa_node_params *info, int i)
332 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
333 return &plats->ctxlat;
336 /* Return whether LAT is a lattice with a single constant and without an
337 undefined value. */
339 template <typename valtype>
340 inline bool
341 ipcp_lattice<valtype>::is_single_const ()
343 if (bottom || contains_variable || values_count != 1)
344 return false;
345 else
346 return true;
349 /* Print V which is extracted from a value in a lattice to F. */
351 static void
352 print_ipcp_constant_value (FILE * f, tree v)
354 if (TREE_CODE (v) == ADDR_EXPR
355 && TREE_CODE (TREE_OPERAND (v, 0)) == CONST_DECL)
357 fprintf (f, "& ");
358 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (v, 0)), 0);
360 else
361 print_generic_expr (f, v, 0);
364 /* Print V which is extracted from a value in a lattice to F. */
366 static void
367 print_ipcp_constant_value (FILE * f, ipa_polymorphic_call_context v)
369 v.dump(f, false);
372 /* Print a lattice LAT to F. */
374 template <typename valtype>
375 void
376 ipcp_lattice<valtype>::print (FILE * f, bool dump_sources, bool dump_benefits)
378 ipcp_value<valtype> *val;
379 bool prev = false;
381 if (bottom)
383 fprintf (f, "BOTTOM\n");
384 return;
387 if (!values_count && !contains_variable)
389 fprintf (f, "TOP\n");
390 return;
393 if (contains_variable)
395 fprintf (f, "VARIABLE");
396 prev = true;
397 if (dump_benefits)
398 fprintf (f, "\n");
401 for (val = values; val; val = val->next)
403 if (dump_benefits && prev)
404 fprintf (f, " ");
405 else if (!dump_benefits && prev)
406 fprintf (f, ", ");
407 else
408 prev = true;
410 print_ipcp_constant_value (f, val->value);
412 if (dump_sources)
414 ipcp_value_source<valtype> *s;
416 fprintf (f, " [from:");
417 for (s = val->sources; s; s = s->next)
418 fprintf (f, " %i(%i)", s->cs->caller->order,
419 s->cs->frequency);
420 fprintf (f, "]");
423 if (dump_benefits)
424 fprintf (f, " [loc_time: %i, loc_size: %i, "
425 "prop_time: %i, prop_size: %i]\n",
426 val->local_time_benefit, val->local_size_cost,
427 val->prop_time_benefit, val->prop_size_cost);
429 if (!dump_benefits)
430 fprintf (f, "\n");
433 /* Print all ipcp_lattices of all functions to F. */
435 static void
436 print_all_lattices (FILE * f, bool dump_sources, bool dump_benefits)
438 struct cgraph_node *node;
439 int i, count;
441 fprintf (f, "\nLattices:\n");
442 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
444 struct ipa_node_params *info;
446 info = IPA_NODE_REF (node);
447 fprintf (f, " Node: %s/%i:\n", node->name (),
448 node->order);
449 count = ipa_get_param_count (info);
450 for (i = 0; i < count; i++)
452 struct ipcp_agg_lattice *aglat;
453 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
454 fprintf (f, " param [%d]: ", i);
455 plats->itself.print (f, dump_sources, dump_benefits);
456 fprintf (f, " ctxs: ");
457 plats->ctxlat.print (f, dump_sources, dump_benefits);
458 if (plats->alignment.known && plats->alignment.align > 0)
459 fprintf (f, " Alignment %u, misalignment %u\n",
460 plats->alignment.align, plats->alignment.misalign);
461 else if (plats->alignment.known)
462 fprintf (f, " Alignment unusable\n");
463 else
464 fprintf (f, " Alignment unknown\n");
465 if (plats->virt_call)
466 fprintf (f, " virt_call flag set\n");
468 if (plats->aggs_bottom)
470 fprintf (f, " AGGS BOTTOM\n");
471 continue;
473 if (plats->aggs_contain_variable)
474 fprintf (f, " AGGS VARIABLE\n");
475 for (aglat = plats->aggs; aglat; aglat = aglat->next)
477 fprintf (f, " %soffset " HOST_WIDE_INT_PRINT_DEC ": ",
478 plats->aggs_by_ref ? "ref " : "", aglat->offset);
479 aglat->print (f, dump_sources, dump_benefits);
485 /* Determine whether it is at all technically possible to create clones of NODE
486 and store this information in the ipa_node_params structure associated
487 with NODE. */
489 static void
490 determine_versionability (struct cgraph_node *node)
492 const char *reason = NULL;
494 /* There are a number of generic reasons functions cannot be versioned. We
495 also cannot remove parameters if there are type attributes such as fnspec
496 present. */
497 if (node->alias || node->thunk.thunk_p)
498 reason = "alias or thunk";
499 else if (!node->local.versionable)
500 reason = "not a tree_versionable_function";
501 else if (node->get_availability () <= AVAIL_INTERPOSABLE)
502 reason = "insufficient body availability";
503 else if (!opt_for_fn (node->decl, optimize)
504 || !opt_for_fn (node->decl, flag_ipa_cp))
505 reason = "non-optimized function";
506 else if (lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (node->decl)))
508 /* Ideally we should clone the SIMD clones themselves and create
509 vector copies of them, so IPA-cp and SIMD clones can happily
510 coexist, but that may not be worth the effort. */
511 reason = "function has SIMD clones";
513 /* Don't clone decls local to a comdat group; it breaks and for C++
514 decloned constructors, inlining is always better anyway. */
515 else if (node->comdat_local_p ())
516 reason = "comdat-local function";
518 if (reason && dump_file && !node->alias && !node->thunk.thunk_p)
519 fprintf (dump_file, "Function %s/%i is not versionable, reason: %s.\n",
520 node->name (), node->order, reason);
522 node->local.versionable = (reason == NULL);
525 /* Return true if it is at all technically possible to create clones of a
526 NODE. */
528 static bool
529 ipcp_versionable_function_p (struct cgraph_node *node)
531 return node->local.versionable;
534 /* Structure holding accumulated information about callers of a node. */
536 struct caller_statistics
538 gcov_type count_sum;
539 int n_calls, n_hot_calls, freq_sum;
542 /* Initialize fields of STAT to zeroes. */
544 static inline void
545 init_caller_stats (struct caller_statistics *stats)
547 stats->count_sum = 0;
548 stats->n_calls = 0;
549 stats->n_hot_calls = 0;
550 stats->freq_sum = 0;
553 /* Worker callback of cgraph_for_node_and_aliases accumulating statistics of
554 non-thunk incoming edges to NODE. */
556 static bool
557 gather_caller_stats (struct cgraph_node *node, void *data)
559 struct caller_statistics *stats = (struct caller_statistics *) data;
560 struct cgraph_edge *cs;
562 for (cs = node->callers; cs; cs = cs->next_caller)
563 if (!cs->caller->thunk.thunk_p)
565 stats->count_sum += cs->count;
566 stats->freq_sum += cs->frequency;
567 stats->n_calls++;
568 if (cs->maybe_hot_p ())
569 stats->n_hot_calls ++;
571 return false;
575 /* Return true if this NODE is viable candidate for cloning. */
577 static bool
578 ipcp_cloning_candidate_p (struct cgraph_node *node)
580 struct caller_statistics stats;
582 gcc_checking_assert (node->has_gimple_body_p ());
584 if (!opt_for_fn (node->decl, flag_ipa_cp_clone))
586 if (dump_file)
587 fprintf (dump_file, "Not considering %s for cloning; "
588 "-fipa-cp-clone disabled.\n",
589 node->name ());
590 return false;
593 if (!optimize_function_for_speed_p (DECL_STRUCT_FUNCTION (node->decl)))
595 if (dump_file)
596 fprintf (dump_file, "Not considering %s for cloning; "
597 "optimizing it for size.\n",
598 node->name ());
599 return false;
602 init_caller_stats (&stats);
603 node->call_for_symbol_thunks_and_aliases (gather_caller_stats, &stats, false);
605 if (inline_summaries->get (node)->self_size < stats.n_calls)
607 if (dump_file)
608 fprintf (dump_file, "Considering %s for cloning; code might shrink.\n",
609 node->name ());
610 return true;
613 /* When profile is available and function is hot, propagate into it even if
614 calls seems cold; constant propagation can improve function's speed
615 significantly. */
616 if (max_count)
618 if (stats.count_sum > node->count * 90 / 100)
620 if (dump_file)
621 fprintf (dump_file, "Considering %s for cloning; "
622 "usually called directly.\n",
623 node->name ());
624 return true;
627 if (!stats.n_hot_calls)
629 if (dump_file)
630 fprintf (dump_file, "Not considering %s for cloning; no hot calls.\n",
631 node->name ());
632 return false;
634 if (dump_file)
635 fprintf (dump_file, "Considering %s for cloning.\n",
636 node->name ());
637 return true;
640 template <typename valtype>
641 class value_topo_info
643 public:
644 /* Head of the linked list of topologically sorted values. */
645 ipcp_value<valtype> *values_topo;
646 /* Stack for creating SCCs, represented by a linked list too. */
647 ipcp_value<valtype> *stack;
648 /* Counter driving the algorithm in add_val_to_toposort. */
649 int dfs_counter;
651 value_topo_info () : values_topo (NULL), stack (NULL), dfs_counter (0)
653 void add_val (ipcp_value<valtype> *cur_val);
654 void propagate_effects ();
657 /* Arrays representing a topological ordering of call graph nodes and a stack
658 of nodes used during constant propagation and also data required to perform
659 topological sort of values and propagation of benefits in the determined
660 order. */
662 class ipa_topo_info
664 public:
665 /* Array with obtained topological order of cgraph nodes. */
666 struct cgraph_node **order;
667 /* Stack of cgraph nodes used during propagation within SCC until all values
668 in the SCC stabilize. */
669 struct cgraph_node **stack;
670 int nnodes, stack_top;
672 value_topo_info<tree> constants;
673 value_topo_info<ipa_polymorphic_call_context> contexts;
675 ipa_topo_info () : order(NULL), stack(NULL), nnodes(0), stack_top(0),
676 constants ()
680 /* Allocate the arrays in TOPO and topologically sort the nodes into order. */
682 static void
683 build_toporder_info (struct ipa_topo_info *topo)
685 topo->order = XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
686 topo->stack = XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
688 gcc_checking_assert (topo->stack_top == 0);
689 topo->nnodes = ipa_reduced_postorder (topo->order, true, true, NULL);
692 /* Free information about strongly connected components and the arrays in
693 TOPO. */
695 static void
696 free_toporder_info (struct ipa_topo_info *topo)
698 ipa_free_postorder_info ();
699 free (topo->order);
700 free (topo->stack);
703 /* Add NODE to the stack in TOPO, unless it is already there. */
705 static inline void
706 push_node_to_stack (struct ipa_topo_info *topo, struct cgraph_node *node)
708 struct ipa_node_params *info = IPA_NODE_REF (node);
709 if (info->node_enqueued)
710 return;
711 info->node_enqueued = 1;
712 topo->stack[topo->stack_top++] = node;
715 /* Pop a node from the stack in TOPO and return it or return NULL if the stack
716 is empty. */
718 static struct cgraph_node *
719 pop_node_from_stack (struct ipa_topo_info *topo)
721 if (topo->stack_top)
723 struct cgraph_node *node;
724 topo->stack_top--;
725 node = topo->stack[topo->stack_top];
726 IPA_NODE_REF (node)->node_enqueued = 0;
727 return node;
729 else
730 return NULL;
733 /* Set lattice LAT to bottom and return true if it previously was not set as
734 such. */
736 template <typename valtype>
737 inline bool
738 ipcp_lattice<valtype>::set_to_bottom ()
740 bool ret = !bottom;
741 bottom = true;
742 return ret;
745 /* Mark lattice as containing an unknown value and return true if it previously
746 was not marked as such. */
748 template <typename valtype>
749 inline bool
750 ipcp_lattice<valtype>::set_contains_variable ()
752 bool ret = !contains_variable;
753 contains_variable = true;
754 return ret;
757 /* Set all aggegate lattices in PLATS to bottom and return true if they were
758 not previously set as such. */
760 static inline bool
761 set_agg_lats_to_bottom (struct ipcp_param_lattices *plats)
763 bool ret = !plats->aggs_bottom;
764 plats->aggs_bottom = true;
765 return ret;
768 /* Mark all aggegate lattices in PLATS as containing an unknown value and
769 return true if they were not previously marked as such. */
771 static inline bool
772 set_agg_lats_contain_variable (struct ipcp_param_lattices *plats)
774 bool ret = !plats->aggs_contain_variable;
775 plats->aggs_contain_variable = true;
776 return ret;
779 /* Return true if alignment information in PLATS is known to be unusable. */
781 static inline bool
782 alignment_bottom_p (ipcp_param_lattices *plats)
784 return plats->alignment.known && (plats->alignment.align == 0);
787 /* Set alignment information in PLATS to unusable. Return true if it
788 previously was usable or unknown. */
790 static inline bool
791 set_alignment_to_bottom (ipcp_param_lattices *plats)
793 if (alignment_bottom_p (plats))
794 return false;
795 plats->alignment.known = true;
796 plats->alignment.align = 0;
797 return true;
800 /* Mark bot aggregate and scalar lattices as containing an unknown variable,
801 return true is any of them has not been marked as such so far. */
803 static inline bool
804 set_all_contains_variable (struct ipcp_param_lattices *plats)
806 bool ret;
807 ret = plats->itself.set_contains_variable ();
808 ret |= plats->ctxlat.set_contains_variable ();
809 ret |= set_agg_lats_contain_variable (plats);
810 ret |= set_alignment_to_bottom (plats);
811 return ret;
814 /* Initialize ipcp_lattices. */
816 static void
817 initialize_node_lattices (struct cgraph_node *node)
819 struct ipa_node_params *info = IPA_NODE_REF (node);
820 struct cgraph_edge *ie;
821 bool disable = false, variable = false;
822 int i;
824 gcc_checking_assert (node->has_gimple_body_p ());
825 if (!cgraph_local_p (node))
827 /* When cloning is allowed, we can assume that externally visible
828 functions are not called. We will compensate this by cloning
829 later. */
830 if (ipcp_versionable_function_p (node)
831 && ipcp_cloning_candidate_p (node))
832 variable = true;
833 else
834 disable = true;
837 if (disable || variable)
839 for (i = 0; i < ipa_get_param_count (info) ; i++)
841 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
842 if (disable)
844 plats->itself.set_to_bottom ();
845 plats->ctxlat.set_to_bottom ();
846 set_agg_lats_to_bottom (plats);
847 set_alignment_to_bottom (plats);
849 else
850 set_all_contains_variable (plats);
852 if (dump_file && (dump_flags & TDF_DETAILS)
853 && !node->alias && !node->thunk.thunk_p)
854 fprintf (dump_file, "Marking all lattices of %s/%i as %s\n",
855 node->name (), node->order,
856 disable ? "BOTTOM" : "VARIABLE");
859 for (ie = node->indirect_calls; ie; ie = ie->next_callee)
860 if (ie->indirect_info->polymorphic
861 && ie->indirect_info->param_index >= 0)
863 gcc_checking_assert (ie->indirect_info->param_index >= 0);
864 ipa_get_parm_lattices (info,
865 ie->indirect_info->param_index)->virt_call = 1;
869 /* Return the result of a (possibly arithmetic) pass through jump function
870 JFUNC on the constant value INPUT. Return NULL_TREE if that cannot be
871 determined or be considered an interprocedural invariant. */
873 static tree
874 ipa_get_jf_pass_through_result (struct ipa_jump_func *jfunc, tree input)
876 tree restype, res;
878 gcc_checking_assert (is_gimple_ip_invariant (input));
879 if (ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
880 return input;
882 if (TREE_CODE_CLASS (ipa_get_jf_pass_through_operation (jfunc))
883 == tcc_comparison)
884 restype = boolean_type_node;
885 else
886 restype = TREE_TYPE (input);
887 res = fold_binary (ipa_get_jf_pass_through_operation (jfunc), restype,
888 input, ipa_get_jf_pass_through_operand (jfunc));
890 if (res && !is_gimple_ip_invariant (res))
891 return NULL_TREE;
893 return res;
896 /* Return the result of an ancestor jump function JFUNC on the constant value
897 INPUT. Return NULL_TREE if that cannot be determined. */
899 static tree
900 ipa_get_jf_ancestor_result (struct ipa_jump_func *jfunc, tree input)
902 gcc_checking_assert (TREE_CODE (input) != TREE_BINFO);
903 if (TREE_CODE (input) == ADDR_EXPR)
905 tree t = TREE_OPERAND (input, 0);
906 t = build_ref_for_offset (EXPR_LOCATION (t), t,
907 ipa_get_jf_ancestor_offset (jfunc),
908 ptr_type_node, NULL, false);
909 return build_fold_addr_expr (t);
911 else
912 return NULL_TREE;
915 /* Determine whether JFUNC evaluates to a single known constant value and if
916 so, return it. Otherwise return NULL. INFO describes the caller node or
917 the one it is inlined to, so that pass-through jump functions can be
918 evaluated. */
920 tree
921 ipa_value_from_jfunc (struct ipa_node_params *info, struct ipa_jump_func *jfunc)
923 if (jfunc->type == IPA_JF_CONST)
924 return ipa_get_jf_constant (jfunc);
925 else if (jfunc->type == IPA_JF_PASS_THROUGH
926 || jfunc->type == IPA_JF_ANCESTOR)
928 tree input;
929 int idx;
931 if (jfunc->type == IPA_JF_PASS_THROUGH)
932 idx = ipa_get_jf_pass_through_formal_id (jfunc);
933 else
934 idx = ipa_get_jf_ancestor_formal_id (jfunc);
936 if (info->ipcp_orig_node)
937 input = info->known_csts[idx];
938 else
940 ipcp_lattice<tree> *lat;
942 if (!info->lattices
943 || idx >= ipa_get_param_count (info))
944 return NULL_TREE;
945 lat = ipa_get_scalar_lat (info, idx);
946 if (!lat->is_single_const ())
947 return NULL_TREE;
948 input = lat->values->value;
951 if (!input)
952 return NULL_TREE;
954 if (jfunc->type == IPA_JF_PASS_THROUGH)
955 return ipa_get_jf_pass_through_result (jfunc, input);
956 else
957 return ipa_get_jf_ancestor_result (jfunc, input);
959 else
960 return NULL_TREE;
963 /* Determie whether JFUNC evaluates to single known polymorphic context, given
964 that INFO describes the caller node or the one it is inlined to, CS is the
965 call graph edge corresponding to JFUNC and CSIDX index of the described
966 parameter. */
968 ipa_polymorphic_call_context
969 ipa_context_from_jfunc (ipa_node_params *info, cgraph_edge *cs, int csidx,
970 ipa_jump_func *jfunc)
972 ipa_edge_args *args = IPA_EDGE_REF (cs);
973 ipa_polymorphic_call_context ctx;
974 ipa_polymorphic_call_context *edge_ctx
975 = cs ? ipa_get_ith_polymorhic_call_context (args, csidx) : NULL;
977 if (edge_ctx && !edge_ctx->useless_p ())
978 ctx = *edge_ctx;
980 if (jfunc->type == IPA_JF_PASS_THROUGH
981 || jfunc->type == IPA_JF_ANCESTOR)
983 ipa_polymorphic_call_context srcctx;
984 int srcidx;
985 bool type_preserved = true;
986 if (jfunc->type == IPA_JF_PASS_THROUGH)
988 if (ipa_get_jf_pass_through_operation (jfunc) != NOP_EXPR)
989 return ctx;
990 type_preserved = ipa_get_jf_pass_through_type_preserved (jfunc);
991 srcidx = ipa_get_jf_pass_through_formal_id (jfunc);
993 else
995 type_preserved = ipa_get_jf_ancestor_type_preserved (jfunc);
996 srcidx = ipa_get_jf_ancestor_formal_id (jfunc);
998 if (info->ipcp_orig_node)
1000 if (info->known_contexts.exists ())
1001 srcctx = info->known_contexts[srcidx];
1003 else
1005 if (!info->lattices
1006 || srcidx >= ipa_get_param_count (info))
1007 return ctx;
1008 ipcp_lattice<ipa_polymorphic_call_context> *lat;
1009 lat = ipa_get_poly_ctx_lat (info, srcidx);
1010 if (!lat->is_single_const ())
1011 return ctx;
1012 srcctx = lat->values->value;
1014 if (srcctx.useless_p ())
1015 return ctx;
1016 if (jfunc->type == IPA_JF_ANCESTOR)
1017 srcctx.offset_by (ipa_get_jf_ancestor_offset (jfunc));
1018 if (!type_preserved)
1019 srcctx.possible_dynamic_type_change (cs->in_polymorphic_cdtor);
1020 srcctx.combine_with (ctx);
1021 return srcctx;
1024 return ctx;
1027 /* If checking is enabled, verify that no lattice is in the TOP state, i.e. not
1028 bottom, not containing a variable component and without any known value at
1029 the same time. */
1031 DEBUG_FUNCTION void
1032 ipcp_verify_propagated_values (void)
1034 struct cgraph_node *node;
1036 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
1038 struct ipa_node_params *info = IPA_NODE_REF (node);
1039 int i, count = ipa_get_param_count (info);
1041 for (i = 0; i < count; i++)
1043 ipcp_lattice<tree> *lat = ipa_get_scalar_lat (info, i);
1045 if (!lat->bottom
1046 && !lat->contains_variable
1047 && lat->values_count == 0)
1049 if (dump_file)
1051 symtab_node::dump_table (dump_file);
1052 fprintf (dump_file, "\nIPA lattices after constant "
1053 "propagation, before gcc_unreachable:\n");
1054 print_all_lattices (dump_file, true, false);
1057 gcc_unreachable ();
1063 /* Return true iff X and Y should be considered equal values by IPA-CP. */
1065 static bool
1066 values_equal_for_ipcp_p (tree x, tree y)
1068 gcc_checking_assert (x != NULL_TREE && y != NULL_TREE);
1070 if (x == y)
1071 return true;
1073 if (TREE_CODE (x) == ADDR_EXPR
1074 && TREE_CODE (y) == ADDR_EXPR
1075 && TREE_CODE (TREE_OPERAND (x, 0)) == CONST_DECL
1076 && TREE_CODE (TREE_OPERAND (y, 0)) == CONST_DECL)
1077 return operand_equal_p (DECL_INITIAL (TREE_OPERAND (x, 0)),
1078 DECL_INITIAL (TREE_OPERAND (y, 0)), 0);
1079 else
1080 return operand_equal_p (x, y, 0);
1083 /* Return true iff X and Y should be considered equal contexts by IPA-CP. */
1085 static bool
1086 values_equal_for_ipcp_p (ipa_polymorphic_call_context x,
1087 ipa_polymorphic_call_context y)
1089 return x.equal_to (y);
1093 /* Add a new value source to the value represented by THIS, marking that a
1094 value comes from edge CS and (if the underlying jump function is a
1095 pass-through or an ancestor one) from a caller value SRC_VAL of a caller
1096 parameter described by SRC_INDEX. OFFSET is negative if the source was the
1097 scalar value of the parameter itself or the offset within an aggregate. */
1099 template <typename valtype>
1100 void
1101 ipcp_value<valtype>::add_source (cgraph_edge *cs, ipcp_value *src_val,
1102 int src_idx, HOST_WIDE_INT offset)
1104 ipcp_value_source<valtype> *src;
1106 src = new (pool_alloc (ipcp_sources_pool)) ipcp_value_source<valtype>;
1107 src->offset = offset;
1108 src->cs = cs;
1109 src->val = src_val;
1110 src->index = src_idx;
1112 src->next = sources;
1113 sources = src;
1116 /* Allocate a new ipcp_value holding a tree constant, initialize its value to
1117 SOURCE and clear all other fields. */
1119 static ipcp_value<tree> *
1120 allocate_and_init_ipcp_value (tree source)
1122 ipcp_value<tree> *val;
1124 val = new (pool_alloc (ipcp_cst_values_pool)) ipcp_value<tree>;
1125 memset (val, 0, sizeof (*val));
1126 val->value = source;
1127 return val;
1130 /* Allocate a new ipcp_value holding a polymorphic context, initialize its
1131 value to SOURCE and clear all other fields. */
1133 static ipcp_value<ipa_polymorphic_call_context> *
1134 allocate_and_init_ipcp_value (ipa_polymorphic_call_context source)
1136 ipcp_value<ipa_polymorphic_call_context> *val;
1138 val = new (pool_alloc (ipcp_poly_ctx_values_pool))
1139 ipcp_value<ipa_polymorphic_call_context>;
1140 memset (val, 0, sizeof (*val));
1141 val->value = source;
1142 return val;
1145 /* Try to add NEWVAL to LAT, potentially creating a new ipcp_value for it. CS,
1146 SRC_VAL SRC_INDEX and OFFSET are meant for add_source and have the same
1147 meaning. OFFSET -1 means the source is scalar and not a part of an
1148 aggregate. */
1150 template <typename valtype>
1151 bool
1152 ipcp_lattice<valtype>::add_value (valtype newval, cgraph_edge *cs,
1153 ipcp_value<valtype> *src_val,
1154 int src_idx, HOST_WIDE_INT offset)
1156 ipcp_value<valtype> *val;
1158 if (bottom)
1159 return false;
1161 for (val = values; val; val = val->next)
1162 if (values_equal_for_ipcp_p (val->value, newval))
1164 if (ipa_edge_within_scc (cs))
1166 ipcp_value_source<valtype> *s;
1167 for (s = val->sources; s ; s = s->next)
1168 if (s->cs == cs)
1169 break;
1170 if (s)
1171 return false;
1174 val->add_source (cs, src_val, src_idx, offset);
1175 return false;
1178 if (values_count == PARAM_VALUE (PARAM_IPA_CP_VALUE_LIST_SIZE))
1180 /* We can only free sources, not the values themselves, because sources
1181 of other values in this this SCC might point to them. */
1182 for (val = values; val; val = val->next)
1184 while (val->sources)
1186 ipcp_value_source<valtype> *src = val->sources;
1187 val->sources = src->next;
1188 pool_free (ipcp_sources_pool, src);
1192 values = NULL;
1193 return set_to_bottom ();
1196 values_count++;
1197 val = allocate_and_init_ipcp_value (newval);
1198 val->add_source (cs, src_val, src_idx, offset);
1199 val->next = values;
1200 values = val;
1201 return true;
1204 /* Propagate values through a pass-through jump function JFUNC associated with
1205 edge CS, taking values from SRC_LAT and putting them into DEST_LAT. SRC_IDX
1206 is the index of the source parameter. */
1208 static bool
1209 propagate_vals_accross_pass_through (cgraph_edge *cs,
1210 ipa_jump_func *jfunc,
1211 ipcp_lattice<tree> *src_lat,
1212 ipcp_lattice<tree> *dest_lat,
1213 int src_idx)
1215 ipcp_value<tree> *src_val;
1216 bool ret = false;
1218 /* Do not create new values when propagating within an SCC because if there
1219 are arithmetic functions with circular dependencies, there is infinite
1220 number of them and we would just make lattices bottom. */
1221 if ((ipa_get_jf_pass_through_operation (jfunc) != NOP_EXPR)
1222 && ipa_edge_within_scc (cs))
1223 ret = dest_lat->set_contains_variable ();
1224 else
1225 for (src_val = src_lat->values; src_val; src_val = src_val->next)
1227 tree cstval = ipa_get_jf_pass_through_result (jfunc, src_val->value);
1229 if (cstval)
1230 ret |= dest_lat->add_value (cstval, cs, src_val, src_idx);
1231 else
1232 ret |= dest_lat->set_contains_variable ();
1235 return ret;
1238 /* Propagate values through an ancestor jump function JFUNC associated with
1239 edge CS, taking values from SRC_LAT and putting them into DEST_LAT. SRC_IDX
1240 is the index of the source parameter. */
1242 static bool
1243 propagate_vals_accross_ancestor (struct cgraph_edge *cs,
1244 struct ipa_jump_func *jfunc,
1245 ipcp_lattice<tree> *src_lat,
1246 ipcp_lattice<tree> *dest_lat,
1247 int src_idx)
1249 ipcp_value<tree> *src_val;
1250 bool ret = false;
1252 if (ipa_edge_within_scc (cs))
1253 return dest_lat->set_contains_variable ();
1255 for (src_val = src_lat->values; src_val; src_val = src_val->next)
1257 tree t = ipa_get_jf_ancestor_result (jfunc, src_val->value);
1259 if (t)
1260 ret |= dest_lat->add_value (t, cs, src_val, src_idx);
1261 else
1262 ret |= dest_lat->set_contains_variable ();
1265 return ret;
1268 /* Propagate scalar values across jump function JFUNC that is associated with
1269 edge CS and put the values into DEST_LAT. */
1271 static bool
1272 propagate_scalar_accross_jump_function (struct cgraph_edge *cs,
1273 struct ipa_jump_func *jfunc,
1274 ipcp_lattice<tree> *dest_lat)
1276 if (dest_lat->bottom)
1277 return false;
1279 if (jfunc->type == IPA_JF_CONST)
1281 tree val = ipa_get_jf_constant (jfunc);
1282 return dest_lat->add_value (val, cs, NULL, 0);
1284 else if (jfunc->type == IPA_JF_PASS_THROUGH
1285 || jfunc->type == IPA_JF_ANCESTOR)
1287 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
1288 ipcp_lattice<tree> *src_lat;
1289 int src_idx;
1290 bool ret;
1292 if (jfunc->type == IPA_JF_PASS_THROUGH)
1293 src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
1294 else
1295 src_idx = ipa_get_jf_ancestor_formal_id (jfunc);
1297 src_lat = ipa_get_scalar_lat (caller_info, src_idx);
1298 if (src_lat->bottom)
1299 return dest_lat->set_contains_variable ();
1301 /* If we would need to clone the caller and cannot, do not propagate. */
1302 if (!ipcp_versionable_function_p (cs->caller)
1303 && (src_lat->contains_variable
1304 || (src_lat->values_count > 1)))
1305 return dest_lat->set_contains_variable ();
1307 if (jfunc->type == IPA_JF_PASS_THROUGH)
1308 ret = propagate_vals_accross_pass_through (cs, jfunc, src_lat,
1309 dest_lat, src_idx);
1310 else
1311 ret = propagate_vals_accross_ancestor (cs, jfunc, src_lat, dest_lat,
1312 src_idx);
1314 if (src_lat->contains_variable)
1315 ret |= dest_lat->set_contains_variable ();
1317 return ret;
1320 /* TODO: We currently do not handle member method pointers in IPA-CP (we only
1321 use it for indirect inlining), we should propagate them too. */
1322 return dest_lat->set_contains_variable ();
1325 /* Propagate scalar values across jump function JFUNC that is associated with
1326 edge CS and describes argument IDX and put the values into DEST_LAT. */
1328 static bool
1329 propagate_context_accross_jump_function (cgraph_edge *cs,
1330 ipa_jump_func *jfunc, int idx,
1331 ipcp_lattice<ipa_polymorphic_call_context> *dest_lat)
1333 ipa_edge_args *args = IPA_EDGE_REF (cs);
1334 if (dest_lat->bottom)
1335 return false;
1336 bool ret = false;
1337 bool added_sth = false;
1338 bool type_preserved = true;
1340 ipa_polymorphic_call_context edge_ctx, *edge_ctx_ptr
1341 = ipa_get_ith_polymorhic_call_context (args, idx);
1343 if (edge_ctx_ptr)
1344 edge_ctx = *edge_ctx_ptr;
1346 if (jfunc->type == IPA_JF_PASS_THROUGH
1347 || jfunc->type == IPA_JF_ANCESTOR)
1349 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
1350 int src_idx;
1351 ipcp_lattice<ipa_polymorphic_call_context> *src_lat;
1353 /* TODO: Once we figure out how to propagate speculations, it will
1354 probably be a good idea to switch to speculation if type_preserved is
1355 not set instead of punting. */
1356 if (jfunc->type == IPA_JF_PASS_THROUGH)
1358 if (ipa_get_jf_pass_through_operation (jfunc) != NOP_EXPR)
1359 goto prop_fail;
1360 type_preserved = ipa_get_jf_pass_through_type_preserved (jfunc);
1361 src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
1363 else
1365 type_preserved = ipa_get_jf_ancestor_type_preserved (jfunc);
1366 src_idx = ipa_get_jf_ancestor_formal_id (jfunc);
1369 src_lat = ipa_get_poly_ctx_lat (caller_info, src_idx);
1370 /* If we would need to clone the caller and cannot, do not propagate. */
1371 if (!ipcp_versionable_function_p (cs->caller)
1372 && (src_lat->contains_variable
1373 || (src_lat->values_count > 1)))
1374 goto prop_fail;
1376 ipcp_value<ipa_polymorphic_call_context> *src_val;
1377 for (src_val = src_lat->values; src_val; src_val = src_val->next)
1379 ipa_polymorphic_call_context cur = src_val->value;
1381 if (!type_preserved)
1382 cur.possible_dynamic_type_change (cs->in_polymorphic_cdtor);
1383 if (jfunc->type == IPA_JF_ANCESTOR)
1384 cur.offset_by (ipa_get_jf_ancestor_offset (jfunc));
1385 /* TODO: In cases we know how the context is going to be used,
1386 we can improve the result by passing proper OTR_TYPE. */
1387 cur.combine_with (edge_ctx);
1388 if (!cur.useless_p ())
1390 if (src_lat->contains_variable
1391 && !edge_ctx.equal_to (cur))
1392 ret |= dest_lat->set_contains_variable ();
1393 ret |= dest_lat->add_value (cur, cs, src_val, src_idx);
1394 added_sth = true;
1400 prop_fail:
1401 if (!added_sth)
1403 if (!edge_ctx.useless_p ())
1404 ret |= dest_lat->add_value (edge_ctx, cs);
1405 else
1406 ret |= dest_lat->set_contains_variable ();
1409 return ret;
1412 /* Propagate alignments across jump function JFUNC that is associated with
1413 edge CS and update DEST_LAT accordingly. */
1415 static bool
1416 propagate_alignment_accross_jump_function (struct cgraph_edge *cs,
1417 struct ipa_jump_func *jfunc,
1418 struct ipcp_param_lattices *dest_lat)
1420 if (alignment_bottom_p (dest_lat))
1421 return false;
1423 ipa_alignment cur;
1424 cur.known = false;
1425 if (jfunc->alignment.known)
1426 cur = jfunc->alignment;
1427 else if (jfunc->type == IPA_JF_PASS_THROUGH
1428 || jfunc->type == IPA_JF_ANCESTOR)
1430 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
1431 struct ipcp_param_lattices *src_lats;
1432 HOST_WIDE_INT offset = 0;
1433 int src_idx;
1435 if (jfunc->type == IPA_JF_PASS_THROUGH)
1437 enum tree_code op = ipa_get_jf_pass_through_operation (jfunc);
1438 if (op != NOP_EXPR)
1440 if (op != POINTER_PLUS_EXPR
1441 && op != PLUS_EXPR)
1442 goto prop_fail;
1443 tree operand = ipa_get_jf_pass_through_operand (jfunc);
1444 if (!tree_fits_shwi_p (operand))
1445 goto prop_fail;
1446 offset = tree_to_shwi (operand);
1448 src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
1450 else
1452 src_idx = ipa_get_jf_ancestor_formal_id (jfunc);
1453 offset = ipa_get_jf_ancestor_offset (jfunc) / BITS_PER_UNIT;;
1456 src_lats = ipa_get_parm_lattices (caller_info, src_idx);
1457 if (!src_lats->alignment.known
1458 || alignment_bottom_p (src_lats))
1459 goto prop_fail;
1461 cur = src_lats->alignment;
1462 cur.misalign = (cur.misalign + offset) % cur.align;
1465 if (cur.known)
1467 if (!dest_lat->alignment.known)
1469 dest_lat->alignment = cur;
1470 return true;
1472 else if (dest_lat->alignment.align == cur.align
1473 && dest_lat->alignment.misalign == cur.misalign)
1474 return false;
1477 prop_fail:
1478 set_alignment_to_bottom (dest_lat);
1479 return true;
1482 /* If DEST_PLATS already has aggregate items, check that aggs_by_ref matches
1483 NEW_AGGS_BY_REF and if not, mark all aggs as bottoms and return true (in all
1484 other cases, return false). If there are no aggregate items, set
1485 aggs_by_ref to NEW_AGGS_BY_REF. */
1487 static bool
1488 set_check_aggs_by_ref (struct ipcp_param_lattices *dest_plats,
1489 bool new_aggs_by_ref)
1491 if (dest_plats->aggs)
1493 if (dest_plats->aggs_by_ref != new_aggs_by_ref)
1495 set_agg_lats_to_bottom (dest_plats);
1496 return true;
1499 else
1500 dest_plats->aggs_by_ref = new_aggs_by_ref;
1501 return false;
1504 /* Walk aggregate lattices in DEST_PLATS from ***AGLAT on, until ***aglat is an
1505 already existing lattice for the given OFFSET and SIZE, marking all skipped
1506 lattices as containing variable and checking for overlaps. If there is no
1507 already existing lattice for the OFFSET and VAL_SIZE, create one, initialize
1508 it with offset, size and contains_variable to PRE_EXISTING, and return true,
1509 unless there are too many already. If there are two many, return false. If
1510 there are overlaps turn whole DEST_PLATS to bottom and return false. If any
1511 skipped lattices were newly marked as containing variable, set *CHANGE to
1512 true. */
1514 static bool
1515 merge_agg_lats_step (struct ipcp_param_lattices *dest_plats,
1516 HOST_WIDE_INT offset, HOST_WIDE_INT val_size,
1517 struct ipcp_agg_lattice ***aglat,
1518 bool pre_existing, bool *change)
1520 gcc_checking_assert (offset >= 0);
1522 while (**aglat && (**aglat)->offset < offset)
1524 if ((**aglat)->offset + (**aglat)->size > offset)
1526 set_agg_lats_to_bottom (dest_plats);
1527 return false;
1529 *change |= (**aglat)->set_contains_variable ();
1530 *aglat = &(**aglat)->next;
1533 if (**aglat && (**aglat)->offset == offset)
1535 if ((**aglat)->size != val_size
1536 || ((**aglat)->next
1537 && (**aglat)->next->offset < offset + val_size))
1539 set_agg_lats_to_bottom (dest_plats);
1540 return false;
1542 gcc_checking_assert (!(**aglat)->next
1543 || (**aglat)->next->offset >= offset + val_size);
1544 return true;
1546 else
1548 struct ipcp_agg_lattice *new_al;
1550 if (**aglat && (**aglat)->offset < offset + val_size)
1552 set_agg_lats_to_bottom (dest_plats);
1553 return false;
1555 if (dest_plats->aggs_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1556 return false;
1557 dest_plats->aggs_count++;
1558 new_al = (struct ipcp_agg_lattice *) pool_alloc (ipcp_agg_lattice_pool);
1559 memset (new_al, 0, sizeof (*new_al));
1561 new_al->offset = offset;
1562 new_al->size = val_size;
1563 new_al->contains_variable = pre_existing;
1565 new_al->next = **aglat;
1566 **aglat = new_al;
1567 return true;
1571 /* Set all AGLAT and all other aggregate lattices reachable by next pointers as
1572 containing an unknown value. */
1574 static bool
1575 set_chain_of_aglats_contains_variable (struct ipcp_agg_lattice *aglat)
1577 bool ret = false;
1578 while (aglat)
1580 ret |= aglat->set_contains_variable ();
1581 aglat = aglat->next;
1583 return ret;
1586 /* Merge existing aggregate lattices in SRC_PLATS to DEST_PLATS, subtracting
1587 DELTA_OFFSET. CS is the call graph edge and SRC_IDX the index of the source
1588 parameter used for lattice value sources. Return true if DEST_PLATS changed
1589 in any way. */
1591 static bool
1592 merge_aggregate_lattices (struct cgraph_edge *cs,
1593 struct ipcp_param_lattices *dest_plats,
1594 struct ipcp_param_lattices *src_plats,
1595 int src_idx, HOST_WIDE_INT offset_delta)
1597 bool pre_existing = dest_plats->aggs != NULL;
1598 struct ipcp_agg_lattice **dst_aglat;
1599 bool ret = false;
1601 if (set_check_aggs_by_ref (dest_plats, src_plats->aggs_by_ref))
1602 return true;
1603 if (src_plats->aggs_bottom)
1604 return set_agg_lats_contain_variable (dest_plats);
1605 if (src_plats->aggs_contain_variable)
1606 ret |= set_agg_lats_contain_variable (dest_plats);
1607 dst_aglat = &dest_plats->aggs;
1609 for (struct ipcp_agg_lattice *src_aglat = src_plats->aggs;
1610 src_aglat;
1611 src_aglat = src_aglat->next)
1613 HOST_WIDE_INT new_offset = src_aglat->offset - offset_delta;
1615 if (new_offset < 0)
1616 continue;
1617 if (merge_agg_lats_step (dest_plats, new_offset, src_aglat->size,
1618 &dst_aglat, pre_existing, &ret))
1620 struct ipcp_agg_lattice *new_al = *dst_aglat;
1622 dst_aglat = &(*dst_aglat)->next;
1623 if (src_aglat->bottom)
1625 ret |= new_al->set_contains_variable ();
1626 continue;
1628 if (src_aglat->contains_variable)
1629 ret |= new_al->set_contains_variable ();
1630 for (ipcp_value<tree> *val = src_aglat->values;
1631 val;
1632 val = val->next)
1633 ret |= new_al->add_value (val->value, cs, val, src_idx,
1634 src_aglat->offset);
1636 else if (dest_plats->aggs_bottom)
1637 return true;
1639 ret |= set_chain_of_aglats_contains_variable (*dst_aglat);
1640 return ret;
1643 /* Determine whether there is anything to propagate FROM SRC_PLATS through a
1644 pass-through JFUNC and if so, whether it has conform and conforms to the
1645 rules about propagating values passed by reference. */
1647 static bool
1648 agg_pass_through_permissible_p (struct ipcp_param_lattices *src_plats,
1649 struct ipa_jump_func *jfunc)
1651 return src_plats->aggs
1652 && (!src_plats->aggs_by_ref
1653 || ipa_get_jf_pass_through_agg_preserved (jfunc));
1656 /* Propagate scalar values across jump function JFUNC that is associated with
1657 edge CS and put the values into DEST_LAT. */
1659 static bool
1660 propagate_aggs_accross_jump_function (struct cgraph_edge *cs,
1661 struct ipa_jump_func *jfunc,
1662 struct ipcp_param_lattices *dest_plats)
1664 bool ret = false;
1666 if (dest_plats->aggs_bottom)
1667 return false;
1669 if (jfunc->type == IPA_JF_PASS_THROUGH
1670 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
1672 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
1673 int src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
1674 struct ipcp_param_lattices *src_plats;
1676 src_plats = ipa_get_parm_lattices (caller_info, src_idx);
1677 if (agg_pass_through_permissible_p (src_plats, jfunc))
1679 /* Currently we do not produce clobber aggregate jump
1680 functions, replace with merging when we do. */
1681 gcc_assert (!jfunc->agg.items);
1682 ret |= merge_aggregate_lattices (cs, dest_plats, src_plats,
1683 src_idx, 0);
1685 else
1686 ret |= set_agg_lats_contain_variable (dest_plats);
1688 else if (jfunc->type == IPA_JF_ANCESTOR
1689 && ipa_get_jf_ancestor_agg_preserved (jfunc))
1691 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
1692 int src_idx = ipa_get_jf_ancestor_formal_id (jfunc);
1693 struct ipcp_param_lattices *src_plats;
1695 src_plats = ipa_get_parm_lattices (caller_info, src_idx);
1696 if (src_plats->aggs && src_plats->aggs_by_ref)
1698 /* Currently we do not produce clobber aggregate jump
1699 functions, replace with merging when we do. */
1700 gcc_assert (!jfunc->agg.items);
1701 ret |= merge_aggregate_lattices (cs, dest_plats, src_plats, src_idx,
1702 ipa_get_jf_ancestor_offset (jfunc));
1704 else if (!src_plats->aggs_by_ref)
1705 ret |= set_agg_lats_to_bottom (dest_plats);
1706 else
1707 ret |= set_agg_lats_contain_variable (dest_plats);
1709 else if (jfunc->agg.items)
1711 bool pre_existing = dest_plats->aggs != NULL;
1712 struct ipcp_agg_lattice **aglat = &dest_plats->aggs;
1713 struct ipa_agg_jf_item *item;
1714 int i;
1716 if (set_check_aggs_by_ref (dest_plats, jfunc->agg.by_ref))
1717 return true;
1719 FOR_EACH_VEC_ELT (*jfunc->agg.items, i, item)
1721 HOST_WIDE_INT val_size;
1723 if (item->offset < 0)
1724 continue;
1725 gcc_checking_assert (is_gimple_ip_invariant (item->value));
1726 val_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (item->value)));
1728 if (merge_agg_lats_step (dest_plats, item->offset, val_size,
1729 &aglat, pre_existing, &ret))
1731 ret |= (*aglat)->add_value (item->value, cs, NULL, 0, 0);
1732 aglat = &(*aglat)->next;
1734 else if (dest_plats->aggs_bottom)
1735 return true;
1738 ret |= set_chain_of_aglats_contains_variable (*aglat);
1740 else
1741 ret |= set_agg_lats_contain_variable (dest_plats);
1743 return ret;
1746 /* Propagate constants from the caller to the callee of CS. INFO describes the
1747 caller. */
1749 static bool
1750 propagate_constants_accross_call (struct cgraph_edge *cs)
1752 struct ipa_node_params *callee_info;
1753 enum availability availability;
1754 struct cgraph_node *callee, *alias_or_thunk;
1755 struct ipa_edge_args *args;
1756 bool ret = false;
1757 int i, args_count, parms_count;
1759 callee = cs->callee->function_symbol (&availability);
1760 if (!callee->definition)
1761 return false;
1762 gcc_checking_assert (callee->has_gimple_body_p ());
1763 callee_info = IPA_NODE_REF (callee);
1765 args = IPA_EDGE_REF (cs);
1766 args_count = ipa_get_cs_argument_count (args);
1767 parms_count = ipa_get_param_count (callee_info);
1768 if (parms_count == 0)
1769 return false;
1771 /* No propagation through instrumentation thunks is available yet.
1772 It should be possible with proper mapping of call args and
1773 instrumented callee params in the propagation loop below. But
1774 this case mostly occurs when legacy code calls instrumented code
1775 and it is not a primary target for optimizations.
1776 We detect instrumentation thunks in aliases and thunks chain by
1777 checking instrumentation_clone flag for chain source and target.
1778 Going through instrumentation thunks we always have it changed
1779 from 0 to 1 and all other nodes do not change it. */
1780 if (!cs->callee->instrumentation_clone
1781 && callee->instrumentation_clone)
1783 for (i = 0; i < parms_count; i++)
1784 ret |= set_all_contains_variable (ipa_get_parm_lattices (callee_info,
1785 i));
1786 return ret;
1789 /* If this call goes through a thunk we must not propagate to the first (0th)
1790 parameter. However, we might need to uncover a thunk from below a series
1791 of aliases first. */
1792 alias_or_thunk = cs->callee;
1793 while (alias_or_thunk->alias)
1794 alias_or_thunk = alias_or_thunk->get_alias_target ();
1795 if (alias_or_thunk->thunk.thunk_p)
1797 ret |= set_all_contains_variable (ipa_get_parm_lattices (callee_info,
1798 0));
1799 i = 1;
1801 else
1802 i = 0;
1804 for (; (i < args_count) && (i < parms_count); i++)
1806 struct ipa_jump_func *jump_func = ipa_get_ith_jump_func (args, i);
1807 struct ipcp_param_lattices *dest_plats;
1809 dest_plats = ipa_get_parm_lattices (callee_info, i);
1810 if (availability == AVAIL_INTERPOSABLE)
1811 ret |= set_all_contains_variable (dest_plats);
1812 else
1814 ret |= propagate_scalar_accross_jump_function (cs, jump_func,
1815 &dest_plats->itself);
1816 ret |= propagate_context_accross_jump_function (cs, jump_func, i,
1817 &dest_plats->ctxlat);
1818 ret |= propagate_alignment_accross_jump_function (cs, jump_func,
1819 dest_plats);
1820 ret |= propagate_aggs_accross_jump_function (cs, jump_func,
1821 dest_plats);
1824 for (; i < parms_count; i++)
1825 ret |= set_all_contains_variable (ipa_get_parm_lattices (callee_info, i));
1827 return ret;
1830 /* If an indirect edge IE can be turned into a direct one based on KNOWN_VALS
1831 KNOWN_CONTEXTS, KNOWN_AGGS or AGG_REPS return the destination. The latter
1832 three can be NULL. If AGG_REPS is not NULL, KNOWN_AGGS is ignored. */
1834 static tree
1835 ipa_get_indirect_edge_target_1 (struct cgraph_edge *ie,
1836 vec<tree> known_csts,
1837 vec<ipa_polymorphic_call_context> known_contexts,
1838 vec<ipa_agg_jump_function_p> known_aggs,
1839 struct ipa_agg_replacement_value *agg_reps,
1840 bool *speculative)
1842 int param_index = ie->indirect_info->param_index;
1843 HOST_WIDE_INT anc_offset;
1844 tree t;
1845 tree target = NULL;
1847 *speculative = false;
1849 if (param_index == -1
1850 || known_csts.length () <= (unsigned int) param_index)
1851 return NULL_TREE;
1853 if (!ie->indirect_info->polymorphic)
1855 tree t;
1857 if (ie->indirect_info->agg_contents)
1859 if (agg_reps)
1861 t = NULL;
1862 while (agg_reps)
1864 if (agg_reps->index == param_index
1865 && agg_reps->offset == ie->indirect_info->offset
1866 && agg_reps->by_ref == ie->indirect_info->by_ref)
1868 t = agg_reps->value;
1869 break;
1871 agg_reps = agg_reps->next;
1874 else if (known_aggs.length () > (unsigned int) param_index)
1876 struct ipa_agg_jump_function *agg;
1877 agg = known_aggs[param_index];
1878 t = ipa_find_agg_cst_for_param (agg, ie->indirect_info->offset,
1879 ie->indirect_info->by_ref);
1881 else
1882 t = NULL;
1884 else
1885 t = known_csts[param_index];
1887 if (t &&
1888 TREE_CODE (t) == ADDR_EXPR
1889 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL)
1890 return TREE_OPERAND (t, 0);
1891 else
1892 return NULL_TREE;
1895 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
1896 return NULL_TREE;
1898 gcc_assert (!ie->indirect_info->agg_contents);
1899 anc_offset = ie->indirect_info->offset;
1901 t = NULL;
1903 /* Try to work out value of virtual table pointer value in replacemnets. */
1904 if (!t && agg_reps && !ie->indirect_info->by_ref)
1906 while (agg_reps)
1908 if (agg_reps->index == param_index
1909 && agg_reps->offset == ie->indirect_info->offset
1910 && agg_reps->by_ref)
1912 t = agg_reps->value;
1913 break;
1915 agg_reps = agg_reps->next;
1919 /* Try to work out value of virtual table pointer value in known
1920 aggregate values. */
1921 if (!t && known_aggs.length () > (unsigned int) param_index
1922 && !ie->indirect_info->by_ref)
1924 struct ipa_agg_jump_function *agg;
1925 agg = known_aggs[param_index];
1926 t = ipa_find_agg_cst_for_param (agg, ie->indirect_info->offset,
1927 true);
1930 /* If we found the virtual table pointer, lookup the target. */
1931 if (t)
1933 tree vtable;
1934 unsigned HOST_WIDE_INT offset;
1935 if (vtable_pointer_value_to_vtable (t, &vtable, &offset))
1937 target = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
1938 vtable, offset);
1939 if (target)
1941 if ((TREE_CODE (TREE_TYPE (target)) == FUNCTION_TYPE
1942 && DECL_FUNCTION_CODE (target) == BUILT_IN_UNREACHABLE)
1943 || !possible_polymorphic_call_target_p
1944 (ie, cgraph_node::get (target)))
1945 target = ipa_impossible_devirt_target (ie, target);
1946 *speculative = ie->indirect_info->vptr_changed;
1947 if (!*speculative)
1948 return target;
1953 /* Do we know the constant value of pointer? */
1954 if (!t)
1955 t = known_csts[param_index];
1957 gcc_checking_assert (!t || TREE_CODE (t) != TREE_BINFO);
1959 ipa_polymorphic_call_context context;
1960 if (known_contexts.length () > (unsigned int) param_index)
1962 context = known_contexts[param_index];
1963 context.offset_by (anc_offset);
1964 if (ie->indirect_info->vptr_changed)
1965 context.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
1966 ie->indirect_info->otr_type);
1967 if (t)
1969 ipa_polymorphic_call_context ctx2 = ipa_polymorphic_call_context
1970 (t, ie->indirect_info->otr_type, anc_offset);
1971 if (!ctx2.useless_p ())
1972 context.combine_with (ctx2, ie->indirect_info->otr_type);
1975 else if (t)
1977 context = ipa_polymorphic_call_context (t, ie->indirect_info->otr_type,
1978 anc_offset);
1979 if (ie->indirect_info->vptr_changed)
1980 context.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
1981 ie->indirect_info->otr_type);
1983 else
1984 return NULL_TREE;
1986 vec <cgraph_node *>targets;
1987 bool final;
1989 targets = possible_polymorphic_call_targets
1990 (ie->indirect_info->otr_type,
1991 ie->indirect_info->otr_token,
1992 context, &final);
1993 if (!final || targets.length () > 1)
1995 struct cgraph_node *node;
1996 if (*speculative)
1997 return target;
1998 if (!opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
1999 || ie->speculative || !ie->maybe_hot_p ())
2000 return NULL;
2001 node = try_speculative_devirtualization (ie->indirect_info->otr_type,
2002 ie->indirect_info->otr_token,
2003 context);
2004 if (node)
2006 *speculative = true;
2007 target = node->decl;
2009 else
2010 return NULL;
2012 else
2014 *speculative = false;
2015 if (targets.length () == 1)
2016 target = targets[0]->decl;
2017 else
2018 target = ipa_impossible_devirt_target (ie, NULL_TREE);
2021 if (target && !possible_polymorphic_call_target_p (ie,
2022 cgraph_node::get (target)))
2023 target = ipa_impossible_devirt_target (ie, target);
2025 return target;
2029 /* If an indirect edge IE can be turned into a direct one based on KNOWN_CSTS,
2030 KNOWN_CONTEXTS (which can be vNULL) or KNOWN_AGGS (which also can be vNULL)
2031 return the destination. */
2033 tree
2034 ipa_get_indirect_edge_target (struct cgraph_edge *ie,
2035 vec<tree> known_csts,
2036 vec<ipa_polymorphic_call_context> known_contexts,
2037 vec<ipa_agg_jump_function_p> known_aggs,
2038 bool *speculative)
2040 return ipa_get_indirect_edge_target_1 (ie, known_csts, known_contexts,
2041 known_aggs, NULL, speculative);
2044 /* Calculate devirtualization time bonus for NODE, assuming we know KNOWN_CSTS
2045 and KNOWN_CONTEXTS. */
2047 static int
2048 devirtualization_time_bonus (struct cgraph_node *node,
2049 vec<tree> known_csts,
2050 vec<ipa_polymorphic_call_context> known_contexts,
2051 vec<ipa_agg_jump_function_p> known_aggs)
2053 struct cgraph_edge *ie;
2054 int res = 0;
2056 for (ie = node->indirect_calls; ie; ie = ie->next_callee)
2058 struct cgraph_node *callee;
2059 struct inline_summary *isummary;
2060 enum availability avail;
2061 tree target;
2062 bool speculative;
2064 target = ipa_get_indirect_edge_target (ie, known_csts, known_contexts,
2065 known_aggs, &speculative);
2066 if (!target)
2067 continue;
2069 /* Only bare minimum benefit for clearly un-inlineable targets. */
2070 res += 1;
2071 callee = cgraph_node::get (target);
2072 if (!callee || !callee->definition)
2073 continue;
2074 callee = callee->function_symbol (&avail);
2075 if (avail < AVAIL_AVAILABLE)
2076 continue;
2077 isummary = inline_summaries->get (callee);
2078 if (!isummary->inlinable)
2079 continue;
2081 /* FIXME: The values below need re-considering and perhaps also
2082 integrating into the cost metrics, at lest in some very basic way. */
2083 if (isummary->size <= MAX_INLINE_INSNS_AUTO / 4)
2084 res += 31 / ((int)speculative + 1);
2085 else if (isummary->size <= MAX_INLINE_INSNS_AUTO / 2)
2086 res += 15 / ((int)speculative + 1);
2087 else if (isummary->size <= MAX_INLINE_INSNS_AUTO
2088 || DECL_DECLARED_INLINE_P (callee->decl))
2089 res += 7 / ((int)speculative + 1);
2092 return res;
2095 /* Return time bonus incurred because of HINTS. */
2097 static int
2098 hint_time_bonus (inline_hints hints)
2100 int result = 0;
2101 if (hints & (INLINE_HINT_loop_iterations | INLINE_HINT_loop_stride))
2102 result += PARAM_VALUE (PARAM_IPA_CP_LOOP_HINT_BONUS);
2103 if (hints & INLINE_HINT_array_index)
2104 result += PARAM_VALUE (PARAM_IPA_CP_ARRAY_INDEX_HINT_BONUS);
2105 return result;
2108 /* Return true if cloning NODE is a good idea, given the estimated TIME_BENEFIT
2109 and SIZE_COST and with the sum of frequencies of incoming edges to the
2110 potential new clone in FREQUENCIES. */
2112 static bool
2113 good_cloning_opportunity_p (struct cgraph_node *node, int time_benefit,
2114 int freq_sum, gcov_type count_sum, int size_cost)
2116 if (time_benefit == 0
2117 || !opt_for_fn (node->decl, flag_ipa_cp_clone)
2118 || !optimize_function_for_speed_p (DECL_STRUCT_FUNCTION (node->decl)))
2119 return false;
2121 gcc_assert (size_cost > 0);
2123 if (max_count)
2125 int factor = (count_sum * 1000) / max_count;
2126 int64_t evaluation = (((int64_t) time_benefit * factor)
2127 / size_cost);
2129 if (dump_file && (dump_flags & TDF_DETAILS))
2130 fprintf (dump_file, " good_cloning_opportunity_p (time: %i, "
2131 "size: %i, count_sum: " HOST_WIDE_INT_PRINT_DEC
2132 ") -> evaluation: " "%"PRId64
2133 ", threshold: %i\n",
2134 time_benefit, size_cost, (HOST_WIDE_INT) count_sum,
2135 evaluation, PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD));
2137 return evaluation >= PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD);
2139 else
2141 int64_t evaluation = (((int64_t) time_benefit * freq_sum)
2142 / size_cost);
2144 if (dump_file && (dump_flags & TDF_DETAILS))
2145 fprintf (dump_file, " good_cloning_opportunity_p (time: %i, "
2146 "size: %i, freq_sum: %i) -> evaluation: "
2147 "%"PRId64 ", threshold: %i\n",
2148 time_benefit, size_cost, freq_sum, evaluation,
2149 PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD));
2151 return evaluation >= PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD);
2155 /* Return all context independent values from aggregate lattices in PLATS in a
2156 vector. Return NULL if there are none. */
2158 static vec<ipa_agg_jf_item, va_gc> *
2159 context_independent_aggregate_values (struct ipcp_param_lattices *plats)
2161 vec<ipa_agg_jf_item, va_gc> *res = NULL;
2163 if (plats->aggs_bottom
2164 || plats->aggs_contain_variable
2165 || plats->aggs_count == 0)
2166 return NULL;
2168 for (struct ipcp_agg_lattice *aglat = plats->aggs;
2169 aglat;
2170 aglat = aglat->next)
2171 if (aglat->is_single_const ())
2173 struct ipa_agg_jf_item item;
2174 item.offset = aglat->offset;
2175 item.value = aglat->values->value;
2176 vec_safe_push (res, item);
2178 return res;
2181 /* Allocate KNOWN_CSTS, KNOWN_CONTEXTS and, if non-NULL, KNOWN_AGGS and
2182 populate them with values of parameters that are known independent of the
2183 context. INFO describes the function. If REMOVABLE_PARAMS_COST is
2184 non-NULL, the movement cost of all removable parameters will be stored in
2185 it. */
2187 static bool
2188 gather_context_independent_values (struct ipa_node_params *info,
2189 vec<tree> *known_csts,
2190 vec<ipa_polymorphic_call_context>
2191 *known_contexts,
2192 vec<ipa_agg_jump_function> *known_aggs,
2193 int *removable_params_cost)
2195 int i, count = ipa_get_param_count (info);
2196 bool ret = false;
2198 known_csts->create (0);
2199 known_contexts->create (0);
2200 known_csts->safe_grow_cleared (count);
2201 known_contexts->safe_grow_cleared (count);
2202 if (known_aggs)
2204 known_aggs->create (0);
2205 known_aggs->safe_grow_cleared (count);
2208 if (removable_params_cost)
2209 *removable_params_cost = 0;
2211 for (i = 0; i < count ; i++)
2213 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
2214 ipcp_lattice<tree> *lat = &plats->itself;
2216 if (lat->is_single_const ())
2218 ipcp_value<tree> *val = lat->values;
2219 gcc_checking_assert (TREE_CODE (val->value) != TREE_BINFO);
2220 (*known_csts)[i] = val->value;
2221 if (removable_params_cost)
2222 *removable_params_cost
2223 += estimate_move_cost (TREE_TYPE (val->value), false);
2224 ret = true;
2226 else if (removable_params_cost
2227 && !ipa_is_param_used (info, i))
2228 *removable_params_cost
2229 += ipa_get_param_move_cost (info, i);
2231 ipcp_lattice<ipa_polymorphic_call_context> *ctxlat = &plats->ctxlat;
2232 if (ctxlat->is_single_const ())
2234 (*known_contexts)[i] = ctxlat->values->value;
2235 ret = true;
2238 if (known_aggs)
2240 vec<ipa_agg_jf_item, va_gc> *agg_items;
2241 struct ipa_agg_jump_function *ajf;
2243 agg_items = context_independent_aggregate_values (plats);
2244 ajf = &(*known_aggs)[i];
2245 ajf->items = agg_items;
2246 ajf->by_ref = plats->aggs_by_ref;
2247 ret |= agg_items != NULL;
2251 return ret;
2254 /* The current interface in ipa-inline-analysis requires a pointer vector.
2255 Create it.
2257 FIXME: That interface should be re-worked, this is slightly silly. Still,
2258 I'd like to discuss how to change it first and this demonstrates the
2259 issue. */
2261 static vec<ipa_agg_jump_function_p>
2262 agg_jmp_p_vec_for_t_vec (vec<ipa_agg_jump_function> known_aggs)
2264 vec<ipa_agg_jump_function_p> ret;
2265 struct ipa_agg_jump_function *ajf;
2266 int i;
2268 ret.create (known_aggs.length ());
2269 FOR_EACH_VEC_ELT (known_aggs, i, ajf)
2270 ret.quick_push (ajf);
2271 return ret;
2274 /* Perform time and size measurement of NODE with the context given in
2275 KNOWN_CSTS, KNOWN_CONTEXTS and KNOWN_AGGS, calculate the benefit and cost
2276 given BASE_TIME of the node without specialization, REMOVABLE_PARAMS_COST of
2277 all context-independent removable parameters and EST_MOVE_COST of estimated
2278 movement of the considered parameter and store it into VAL. */
2280 static void
2281 perform_estimation_of_a_value (cgraph_node *node, vec<tree> known_csts,
2282 vec<ipa_polymorphic_call_context> known_contexts,
2283 vec<ipa_agg_jump_function_p> known_aggs_ptrs,
2284 int base_time, int removable_params_cost,
2285 int est_move_cost, ipcp_value_base *val)
2287 int time, size, time_benefit;
2288 inline_hints hints;
2290 estimate_ipcp_clone_size_and_time (node, known_csts, known_contexts,
2291 known_aggs_ptrs, &size, &time,
2292 &hints);
2293 time_benefit = base_time - time
2294 + devirtualization_time_bonus (node, known_csts, known_contexts,
2295 known_aggs_ptrs)
2296 + hint_time_bonus (hints)
2297 + removable_params_cost + est_move_cost;
2299 gcc_checking_assert (size >=0);
2300 /* The inliner-heuristics based estimates may think that in certain
2301 contexts some functions do not have any size at all but we want
2302 all specializations to have at least a tiny cost, not least not to
2303 divide by zero. */
2304 if (size == 0)
2305 size = 1;
2307 val->local_time_benefit = time_benefit;
2308 val->local_size_cost = size;
2311 /* Iterate over known values of parameters of NODE and estimate the local
2312 effects in terms of time and size they have. */
2314 static void
2315 estimate_local_effects (struct cgraph_node *node)
2317 struct ipa_node_params *info = IPA_NODE_REF (node);
2318 int i, count = ipa_get_param_count (info);
2319 vec<tree> known_csts;
2320 vec<ipa_polymorphic_call_context> known_contexts;
2321 vec<ipa_agg_jump_function> known_aggs;
2322 vec<ipa_agg_jump_function_p> known_aggs_ptrs;
2323 bool always_const;
2324 int base_time = inline_summaries->get (node)->time;
2325 int removable_params_cost;
2327 if (!count || !ipcp_versionable_function_p (node))
2328 return;
2330 if (dump_file && (dump_flags & TDF_DETAILS))
2331 fprintf (dump_file, "\nEstimating effects for %s/%i, base_time: %i.\n",
2332 node->name (), node->order, base_time);
2334 always_const = gather_context_independent_values (info, &known_csts,
2335 &known_contexts, &known_aggs,
2336 &removable_params_cost);
2337 known_aggs_ptrs = agg_jmp_p_vec_for_t_vec (known_aggs);
2338 if (always_const)
2340 struct caller_statistics stats;
2341 inline_hints hints;
2342 int time, size;
2344 init_caller_stats (&stats);
2345 node->call_for_symbol_thunks_and_aliases (gather_caller_stats, &stats,
2346 false);
2347 estimate_ipcp_clone_size_and_time (node, known_csts, known_contexts,
2348 known_aggs_ptrs, &size, &time, &hints);
2349 time -= devirtualization_time_bonus (node, known_csts, known_contexts,
2350 known_aggs_ptrs);
2351 time -= hint_time_bonus (hints);
2352 time -= removable_params_cost;
2353 size -= stats.n_calls * removable_params_cost;
2355 if (dump_file)
2356 fprintf (dump_file, " - context independent values, size: %i, "
2357 "time_benefit: %i\n", size, base_time - time);
2359 if (size <= 0
2360 || node->will_be_removed_from_program_if_no_direct_calls_p ())
2362 info->do_clone_for_all_contexts = true;
2363 base_time = time;
2365 if (dump_file)
2366 fprintf (dump_file, " Decided to specialize for all "
2367 "known contexts, code not going to grow.\n");
2369 else if (good_cloning_opportunity_p (node, base_time - time,
2370 stats.freq_sum, stats.count_sum,
2371 size))
2373 if (size + overall_size <= max_new_size)
2375 info->do_clone_for_all_contexts = true;
2376 base_time = time;
2377 overall_size += size;
2379 if (dump_file)
2380 fprintf (dump_file, " Decided to specialize for all "
2381 "known contexts, growth deemed beneficial.\n");
2383 else if (dump_file && (dump_flags & TDF_DETAILS))
2384 fprintf (dump_file, " Not cloning for all contexts because "
2385 "max_new_size would be reached with %li.\n",
2386 size + overall_size);
2390 for (i = 0; i < count ; i++)
2392 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
2393 ipcp_lattice<tree> *lat = &plats->itself;
2394 ipcp_value<tree> *val;
2396 if (lat->bottom
2397 || !lat->values
2398 || known_csts[i])
2399 continue;
2401 for (val = lat->values; val; val = val->next)
2403 gcc_checking_assert (TREE_CODE (val->value) != TREE_BINFO);
2404 known_csts[i] = val->value;
2406 int emc = estimate_move_cost (TREE_TYPE (val->value), true);
2407 perform_estimation_of_a_value (node, known_csts, known_contexts,
2408 known_aggs_ptrs, base_time,
2409 removable_params_cost, emc, val);
2411 if (dump_file && (dump_flags & TDF_DETAILS))
2413 fprintf (dump_file, " - estimates for value ");
2414 print_ipcp_constant_value (dump_file, val->value);
2415 fprintf (dump_file, " for ");
2416 ipa_dump_param (dump_file, info, i);
2417 fprintf (dump_file, ": time_benefit: %i, size: %i\n",
2418 val->local_time_benefit, val->local_size_cost);
2421 known_csts[i] = NULL_TREE;
2424 for (i = 0; i < count; i++)
2426 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
2428 if (!plats->virt_call)
2429 continue;
2431 ipcp_lattice<ipa_polymorphic_call_context> *ctxlat = &plats->ctxlat;
2432 ipcp_value<ipa_polymorphic_call_context> *val;
2434 if (ctxlat->bottom
2435 || !ctxlat->values
2436 || !known_contexts[i].useless_p ())
2437 continue;
2439 for (val = ctxlat->values; val; val = val->next)
2441 known_contexts[i] = val->value;
2442 perform_estimation_of_a_value (node, known_csts, known_contexts,
2443 known_aggs_ptrs, base_time,
2444 removable_params_cost, 0, val);
2446 if (dump_file && (dump_flags & TDF_DETAILS))
2448 fprintf (dump_file, " - estimates for polymorphic context ");
2449 print_ipcp_constant_value (dump_file, val->value);
2450 fprintf (dump_file, " for ");
2451 ipa_dump_param (dump_file, info, i);
2452 fprintf (dump_file, ": time_benefit: %i, size: %i\n",
2453 val->local_time_benefit, val->local_size_cost);
2456 known_contexts[i] = ipa_polymorphic_call_context ();
2459 for (i = 0; i < count ; i++)
2461 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
2462 struct ipa_agg_jump_function *ajf;
2463 struct ipcp_agg_lattice *aglat;
2465 if (plats->aggs_bottom || !plats->aggs)
2466 continue;
2468 ajf = &known_aggs[i];
2469 for (aglat = plats->aggs; aglat; aglat = aglat->next)
2471 ipcp_value<tree> *val;
2472 if (aglat->bottom || !aglat->values
2473 /* If the following is true, the one value is in known_aggs. */
2474 || (!plats->aggs_contain_variable
2475 && aglat->is_single_const ()))
2476 continue;
2478 for (val = aglat->values; val; val = val->next)
2480 struct ipa_agg_jf_item item;
2482 item.offset = aglat->offset;
2483 item.value = val->value;
2484 vec_safe_push (ajf->items, item);
2486 perform_estimation_of_a_value (node, known_csts, known_contexts,
2487 known_aggs_ptrs, base_time,
2488 removable_params_cost, 0, val);
2490 if (dump_file && (dump_flags & TDF_DETAILS))
2492 fprintf (dump_file, " - estimates for value ");
2493 print_ipcp_constant_value (dump_file, val->value);
2494 fprintf (dump_file, " for ");
2495 ipa_dump_param (dump_file, info, i);
2496 fprintf (dump_file, "[%soffset: " HOST_WIDE_INT_PRINT_DEC
2497 "]: time_benefit: %i, size: %i\n",
2498 plats->aggs_by_ref ? "ref " : "",
2499 aglat->offset,
2500 val->local_time_benefit, val->local_size_cost);
2503 ajf->items->pop ();
2508 for (i = 0; i < count ; i++)
2509 vec_free (known_aggs[i].items);
2511 known_csts.release ();
2512 known_contexts.release ();
2513 known_aggs.release ();
2514 known_aggs_ptrs.release ();
2518 /* Add value CUR_VAL and all yet-unsorted values it is dependent on to the
2519 topological sort of values. */
2521 template <typename valtype>
2522 void
2523 value_topo_info<valtype>::add_val (ipcp_value<valtype> *cur_val)
2525 ipcp_value_source<valtype> *src;
2527 if (cur_val->dfs)
2528 return;
2530 dfs_counter++;
2531 cur_val->dfs = dfs_counter;
2532 cur_val->low_link = dfs_counter;
2534 cur_val->topo_next = stack;
2535 stack = cur_val;
2536 cur_val->on_stack = true;
2538 for (src = cur_val->sources; src; src = src->next)
2539 if (src->val)
2541 if (src->val->dfs == 0)
2543 add_val (src->val);
2544 if (src->val->low_link < cur_val->low_link)
2545 cur_val->low_link = src->val->low_link;
2547 else if (src->val->on_stack
2548 && src->val->dfs < cur_val->low_link)
2549 cur_val->low_link = src->val->dfs;
2552 if (cur_val->dfs == cur_val->low_link)
2554 ipcp_value<valtype> *v, *scc_list = NULL;
2558 v = stack;
2559 stack = v->topo_next;
2560 v->on_stack = false;
2562 v->scc_next = scc_list;
2563 scc_list = v;
2565 while (v != cur_val);
2567 cur_val->topo_next = values_topo;
2568 values_topo = cur_val;
2572 /* Add all values in lattices associated with NODE to the topological sort if
2573 they are not there yet. */
2575 static void
2576 add_all_node_vals_to_toposort (cgraph_node *node, ipa_topo_info *topo)
2578 struct ipa_node_params *info = IPA_NODE_REF (node);
2579 int i, count = ipa_get_param_count (info);
2581 for (i = 0; i < count ; i++)
2583 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
2584 ipcp_lattice<tree> *lat = &plats->itself;
2585 struct ipcp_agg_lattice *aglat;
2587 if (!lat->bottom)
2589 ipcp_value<tree> *val;
2590 for (val = lat->values; val; val = val->next)
2591 topo->constants.add_val (val);
2594 if (!plats->aggs_bottom)
2595 for (aglat = plats->aggs; aglat; aglat = aglat->next)
2596 if (!aglat->bottom)
2598 ipcp_value<tree> *val;
2599 for (val = aglat->values; val; val = val->next)
2600 topo->constants.add_val (val);
2603 ipcp_lattice<ipa_polymorphic_call_context> *ctxlat = &plats->ctxlat;
2604 if (!ctxlat->bottom)
2606 ipcp_value<ipa_polymorphic_call_context> *ctxval;
2607 for (ctxval = ctxlat->values; ctxval; ctxval = ctxval->next)
2608 topo->contexts.add_val (ctxval);
2613 /* One pass of constants propagation along the call graph edges, from callers
2614 to callees (requires topological ordering in TOPO), iterate over strongly
2615 connected components. */
2617 static void
2618 propagate_constants_topo (struct ipa_topo_info *topo)
2620 int i;
2622 for (i = topo->nnodes - 1; i >= 0; i--)
2624 unsigned j;
2625 struct cgraph_node *v, *node = topo->order[i];
2626 vec<cgraph_node *> cycle_nodes = ipa_get_nodes_in_cycle (node);
2628 /* First, iteratively propagate within the strongly connected component
2629 until all lattices stabilize. */
2630 FOR_EACH_VEC_ELT (cycle_nodes, j, v)
2631 if (v->has_gimple_body_p ())
2632 push_node_to_stack (topo, v);
2634 v = pop_node_from_stack (topo);
2635 while (v)
2637 struct cgraph_edge *cs;
2639 for (cs = v->callees; cs; cs = cs->next_callee)
2640 if (ipa_edge_within_scc (cs)
2641 && propagate_constants_accross_call (cs))
2642 push_node_to_stack (topo, cs->callee->function_symbol ());
2643 v = pop_node_from_stack (topo);
2646 /* Afterwards, propagate along edges leading out of the SCC, calculates
2647 the local effects of the discovered constants and all valid values to
2648 their topological sort. */
2649 FOR_EACH_VEC_ELT (cycle_nodes, j, v)
2650 if (v->has_gimple_body_p ())
2652 struct cgraph_edge *cs;
2654 estimate_local_effects (v);
2655 add_all_node_vals_to_toposort (v, topo);
2656 for (cs = v->callees; cs; cs = cs->next_callee)
2657 if (!ipa_edge_within_scc (cs))
2658 propagate_constants_accross_call (cs);
2660 cycle_nodes.release ();
2665 /* Return the sum of A and B if none of them is bigger than INT_MAX/2, return
2666 the bigger one if otherwise. */
2668 static int
2669 safe_add (int a, int b)
2671 if (a > INT_MAX/2 || b > INT_MAX/2)
2672 return a > b ? a : b;
2673 else
2674 return a + b;
2678 /* Propagate the estimated effects of individual values along the topological
2679 from the dependent values to those they depend on. */
2681 template <typename valtype>
2682 void
2683 value_topo_info<valtype>::propagate_effects ()
2685 ipcp_value<valtype> *base;
2687 for (base = values_topo; base; base = base->topo_next)
2689 ipcp_value_source<valtype> *src;
2690 ipcp_value<valtype> *val;
2691 int time = 0, size = 0;
2693 for (val = base; val; val = val->scc_next)
2695 time = safe_add (time,
2696 val->local_time_benefit + val->prop_time_benefit);
2697 size = safe_add (size, val->local_size_cost + val->prop_size_cost);
2700 for (val = base; val; val = val->scc_next)
2701 for (src = val->sources; src; src = src->next)
2702 if (src->val
2703 && src->cs->maybe_hot_p ())
2705 src->val->prop_time_benefit = safe_add (time,
2706 src->val->prop_time_benefit);
2707 src->val->prop_size_cost = safe_add (size,
2708 src->val->prop_size_cost);
2714 /* Propagate constants, polymorphic contexts and their effects from the
2715 summaries interprocedurally. */
2717 static void
2718 ipcp_propagate_stage (struct ipa_topo_info *topo)
2720 struct cgraph_node *node;
2722 if (dump_file)
2723 fprintf (dump_file, "\n Propagating constants:\n\n");
2725 if (in_lto_p)
2726 ipa_update_after_lto_read ();
2729 FOR_EACH_DEFINED_FUNCTION (node)
2731 struct ipa_node_params *info = IPA_NODE_REF (node);
2733 determine_versionability (node);
2734 if (node->has_gimple_body_p ())
2736 info->lattices = XCNEWVEC (struct ipcp_param_lattices,
2737 ipa_get_param_count (info));
2738 initialize_node_lattices (node);
2740 if (node->definition && !node->alias)
2741 overall_size += inline_summaries->get (node)->self_size;
2742 if (node->count > max_count)
2743 max_count = node->count;
2746 max_new_size = overall_size;
2747 if (max_new_size < PARAM_VALUE (PARAM_LARGE_UNIT_INSNS))
2748 max_new_size = PARAM_VALUE (PARAM_LARGE_UNIT_INSNS);
2749 max_new_size += max_new_size * PARAM_VALUE (PARAM_IPCP_UNIT_GROWTH) / 100 + 1;
2751 if (dump_file)
2752 fprintf (dump_file, "\noverall_size: %li, max_new_size: %li\n",
2753 overall_size, max_new_size);
2755 propagate_constants_topo (topo);
2756 #ifdef ENABLE_CHECKING
2757 ipcp_verify_propagated_values ();
2758 #endif
2759 topo->constants.propagate_effects ();
2760 topo->contexts.propagate_effects ();
2762 if (dump_file)
2764 fprintf (dump_file, "\nIPA lattices after all propagation:\n");
2765 print_all_lattices (dump_file, (dump_flags & TDF_DETAILS), true);
2769 /* Discover newly direct outgoing edges from NODE which is a new clone with
2770 known KNOWN_CSTS and make them direct. */
2772 static void
2773 ipcp_discover_new_direct_edges (struct cgraph_node *node,
2774 vec<tree> known_csts,
2775 vec<ipa_polymorphic_call_context>
2776 known_contexts,
2777 struct ipa_agg_replacement_value *aggvals)
2779 struct cgraph_edge *ie, *next_ie;
2780 bool found = false;
2782 for (ie = node->indirect_calls; ie; ie = next_ie)
2784 tree target;
2785 bool speculative;
2787 next_ie = ie->next_callee;
2788 target = ipa_get_indirect_edge_target_1 (ie, known_csts, known_contexts,
2789 vNULL, aggvals, &speculative);
2790 if (target)
2792 bool agg_contents = ie->indirect_info->agg_contents;
2793 bool polymorphic = ie->indirect_info->polymorphic;
2794 int param_index = ie->indirect_info->param_index;
2795 struct cgraph_edge *cs = ipa_make_edge_direct_to_target (ie, target,
2796 speculative);
2797 found = true;
2799 if (cs && !agg_contents && !polymorphic)
2801 struct ipa_node_params *info = IPA_NODE_REF (node);
2802 int c = ipa_get_controlled_uses (info, param_index);
2803 if (c != IPA_UNDESCRIBED_USE)
2805 struct ipa_ref *to_del;
2807 c--;
2808 ipa_set_controlled_uses (info, param_index, c);
2809 if (dump_file && (dump_flags & TDF_DETAILS))
2810 fprintf (dump_file, " controlled uses count of param "
2811 "%i bumped down to %i\n", param_index, c);
2812 if (c == 0
2813 && (to_del = node->find_reference (cs->callee, NULL, 0)))
2815 if (dump_file && (dump_flags & TDF_DETAILS))
2816 fprintf (dump_file, " and even removing its "
2817 "cloning-created reference\n");
2818 to_del->remove_reference ();
2824 /* Turning calls to direct calls will improve overall summary. */
2825 if (found)
2826 inline_update_overall_summary (node);
2829 /* Vector of pointers which for linked lists of clones of an original crgaph
2830 edge. */
2832 static vec<cgraph_edge *> next_edge_clone;
2833 static vec<cgraph_edge *> prev_edge_clone;
2835 static inline void
2836 grow_edge_clone_vectors (void)
2838 if (next_edge_clone.length ()
2839 <= (unsigned) symtab->edges_max_uid)
2840 next_edge_clone.safe_grow_cleared (symtab->edges_max_uid + 1);
2841 if (prev_edge_clone.length ()
2842 <= (unsigned) symtab->edges_max_uid)
2843 prev_edge_clone.safe_grow_cleared (symtab->edges_max_uid + 1);
2846 /* Edge duplication hook to grow the appropriate linked list in
2847 next_edge_clone. */
2849 static void
2850 ipcp_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
2851 void *)
2853 grow_edge_clone_vectors ();
2855 struct cgraph_edge *old_next = next_edge_clone[src->uid];
2856 if (old_next)
2857 prev_edge_clone[old_next->uid] = dst;
2858 prev_edge_clone[dst->uid] = src;
2860 next_edge_clone[dst->uid] = old_next;
2861 next_edge_clone[src->uid] = dst;
2864 /* Hook that is called by cgraph.c when an edge is removed. */
2866 static void
2867 ipcp_edge_removal_hook (struct cgraph_edge *cs, void *)
2869 grow_edge_clone_vectors ();
2871 struct cgraph_edge *prev = prev_edge_clone[cs->uid];
2872 struct cgraph_edge *next = next_edge_clone[cs->uid];
2873 if (prev)
2874 next_edge_clone[prev->uid] = next;
2875 if (next)
2876 prev_edge_clone[next->uid] = prev;
2879 /* See if NODE is a clone with a known aggregate value at a given OFFSET of a
2880 parameter with the given INDEX. */
2882 static tree
2883 get_clone_agg_value (struct cgraph_node *node, HOST_WIDE_INT offset,
2884 int index)
2886 struct ipa_agg_replacement_value *aggval;
2888 aggval = ipa_get_agg_replacements_for_node (node);
2889 while (aggval)
2891 if (aggval->offset == offset
2892 && aggval->index == index)
2893 return aggval->value;
2894 aggval = aggval->next;
2896 return NULL_TREE;
2899 /* Return true is NODE is DEST or its clone for all contexts. */
2901 static bool
2902 same_node_or_its_all_contexts_clone_p (cgraph_node *node, cgraph_node *dest)
2904 if (node == dest)
2905 return true;
2907 struct ipa_node_params *info = IPA_NODE_REF (node);
2908 return info->is_all_contexts_clone && info->ipcp_orig_node == dest;
2911 /* Return true if edge CS does bring about the value described by SRC to node
2912 DEST or its clone for all contexts. */
2914 static bool
2915 cgraph_edge_brings_value_p (cgraph_edge *cs, ipcp_value_source<tree> *src,
2916 cgraph_node *dest)
2918 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
2919 enum availability availability;
2920 cgraph_node *real_dest = cs->callee->function_symbol (&availability);
2922 if (!same_node_or_its_all_contexts_clone_p (real_dest, dest)
2923 || availability <= AVAIL_INTERPOSABLE
2924 || caller_info->node_dead)
2925 return false;
2926 if (!src->val)
2927 return true;
2929 if (caller_info->ipcp_orig_node)
2931 tree t;
2932 if (src->offset == -1)
2933 t = caller_info->known_csts[src->index];
2934 else
2935 t = get_clone_agg_value (cs->caller, src->offset, src->index);
2936 return (t != NULL_TREE
2937 && values_equal_for_ipcp_p (src->val->value, t));
2939 else
2941 struct ipcp_agg_lattice *aglat;
2942 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (caller_info,
2943 src->index);
2944 if (src->offset == -1)
2945 return (plats->itself.is_single_const ()
2946 && values_equal_for_ipcp_p (src->val->value,
2947 plats->itself.values->value));
2948 else
2950 if (plats->aggs_bottom || plats->aggs_contain_variable)
2951 return false;
2952 for (aglat = plats->aggs; aglat; aglat = aglat->next)
2953 if (aglat->offset == src->offset)
2954 return (aglat->is_single_const ()
2955 && values_equal_for_ipcp_p (src->val->value,
2956 aglat->values->value));
2958 return false;
2962 /* Return true if edge CS does bring about the value described by SRC to node
2963 DEST or its clone for all contexts. */
2965 static bool
2966 cgraph_edge_brings_value_p (cgraph_edge *cs,
2967 ipcp_value_source<ipa_polymorphic_call_context> *src,
2968 cgraph_node *dest)
2970 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
2971 cgraph_node *real_dest = cs->callee->function_symbol ();
2973 if (!same_node_or_its_all_contexts_clone_p (real_dest, dest)
2974 || caller_info->node_dead)
2975 return false;
2976 if (!src->val)
2977 return true;
2979 if (caller_info->ipcp_orig_node)
2980 return (caller_info->known_contexts.length () > (unsigned) src->index)
2981 && values_equal_for_ipcp_p (src->val->value,
2982 caller_info->known_contexts[src->index]);
2984 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (caller_info,
2985 src->index);
2986 return plats->ctxlat.is_single_const ()
2987 && values_equal_for_ipcp_p (src->val->value,
2988 plats->ctxlat.values->value);
2991 /* Get the next clone in the linked list of clones of an edge. */
2993 static inline struct cgraph_edge *
2994 get_next_cgraph_edge_clone (struct cgraph_edge *cs)
2996 return next_edge_clone[cs->uid];
2999 /* Given VAL that is intended for DEST, iterate over all its sources and if
3000 they still hold, add their edge frequency and their number into *FREQUENCY
3001 and *CALLER_COUNT respectively. */
3003 template <typename valtype>
3004 static bool
3005 get_info_about_necessary_edges (ipcp_value<valtype> *val, cgraph_node *dest,
3006 int *freq_sum,
3007 gcov_type *count_sum, int *caller_count)
3009 ipcp_value_source<valtype> *src;
3010 int freq = 0, count = 0;
3011 gcov_type cnt = 0;
3012 bool hot = false;
3014 for (src = val->sources; src; src = src->next)
3016 struct cgraph_edge *cs = src->cs;
3017 while (cs)
3019 if (cgraph_edge_brings_value_p (cs, src, dest))
3021 count++;
3022 freq += cs->frequency;
3023 cnt += cs->count;
3024 hot |= cs->maybe_hot_p ();
3026 cs = get_next_cgraph_edge_clone (cs);
3030 *freq_sum = freq;
3031 *count_sum = cnt;
3032 *caller_count = count;
3033 return hot;
3036 /* Return a vector of incoming edges that do bring value VAL to node DEST. It
3037 is assumed their number is known and equal to CALLER_COUNT. */
3039 template <typename valtype>
3040 static vec<cgraph_edge *>
3041 gather_edges_for_value (ipcp_value<valtype> *val, cgraph_node *dest,
3042 int caller_count)
3044 ipcp_value_source<valtype> *src;
3045 vec<cgraph_edge *> ret;
3047 ret.create (caller_count);
3048 for (src = val->sources; src; src = src->next)
3050 struct cgraph_edge *cs = src->cs;
3051 while (cs)
3053 if (cgraph_edge_brings_value_p (cs, src, dest))
3054 ret.quick_push (cs);
3055 cs = get_next_cgraph_edge_clone (cs);
3059 return ret;
3062 /* Construct a replacement map for a know VALUE for a formal parameter PARAM.
3063 Return it or NULL if for some reason it cannot be created. */
3065 static struct ipa_replace_map *
3066 get_replacement_map (struct ipa_node_params *info, tree value, int parm_num)
3068 struct ipa_replace_map *replace_map;
3071 replace_map = ggc_alloc<ipa_replace_map> ();
3072 if (dump_file)
3074 fprintf (dump_file, " replacing ");
3075 ipa_dump_param (dump_file, info, parm_num);
3077 fprintf (dump_file, " with const ");
3078 print_generic_expr (dump_file, value, 0);
3079 fprintf (dump_file, "\n");
3081 replace_map->old_tree = NULL;
3082 replace_map->parm_num = parm_num;
3083 replace_map->new_tree = value;
3084 replace_map->replace_p = true;
3085 replace_map->ref_p = false;
3087 return replace_map;
3090 /* Dump new profiling counts */
3092 static void
3093 dump_profile_updates (struct cgraph_node *orig_node,
3094 struct cgraph_node *new_node)
3096 struct cgraph_edge *cs;
3098 fprintf (dump_file, " setting count of the specialized node to "
3099 HOST_WIDE_INT_PRINT_DEC "\n", (HOST_WIDE_INT) new_node->count);
3100 for (cs = new_node->callees; cs ; cs = cs->next_callee)
3101 fprintf (dump_file, " edge to %s has count "
3102 HOST_WIDE_INT_PRINT_DEC "\n",
3103 cs->callee->name (), (HOST_WIDE_INT) cs->count);
3105 fprintf (dump_file, " setting count of the original node to "
3106 HOST_WIDE_INT_PRINT_DEC "\n", (HOST_WIDE_INT) orig_node->count);
3107 for (cs = orig_node->callees; cs ; cs = cs->next_callee)
3108 fprintf (dump_file, " edge to %s is left with "
3109 HOST_WIDE_INT_PRINT_DEC "\n",
3110 cs->callee->name (), (HOST_WIDE_INT) cs->count);
3113 /* After a specialized NEW_NODE version of ORIG_NODE has been created, update
3114 their profile information to reflect this. */
3116 static void
3117 update_profiling_info (struct cgraph_node *orig_node,
3118 struct cgraph_node *new_node)
3120 struct cgraph_edge *cs;
3121 struct caller_statistics stats;
3122 gcov_type new_sum, orig_sum;
3123 gcov_type remainder, orig_node_count = orig_node->count;
3125 if (orig_node_count == 0)
3126 return;
3128 init_caller_stats (&stats);
3129 orig_node->call_for_symbol_thunks_and_aliases (gather_caller_stats, &stats,
3130 false);
3131 orig_sum = stats.count_sum;
3132 init_caller_stats (&stats);
3133 new_node->call_for_symbol_thunks_and_aliases (gather_caller_stats, &stats,
3134 false);
3135 new_sum = stats.count_sum;
3137 if (orig_node_count < orig_sum + new_sum)
3139 if (dump_file)
3140 fprintf (dump_file, " Problem: node %s/%i has too low count "
3141 HOST_WIDE_INT_PRINT_DEC " while the sum of incoming "
3142 "counts is " HOST_WIDE_INT_PRINT_DEC "\n",
3143 orig_node->name (), orig_node->order,
3144 (HOST_WIDE_INT) orig_node_count,
3145 (HOST_WIDE_INT) (orig_sum + new_sum));
3147 orig_node_count = (orig_sum + new_sum) * 12 / 10;
3148 if (dump_file)
3149 fprintf (dump_file, " proceeding by pretending it was "
3150 HOST_WIDE_INT_PRINT_DEC "\n",
3151 (HOST_WIDE_INT) orig_node_count);
3154 new_node->count = new_sum;
3155 remainder = orig_node_count - new_sum;
3156 orig_node->count = remainder;
3158 for (cs = new_node->callees; cs ; cs = cs->next_callee)
3159 if (cs->frequency)
3160 cs->count = apply_probability (cs->count,
3161 GCOV_COMPUTE_SCALE (new_sum,
3162 orig_node_count));
3163 else
3164 cs->count = 0;
3166 for (cs = orig_node->callees; cs ; cs = cs->next_callee)
3167 cs->count = apply_probability (cs->count,
3168 GCOV_COMPUTE_SCALE (remainder,
3169 orig_node_count));
3171 if (dump_file)
3172 dump_profile_updates (orig_node, new_node);
3175 /* Update the respective profile of specialized NEW_NODE and the original
3176 ORIG_NODE after additional edges with cumulative count sum REDIRECTED_SUM
3177 have been redirected to the specialized version. */
3179 static void
3180 update_specialized_profile (struct cgraph_node *new_node,
3181 struct cgraph_node *orig_node,
3182 gcov_type redirected_sum)
3184 struct cgraph_edge *cs;
3185 gcov_type new_node_count, orig_node_count = orig_node->count;
3187 if (dump_file)
3188 fprintf (dump_file, " the sum of counts of redirected edges is "
3189 HOST_WIDE_INT_PRINT_DEC "\n", (HOST_WIDE_INT) redirected_sum);
3190 if (orig_node_count == 0)
3191 return;
3193 gcc_assert (orig_node_count >= redirected_sum);
3195 new_node_count = new_node->count;
3196 new_node->count += redirected_sum;
3197 orig_node->count -= redirected_sum;
3199 for (cs = new_node->callees; cs ; cs = cs->next_callee)
3200 if (cs->frequency)
3201 cs->count += apply_probability (cs->count,
3202 GCOV_COMPUTE_SCALE (redirected_sum,
3203 new_node_count));
3204 else
3205 cs->count = 0;
3207 for (cs = orig_node->callees; cs ; cs = cs->next_callee)
3209 gcov_type dec = apply_probability (cs->count,
3210 GCOV_COMPUTE_SCALE (redirected_sum,
3211 orig_node_count));
3212 if (dec < cs->count)
3213 cs->count -= dec;
3214 else
3215 cs->count = 0;
3218 if (dump_file)
3219 dump_profile_updates (orig_node, new_node);
3222 /* Create a specialized version of NODE with known constants in KNOWN_CSTS,
3223 known contexts in KNOWN_CONTEXTS and known aggregate values in AGGVALS and
3224 redirect all edges in CALLERS to it. */
3226 static struct cgraph_node *
3227 create_specialized_node (struct cgraph_node *node,
3228 vec<tree> known_csts,
3229 vec<ipa_polymorphic_call_context> known_contexts,
3230 struct ipa_agg_replacement_value *aggvals,
3231 vec<cgraph_edge *> callers)
3233 struct ipa_node_params *new_info, *info = IPA_NODE_REF (node);
3234 vec<ipa_replace_map *, va_gc> *replace_trees = NULL;
3235 struct ipa_agg_replacement_value *av;
3236 struct cgraph_node *new_node;
3237 int i, count = ipa_get_param_count (info);
3238 bitmap args_to_skip;
3240 gcc_assert (!info->ipcp_orig_node);
3242 if (node->local.can_change_signature)
3244 args_to_skip = BITMAP_GGC_ALLOC ();
3245 for (i = 0; i < count; i++)
3247 tree t = known_csts[i];
3249 if (t || !ipa_is_param_used (info, i))
3250 bitmap_set_bit (args_to_skip, i);
3253 else
3255 args_to_skip = NULL;
3256 if (dump_file && (dump_flags & TDF_DETAILS))
3257 fprintf (dump_file, " cannot change function signature\n");
3260 for (i = 0; i < count ; i++)
3262 tree t = known_csts[i];
3263 if (t)
3265 struct ipa_replace_map *replace_map;
3267 gcc_checking_assert (TREE_CODE (t) != TREE_BINFO);
3268 replace_map = get_replacement_map (info, t, i);
3269 if (replace_map)
3270 vec_safe_push (replace_trees, replace_map);
3274 new_node = node->create_virtual_clone (callers, replace_trees,
3275 args_to_skip, "constprop");
3276 ipa_set_node_agg_value_chain (new_node, aggvals);
3277 for (av = aggvals; av; av = av->next)
3278 new_node->maybe_create_reference (av->value, IPA_REF_ADDR, NULL);
3280 if (dump_file && (dump_flags & TDF_DETAILS))
3282 fprintf (dump_file, " the new node is %s/%i.\n",
3283 new_node->name (), new_node->order);
3284 if (known_contexts.exists ())
3286 for (i = 0; i < count ; i++)
3287 if (!known_contexts[i].useless_p ())
3289 fprintf (dump_file, " known ctx %i is ", i);
3290 known_contexts[i].dump (dump_file);
3293 if (aggvals)
3294 ipa_dump_agg_replacement_values (dump_file, aggvals);
3296 ipa_check_create_node_params ();
3297 update_profiling_info (node, new_node);
3298 new_info = IPA_NODE_REF (new_node);
3299 new_info->ipcp_orig_node = node;
3300 new_info->known_csts = known_csts;
3301 new_info->known_contexts = known_contexts;
3303 ipcp_discover_new_direct_edges (new_node, known_csts, known_contexts, aggvals);
3305 callers.release ();
3306 return new_node;
3309 /* Given a NODE, and a subset of its CALLERS, try to populate blanks slots in
3310 KNOWN_CSTS with constants that are also known for all of the CALLERS. */
3312 static void
3313 find_more_scalar_values_for_callers_subset (struct cgraph_node *node,
3314 vec<tree> known_csts,
3315 vec<cgraph_edge *> callers)
3317 struct ipa_node_params *info = IPA_NODE_REF (node);
3318 int i, count = ipa_get_param_count (info);
3320 for (i = 0; i < count ; i++)
3322 struct cgraph_edge *cs;
3323 tree newval = NULL_TREE;
3324 int j;
3325 bool first = true;
3327 if (ipa_get_scalar_lat (info, i)->bottom || known_csts[i])
3328 continue;
3330 FOR_EACH_VEC_ELT (callers, j, cs)
3332 struct ipa_jump_func *jump_func;
3333 tree t;
3335 if (i >= ipa_get_cs_argument_count (IPA_EDGE_REF (cs)))
3337 newval = NULL_TREE;
3338 break;
3340 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
3341 t = ipa_value_from_jfunc (IPA_NODE_REF (cs->caller), jump_func);
3342 if (!t
3343 || (newval
3344 && !values_equal_for_ipcp_p (t, newval))
3345 || (!first && !newval))
3347 newval = NULL_TREE;
3348 break;
3350 else
3351 newval = t;
3352 first = false;
3355 if (newval)
3357 if (dump_file && (dump_flags & TDF_DETAILS))
3359 fprintf (dump_file, " adding an extra known scalar value ");
3360 print_ipcp_constant_value (dump_file, newval);
3361 fprintf (dump_file, " for ");
3362 ipa_dump_param (dump_file, info, i);
3363 fprintf (dump_file, "\n");
3366 known_csts[i] = newval;
3371 /* Given a NODE and a subset of its CALLERS, try to populate plank slots in
3372 KNOWN_CONTEXTS with polymorphic contexts that are also known for all of the
3373 CALLERS. */
3375 static void
3376 find_more_contexts_for_caller_subset (cgraph_node *node,
3377 vec<ipa_polymorphic_call_context>
3378 *known_contexts,
3379 vec<cgraph_edge *> callers)
3381 ipa_node_params *info = IPA_NODE_REF (node);
3382 int i, count = ipa_get_param_count (info);
3384 for (i = 0; i < count ; i++)
3386 cgraph_edge *cs;
3388 if (ipa_get_poly_ctx_lat (info, i)->bottom
3389 || (known_contexts->exists ()
3390 && !(*known_contexts)[i].useless_p ()))
3391 continue;
3393 ipa_polymorphic_call_context newval;
3394 bool first = true;
3395 int j;
3397 FOR_EACH_VEC_ELT (callers, j, cs)
3399 if (i >= ipa_get_cs_argument_count (IPA_EDGE_REF (cs)))
3400 return;
3401 ipa_jump_func *jfunc = ipa_get_ith_jump_func (IPA_EDGE_REF (cs),
3403 ipa_polymorphic_call_context ctx;
3404 ctx = ipa_context_from_jfunc (IPA_NODE_REF (cs->caller), cs, i,
3405 jfunc);
3406 if (first)
3408 newval = ctx;
3409 first = false;
3411 else
3412 newval.meet_with (ctx);
3413 if (newval.useless_p ())
3414 break;
3417 if (!newval.useless_p ())
3419 if (dump_file && (dump_flags & TDF_DETAILS))
3421 fprintf (dump_file, " adding an extra known polymorphic "
3422 "context ");
3423 print_ipcp_constant_value (dump_file, newval);
3424 fprintf (dump_file, " for ");
3425 ipa_dump_param (dump_file, info, i);
3426 fprintf (dump_file, "\n");
3429 if (!known_contexts->exists ())
3430 known_contexts->safe_grow_cleared (ipa_get_param_count (info));
3431 (*known_contexts)[i] = newval;
3437 /* Go through PLATS and create a vector of values consisting of values and
3438 offsets (minus OFFSET) of lattices that contain only a single value. */
3440 static vec<ipa_agg_jf_item>
3441 copy_plats_to_inter (struct ipcp_param_lattices *plats, HOST_WIDE_INT offset)
3443 vec<ipa_agg_jf_item> res = vNULL;
3445 if (!plats->aggs || plats->aggs_contain_variable || plats->aggs_bottom)
3446 return vNULL;
3448 for (struct ipcp_agg_lattice *aglat = plats->aggs; aglat; aglat = aglat->next)
3449 if (aglat->is_single_const ())
3451 struct ipa_agg_jf_item ti;
3452 ti.offset = aglat->offset - offset;
3453 ti.value = aglat->values->value;
3454 res.safe_push (ti);
3456 return res;
3459 /* Intersect all values in INTER with single value lattices in PLATS (while
3460 subtracting OFFSET). */
3462 static void
3463 intersect_with_plats (struct ipcp_param_lattices *plats,
3464 vec<ipa_agg_jf_item> *inter,
3465 HOST_WIDE_INT offset)
3467 struct ipcp_agg_lattice *aglat;
3468 struct ipa_agg_jf_item *item;
3469 int k;
3471 if (!plats->aggs || plats->aggs_contain_variable || plats->aggs_bottom)
3473 inter->release ();
3474 return;
3477 aglat = plats->aggs;
3478 FOR_EACH_VEC_ELT (*inter, k, item)
3480 bool found = false;
3481 if (!item->value)
3482 continue;
3483 while (aglat)
3485 if (aglat->offset - offset > item->offset)
3486 break;
3487 if (aglat->offset - offset == item->offset)
3489 gcc_checking_assert (item->value);
3490 if (values_equal_for_ipcp_p (item->value, aglat->values->value))
3491 found = true;
3492 break;
3494 aglat = aglat->next;
3496 if (!found)
3497 item->value = NULL_TREE;
3501 /* Copy agggregate replacement values of NODE (which is an IPA-CP clone) to the
3502 vector result while subtracting OFFSET from the individual value offsets. */
3504 static vec<ipa_agg_jf_item>
3505 agg_replacements_to_vector (struct cgraph_node *node, int index,
3506 HOST_WIDE_INT offset)
3508 struct ipa_agg_replacement_value *av;
3509 vec<ipa_agg_jf_item> res = vNULL;
3511 for (av = ipa_get_agg_replacements_for_node (node); av; av = av->next)
3512 if (av->index == index
3513 && (av->offset - offset) >= 0)
3515 struct ipa_agg_jf_item item;
3516 gcc_checking_assert (av->value);
3517 item.offset = av->offset - offset;
3518 item.value = av->value;
3519 res.safe_push (item);
3522 return res;
3525 /* Intersect all values in INTER with those that we have already scheduled to
3526 be replaced in parameter number INDEX of NODE, which is an IPA-CP clone
3527 (while subtracting OFFSET). */
3529 static void
3530 intersect_with_agg_replacements (struct cgraph_node *node, int index,
3531 vec<ipa_agg_jf_item> *inter,
3532 HOST_WIDE_INT offset)
3534 struct ipa_agg_replacement_value *srcvals;
3535 struct ipa_agg_jf_item *item;
3536 int i;
3538 srcvals = ipa_get_agg_replacements_for_node (node);
3539 if (!srcvals)
3541 inter->release ();
3542 return;
3545 FOR_EACH_VEC_ELT (*inter, i, item)
3547 struct ipa_agg_replacement_value *av;
3548 bool found = false;
3549 if (!item->value)
3550 continue;
3551 for (av = srcvals; av; av = av->next)
3553 gcc_checking_assert (av->value);
3554 if (av->index == index
3555 && av->offset - offset == item->offset)
3557 if (values_equal_for_ipcp_p (item->value, av->value))
3558 found = true;
3559 break;
3562 if (!found)
3563 item->value = NULL_TREE;
3567 /* Intersect values in INTER with aggregate values that come along edge CS to
3568 parameter number INDEX and return it. If INTER does not actually exist yet,
3569 copy all incoming values to it. If we determine we ended up with no values
3570 whatsoever, return a released vector. */
3572 static vec<ipa_agg_jf_item>
3573 intersect_aggregates_with_edge (struct cgraph_edge *cs, int index,
3574 vec<ipa_agg_jf_item> inter)
3576 struct ipa_jump_func *jfunc;
3577 jfunc = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), index);
3578 if (jfunc->type == IPA_JF_PASS_THROUGH
3579 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3581 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
3582 int src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
3584 if (caller_info->ipcp_orig_node)
3586 struct cgraph_node *orig_node = caller_info->ipcp_orig_node;
3587 struct ipcp_param_lattices *orig_plats;
3588 orig_plats = ipa_get_parm_lattices (IPA_NODE_REF (orig_node),
3589 src_idx);
3590 if (agg_pass_through_permissible_p (orig_plats, jfunc))
3592 if (!inter.exists ())
3593 inter = agg_replacements_to_vector (cs->caller, src_idx, 0);
3594 else
3595 intersect_with_agg_replacements (cs->caller, src_idx,
3596 &inter, 0);
3598 else
3600 inter.release ();
3601 return vNULL;
3604 else
3606 struct ipcp_param_lattices *src_plats;
3607 src_plats = ipa_get_parm_lattices (caller_info, src_idx);
3608 if (agg_pass_through_permissible_p (src_plats, jfunc))
3610 /* Currently we do not produce clobber aggregate jump
3611 functions, adjust when we do. */
3612 gcc_checking_assert (!jfunc->agg.items);
3613 if (!inter.exists ())
3614 inter = copy_plats_to_inter (src_plats, 0);
3615 else
3616 intersect_with_plats (src_plats, &inter, 0);
3618 else
3620 inter.release ();
3621 return vNULL;
3625 else if (jfunc->type == IPA_JF_ANCESTOR
3626 && ipa_get_jf_ancestor_agg_preserved (jfunc))
3628 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
3629 int src_idx = ipa_get_jf_ancestor_formal_id (jfunc);
3630 struct ipcp_param_lattices *src_plats;
3631 HOST_WIDE_INT delta = ipa_get_jf_ancestor_offset (jfunc);
3633 if (caller_info->ipcp_orig_node)
3635 if (!inter.exists ())
3636 inter = agg_replacements_to_vector (cs->caller, src_idx, delta);
3637 else
3638 intersect_with_agg_replacements (cs->caller, src_idx, &inter,
3639 delta);
3641 else
3643 src_plats = ipa_get_parm_lattices (caller_info, src_idx);;
3644 /* Currently we do not produce clobber aggregate jump
3645 functions, adjust when we do. */
3646 gcc_checking_assert (!src_plats->aggs || !jfunc->agg.items);
3647 if (!inter.exists ())
3648 inter = copy_plats_to_inter (src_plats, delta);
3649 else
3650 intersect_with_plats (src_plats, &inter, delta);
3653 else if (jfunc->agg.items)
3655 struct ipa_agg_jf_item *item;
3656 int k;
3658 if (!inter.exists ())
3659 for (unsigned i = 0; i < jfunc->agg.items->length (); i++)
3660 inter.safe_push ((*jfunc->agg.items)[i]);
3661 else
3662 FOR_EACH_VEC_ELT (inter, k, item)
3664 int l = 0;
3665 bool found = false;;
3667 if (!item->value)
3668 continue;
3670 while ((unsigned) l < jfunc->agg.items->length ())
3672 struct ipa_agg_jf_item *ti;
3673 ti = &(*jfunc->agg.items)[l];
3674 if (ti->offset > item->offset)
3675 break;
3676 if (ti->offset == item->offset)
3678 gcc_checking_assert (ti->value);
3679 if (values_equal_for_ipcp_p (item->value,
3680 ti->value))
3681 found = true;
3682 break;
3684 l++;
3686 if (!found)
3687 item->value = NULL;
3690 else
3692 inter.release ();
3693 return vec<ipa_agg_jf_item>();
3695 return inter;
3698 /* Look at edges in CALLERS and collect all known aggregate values that arrive
3699 from all of them. */
3701 static struct ipa_agg_replacement_value *
3702 find_aggregate_values_for_callers_subset (struct cgraph_node *node,
3703 vec<cgraph_edge *> callers)
3705 struct ipa_node_params *dest_info = IPA_NODE_REF (node);
3706 struct ipa_agg_replacement_value *res;
3707 struct ipa_agg_replacement_value **tail = &res;
3708 struct cgraph_edge *cs;
3709 int i, j, count = ipa_get_param_count (dest_info);
3711 FOR_EACH_VEC_ELT (callers, j, cs)
3713 int c = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
3714 if (c < count)
3715 count = c;
3718 for (i = 0; i < count ; i++)
3720 struct cgraph_edge *cs;
3721 vec<ipa_agg_jf_item> inter = vNULL;
3722 struct ipa_agg_jf_item *item;
3723 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (dest_info, i);
3724 int j;
3726 /* Among other things, the following check should deal with all by_ref
3727 mismatches. */
3728 if (plats->aggs_bottom)
3729 continue;
3731 FOR_EACH_VEC_ELT (callers, j, cs)
3733 inter = intersect_aggregates_with_edge (cs, i, inter);
3735 if (!inter.exists ())
3736 goto next_param;
3739 FOR_EACH_VEC_ELT (inter, j, item)
3741 struct ipa_agg_replacement_value *v;
3743 if (!item->value)
3744 continue;
3746 v = ggc_alloc<ipa_agg_replacement_value> ();
3747 v->index = i;
3748 v->offset = item->offset;
3749 v->value = item->value;
3750 v->by_ref = plats->aggs_by_ref;
3751 *tail = v;
3752 tail = &v->next;
3755 next_param:
3756 if (inter.exists ())
3757 inter.release ();
3759 *tail = NULL;
3760 return res;
3763 /* Turn KNOWN_AGGS into a list of aggreate replacement values. */
3765 static struct ipa_agg_replacement_value *
3766 known_aggs_to_agg_replacement_list (vec<ipa_agg_jump_function> known_aggs)
3768 struct ipa_agg_replacement_value *res;
3769 struct ipa_agg_replacement_value **tail = &res;
3770 struct ipa_agg_jump_function *aggjf;
3771 struct ipa_agg_jf_item *item;
3772 int i, j;
3774 FOR_EACH_VEC_ELT (known_aggs, i, aggjf)
3775 FOR_EACH_VEC_SAFE_ELT (aggjf->items, j, item)
3777 struct ipa_agg_replacement_value *v;
3778 v = ggc_alloc<ipa_agg_replacement_value> ();
3779 v->index = i;
3780 v->offset = item->offset;
3781 v->value = item->value;
3782 v->by_ref = aggjf->by_ref;
3783 *tail = v;
3784 tail = &v->next;
3786 *tail = NULL;
3787 return res;
3790 /* Determine whether CS also brings all scalar values that the NODE is
3791 specialized for. */
3793 static bool
3794 cgraph_edge_brings_all_scalars_for_node (struct cgraph_edge *cs,
3795 struct cgraph_node *node)
3797 struct ipa_node_params *dest_info = IPA_NODE_REF (node);
3798 int count = ipa_get_param_count (dest_info);
3799 struct ipa_node_params *caller_info;
3800 struct ipa_edge_args *args;
3801 int i;
3803 caller_info = IPA_NODE_REF (cs->caller);
3804 args = IPA_EDGE_REF (cs);
3805 for (i = 0; i < count; i++)
3807 struct ipa_jump_func *jump_func;
3808 tree val, t;
3810 val = dest_info->known_csts[i];
3811 if (!val)
3812 continue;
3814 if (i >= ipa_get_cs_argument_count (args))
3815 return false;
3816 jump_func = ipa_get_ith_jump_func (args, i);
3817 t = ipa_value_from_jfunc (caller_info, jump_func);
3818 if (!t || !values_equal_for_ipcp_p (val, t))
3819 return false;
3821 return true;
3824 /* Determine whether CS also brings all aggregate values that NODE is
3825 specialized for. */
3826 static bool
3827 cgraph_edge_brings_all_agg_vals_for_node (struct cgraph_edge *cs,
3828 struct cgraph_node *node)
3830 struct ipa_node_params *orig_caller_info = IPA_NODE_REF (cs->caller);
3831 struct ipa_node_params *orig_node_info;
3832 struct ipa_agg_replacement_value *aggval;
3833 int i, ec, count;
3835 aggval = ipa_get_agg_replacements_for_node (node);
3836 if (!aggval)
3837 return true;
3839 count = ipa_get_param_count (IPA_NODE_REF (node));
3840 ec = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
3841 if (ec < count)
3842 for (struct ipa_agg_replacement_value *av = aggval; av; av = av->next)
3843 if (aggval->index >= ec)
3844 return false;
3846 orig_node_info = IPA_NODE_REF (IPA_NODE_REF (node)->ipcp_orig_node);
3847 if (orig_caller_info->ipcp_orig_node)
3848 orig_caller_info = IPA_NODE_REF (orig_caller_info->ipcp_orig_node);
3850 for (i = 0; i < count; i++)
3852 static vec<ipa_agg_jf_item> values = vec<ipa_agg_jf_item>();
3853 struct ipcp_param_lattices *plats;
3854 bool interesting = false;
3855 for (struct ipa_agg_replacement_value *av = aggval; av; av = av->next)
3856 if (aggval->index == i)
3858 interesting = true;
3859 break;
3861 if (!interesting)
3862 continue;
3864 plats = ipa_get_parm_lattices (orig_node_info, aggval->index);
3865 if (plats->aggs_bottom)
3866 return false;
3868 values = intersect_aggregates_with_edge (cs, i, values);
3869 if (!values.exists ())
3870 return false;
3872 for (struct ipa_agg_replacement_value *av = aggval; av; av = av->next)
3873 if (aggval->index == i)
3875 struct ipa_agg_jf_item *item;
3876 int j;
3877 bool found = false;
3878 FOR_EACH_VEC_ELT (values, j, item)
3879 if (item->value
3880 && item->offset == av->offset
3881 && values_equal_for_ipcp_p (item->value, av->value))
3883 found = true;
3884 break;
3886 if (!found)
3888 values.release ();
3889 return false;
3893 return true;
3896 /* Given an original NODE and a VAL for which we have already created a
3897 specialized clone, look whether there are incoming edges that still lead
3898 into the old node but now also bring the requested value and also conform to
3899 all other criteria such that they can be redirected the the special node.
3900 This function can therefore redirect the final edge in a SCC. */
3902 template <typename valtype>
3903 static void
3904 perhaps_add_new_callers (cgraph_node *node, ipcp_value<valtype> *val)
3906 ipcp_value_source<valtype> *src;
3907 gcov_type redirected_sum = 0;
3909 for (src = val->sources; src; src = src->next)
3911 struct cgraph_edge *cs = src->cs;
3912 while (cs)
3914 if (cgraph_edge_brings_value_p (cs, src, node)
3915 && cgraph_edge_brings_all_scalars_for_node (cs, val->spec_node)
3916 && cgraph_edge_brings_all_agg_vals_for_node (cs, val->spec_node))
3918 if (dump_file)
3919 fprintf (dump_file, " - adding an extra caller %s/%i"
3920 " of %s/%i\n",
3921 xstrdup_for_dump (cs->caller->name ()),
3922 cs->caller->order,
3923 xstrdup_for_dump (val->spec_node->name ()),
3924 val->spec_node->order);
3926 cs->redirect_callee_duplicating_thunks (val->spec_node);
3927 val->spec_node->expand_all_artificial_thunks ();
3928 redirected_sum += cs->count;
3930 cs = get_next_cgraph_edge_clone (cs);
3934 if (redirected_sum)
3935 update_specialized_profile (val->spec_node, node, redirected_sum);
3938 /* Return true if KNOWN_CONTEXTS contain at least one useful context. */
3940 static bool
3941 known_contexts_useful_p (vec<ipa_polymorphic_call_context> known_contexts)
3943 ipa_polymorphic_call_context *ctx;
3944 int i;
3946 FOR_EACH_VEC_ELT (known_contexts, i, ctx)
3947 if (!ctx->useless_p ())
3948 return true;
3949 return false;
3952 /* Return a copy of KNOWN_CSTS if it is not empty, otherwise return vNULL. */
3954 static vec<ipa_polymorphic_call_context>
3955 copy_useful_known_contexts (vec<ipa_polymorphic_call_context> known_contexts)
3957 if (known_contexts_useful_p (known_contexts))
3958 return known_contexts.copy ();
3959 else
3960 return vNULL;
3963 /* Copy KNOWN_CSTS and modify the copy according to VAL and INDEX. If
3964 non-empty, replace KNOWN_CONTEXTS with its copy too. */
3966 static void
3967 modify_known_vectors_with_val (vec<tree> *known_csts,
3968 vec<ipa_polymorphic_call_context> *known_contexts,
3969 ipcp_value<tree> *val,
3970 int index)
3972 *known_csts = known_csts->copy ();
3973 *known_contexts = copy_useful_known_contexts (*known_contexts);
3974 (*known_csts)[index] = val->value;
3977 /* Replace KNOWN_CSTS with its copy. Also copy KNOWN_CONTEXTS and modify the
3978 copy according to VAL and INDEX. */
3980 static void
3981 modify_known_vectors_with_val (vec<tree> *known_csts,
3982 vec<ipa_polymorphic_call_context> *known_contexts,
3983 ipcp_value<ipa_polymorphic_call_context> *val,
3984 int index)
3986 *known_csts = known_csts->copy ();
3987 *known_contexts = known_contexts->copy ();
3988 (*known_contexts)[index] = val->value;
3991 /* Return true if OFFSET indicates this was not an aggregate value or there is
3992 a replacement equivalent to VALUE, INDEX and OFFSET among those in the
3993 AGGVALS list. */
3995 DEBUG_FUNCTION bool
3996 ipcp_val_agg_replacement_ok_p (ipa_agg_replacement_value *aggvals,
3997 int index, HOST_WIDE_INT offset, tree value)
3999 if (offset == -1)
4000 return true;
4002 while (aggvals)
4004 if (aggvals->index == index
4005 && aggvals->offset == offset
4006 && values_equal_for_ipcp_p (aggvals->value, value))
4007 return true;
4008 aggvals = aggvals->next;
4010 return false;
4013 /* Return true if offset is minus one because source of a polymorphic contect
4014 cannot be an aggregate value. */
4016 DEBUG_FUNCTION bool
4017 ipcp_val_agg_replacement_ok_p (ipa_agg_replacement_value *,
4018 int , HOST_WIDE_INT offset,
4019 ipa_polymorphic_call_context)
4021 return offset == -1;
4024 /* Decide wheter to create a special version of NODE for value VAL of parameter
4025 at the given INDEX. If OFFSET is -1, the value is for the parameter itself,
4026 otherwise it is stored at the given OFFSET of the parameter. KNOWN_CSTS,
4027 KNOWN_CONTEXTS and KNOWN_AGGS describe the other already known values. */
4029 template <typename valtype>
4030 static bool
4031 decide_about_value (struct cgraph_node *node, int index, HOST_WIDE_INT offset,
4032 ipcp_value<valtype> *val, vec<tree> known_csts,
4033 vec<ipa_polymorphic_call_context> known_contexts)
4035 struct ipa_agg_replacement_value *aggvals;
4036 int freq_sum, caller_count;
4037 gcov_type count_sum;
4038 vec<cgraph_edge *> callers;
4040 if (val->spec_node)
4042 perhaps_add_new_callers (node, val);
4043 return false;
4045 else if (val->local_size_cost + overall_size > max_new_size)
4047 if (dump_file && (dump_flags & TDF_DETAILS))
4048 fprintf (dump_file, " Ignoring candidate value because "
4049 "max_new_size would be reached with %li.\n",
4050 val->local_size_cost + overall_size);
4051 return false;
4053 else if (!get_info_about_necessary_edges (val, node, &freq_sum, &count_sum,
4054 &caller_count))
4055 return false;
4057 if (dump_file && (dump_flags & TDF_DETAILS))
4059 fprintf (dump_file, " - considering value ");
4060 print_ipcp_constant_value (dump_file, val->value);
4061 fprintf (dump_file, " for ");
4062 ipa_dump_param (dump_file, IPA_NODE_REF (node), index);
4063 if (offset != -1)
4064 fprintf (dump_file, ", offset: " HOST_WIDE_INT_PRINT_DEC, offset);
4065 fprintf (dump_file, " (caller_count: %i)\n", caller_count);
4068 if (!good_cloning_opportunity_p (node, val->local_time_benefit,
4069 freq_sum, count_sum,
4070 val->local_size_cost)
4071 && !good_cloning_opportunity_p (node,
4072 val->local_time_benefit
4073 + val->prop_time_benefit,
4074 freq_sum, count_sum,
4075 val->local_size_cost
4076 + val->prop_size_cost))
4077 return false;
4079 if (dump_file)
4080 fprintf (dump_file, " Creating a specialized node of %s/%i.\n",
4081 node->name (), node->order);
4083 callers = gather_edges_for_value (val, node, caller_count);
4084 if (offset == -1)
4085 modify_known_vectors_with_val (&known_csts, &known_contexts, val, index);
4086 else
4088 known_csts = known_csts.copy ();
4089 known_contexts = copy_useful_known_contexts (known_contexts);
4091 find_more_scalar_values_for_callers_subset (node, known_csts, callers);
4092 find_more_contexts_for_caller_subset (node, &known_contexts, callers);
4093 aggvals = find_aggregate_values_for_callers_subset (node, callers);
4094 gcc_checking_assert (ipcp_val_agg_replacement_ok_p (aggvals, index,
4095 offset, val->value));
4096 val->spec_node = create_specialized_node (node, known_csts, known_contexts,
4097 aggvals, callers);
4098 overall_size += val->local_size_cost;
4100 /* TODO: If for some lattice there is only one other known value
4101 left, make a special node for it too. */
4103 return true;
4106 /* Decide whether and what specialized clones of NODE should be created. */
4108 static bool
4109 decide_whether_version_node (struct cgraph_node *node)
4111 struct ipa_node_params *info = IPA_NODE_REF (node);
4112 int i, count = ipa_get_param_count (info);
4113 vec<tree> known_csts;
4114 vec<ipa_polymorphic_call_context> known_contexts;
4115 vec<ipa_agg_jump_function> known_aggs = vNULL;
4116 bool ret = false;
4118 if (count == 0)
4119 return false;
4121 if (dump_file && (dump_flags & TDF_DETAILS))
4122 fprintf (dump_file, "\nEvaluating opportunities for %s/%i.\n",
4123 node->name (), node->order);
4125 gather_context_independent_values (info, &known_csts, &known_contexts,
4126 info->do_clone_for_all_contexts ? &known_aggs
4127 : NULL, NULL);
4129 for (i = 0; i < count ;i++)
4131 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
4132 ipcp_lattice<tree> *lat = &plats->itself;
4133 ipcp_lattice<ipa_polymorphic_call_context> *ctxlat = &plats->ctxlat;
4135 if (!lat->bottom
4136 && !known_csts[i])
4138 ipcp_value<tree> *val;
4139 for (val = lat->values; val; val = val->next)
4140 ret |= decide_about_value (node, i, -1, val, known_csts,
4141 known_contexts);
4144 if (!plats->aggs_bottom)
4146 struct ipcp_agg_lattice *aglat;
4147 ipcp_value<tree> *val;
4148 for (aglat = plats->aggs; aglat; aglat = aglat->next)
4149 if (!aglat->bottom && aglat->values
4150 /* If the following is false, the one value is in
4151 known_aggs. */
4152 && (plats->aggs_contain_variable
4153 || !aglat->is_single_const ()))
4154 for (val = aglat->values; val; val = val->next)
4155 ret |= decide_about_value (node, i, aglat->offset, val,
4156 known_csts, known_contexts);
4159 if (!ctxlat->bottom
4160 && known_contexts[i].useless_p ())
4162 ipcp_value<ipa_polymorphic_call_context> *val;
4163 for (val = ctxlat->values; val; val = val->next)
4164 ret |= decide_about_value (node, i, -1, val, known_csts,
4165 known_contexts);
4168 info = IPA_NODE_REF (node);
4171 if (info->do_clone_for_all_contexts)
4173 struct cgraph_node *clone;
4174 vec<cgraph_edge *> callers;
4176 if (dump_file)
4177 fprintf (dump_file, " - Creating a specialized node of %s/%i "
4178 "for all known contexts.\n", node->name (),
4179 node->order);
4181 callers = node->collect_callers ();
4183 if (!known_contexts_useful_p (known_contexts))
4185 known_contexts.release ();
4186 known_contexts = vNULL;
4188 clone = create_specialized_node (node, known_csts, known_contexts,
4189 known_aggs_to_agg_replacement_list (known_aggs),
4190 callers);
4191 info = IPA_NODE_REF (node);
4192 info->do_clone_for_all_contexts = false;
4193 IPA_NODE_REF (clone)->is_all_contexts_clone = true;
4194 for (i = 0; i < count ; i++)
4195 vec_free (known_aggs[i].items);
4196 known_aggs.release ();
4197 ret = true;
4199 else
4201 known_csts.release ();
4202 known_contexts.release ();
4205 return ret;
4208 /* Transitively mark all callees of NODE within the same SCC as not dead. */
4210 static void
4211 spread_undeadness (struct cgraph_node *node)
4213 struct cgraph_edge *cs;
4215 for (cs = node->callees; cs; cs = cs->next_callee)
4216 if (ipa_edge_within_scc (cs))
4218 struct cgraph_node *callee;
4219 struct ipa_node_params *info;
4221 callee = cs->callee->function_symbol (NULL);
4222 info = IPA_NODE_REF (callee);
4224 if (info->node_dead)
4226 info->node_dead = 0;
4227 spread_undeadness (callee);
4232 /* Return true if NODE has a caller from outside of its SCC that is not
4233 dead. Worker callback for cgraph_for_node_and_aliases. */
4235 static bool
4236 has_undead_caller_from_outside_scc_p (struct cgraph_node *node,
4237 void *data ATTRIBUTE_UNUSED)
4239 struct cgraph_edge *cs;
4241 for (cs = node->callers; cs; cs = cs->next_caller)
4242 if (cs->caller->thunk.thunk_p
4243 && cs->caller->call_for_symbol_thunks_and_aliases
4244 (has_undead_caller_from_outside_scc_p, NULL, true))
4245 return true;
4246 else if (!ipa_edge_within_scc (cs)
4247 && !IPA_NODE_REF (cs->caller)->node_dead)
4248 return true;
4249 return false;
4253 /* Identify nodes within the same SCC as NODE which are no longer needed
4254 because of new clones and will be removed as unreachable. */
4256 static void
4257 identify_dead_nodes (struct cgraph_node *node)
4259 struct cgraph_node *v;
4260 for (v = node; v ; v = ((struct ipa_dfs_info *) v->aux)->next_cycle)
4261 if (v->will_be_removed_from_program_if_no_direct_calls_p ()
4262 && !v->call_for_symbol_thunks_and_aliases
4263 (has_undead_caller_from_outside_scc_p, NULL, true))
4264 IPA_NODE_REF (v)->node_dead = 1;
4266 for (v = node; v ; v = ((struct ipa_dfs_info *) v->aux)->next_cycle)
4267 if (!IPA_NODE_REF (v)->node_dead)
4268 spread_undeadness (v);
4270 if (dump_file && (dump_flags & TDF_DETAILS))
4272 for (v = node; v ; v = ((struct ipa_dfs_info *) v->aux)->next_cycle)
4273 if (IPA_NODE_REF (v)->node_dead)
4274 fprintf (dump_file, " Marking node as dead: %s/%i.\n",
4275 v->name (), v->order);
4279 /* The decision stage. Iterate over the topological order of call graph nodes
4280 TOPO and make specialized clones if deemed beneficial. */
4282 static void
4283 ipcp_decision_stage (struct ipa_topo_info *topo)
4285 int i;
4287 if (dump_file)
4288 fprintf (dump_file, "\nIPA decision stage:\n\n");
4290 for (i = topo->nnodes - 1; i >= 0; i--)
4292 struct cgraph_node *node = topo->order[i];
4293 bool change = false, iterate = true;
4295 while (iterate)
4297 struct cgraph_node *v;
4298 iterate = false;
4299 for (v = node; v ; v = ((struct ipa_dfs_info *) v->aux)->next_cycle)
4300 if (v->has_gimple_body_p ()
4301 && ipcp_versionable_function_p (v))
4302 iterate |= decide_whether_version_node (v);
4304 change |= iterate;
4306 if (change)
4307 identify_dead_nodes (node);
4311 /* Look up all alignment information that we have discovered and copy it over
4312 to the transformation summary. */
4314 static void
4315 ipcp_store_alignment_results (void)
4317 cgraph_node *node;
4319 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
4321 ipa_node_params *info = IPA_NODE_REF (node);
4322 bool dumped_sth = false;
4323 bool found_useful_result = false;
4325 if (!opt_for_fn (node->decl, flag_ipa_cp_alignment))
4327 if (dump_file)
4328 fprintf (dump_file, "Not considering %s for alignment discovery "
4329 "and propagate; -fipa-cp-alignment: disabled.\n",
4330 node->name ());
4331 continue;
4334 if (info->ipcp_orig_node)
4335 info = IPA_NODE_REF (info->ipcp_orig_node);
4337 unsigned count = ipa_get_param_count (info);
4338 for (unsigned i = 0; i < count ; i++)
4340 ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
4341 if (plats->alignment.known
4342 && plats->alignment.align > 0)
4344 found_useful_result = true;
4345 break;
4348 if (!found_useful_result)
4349 continue;
4351 ipcp_grow_transformations_if_necessary ();
4352 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
4353 vec_safe_reserve_exact (ts->alignments, count);
4355 for (unsigned i = 0; i < count ; i++)
4357 ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
4359 if (plats->alignment.align == 0)
4360 plats->alignment.known = false;
4362 ts->alignments->quick_push (plats->alignment);
4363 if (!dump_file || !plats->alignment.known)
4364 continue;
4365 if (!dumped_sth)
4367 fprintf (dump_file, "Propagated alignment info for function %s/%i:\n",
4368 node->name (), node->order);
4369 dumped_sth = true;
4371 fprintf (dump_file, " param %i: align: %u, misalign: %u\n",
4372 i, plats->alignment.align, plats->alignment.misalign);
4377 /* The IPCP driver. */
4379 static unsigned int
4380 ipcp_driver (void)
4382 struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
4383 struct cgraph_edge_hook_list *edge_removal_hook_holder;
4384 struct ipa_topo_info topo;
4386 ipa_check_create_node_params ();
4387 ipa_check_create_edge_args ();
4388 grow_edge_clone_vectors ();
4389 edge_duplication_hook_holder =
4390 symtab->add_edge_duplication_hook (&ipcp_edge_duplication_hook, NULL);
4391 edge_removal_hook_holder =
4392 symtab->add_edge_removal_hook (&ipcp_edge_removal_hook, NULL);
4394 ipcp_cst_values_pool = create_alloc_pool ("IPA-CP constant values",
4395 sizeof (ipcp_value<tree>), 32);
4396 ipcp_poly_ctx_values_pool = create_alloc_pool
4397 ("IPA-CP polymorphic contexts",
4398 sizeof (ipcp_value<ipa_polymorphic_call_context>), 32);
4399 ipcp_sources_pool = create_alloc_pool ("IPA-CP value sources",
4400 sizeof (ipcp_value_source<tree>), 64);
4401 ipcp_agg_lattice_pool = create_alloc_pool ("IPA_CP aggregate lattices",
4402 sizeof (struct ipcp_agg_lattice),
4403 32);
4404 if (dump_file)
4406 fprintf (dump_file, "\nIPA structures before propagation:\n");
4407 if (dump_flags & TDF_DETAILS)
4408 ipa_print_all_params (dump_file);
4409 ipa_print_all_jump_functions (dump_file);
4412 /* Topological sort. */
4413 build_toporder_info (&topo);
4414 /* Do the interprocedural propagation. */
4415 ipcp_propagate_stage (&topo);
4416 /* Decide what constant propagation and cloning should be performed. */
4417 ipcp_decision_stage (&topo);
4418 /* Store results of alignment propagation. */
4419 ipcp_store_alignment_results ();
4421 /* Free all IPCP structures. */
4422 free_toporder_info (&topo);
4423 next_edge_clone.release ();
4424 symtab->remove_edge_removal_hook (edge_removal_hook_holder);
4425 symtab->remove_edge_duplication_hook (edge_duplication_hook_holder);
4426 ipa_free_all_structures_after_ipa_cp ();
4427 if (dump_file)
4428 fprintf (dump_file, "\nIPA constant propagation end\n");
4429 return 0;
4432 /* Initialization and computation of IPCP data structures. This is the initial
4433 intraprocedural analysis of functions, which gathers information to be
4434 propagated later on. */
4436 static void
4437 ipcp_generate_summary (void)
4439 struct cgraph_node *node;
4441 if (dump_file)
4442 fprintf (dump_file, "\nIPA constant propagation start:\n");
4443 ipa_register_cgraph_hooks ();
4445 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
4447 node->local.versionable
4448 = tree_versionable_function_p (node->decl);
4449 ipa_analyze_node (node);
4453 /* Write ipcp summary for nodes in SET. */
4455 static void
4456 ipcp_write_summary (void)
4458 ipa_prop_write_jump_functions ();
4461 /* Read ipcp summary. */
4463 static void
4464 ipcp_read_summary (void)
4466 ipa_prop_read_jump_functions ();
4469 namespace {
4471 const pass_data pass_data_ipa_cp =
4473 IPA_PASS, /* type */
4474 "cp", /* name */
4475 OPTGROUP_NONE, /* optinfo_flags */
4476 TV_IPA_CONSTANT_PROP, /* tv_id */
4477 0, /* properties_required */
4478 0, /* properties_provided */
4479 0, /* properties_destroyed */
4480 0, /* todo_flags_start */
4481 ( TODO_dump_symtab | TODO_remove_functions ), /* todo_flags_finish */
4484 class pass_ipa_cp : public ipa_opt_pass_d
4486 public:
4487 pass_ipa_cp (gcc::context *ctxt)
4488 : ipa_opt_pass_d (pass_data_ipa_cp, ctxt,
4489 ipcp_generate_summary, /* generate_summary */
4490 ipcp_write_summary, /* write_summary */
4491 ipcp_read_summary, /* read_summary */
4492 ipcp_write_transformation_summaries, /*
4493 write_optimization_summary */
4494 ipcp_read_transformation_summaries, /*
4495 read_optimization_summary */
4496 NULL, /* stmt_fixup */
4497 0, /* function_transform_todo_flags_start */
4498 ipcp_transform_function, /* function_transform */
4499 NULL) /* variable_transform */
4502 /* opt_pass methods: */
4503 virtual bool gate (function *)
4505 /* FIXME: We should remove the optimize check after we ensure we never run
4506 IPA passes when not optimizing. */
4507 return (flag_ipa_cp && optimize) || in_lto_p;
4510 virtual unsigned int execute (function *) { return ipcp_driver (); }
4512 }; // class pass_ipa_cp
4514 } // anon namespace
4516 ipa_opt_pass_d *
4517 make_pass_ipa_cp (gcc::context *ctxt)
4519 return new pass_ipa_cp (ctxt);
4522 /* Reset all state within ipa-cp.c so that we can rerun the compiler
4523 within the same process. For use by toplev::finalize. */
4525 void
4526 ipa_cp_c_finalize (void)
4528 max_count = 0;
4529 overall_size = 0;
4530 max_new_size = 0;