* config/i386/gnu-user.h (TARGET_CAN_SPLIT_STACK): Move from here ...
[official-gcc.git] / gcc / ipa-cp.c
blob79a1799d90137295e015a629ca0709c2e068fbaa
1 /* Interprocedural constant propagation
2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
4 Contributed by Razya Ladelsky <RAZYA@il.ibm.com> and Martin Jambor
5 <mjambor@suse.cz>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Interprocedural constant propagation (IPA-CP).
25 The goal of this transformation is to
27 1) discover functions which are always invoked with some arguments with the
28 same known constant values and modify the functions so that the
29 subsequent optimizations can take advantage of the knowledge, and
31 2) partial specialization - create specialized versions of functions
32 transformed in this way if some parameters are known constants only in
33 certain contexts but the estimated tradeoff between speedup and cost size
34 is deemed good.
36 The algorithm also propagates types and attempts to perform type based
37 devirtualization. Types are propagated much like constants.
39 The algorithm basically consists of three stages. In the first, functions
40 are analyzed one at a time and jump functions are constructed for all known
41 call-sites. In the second phase, the pass propagates information from the
42 jump functions across the call to reveal what values are available at what
43 call sites, performs estimations of effects of known values on functions and
44 their callees, and finally decides what specialized extra versions should be
45 created. In the third, the special versions materialize and appropriate
46 calls are redirected.
48 The algorithm used is to a certain extent based on "Interprocedural Constant
49 Propagation", by David Callahan, Keith D Cooper, Ken Kennedy, Linda Torczon,
50 Comp86, pg 152-161 and "A Methodology for Procedure Cloning" by Keith D
51 Cooper, Mary W. Hall, and Ken Kennedy.
54 First stage - intraprocedural analysis
55 =======================================
57 This phase computes jump_function and modification flags.
59 A jump function for a call-site represents the values passed as an actual
60 arguments of a given call-site. In principle, there are three types of
61 values:
63 Pass through - the caller's formal parameter is passed as an actual
64 argument, plus an operation on it can be performed.
65 Constant - a constant is passed as an actual argument.
66 Unknown - neither of the above.
68 All jump function types are described in detail in ipa-prop.h, together with
69 the data structures that represent them and methods of accessing them.
71 ipcp_generate_summary() is the main function of the first stage.
73 Second stage - interprocedural analysis
74 ========================================
76 This stage is itself divided into two phases. In the first, we propagate
77 known values over the call graph, in the second, we make cloning decisions.
78 It uses a different algorithm than the original Callahan's paper.
80 First, we traverse the functions topologically from callers to callees and,
81 for each strongly connected component (SCC), we propagate constants
82 according to previously computed jump functions. We also record what known
83 values depend on other known values and estimate local effects. Finally, we
84 propagate cumulative information about these effects from dependent values
85 to those on which they depend.
87 Second, we again traverse the call graph in the same topological order and
88 make clones for functions which we know are called with the same values in
89 all contexts and decide about extra specialized clones of functions just for
90 some contexts - these decisions are based on both local estimates and
91 cumulative estimates propagated from callees.
93 ipcp_propagate_stage() and ipcp_decision_stage() together constitute the
94 third stage.
96 Third phase - materialization of clones, call statement updates.
97 ============================================
99 This stage is currently performed by call graph code (mainly in cgraphunit.c
100 and tree-inline.c) according to instructions inserted to the call graph by
101 the second stage. */
103 #include "config.h"
104 #include "system.h"
105 #include "coretypes.h"
106 #include "tree.h"
107 #include "gimple-fold.h"
108 #include "gimple-expr.h"
109 #include "target.h"
110 #include "predict.h"
111 #include "basic-block.h"
112 #include "vec.h"
113 #include "hash-map.h"
114 #include "is-a.h"
115 #include "plugin-api.h"
116 #include "hashtab.h"
117 #include "hash-set.h"
118 #include "machmode.h"
119 #include "tm.h"
120 #include "hard-reg-set.h"
121 #include "input.h"
122 #include "function.h"
123 #include "ipa-ref.h"
124 #include "cgraph.h"
125 #include "alloc-pool.h"
126 #include "ipa-prop.h"
127 #include "bitmap.h"
128 #include "tree-pass.h"
129 #include "flags.h"
130 #include "diagnostic.h"
131 #include "tree-pretty-print.h"
132 #include "tree-inline.h"
133 #include "params.h"
134 #include "ipa-inline.h"
135 #include "ipa-utils.h"
137 template <typename valtype> class ipcp_value;
139 /* Describes a particular source for an IPA-CP value. */
141 template <typename valtype>
142 class ipcp_value_source
144 public:
145 /* Aggregate offset of the source, negative if the source is scalar value of
146 the argument itself. */
147 HOST_WIDE_INT offset;
148 /* The incoming edge that brought the value. */
149 cgraph_edge *cs;
150 /* If the jump function that resulted into his value was a pass-through or an
151 ancestor, this is the ipcp_value of the caller from which the described
152 value has been derived. Otherwise it is NULL. */
153 ipcp_value<valtype> *val;
154 /* Next pointer in a linked list of sources of a value. */
155 ipcp_value_source *next;
156 /* If the jump function that resulted into his value was a pass-through or an
157 ancestor, this is the index of the parameter of the caller the jump
158 function references. */
159 int index;
162 /* Common ancestor for all ipcp_value instantiations. */
164 class ipcp_value_base
166 public:
167 /* Time benefit and size cost that specializing the function for this value
168 would bring about in this function alone. */
169 int local_time_benefit, local_size_cost;
170 /* Time benefit and size cost that specializing the function for this value
171 can bring about in it's callees (transitively). */
172 int prop_time_benefit, prop_size_cost;
175 /* Describes one particular value stored in struct ipcp_lattice. */
177 template <typename valtype>
178 class ipcp_value : public ipcp_value_base
180 public:
181 /* The actual value for the given parameter. */
182 valtype value;
183 /* The list of sources from which this value originates. */
184 ipcp_value_source <valtype> *sources;
185 /* Next pointers in a linked list of all values in a lattice. */
186 ipcp_value *next;
187 /* Next pointers in a linked list of values in a strongly connected component
188 of values. */
189 ipcp_value *scc_next;
190 /* Next pointers in a linked list of SCCs of values sorted topologically
191 according their sources. */
192 ipcp_value *topo_next;
193 /* A specialized node created for this value, NULL if none has been (so far)
194 created. */
195 cgraph_node *spec_node;
196 /* Depth first search number and low link for topological sorting of
197 values. */
198 int dfs, low_link;
199 /* True if this valye is currently on the topo-sort stack. */
200 bool on_stack;
202 void add_source (cgraph_edge *cs, ipcp_value *src_val, int src_idx,
203 HOST_WIDE_INT offset);
206 /* Lattice describing potential values of a formal parameter of a function, or
207 a part of an aggreagate. TOP is represented by a lattice with zero values
208 and with contains_variable and bottom flags cleared. BOTTOM is represented
209 by a lattice with the bottom flag set. In that case, values and
210 contains_variable flag should be disregarded. */
212 template <typename valtype>
213 class ipcp_lattice
215 public:
216 /* The list of known values and types in this lattice. Note that values are
217 not deallocated if a lattice is set to bottom because there may be value
218 sources referencing them. */
219 ipcp_value<valtype> *values;
220 /* Number of known values and types in this lattice. */
221 int values_count;
222 /* The lattice contains a variable component (in addition to values). */
223 bool contains_variable;
224 /* The value of the lattice is bottom (i.e. variable and unusable for any
225 propagation). */
226 bool bottom;
228 inline bool is_single_const ();
229 inline bool set_to_bottom ();
230 inline bool set_contains_variable ();
231 bool add_value (valtype newval, cgraph_edge *cs,
232 ipcp_value<valtype> *src_val = NULL,
233 int src_idx = 0, HOST_WIDE_INT offset = -1);
234 void print (FILE * f, bool dump_sources, bool dump_benefits);
237 /* Lattice of tree values with an offset to describe a part of an
238 aggregate. */
240 class ipcp_agg_lattice : public ipcp_lattice<tree>
242 public:
243 /* Offset that is being described by this lattice. */
244 HOST_WIDE_INT offset;
245 /* Size so that we don't have to re-compute it every time we traverse the
246 list. Must correspond to TYPE_SIZE of all lat values. */
247 HOST_WIDE_INT size;
248 /* Next element of the linked list. */
249 struct ipcp_agg_lattice *next;
252 /* Structure containing lattices for a parameter itself and for pieces of
253 aggregates that are passed in the parameter or by a reference in a parameter
254 plus some other useful flags. */
256 class ipcp_param_lattices
258 public:
259 /* Lattice describing the value of the parameter itself. */
260 ipcp_lattice<tree> itself;
261 /* Lattice describing the the polymorphic contexts of a parameter. */
262 ipcp_lattice<ipa_polymorphic_call_context> ctxlat;
263 /* Lattices describing aggregate parts. */
264 ipcp_agg_lattice *aggs;
265 /* Alignment information. Very basic one value lattice where !known means
266 TOP and zero alignment bottom. */
267 ipa_alignment alignment;
268 /* Number of aggregate lattices */
269 int aggs_count;
270 /* True if aggregate data were passed by reference (as opposed to by
271 value). */
272 bool aggs_by_ref;
273 /* All aggregate lattices contain a variable component (in addition to
274 values). */
275 bool aggs_contain_variable;
276 /* The value of all aggregate lattices is bottom (i.e. variable and unusable
277 for any propagation). */
278 bool aggs_bottom;
280 /* There is a virtual call based on this parameter. */
281 bool virt_call;
284 /* Allocation pools for values and their sources in ipa-cp. */
286 alloc_pool ipcp_cst_values_pool;
287 alloc_pool ipcp_poly_ctx_values_pool;
288 alloc_pool ipcp_sources_pool;
289 alloc_pool ipcp_agg_lattice_pool;
291 /* Maximal count found in program. */
293 static gcov_type max_count;
295 /* Original overall size of the program. */
297 static long overall_size, max_new_size;
299 /* Return the param lattices structure corresponding to the Ith formal
300 parameter of the function described by INFO. */
301 static inline struct ipcp_param_lattices *
302 ipa_get_parm_lattices (struct ipa_node_params *info, int i)
304 gcc_assert (i >= 0 && i < ipa_get_param_count (info));
305 gcc_checking_assert (!info->ipcp_orig_node);
306 gcc_checking_assert (info->lattices);
307 return &(info->lattices[i]);
310 /* Return the lattice corresponding to the scalar value of the Ith formal
311 parameter of the function described by INFO. */
312 static inline ipcp_lattice<tree> *
313 ipa_get_scalar_lat (struct ipa_node_params *info, int i)
315 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
316 return &plats->itself;
319 /* Return the lattice corresponding to the scalar value of the Ith formal
320 parameter of the function described by INFO. */
321 static inline ipcp_lattice<ipa_polymorphic_call_context> *
322 ipa_get_poly_ctx_lat (struct ipa_node_params *info, int i)
324 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
325 return &plats->ctxlat;
328 /* Return whether LAT is a lattice with a single constant and without an
329 undefined value. */
331 template <typename valtype>
332 inline bool
333 ipcp_lattice<valtype>::is_single_const ()
335 if (bottom || contains_variable || values_count != 1)
336 return false;
337 else
338 return true;
341 /* Print V which is extracted from a value in a lattice to F. */
343 static void
344 print_ipcp_constant_value (FILE * f, tree v)
346 if (TREE_CODE (v) == ADDR_EXPR
347 && TREE_CODE (TREE_OPERAND (v, 0)) == CONST_DECL)
349 fprintf (f, "& ");
350 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (v, 0)), 0);
352 else
353 print_generic_expr (f, v, 0);
356 /* Print V which is extracted from a value in a lattice to F. */
358 static void
359 print_ipcp_constant_value (FILE * f, ipa_polymorphic_call_context v)
361 v.dump(f, false);
364 /* Print a lattice LAT to F. */
366 template <typename valtype>
367 void
368 ipcp_lattice<valtype>::print (FILE * f, bool dump_sources, bool dump_benefits)
370 ipcp_value<valtype> *val;
371 bool prev = false;
373 if (bottom)
375 fprintf (f, "BOTTOM\n");
376 return;
379 if (!values_count && !contains_variable)
381 fprintf (f, "TOP\n");
382 return;
385 if (contains_variable)
387 fprintf (f, "VARIABLE");
388 prev = true;
389 if (dump_benefits)
390 fprintf (f, "\n");
393 for (val = values; val; val = val->next)
395 if (dump_benefits && prev)
396 fprintf (f, " ");
397 else if (!dump_benefits && prev)
398 fprintf (f, ", ");
399 else
400 prev = true;
402 print_ipcp_constant_value (f, val->value);
404 if (dump_sources)
406 ipcp_value_source<valtype> *s;
408 fprintf (f, " [from:");
409 for (s = val->sources; s; s = s->next)
410 fprintf (f, " %i(%i)", s->cs->caller->order,
411 s->cs->frequency);
412 fprintf (f, "]");
415 if (dump_benefits)
416 fprintf (f, " [loc_time: %i, loc_size: %i, "
417 "prop_time: %i, prop_size: %i]\n",
418 val->local_time_benefit, val->local_size_cost,
419 val->prop_time_benefit, val->prop_size_cost);
421 if (!dump_benefits)
422 fprintf (f, "\n");
425 /* Print all ipcp_lattices of all functions to F. */
427 static void
428 print_all_lattices (FILE * f, bool dump_sources, bool dump_benefits)
430 struct cgraph_node *node;
431 int i, count;
433 fprintf (f, "\nLattices:\n");
434 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
436 struct ipa_node_params *info;
438 info = IPA_NODE_REF (node);
439 fprintf (f, " Node: %s/%i:\n", node->name (),
440 node->order);
441 count = ipa_get_param_count (info);
442 for (i = 0; i < count; i++)
444 struct ipcp_agg_lattice *aglat;
445 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
446 fprintf (f, " param [%d]: ", i);
447 plats->itself.print (f, dump_sources, dump_benefits);
448 fprintf (f, " ctxs: ");
449 plats->ctxlat.print (f, dump_sources, dump_benefits);
450 if (plats->alignment.known && plats->alignment.align > 0)
451 fprintf (f, " Alignment %u, misalignment %u\n",
452 plats->alignment.align, plats->alignment.misalign);
453 else if (plats->alignment.known)
454 fprintf (f, " Alignment unusable\n");
455 else
456 fprintf (f, " Alignment unknown\n");
457 if (plats->virt_call)
458 fprintf (f, " virt_call flag set\n");
460 if (plats->aggs_bottom)
462 fprintf (f, " AGGS BOTTOM\n");
463 continue;
465 if (plats->aggs_contain_variable)
466 fprintf (f, " AGGS VARIABLE\n");
467 for (aglat = plats->aggs; aglat; aglat = aglat->next)
469 fprintf (f, " %soffset " HOST_WIDE_INT_PRINT_DEC ": ",
470 plats->aggs_by_ref ? "ref " : "", aglat->offset);
471 aglat->print (f, dump_sources, dump_benefits);
477 /* Determine whether it is at all technically possible to create clones of NODE
478 and store this information in the ipa_node_params structure associated
479 with NODE. */
481 static void
482 determine_versionability (struct cgraph_node *node)
484 const char *reason = NULL;
486 /* There are a number of generic reasons functions cannot be versioned. We
487 also cannot remove parameters if there are type attributes such as fnspec
488 present. */
489 if (node->alias || node->thunk.thunk_p)
490 reason = "alias or thunk";
491 else if (!node->local.versionable)
492 reason = "not a tree_versionable_function";
493 else if (node->get_availability () <= AVAIL_INTERPOSABLE)
494 reason = "insufficient body availability";
495 else if (!opt_for_fn (node->decl, optimize)
496 || !opt_for_fn (node->decl, flag_ipa_cp))
497 reason = "non-optimized function";
498 else if (lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (node->decl)))
500 /* Ideally we should clone the SIMD clones themselves and create
501 vector copies of them, so IPA-cp and SIMD clones can happily
502 coexist, but that may not be worth the effort. */
503 reason = "function has SIMD clones";
505 /* Don't clone decls local to a comdat group; it breaks and for C++
506 decloned constructors, inlining is always better anyway. */
507 else if (node->comdat_local_p ())
508 reason = "comdat-local function";
510 if (reason && dump_file && !node->alias && !node->thunk.thunk_p)
511 fprintf (dump_file, "Function %s/%i is not versionable, reason: %s.\n",
512 node->name (), node->order, reason);
514 node->local.versionable = (reason == NULL);
517 /* Return true if it is at all technically possible to create clones of a
518 NODE. */
520 static bool
521 ipcp_versionable_function_p (struct cgraph_node *node)
523 return node->local.versionable;
526 /* Structure holding accumulated information about callers of a node. */
528 struct caller_statistics
530 gcov_type count_sum;
531 int n_calls, n_hot_calls, freq_sum;
534 /* Initialize fields of STAT to zeroes. */
536 static inline void
537 init_caller_stats (struct caller_statistics *stats)
539 stats->count_sum = 0;
540 stats->n_calls = 0;
541 stats->n_hot_calls = 0;
542 stats->freq_sum = 0;
545 /* Worker callback of cgraph_for_node_and_aliases accumulating statistics of
546 non-thunk incoming edges to NODE. */
548 static bool
549 gather_caller_stats (struct cgraph_node *node, void *data)
551 struct caller_statistics *stats = (struct caller_statistics *) data;
552 struct cgraph_edge *cs;
554 for (cs = node->callers; cs; cs = cs->next_caller)
555 if (cs->caller->thunk.thunk_p)
556 cs->caller->call_for_symbol_thunks_and_aliases (gather_caller_stats,
557 stats, false);
558 else
560 stats->count_sum += cs->count;
561 stats->freq_sum += cs->frequency;
562 stats->n_calls++;
563 if (cs->maybe_hot_p ())
564 stats->n_hot_calls ++;
566 return false;
570 /* Return true if this NODE is viable candidate for cloning. */
572 static bool
573 ipcp_cloning_candidate_p (struct cgraph_node *node)
575 struct caller_statistics stats;
577 gcc_checking_assert (node->has_gimple_body_p ());
579 if (!opt_for_fn (node->decl, flag_ipa_cp_clone))
581 if (dump_file)
582 fprintf (dump_file, "Not considering %s for cloning; "
583 "-fipa-cp-clone disabled.\n",
584 node->name ());
585 return false;
588 if (!optimize_function_for_speed_p (DECL_STRUCT_FUNCTION (node->decl)))
590 if (dump_file)
591 fprintf (dump_file, "Not considering %s for cloning; "
592 "optimizing it for size.\n",
593 node->name ());
594 return false;
597 init_caller_stats (&stats);
598 node->call_for_symbol_thunks_and_aliases (gather_caller_stats, &stats, false);
600 if (inline_summary (node)->self_size < stats.n_calls)
602 if (dump_file)
603 fprintf (dump_file, "Considering %s for cloning; code might shrink.\n",
604 node->name ());
605 return true;
608 /* When profile is available and function is hot, propagate into it even if
609 calls seems cold; constant propagation can improve function's speed
610 significantly. */
611 if (max_count)
613 if (stats.count_sum > node->count * 90 / 100)
615 if (dump_file)
616 fprintf (dump_file, "Considering %s for cloning; "
617 "usually called directly.\n",
618 node->name ());
619 return true;
622 if (!stats.n_hot_calls)
624 if (dump_file)
625 fprintf (dump_file, "Not considering %s for cloning; no hot calls.\n",
626 node->name ());
627 return false;
629 if (dump_file)
630 fprintf (dump_file, "Considering %s for cloning.\n",
631 node->name ());
632 return true;
635 template <typename valtype>
636 class value_topo_info
638 public:
639 /* Head of the linked list of topologically sorted values. */
640 ipcp_value<valtype> *values_topo;
641 /* Stack for creating SCCs, represented by a linked list too. */
642 ipcp_value<valtype> *stack;
643 /* Counter driving the algorithm in add_val_to_toposort. */
644 int dfs_counter;
646 value_topo_info () : values_topo (NULL), stack (NULL), dfs_counter (0)
648 void add_val (ipcp_value<valtype> *cur_val);
649 void propagate_effects ();
652 /* Arrays representing a topological ordering of call graph nodes and a stack
653 of nodes used during constant propagation and also data required to perform
654 topological sort of values and propagation of benefits in the determined
655 order. */
657 class ipa_topo_info
659 public:
660 /* Array with obtained topological order of cgraph nodes. */
661 struct cgraph_node **order;
662 /* Stack of cgraph nodes used during propagation within SCC until all values
663 in the SCC stabilize. */
664 struct cgraph_node **stack;
665 int nnodes, stack_top;
667 value_topo_info<tree> constants;
668 value_topo_info<ipa_polymorphic_call_context> contexts;
670 ipa_topo_info () : order(NULL), stack(NULL), nnodes(0), stack_top(0),
671 constants ()
675 /* Allocate the arrays in TOPO and topologically sort the nodes into order. */
677 static void
678 build_toporder_info (struct ipa_topo_info *topo)
680 topo->order = XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
681 topo->stack = XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
683 gcc_checking_assert (topo->stack_top == 0);
684 topo->nnodes = ipa_reduced_postorder (topo->order, true, true, NULL);
687 /* Free information about strongly connected components and the arrays in
688 TOPO. */
690 static void
691 free_toporder_info (struct ipa_topo_info *topo)
693 ipa_free_postorder_info ();
694 free (topo->order);
695 free (topo->stack);
698 /* Add NODE to the stack in TOPO, unless it is already there. */
700 static inline void
701 push_node_to_stack (struct ipa_topo_info *topo, struct cgraph_node *node)
703 struct ipa_node_params *info = IPA_NODE_REF (node);
704 if (info->node_enqueued)
705 return;
706 info->node_enqueued = 1;
707 topo->stack[topo->stack_top++] = node;
710 /* Pop a node from the stack in TOPO and return it or return NULL if the stack
711 is empty. */
713 static struct cgraph_node *
714 pop_node_from_stack (struct ipa_topo_info *topo)
716 if (topo->stack_top)
718 struct cgraph_node *node;
719 topo->stack_top--;
720 node = topo->stack[topo->stack_top];
721 IPA_NODE_REF (node)->node_enqueued = 0;
722 return node;
724 else
725 return NULL;
728 /* Set lattice LAT to bottom and return true if it previously was not set as
729 such. */
731 template <typename valtype>
732 inline bool
733 ipcp_lattice<valtype>::set_to_bottom ()
735 bool ret = !bottom;
736 bottom = true;
737 return ret;
740 /* Mark lattice as containing an unknown value and return true if it previously
741 was not marked as such. */
743 template <typename valtype>
744 inline bool
745 ipcp_lattice<valtype>::set_contains_variable ()
747 bool ret = !contains_variable;
748 contains_variable = true;
749 return ret;
752 /* Set all aggegate lattices in PLATS to bottom and return true if they were
753 not previously set as such. */
755 static inline bool
756 set_agg_lats_to_bottom (struct ipcp_param_lattices *plats)
758 bool ret = !plats->aggs_bottom;
759 plats->aggs_bottom = true;
760 return ret;
763 /* Mark all aggegate lattices in PLATS as containing an unknown value and
764 return true if they were not previously marked as such. */
766 static inline bool
767 set_agg_lats_contain_variable (struct ipcp_param_lattices *plats)
769 bool ret = !plats->aggs_contain_variable;
770 plats->aggs_contain_variable = true;
771 return ret;
774 /* Return true if alignment information in PLATS is known to be unusable. */
776 static inline bool
777 alignment_bottom_p (ipcp_param_lattices *plats)
779 return plats->alignment.known && (plats->alignment.align == 0);
782 /* Set alignment information in PLATS to unusable. Return true if it
783 previously was usable or unknown. */
785 static inline bool
786 set_alignment_to_bottom (ipcp_param_lattices *plats)
788 if (alignment_bottom_p (plats))
789 return false;
790 plats->alignment.known = true;
791 plats->alignment.align = 0;
792 return true;
795 /* Mark bot aggregate and scalar lattices as containing an unknown variable,
796 return true is any of them has not been marked as such so far. */
798 static inline bool
799 set_all_contains_variable (struct ipcp_param_lattices *plats)
801 bool ret;
802 ret = plats->itself.set_contains_variable ();
803 ret |= plats->ctxlat.set_contains_variable ();
804 ret |= set_agg_lats_contain_variable (plats);
805 ret |= set_alignment_to_bottom (plats);
806 return ret;
809 /* Initialize ipcp_lattices. */
811 static void
812 initialize_node_lattices (struct cgraph_node *node)
814 struct ipa_node_params *info = IPA_NODE_REF (node);
815 struct cgraph_edge *ie;
816 bool disable = false, variable = false;
817 int i;
819 gcc_checking_assert (node->has_gimple_body_p ());
820 if (!cgraph_local_p (node))
822 /* When cloning is allowed, we can assume that externally visible
823 functions are not called. We will compensate this by cloning
824 later. */
825 if (ipcp_versionable_function_p (node)
826 && ipcp_cloning_candidate_p (node))
827 variable = true;
828 else
829 disable = true;
832 if (disable || variable)
834 for (i = 0; i < ipa_get_param_count (info) ; i++)
836 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
837 if (disable)
839 plats->itself.set_to_bottom ();
840 plats->ctxlat.set_to_bottom ();
841 set_agg_lats_to_bottom (plats);
842 set_alignment_to_bottom (plats);
844 else
845 set_all_contains_variable (plats);
847 if (dump_file && (dump_flags & TDF_DETAILS)
848 && !node->alias && !node->thunk.thunk_p)
849 fprintf (dump_file, "Marking all lattices of %s/%i as %s\n",
850 node->name (), node->order,
851 disable ? "BOTTOM" : "VARIABLE");
854 for (ie = node->indirect_calls; ie; ie = ie->next_callee)
855 if (ie->indirect_info->polymorphic
856 && ie->indirect_info->param_index >= 0)
858 gcc_checking_assert (ie->indirect_info->param_index >= 0);
859 ipa_get_parm_lattices (info,
860 ie->indirect_info->param_index)->virt_call = 1;
864 /* Return the result of a (possibly arithmetic) pass through jump function
865 JFUNC on the constant value INPUT. Return NULL_TREE if that cannot be
866 determined or be considered an interprocedural invariant. */
868 static tree
869 ipa_get_jf_pass_through_result (struct ipa_jump_func *jfunc, tree input)
871 tree restype, res;
873 gcc_checking_assert (is_gimple_ip_invariant (input));
874 if (ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
875 return input;
877 if (TREE_CODE_CLASS (ipa_get_jf_pass_through_operation (jfunc))
878 == tcc_comparison)
879 restype = boolean_type_node;
880 else
881 restype = TREE_TYPE (input);
882 res = fold_binary (ipa_get_jf_pass_through_operation (jfunc), restype,
883 input, ipa_get_jf_pass_through_operand (jfunc));
885 if (res && !is_gimple_ip_invariant (res))
886 return NULL_TREE;
888 return res;
891 /* Return the result of an ancestor jump function JFUNC on the constant value
892 INPUT. Return NULL_TREE if that cannot be determined. */
894 static tree
895 ipa_get_jf_ancestor_result (struct ipa_jump_func *jfunc, tree input)
897 gcc_checking_assert (TREE_CODE (input) != TREE_BINFO);
898 if (TREE_CODE (input) == ADDR_EXPR)
900 tree t = TREE_OPERAND (input, 0);
901 t = build_ref_for_offset (EXPR_LOCATION (t), t,
902 ipa_get_jf_ancestor_offset (jfunc),
903 ptr_type_node, NULL, false);
904 return build_fold_addr_expr (t);
906 else
907 return NULL_TREE;
910 /* Determine whether JFUNC evaluates to a single known constant value and if
911 so, return it. Otherwise return NULL. INFO describes the caller node or
912 the one it is inlined to, so that pass-through jump functions can be
913 evaluated. */
915 tree
916 ipa_value_from_jfunc (struct ipa_node_params *info, struct ipa_jump_func *jfunc)
918 if (jfunc->type == IPA_JF_CONST)
919 return ipa_get_jf_constant (jfunc);
920 else if (jfunc->type == IPA_JF_PASS_THROUGH
921 || jfunc->type == IPA_JF_ANCESTOR)
923 tree input;
924 int idx;
926 if (jfunc->type == IPA_JF_PASS_THROUGH)
927 idx = ipa_get_jf_pass_through_formal_id (jfunc);
928 else
929 idx = ipa_get_jf_ancestor_formal_id (jfunc);
931 if (info->ipcp_orig_node)
932 input = info->known_csts[idx];
933 else
935 ipcp_lattice<tree> *lat;
937 if (!info->lattices)
938 return NULL_TREE;
939 lat = ipa_get_scalar_lat (info, idx);
940 if (!lat->is_single_const ())
941 return NULL_TREE;
942 input = lat->values->value;
945 if (!input)
946 return NULL_TREE;
948 if (jfunc->type == IPA_JF_PASS_THROUGH)
949 return ipa_get_jf_pass_through_result (jfunc, input);
950 else
951 return ipa_get_jf_ancestor_result (jfunc, input);
953 else
954 return NULL_TREE;
957 /* Determie whether JFUNC evaluates to single known polymorphic context, given
958 that INFO describes the caller node or the one it is inlined to, CS is the
959 call graph edge corresponding to JFUNC and CSIDX index of the described
960 parameter. */
962 ipa_polymorphic_call_context
963 ipa_context_from_jfunc (ipa_node_params *info, cgraph_edge *cs, int csidx,
964 ipa_jump_func *jfunc)
966 ipa_edge_args *args = IPA_EDGE_REF (cs);
967 ipa_polymorphic_call_context ctx;
968 ipa_polymorphic_call_context *edge_ctx
969 = cs ? ipa_get_ith_polymorhic_call_context (args, csidx) : NULL;
971 if (edge_ctx && !edge_ctx->useless_p ())
972 ctx = *edge_ctx;
974 if (jfunc->type == IPA_JF_PASS_THROUGH
975 || jfunc->type == IPA_JF_ANCESTOR)
977 ipa_polymorphic_call_context srcctx;
978 int srcidx;
979 bool type_preserved = true;
980 if (jfunc->type == IPA_JF_PASS_THROUGH)
982 if (ipa_get_jf_pass_through_operation (jfunc) != NOP_EXPR)
983 return ctx;
984 type_preserved = ipa_get_jf_pass_through_type_preserved (jfunc);
985 srcidx = ipa_get_jf_pass_through_formal_id (jfunc);
987 else
989 type_preserved = ipa_get_jf_ancestor_type_preserved (jfunc);
990 srcidx = ipa_get_jf_ancestor_formal_id (jfunc);
992 if (info->ipcp_orig_node)
994 if (info->known_contexts.exists ())
995 srcctx = info->known_contexts[srcidx];
997 else
999 if (!info->lattices)
1000 return ctx;
1001 ipcp_lattice<ipa_polymorphic_call_context> *lat;
1002 lat = ipa_get_poly_ctx_lat (info, srcidx);
1003 if (!lat->is_single_const ())
1004 return ctx;
1005 srcctx = lat->values->value;
1007 if (srcctx.useless_p ())
1008 return ctx;
1009 if (jfunc->type == IPA_JF_ANCESTOR)
1010 srcctx.offset_by (ipa_get_jf_ancestor_offset (jfunc));
1011 if (!type_preserved)
1012 srcctx.possible_dynamic_type_change (cs->in_polymorphic_cdtor);
1013 srcctx.combine_with (ctx);
1014 return srcctx;
1017 return ctx;
1020 /* If checking is enabled, verify that no lattice is in the TOP state, i.e. not
1021 bottom, not containing a variable component and without any known value at
1022 the same time. */
1024 DEBUG_FUNCTION void
1025 ipcp_verify_propagated_values (void)
1027 struct cgraph_node *node;
1029 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
1031 struct ipa_node_params *info = IPA_NODE_REF (node);
1032 int i, count = ipa_get_param_count (info);
1034 for (i = 0; i < count; i++)
1036 ipcp_lattice<tree> *lat = ipa_get_scalar_lat (info, i);
1038 if (!lat->bottom
1039 && !lat->contains_variable
1040 && lat->values_count == 0)
1042 if (dump_file)
1044 symtab_node::dump_table (dump_file);
1045 fprintf (dump_file, "\nIPA lattices after constant "
1046 "propagation, before gcc_unreachable:\n");
1047 print_all_lattices (dump_file, true, false);
1050 gcc_unreachable ();
1056 /* Return true iff X and Y should be considered equal values by IPA-CP. */
1058 static bool
1059 values_equal_for_ipcp_p (tree x, tree y)
1061 gcc_checking_assert (x != NULL_TREE && y != NULL_TREE);
1063 if (x == y)
1064 return true;
1066 if (TREE_CODE (x) == ADDR_EXPR
1067 && TREE_CODE (y) == ADDR_EXPR
1068 && TREE_CODE (TREE_OPERAND (x, 0)) == CONST_DECL
1069 && TREE_CODE (TREE_OPERAND (y, 0)) == CONST_DECL)
1070 return operand_equal_p (DECL_INITIAL (TREE_OPERAND (x, 0)),
1071 DECL_INITIAL (TREE_OPERAND (y, 0)), 0);
1072 else
1073 return operand_equal_p (x, y, 0);
1076 /* Return true iff X and Y should be considered equal contexts by IPA-CP. */
1078 static bool
1079 values_equal_for_ipcp_p (ipa_polymorphic_call_context x,
1080 ipa_polymorphic_call_context y)
1082 return x.equal_to (y);
1086 /* Add a new value source to the value represented by THIS, marking that a
1087 value comes from edge CS and (if the underlying jump function is a
1088 pass-through or an ancestor one) from a caller value SRC_VAL of a caller
1089 parameter described by SRC_INDEX. OFFSET is negative if the source was the
1090 scalar value of the parameter itself or the offset within an aggregate. */
1092 template <typename valtype>
1093 void
1094 ipcp_value<valtype>::add_source (cgraph_edge *cs, ipcp_value *src_val,
1095 int src_idx, HOST_WIDE_INT offset)
1097 ipcp_value_source<valtype> *src;
1099 src = new (pool_alloc (ipcp_sources_pool)) ipcp_value_source<valtype>;
1100 src->offset = offset;
1101 src->cs = cs;
1102 src->val = src_val;
1103 src->index = src_idx;
1105 src->next = sources;
1106 sources = src;
1109 /* Allocate a new ipcp_value holding a tree constant, initialize its value to
1110 SOURCE and clear all other fields. */
1112 static ipcp_value<tree> *
1113 allocate_and_init_ipcp_value (tree source)
1115 ipcp_value<tree> *val;
1117 val = new (pool_alloc (ipcp_cst_values_pool)) ipcp_value<tree>;
1118 memset (val, 0, sizeof (*val));
1119 val->value = source;
1120 return val;
1123 /* Allocate a new ipcp_value holding a polymorphic context, initialize its
1124 value to SOURCE and clear all other fields. */
1126 static ipcp_value<ipa_polymorphic_call_context> *
1127 allocate_and_init_ipcp_value (ipa_polymorphic_call_context source)
1129 ipcp_value<ipa_polymorphic_call_context> *val;
1131 val = new (pool_alloc (ipcp_poly_ctx_values_pool))
1132 ipcp_value<ipa_polymorphic_call_context>;
1133 memset (val, 0, sizeof (*val));
1134 val->value = source;
1135 return val;
1138 /* Try to add NEWVAL to LAT, potentially creating a new ipcp_value for it. CS,
1139 SRC_VAL SRC_INDEX and OFFSET are meant for add_source and have the same
1140 meaning. OFFSET -1 means the source is scalar and not a part of an
1141 aggregate. */
1143 template <typename valtype>
1144 bool
1145 ipcp_lattice<valtype>::add_value (valtype newval, cgraph_edge *cs,
1146 ipcp_value<valtype> *src_val,
1147 int src_idx, HOST_WIDE_INT offset)
1149 ipcp_value<valtype> *val;
1151 if (bottom)
1152 return false;
1154 for (val = values; val; val = val->next)
1155 if (values_equal_for_ipcp_p (val->value, newval))
1157 if (ipa_edge_within_scc (cs))
1159 ipcp_value_source<valtype> *s;
1160 for (s = val->sources; s ; s = s->next)
1161 if (s->cs == cs)
1162 break;
1163 if (s)
1164 return false;
1167 val->add_source (cs, src_val, src_idx, offset);
1168 return false;
1171 if (values_count == PARAM_VALUE (PARAM_IPA_CP_VALUE_LIST_SIZE))
1173 /* We can only free sources, not the values themselves, because sources
1174 of other values in this this SCC might point to them. */
1175 for (val = values; val; val = val->next)
1177 while (val->sources)
1179 ipcp_value_source<valtype> *src = val->sources;
1180 val->sources = src->next;
1181 pool_free (ipcp_sources_pool, src);
1185 values = NULL;
1186 return set_to_bottom ();
1189 values_count++;
1190 val = allocate_and_init_ipcp_value (newval);
1191 val->add_source (cs, src_val, src_idx, offset);
1192 val->next = values;
1193 values = val;
1194 return true;
1197 /* Propagate values through a pass-through jump function JFUNC associated with
1198 edge CS, taking values from SRC_LAT and putting them into DEST_LAT. SRC_IDX
1199 is the index of the source parameter. */
1201 static bool
1202 propagate_vals_accross_pass_through (cgraph_edge *cs,
1203 ipa_jump_func *jfunc,
1204 ipcp_lattice<tree> *src_lat,
1205 ipcp_lattice<tree> *dest_lat,
1206 int src_idx)
1208 ipcp_value<tree> *src_val;
1209 bool ret = false;
1211 /* Do not create new values when propagating within an SCC because if there
1212 are arithmetic functions with circular dependencies, there is infinite
1213 number of them and we would just make lattices bottom. */
1214 if ((ipa_get_jf_pass_through_operation (jfunc) != NOP_EXPR)
1215 && ipa_edge_within_scc (cs))
1216 ret = dest_lat->set_contains_variable ();
1217 else
1218 for (src_val = src_lat->values; src_val; src_val = src_val->next)
1220 tree cstval = ipa_get_jf_pass_through_result (jfunc, src_val->value);
1222 if (cstval)
1223 ret |= dest_lat->add_value (cstval, cs, src_val, src_idx);
1224 else
1225 ret |= dest_lat->set_contains_variable ();
1228 return ret;
1231 /* Propagate values through an ancestor jump function JFUNC associated with
1232 edge CS, taking values from SRC_LAT and putting them into DEST_LAT. SRC_IDX
1233 is the index of the source parameter. */
1235 static bool
1236 propagate_vals_accross_ancestor (struct cgraph_edge *cs,
1237 struct ipa_jump_func *jfunc,
1238 ipcp_lattice<tree> *src_lat,
1239 ipcp_lattice<tree> *dest_lat,
1240 int src_idx)
1242 ipcp_value<tree> *src_val;
1243 bool ret = false;
1245 if (ipa_edge_within_scc (cs))
1246 return dest_lat->set_contains_variable ();
1248 for (src_val = src_lat->values; src_val; src_val = src_val->next)
1250 tree t = ipa_get_jf_ancestor_result (jfunc, src_val->value);
1252 if (t)
1253 ret |= dest_lat->add_value (t, cs, src_val, src_idx);
1254 else
1255 ret |= dest_lat->set_contains_variable ();
1258 return ret;
1261 /* Propagate scalar values across jump function JFUNC that is associated with
1262 edge CS and put the values into DEST_LAT. */
1264 static bool
1265 propagate_scalar_accross_jump_function (struct cgraph_edge *cs,
1266 struct ipa_jump_func *jfunc,
1267 ipcp_lattice<tree> *dest_lat)
1269 if (dest_lat->bottom)
1270 return false;
1272 if (jfunc->type == IPA_JF_CONST)
1274 tree val = ipa_get_jf_constant (jfunc);
1275 return dest_lat->add_value (val, cs, NULL, 0);
1277 else if (jfunc->type == IPA_JF_PASS_THROUGH
1278 || jfunc->type == IPA_JF_ANCESTOR)
1280 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
1281 ipcp_lattice<tree> *src_lat;
1282 int src_idx;
1283 bool ret;
1285 if (jfunc->type == IPA_JF_PASS_THROUGH)
1286 src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
1287 else
1288 src_idx = ipa_get_jf_ancestor_formal_id (jfunc);
1290 src_lat = ipa_get_scalar_lat (caller_info, src_idx);
1291 if (src_lat->bottom)
1292 return dest_lat->set_contains_variable ();
1294 /* If we would need to clone the caller and cannot, do not propagate. */
1295 if (!ipcp_versionable_function_p (cs->caller)
1296 && (src_lat->contains_variable
1297 || (src_lat->values_count > 1)))
1298 return dest_lat->set_contains_variable ();
1300 if (jfunc->type == IPA_JF_PASS_THROUGH)
1301 ret = propagate_vals_accross_pass_through (cs, jfunc, src_lat,
1302 dest_lat, src_idx);
1303 else
1304 ret = propagate_vals_accross_ancestor (cs, jfunc, src_lat, dest_lat,
1305 src_idx);
1307 if (src_lat->contains_variable)
1308 ret |= dest_lat->set_contains_variable ();
1310 return ret;
1313 /* TODO: We currently do not handle member method pointers in IPA-CP (we only
1314 use it for indirect inlining), we should propagate them too. */
1315 return dest_lat->set_contains_variable ();
1318 /* Propagate scalar values across jump function JFUNC that is associated with
1319 edge CS and describes argument IDX and put the values into DEST_LAT. */
1321 static bool
1322 propagate_context_accross_jump_function (cgraph_edge *cs,
1323 ipa_jump_func *jfunc, int idx,
1324 ipcp_lattice<ipa_polymorphic_call_context> *dest_lat)
1326 ipa_edge_args *args = IPA_EDGE_REF (cs);
1327 if (dest_lat->bottom)
1328 return false;
1329 bool ret = false;
1330 bool added_sth = false;
1331 bool type_preserved = true;
1333 ipa_polymorphic_call_context edge_ctx, *edge_ctx_ptr
1334 = ipa_get_ith_polymorhic_call_context (args, idx);
1336 if (edge_ctx_ptr)
1337 edge_ctx = *edge_ctx_ptr;
1339 if (jfunc->type == IPA_JF_PASS_THROUGH
1340 || jfunc->type == IPA_JF_ANCESTOR)
1342 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
1343 int src_idx;
1344 ipcp_lattice<ipa_polymorphic_call_context> *src_lat;
1346 /* TODO: Once we figure out how to propagate speculations, it will
1347 probably be a good idea to switch to speculation if type_preserved is
1348 not set instead of punting. */
1349 if (jfunc->type == IPA_JF_PASS_THROUGH)
1351 if (ipa_get_jf_pass_through_operation (jfunc) != NOP_EXPR)
1352 goto prop_fail;
1353 type_preserved = ipa_get_jf_pass_through_type_preserved (jfunc);
1354 src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
1356 else
1358 type_preserved = ipa_get_jf_ancestor_type_preserved (jfunc);
1359 src_idx = ipa_get_jf_ancestor_formal_id (jfunc);
1362 src_lat = ipa_get_poly_ctx_lat (caller_info, src_idx);
1363 /* If we would need to clone the caller and cannot, do not propagate. */
1364 if (!ipcp_versionable_function_p (cs->caller)
1365 && (src_lat->contains_variable
1366 || (src_lat->values_count > 1)))
1367 goto prop_fail;
1369 ipcp_value<ipa_polymorphic_call_context> *src_val;
1370 for (src_val = src_lat->values; src_val; src_val = src_val->next)
1372 ipa_polymorphic_call_context cur = src_val->value;
1374 if (!type_preserved)
1375 cur.possible_dynamic_type_change (cs->in_polymorphic_cdtor);
1376 if (jfunc->type == IPA_JF_ANCESTOR)
1377 cur.offset_by (ipa_get_jf_ancestor_offset (jfunc));
1378 /* TODO: In cases we know how the context is going to be used,
1379 we can improve the result by passing proper OTR_TYPE. */
1380 cur.combine_with (edge_ctx);
1381 if (!cur.useless_p ())
1383 if (src_lat->contains_variable
1384 && !edge_ctx.equal_to (cur))
1385 ret |= dest_lat->set_contains_variable ();
1386 ret |= dest_lat->add_value (cur, cs, src_val, src_idx);
1387 added_sth = true;
1393 prop_fail:
1394 if (!added_sth)
1396 if (!edge_ctx.useless_p ())
1397 ret |= dest_lat->add_value (edge_ctx, cs);
1398 else
1399 ret |= dest_lat->set_contains_variable ();
1402 return ret;
1405 /* Propagate alignments across jump function JFUNC that is associated with
1406 edge CS and update DEST_LAT accordingly. */
1408 static bool
1409 propagate_alignment_accross_jump_function (struct cgraph_edge *cs,
1410 struct ipa_jump_func *jfunc,
1411 struct ipcp_param_lattices *dest_lat)
1413 if (alignment_bottom_p (dest_lat))
1414 return false;
1416 ipa_alignment cur;
1417 cur.known = false;
1418 if (jfunc->alignment.known)
1419 cur = jfunc->alignment;
1420 else if (jfunc->type == IPA_JF_PASS_THROUGH
1421 || jfunc->type == IPA_JF_ANCESTOR)
1423 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
1424 struct ipcp_param_lattices *src_lats;
1425 HOST_WIDE_INT offset = 0;
1426 int src_idx;
1428 if (jfunc->type == IPA_JF_PASS_THROUGH)
1430 enum tree_code op = ipa_get_jf_pass_through_operation (jfunc);
1431 if (op != NOP_EXPR)
1433 if (op != POINTER_PLUS_EXPR
1434 && op != PLUS_EXPR
1435 && op != MINUS_EXPR)
1436 goto prop_fail;
1437 tree operand = ipa_get_jf_pass_through_operand (jfunc);
1438 if (!tree_fits_shwi_p (operand))
1439 goto prop_fail;
1440 offset = tree_to_shwi (operand);
1442 src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
1444 else
1446 src_idx = ipa_get_jf_ancestor_formal_id (jfunc);
1447 offset = ipa_get_jf_ancestor_offset (jfunc);
1450 src_lats = ipa_get_parm_lattices (caller_info, src_idx);
1451 if (!src_lats->alignment.known
1452 || alignment_bottom_p (src_lats))
1453 goto prop_fail;
1455 cur = src_lats->alignment;
1456 cur.misalign = (cur.misalign + offset) % cur.align;
1459 if (cur.known)
1461 if (!dest_lat->alignment.known)
1463 dest_lat->alignment = cur;
1464 return true;
1466 else if (dest_lat->alignment.align == cur.align
1467 && dest_lat->alignment.misalign == cur.misalign)
1468 return false;
1471 prop_fail:
1472 set_alignment_to_bottom (dest_lat);
1473 return true;
1476 /* If DEST_PLATS already has aggregate items, check that aggs_by_ref matches
1477 NEW_AGGS_BY_REF and if not, mark all aggs as bottoms and return true (in all
1478 other cases, return false). If there are no aggregate items, set
1479 aggs_by_ref to NEW_AGGS_BY_REF. */
1481 static bool
1482 set_check_aggs_by_ref (struct ipcp_param_lattices *dest_plats,
1483 bool new_aggs_by_ref)
1485 if (dest_plats->aggs)
1487 if (dest_plats->aggs_by_ref != new_aggs_by_ref)
1489 set_agg_lats_to_bottom (dest_plats);
1490 return true;
1493 else
1494 dest_plats->aggs_by_ref = new_aggs_by_ref;
1495 return false;
1498 /* Walk aggregate lattices in DEST_PLATS from ***AGLAT on, until ***aglat is an
1499 already existing lattice for the given OFFSET and SIZE, marking all skipped
1500 lattices as containing variable and checking for overlaps. If there is no
1501 already existing lattice for the OFFSET and VAL_SIZE, create one, initialize
1502 it with offset, size and contains_variable to PRE_EXISTING, and return true,
1503 unless there are too many already. If there are two many, return false. If
1504 there are overlaps turn whole DEST_PLATS to bottom and return false. If any
1505 skipped lattices were newly marked as containing variable, set *CHANGE to
1506 true. */
1508 static bool
1509 merge_agg_lats_step (struct ipcp_param_lattices *dest_plats,
1510 HOST_WIDE_INT offset, HOST_WIDE_INT val_size,
1511 struct ipcp_agg_lattice ***aglat,
1512 bool pre_existing, bool *change)
1514 gcc_checking_assert (offset >= 0);
1516 while (**aglat && (**aglat)->offset < offset)
1518 if ((**aglat)->offset + (**aglat)->size > offset)
1520 set_agg_lats_to_bottom (dest_plats);
1521 return false;
1523 *change |= (**aglat)->set_contains_variable ();
1524 *aglat = &(**aglat)->next;
1527 if (**aglat && (**aglat)->offset == offset)
1529 if ((**aglat)->size != val_size
1530 || ((**aglat)->next
1531 && (**aglat)->next->offset < offset + val_size))
1533 set_agg_lats_to_bottom (dest_plats);
1534 return false;
1536 gcc_checking_assert (!(**aglat)->next
1537 || (**aglat)->next->offset >= offset + val_size);
1538 return true;
1540 else
1542 struct ipcp_agg_lattice *new_al;
1544 if (**aglat && (**aglat)->offset < offset + val_size)
1546 set_agg_lats_to_bottom (dest_plats);
1547 return false;
1549 if (dest_plats->aggs_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1550 return false;
1551 dest_plats->aggs_count++;
1552 new_al = (struct ipcp_agg_lattice *) pool_alloc (ipcp_agg_lattice_pool);
1553 memset (new_al, 0, sizeof (*new_al));
1555 new_al->offset = offset;
1556 new_al->size = val_size;
1557 new_al->contains_variable = pre_existing;
1559 new_al->next = **aglat;
1560 **aglat = new_al;
1561 return true;
1565 /* Set all AGLAT and all other aggregate lattices reachable by next pointers as
1566 containing an unknown value. */
1568 static bool
1569 set_chain_of_aglats_contains_variable (struct ipcp_agg_lattice *aglat)
1571 bool ret = false;
1572 while (aglat)
1574 ret |= aglat->set_contains_variable ();
1575 aglat = aglat->next;
1577 return ret;
1580 /* Merge existing aggregate lattices in SRC_PLATS to DEST_PLATS, subtracting
1581 DELTA_OFFSET. CS is the call graph edge and SRC_IDX the index of the source
1582 parameter used for lattice value sources. Return true if DEST_PLATS changed
1583 in any way. */
1585 static bool
1586 merge_aggregate_lattices (struct cgraph_edge *cs,
1587 struct ipcp_param_lattices *dest_plats,
1588 struct ipcp_param_lattices *src_plats,
1589 int src_idx, HOST_WIDE_INT offset_delta)
1591 bool pre_existing = dest_plats->aggs != NULL;
1592 struct ipcp_agg_lattice **dst_aglat;
1593 bool ret = false;
1595 if (set_check_aggs_by_ref (dest_plats, src_plats->aggs_by_ref))
1596 return true;
1597 if (src_plats->aggs_bottom)
1598 return set_agg_lats_contain_variable (dest_plats);
1599 if (src_plats->aggs_contain_variable)
1600 ret |= set_agg_lats_contain_variable (dest_plats);
1601 dst_aglat = &dest_plats->aggs;
1603 for (struct ipcp_agg_lattice *src_aglat = src_plats->aggs;
1604 src_aglat;
1605 src_aglat = src_aglat->next)
1607 HOST_WIDE_INT new_offset = src_aglat->offset - offset_delta;
1609 if (new_offset < 0)
1610 continue;
1611 if (merge_agg_lats_step (dest_plats, new_offset, src_aglat->size,
1612 &dst_aglat, pre_existing, &ret))
1614 struct ipcp_agg_lattice *new_al = *dst_aglat;
1616 dst_aglat = &(*dst_aglat)->next;
1617 if (src_aglat->bottom)
1619 ret |= new_al->set_contains_variable ();
1620 continue;
1622 if (src_aglat->contains_variable)
1623 ret |= new_al->set_contains_variable ();
1624 for (ipcp_value<tree> *val = src_aglat->values;
1625 val;
1626 val = val->next)
1627 ret |= new_al->add_value (val->value, cs, val, src_idx,
1628 src_aglat->offset);
1630 else if (dest_plats->aggs_bottom)
1631 return true;
1633 ret |= set_chain_of_aglats_contains_variable (*dst_aglat);
1634 return ret;
1637 /* Determine whether there is anything to propagate FROM SRC_PLATS through a
1638 pass-through JFUNC and if so, whether it has conform and conforms to the
1639 rules about propagating values passed by reference. */
1641 static bool
1642 agg_pass_through_permissible_p (struct ipcp_param_lattices *src_plats,
1643 struct ipa_jump_func *jfunc)
1645 return src_plats->aggs
1646 && (!src_plats->aggs_by_ref
1647 || ipa_get_jf_pass_through_agg_preserved (jfunc));
1650 /* Propagate scalar values across jump function JFUNC that is associated with
1651 edge CS and put the values into DEST_LAT. */
1653 static bool
1654 propagate_aggs_accross_jump_function (struct cgraph_edge *cs,
1655 struct ipa_jump_func *jfunc,
1656 struct ipcp_param_lattices *dest_plats)
1658 bool ret = false;
1660 if (dest_plats->aggs_bottom)
1661 return false;
1663 if (jfunc->type == IPA_JF_PASS_THROUGH
1664 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
1666 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
1667 int src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
1668 struct ipcp_param_lattices *src_plats;
1670 src_plats = ipa_get_parm_lattices (caller_info, src_idx);
1671 if (agg_pass_through_permissible_p (src_plats, jfunc))
1673 /* Currently we do not produce clobber aggregate jump
1674 functions, replace with merging when we do. */
1675 gcc_assert (!jfunc->agg.items);
1676 ret |= merge_aggregate_lattices (cs, dest_plats, src_plats,
1677 src_idx, 0);
1679 else
1680 ret |= set_agg_lats_contain_variable (dest_plats);
1682 else if (jfunc->type == IPA_JF_ANCESTOR
1683 && ipa_get_jf_ancestor_agg_preserved (jfunc))
1685 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
1686 int src_idx = ipa_get_jf_ancestor_formal_id (jfunc);
1687 struct ipcp_param_lattices *src_plats;
1689 src_plats = ipa_get_parm_lattices (caller_info, src_idx);
1690 if (src_plats->aggs && src_plats->aggs_by_ref)
1692 /* Currently we do not produce clobber aggregate jump
1693 functions, replace with merging when we do. */
1694 gcc_assert (!jfunc->agg.items);
1695 ret |= merge_aggregate_lattices (cs, dest_plats, src_plats, src_idx,
1696 ipa_get_jf_ancestor_offset (jfunc));
1698 else if (!src_plats->aggs_by_ref)
1699 ret |= set_agg_lats_to_bottom (dest_plats);
1700 else
1701 ret |= set_agg_lats_contain_variable (dest_plats);
1703 else if (jfunc->agg.items)
1705 bool pre_existing = dest_plats->aggs != NULL;
1706 struct ipcp_agg_lattice **aglat = &dest_plats->aggs;
1707 struct ipa_agg_jf_item *item;
1708 int i;
1710 if (set_check_aggs_by_ref (dest_plats, jfunc->agg.by_ref))
1711 return true;
1713 FOR_EACH_VEC_ELT (*jfunc->agg.items, i, item)
1715 HOST_WIDE_INT val_size;
1717 if (item->offset < 0)
1718 continue;
1719 gcc_checking_assert (is_gimple_ip_invariant (item->value));
1720 val_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (item->value)));
1722 if (merge_agg_lats_step (dest_plats, item->offset, val_size,
1723 &aglat, pre_existing, &ret))
1725 ret |= (*aglat)->add_value (item->value, cs, NULL, 0, 0);
1726 aglat = &(*aglat)->next;
1728 else if (dest_plats->aggs_bottom)
1729 return true;
1732 ret |= set_chain_of_aglats_contains_variable (*aglat);
1734 else
1735 ret |= set_agg_lats_contain_variable (dest_plats);
1737 return ret;
1740 /* Propagate constants from the caller to the callee of CS. INFO describes the
1741 caller. */
1743 static bool
1744 propagate_constants_accross_call (struct cgraph_edge *cs)
1746 struct ipa_node_params *callee_info;
1747 enum availability availability;
1748 struct cgraph_node *callee, *alias_or_thunk;
1749 struct ipa_edge_args *args;
1750 bool ret = false;
1751 int i, args_count, parms_count;
1753 callee = cs->callee->function_symbol (&availability);
1754 if (!callee->definition)
1755 return false;
1756 gcc_checking_assert (callee->has_gimple_body_p ());
1757 callee_info = IPA_NODE_REF (callee);
1759 args = IPA_EDGE_REF (cs);
1760 args_count = ipa_get_cs_argument_count (args);
1761 parms_count = ipa_get_param_count (callee_info);
1762 if (parms_count == 0)
1763 return false;
1765 /* No propagation through instrumentation thunks is available yet.
1766 It should be possible with proper mapping of call args and
1767 instrumented callee params in the propagation loop below. But
1768 this case mostly occurs when legacy code calls instrumented code
1769 and it is not a primary target for optimizations.
1770 We detect instrumentation thunks in aliases and thunks chain by
1771 checking instrumentation_clone flag for chain source and target.
1772 Going through instrumentation thunks we always have it changed
1773 from 0 to 1 and all other nodes do not change it. */
1774 if (!cs->callee->instrumentation_clone
1775 && callee->instrumentation_clone)
1777 for (i = 0; i < parms_count; i++)
1778 ret |= set_all_contains_variable (ipa_get_parm_lattices (callee_info,
1779 i));
1780 return ret;
1783 /* If this call goes through a thunk we must not propagate to the first (0th)
1784 parameter. However, we might need to uncover a thunk from below a series
1785 of aliases first. */
1786 alias_or_thunk = cs->callee;
1787 while (alias_or_thunk->alias)
1788 alias_or_thunk = alias_or_thunk->get_alias_target ();
1789 if (alias_or_thunk->thunk.thunk_p)
1791 ret |= set_all_contains_variable (ipa_get_parm_lattices (callee_info,
1792 0));
1793 i = 1;
1795 else
1796 i = 0;
1798 for (; (i < args_count) && (i < parms_count); i++)
1800 struct ipa_jump_func *jump_func = ipa_get_ith_jump_func (args, i);
1801 struct ipcp_param_lattices *dest_plats;
1803 dest_plats = ipa_get_parm_lattices (callee_info, i);
1804 if (availability == AVAIL_INTERPOSABLE)
1805 ret |= set_all_contains_variable (dest_plats);
1806 else
1808 ret |= propagate_scalar_accross_jump_function (cs, jump_func,
1809 &dest_plats->itself);
1810 ret |= propagate_context_accross_jump_function (cs, jump_func, i,
1811 &dest_plats->ctxlat);
1812 ret |= propagate_alignment_accross_jump_function (cs, jump_func,
1813 dest_plats);
1814 ret |= propagate_aggs_accross_jump_function (cs, jump_func,
1815 dest_plats);
1818 for (; i < parms_count; i++)
1819 ret |= set_all_contains_variable (ipa_get_parm_lattices (callee_info, i));
1821 return ret;
1824 /* If an indirect edge IE can be turned into a direct one based on KNOWN_VALS
1825 KNOWN_CONTEXTS, KNOWN_AGGS or AGG_REPS return the destination. The latter
1826 three can be NULL. If AGG_REPS is not NULL, KNOWN_AGGS is ignored. */
1828 static tree
1829 ipa_get_indirect_edge_target_1 (struct cgraph_edge *ie,
1830 vec<tree> known_csts,
1831 vec<ipa_polymorphic_call_context> known_contexts,
1832 vec<ipa_agg_jump_function_p> known_aggs,
1833 struct ipa_agg_replacement_value *agg_reps,
1834 bool *speculative)
1836 int param_index = ie->indirect_info->param_index;
1837 HOST_WIDE_INT anc_offset;
1838 tree t;
1839 tree target = NULL;
1841 *speculative = false;
1843 if (param_index == -1
1844 || known_csts.length () <= (unsigned int) param_index)
1845 return NULL_TREE;
1847 if (!ie->indirect_info->polymorphic)
1849 tree t;
1851 if (ie->indirect_info->agg_contents)
1853 if (agg_reps)
1855 t = NULL;
1856 while (agg_reps)
1858 if (agg_reps->index == param_index
1859 && agg_reps->offset == ie->indirect_info->offset
1860 && agg_reps->by_ref == ie->indirect_info->by_ref)
1862 t = agg_reps->value;
1863 break;
1865 agg_reps = agg_reps->next;
1868 else if (known_aggs.length () > (unsigned int) param_index)
1870 struct ipa_agg_jump_function *agg;
1871 agg = known_aggs[param_index];
1872 t = ipa_find_agg_cst_for_param (agg, ie->indirect_info->offset,
1873 ie->indirect_info->by_ref);
1875 else
1876 t = NULL;
1878 else
1879 t = known_csts[param_index];
1881 if (t &&
1882 TREE_CODE (t) == ADDR_EXPR
1883 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL)
1884 return TREE_OPERAND (t, 0);
1885 else
1886 return NULL_TREE;
1889 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
1890 return NULL_TREE;
1892 gcc_assert (!ie->indirect_info->agg_contents);
1893 anc_offset = ie->indirect_info->offset;
1895 t = NULL;
1897 /* Try to work out value of virtual table pointer value in replacemnets. */
1898 if (!t && agg_reps && !ie->indirect_info->by_ref)
1900 while (agg_reps)
1902 if (agg_reps->index == param_index
1903 && agg_reps->offset == ie->indirect_info->offset
1904 && agg_reps->by_ref)
1906 t = agg_reps->value;
1907 break;
1909 agg_reps = agg_reps->next;
1913 /* Try to work out value of virtual table pointer value in known
1914 aggregate values. */
1915 if (!t && known_aggs.length () > (unsigned int) param_index
1916 && !ie->indirect_info->by_ref)
1918 struct ipa_agg_jump_function *agg;
1919 agg = known_aggs[param_index];
1920 t = ipa_find_agg_cst_for_param (agg, ie->indirect_info->offset,
1921 true);
1924 /* If we found the virtual table pointer, lookup the target. */
1925 if (t)
1927 tree vtable;
1928 unsigned HOST_WIDE_INT offset;
1929 if (vtable_pointer_value_to_vtable (t, &vtable, &offset))
1931 target = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
1932 vtable, offset);
1933 if (target)
1935 if ((TREE_CODE (TREE_TYPE (target)) == FUNCTION_TYPE
1936 && DECL_FUNCTION_CODE (target) == BUILT_IN_UNREACHABLE)
1937 || !possible_polymorphic_call_target_p
1938 (ie, cgraph_node::get (target)))
1939 target = ipa_impossible_devirt_target (ie, target);
1940 *speculative = ie->indirect_info->vptr_changed;
1941 if (!*speculative)
1942 return target;
1947 /* Do we know the constant value of pointer? */
1948 if (!t)
1949 t = known_csts[param_index];
1951 gcc_checking_assert (!t || TREE_CODE (t) != TREE_BINFO);
1953 ipa_polymorphic_call_context context;
1954 if (known_contexts.length () > (unsigned int) param_index)
1956 context = known_contexts[param_index];
1957 context.offset_by (anc_offset);
1958 if (ie->indirect_info->vptr_changed)
1959 context.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
1960 ie->indirect_info->otr_type);
1961 if (t)
1963 ipa_polymorphic_call_context ctx2 = ipa_polymorphic_call_context
1964 (t, ie->indirect_info->otr_type, anc_offset);
1965 if (!ctx2.useless_p ())
1966 context.combine_with (ctx2, ie->indirect_info->otr_type);
1969 else if (t)
1970 context = ipa_polymorphic_call_context (t, ie->indirect_info->otr_type,
1971 anc_offset);
1972 else
1973 return NULL_TREE;
1975 vec <cgraph_node *>targets;
1976 bool final;
1978 targets = possible_polymorphic_call_targets
1979 (ie->indirect_info->otr_type,
1980 ie->indirect_info->otr_token,
1981 context, &final);
1982 if (!final || targets.length () > 1)
1984 struct cgraph_node *node;
1985 if (*speculative)
1986 return target;
1987 if (!opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
1988 || ie->speculative || !ie->maybe_hot_p ())
1989 return NULL;
1990 node = try_speculative_devirtualization (ie->indirect_info->otr_type,
1991 ie->indirect_info->otr_token,
1992 context);
1993 if (node)
1995 *speculative = true;
1996 target = node->decl;
1998 else
1999 return NULL;
2001 else
2003 *speculative = false;
2004 if (targets.length () == 1)
2005 target = targets[0]->decl;
2006 else
2007 target = ipa_impossible_devirt_target (ie, NULL_TREE);
2010 if (target && !possible_polymorphic_call_target_p (ie,
2011 cgraph_node::get (target)))
2012 target = ipa_impossible_devirt_target (ie, target);
2014 return target;
2018 /* If an indirect edge IE can be turned into a direct one based on KNOWN_CSTS,
2019 KNOWN_CONTEXTS (which can be vNULL) or KNOWN_AGGS (which also can be vNULL)
2020 return the destination. */
2022 tree
2023 ipa_get_indirect_edge_target (struct cgraph_edge *ie,
2024 vec<tree> known_csts,
2025 vec<ipa_polymorphic_call_context> known_contexts,
2026 vec<ipa_agg_jump_function_p> known_aggs,
2027 bool *speculative)
2029 return ipa_get_indirect_edge_target_1 (ie, known_csts, known_contexts,
2030 known_aggs, NULL, speculative);
2033 /* Calculate devirtualization time bonus for NODE, assuming we know KNOWN_CSTS
2034 and KNOWN_CONTEXTS. */
2036 static int
2037 devirtualization_time_bonus (struct cgraph_node *node,
2038 vec<tree> known_csts,
2039 vec<ipa_polymorphic_call_context> known_contexts,
2040 vec<ipa_agg_jump_function_p> known_aggs)
2042 struct cgraph_edge *ie;
2043 int res = 0;
2045 for (ie = node->indirect_calls; ie; ie = ie->next_callee)
2047 struct cgraph_node *callee;
2048 struct inline_summary *isummary;
2049 enum availability avail;
2050 tree target;
2051 bool speculative;
2053 target = ipa_get_indirect_edge_target (ie, known_csts, known_contexts,
2054 known_aggs, &speculative);
2055 if (!target)
2056 continue;
2058 /* Only bare minimum benefit for clearly un-inlineable targets. */
2059 res += 1;
2060 callee = cgraph_node::get (target);
2061 if (!callee || !callee->definition)
2062 continue;
2063 callee = callee->function_symbol (&avail);
2064 if (avail < AVAIL_AVAILABLE)
2065 continue;
2066 isummary = inline_summary (callee);
2067 if (!isummary->inlinable)
2068 continue;
2070 /* FIXME: The values below need re-considering and perhaps also
2071 integrating into the cost metrics, at lest in some very basic way. */
2072 if (isummary->size <= MAX_INLINE_INSNS_AUTO / 4)
2073 res += 31 / ((int)speculative + 1);
2074 else if (isummary->size <= MAX_INLINE_INSNS_AUTO / 2)
2075 res += 15 / ((int)speculative + 1);
2076 else if (isummary->size <= MAX_INLINE_INSNS_AUTO
2077 || DECL_DECLARED_INLINE_P (callee->decl))
2078 res += 7 / ((int)speculative + 1);
2081 return res;
2084 /* Return time bonus incurred because of HINTS. */
2086 static int
2087 hint_time_bonus (inline_hints hints)
2089 int result = 0;
2090 if (hints & (INLINE_HINT_loop_iterations | INLINE_HINT_loop_stride))
2091 result += PARAM_VALUE (PARAM_IPA_CP_LOOP_HINT_BONUS);
2092 if (hints & INLINE_HINT_array_index)
2093 result += PARAM_VALUE (PARAM_IPA_CP_ARRAY_INDEX_HINT_BONUS);
2094 return result;
2097 /* Return true if cloning NODE is a good idea, given the estimated TIME_BENEFIT
2098 and SIZE_COST and with the sum of frequencies of incoming edges to the
2099 potential new clone in FREQUENCIES. */
2101 static bool
2102 good_cloning_opportunity_p (struct cgraph_node *node, int time_benefit,
2103 int freq_sum, gcov_type count_sum, int size_cost)
2105 if (time_benefit == 0
2106 || !opt_for_fn (node->decl, flag_ipa_cp_clone)
2107 || !optimize_function_for_speed_p (DECL_STRUCT_FUNCTION (node->decl)))
2108 return false;
2110 gcc_assert (size_cost > 0);
2112 if (max_count)
2114 int factor = (count_sum * 1000) / max_count;
2115 int64_t evaluation = (((int64_t) time_benefit * factor)
2116 / size_cost);
2118 if (dump_file && (dump_flags & TDF_DETAILS))
2119 fprintf (dump_file, " good_cloning_opportunity_p (time: %i, "
2120 "size: %i, count_sum: " HOST_WIDE_INT_PRINT_DEC
2121 ") -> evaluation: " "%"PRId64
2122 ", threshold: %i\n",
2123 time_benefit, size_cost, (HOST_WIDE_INT) count_sum,
2124 evaluation, PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD));
2126 return evaluation >= PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD);
2128 else
2130 int64_t evaluation = (((int64_t) time_benefit * freq_sum)
2131 / size_cost);
2133 if (dump_file && (dump_flags & TDF_DETAILS))
2134 fprintf (dump_file, " good_cloning_opportunity_p (time: %i, "
2135 "size: %i, freq_sum: %i) -> evaluation: "
2136 "%"PRId64 ", threshold: %i\n",
2137 time_benefit, size_cost, freq_sum, evaluation,
2138 PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD));
2140 return evaluation >= PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD);
2144 /* Return all context independent values from aggregate lattices in PLATS in a
2145 vector. Return NULL if there are none. */
2147 static vec<ipa_agg_jf_item, va_gc> *
2148 context_independent_aggregate_values (struct ipcp_param_lattices *plats)
2150 vec<ipa_agg_jf_item, va_gc> *res = NULL;
2152 if (plats->aggs_bottom
2153 || plats->aggs_contain_variable
2154 || plats->aggs_count == 0)
2155 return NULL;
2157 for (struct ipcp_agg_lattice *aglat = plats->aggs;
2158 aglat;
2159 aglat = aglat->next)
2160 if (aglat->is_single_const ())
2162 struct ipa_agg_jf_item item;
2163 item.offset = aglat->offset;
2164 item.value = aglat->values->value;
2165 vec_safe_push (res, item);
2167 return res;
2170 /* Allocate KNOWN_CSTS, KNOWN_CONTEXTS and, if non-NULL, KNOWN_AGGS and
2171 populate them with values of parameters that are known independent of the
2172 context. INFO describes the function. If REMOVABLE_PARAMS_COST is
2173 non-NULL, the movement cost of all removable parameters will be stored in
2174 it. */
2176 static bool
2177 gather_context_independent_values (struct ipa_node_params *info,
2178 vec<tree> *known_csts,
2179 vec<ipa_polymorphic_call_context>
2180 *known_contexts,
2181 vec<ipa_agg_jump_function> *known_aggs,
2182 int *removable_params_cost)
2184 int i, count = ipa_get_param_count (info);
2185 bool ret = false;
2187 known_csts->create (0);
2188 known_contexts->create (0);
2189 known_csts->safe_grow_cleared (count);
2190 known_contexts->safe_grow_cleared (count);
2191 if (known_aggs)
2193 known_aggs->create (0);
2194 known_aggs->safe_grow_cleared (count);
2197 if (removable_params_cost)
2198 *removable_params_cost = 0;
2200 for (i = 0; i < count ; i++)
2202 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
2203 ipcp_lattice<tree> *lat = &plats->itself;
2205 if (lat->is_single_const ())
2207 ipcp_value<tree> *val = lat->values;
2208 gcc_checking_assert (TREE_CODE (val->value) != TREE_BINFO);
2209 (*known_csts)[i] = val->value;
2210 if (removable_params_cost)
2211 *removable_params_cost
2212 += estimate_move_cost (TREE_TYPE (val->value), false);
2213 ret = true;
2215 else if (removable_params_cost
2216 && !ipa_is_param_used (info, i))
2217 *removable_params_cost
2218 += ipa_get_param_move_cost (info, i);
2220 ipcp_lattice<ipa_polymorphic_call_context> *ctxlat = &plats->ctxlat;
2221 if (ctxlat->is_single_const ())
2223 (*known_contexts)[i] = ctxlat->values->value;
2224 ret = true;
2227 if (known_aggs)
2229 vec<ipa_agg_jf_item, va_gc> *agg_items;
2230 struct ipa_agg_jump_function *ajf;
2232 agg_items = context_independent_aggregate_values (plats);
2233 ajf = &(*known_aggs)[i];
2234 ajf->items = agg_items;
2235 ajf->by_ref = plats->aggs_by_ref;
2236 ret |= agg_items != NULL;
2240 return ret;
2243 /* The current interface in ipa-inline-analysis requires a pointer vector.
2244 Create it.
2246 FIXME: That interface should be re-worked, this is slightly silly. Still,
2247 I'd like to discuss how to change it first and this demonstrates the
2248 issue. */
2250 static vec<ipa_agg_jump_function_p>
2251 agg_jmp_p_vec_for_t_vec (vec<ipa_agg_jump_function> known_aggs)
2253 vec<ipa_agg_jump_function_p> ret;
2254 struct ipa_agg_jump_function *ajf;
2255 int i;
2257 ret.create (known_aggs.length ());
2258 FOR_EACH_VEC_ELT (known_aggs, i, ajf)
2259 ret.quick_push (ajf);
2260 return ret;
2263 /* Perform time and size measurement of NODE with the context given in
2264 KNOWN_CSTS, KNOWN_CONTEXTS and KNOWN_AGGS, calculate the benefit and cost
2265 given BASE_TIME of the node without specialization, REMOVABLE_PARAMS_COST of
2266 all context-independent removable parameters and EST_MOVE_COST of estimated
2267 movement of the considered parameter and store it into VAL. */
2269 static void
2270 perform_estimation_of_a_value (cgraph_node *node, vec<tree> known_csts,
2271 vec<ipa_polymorphic_call_context> known_contexts,
2272 vec<ipa_agg_jump_function_p> known_aggs_ptrs,
2273 int base_time, int removable_params_cost,
2274 int est_move_cost, ipcp_value_base *val)
2276 int time, size, time_benefit;
2277 inline_hints hints;
2279 estimate_ipcp_clone_size_and_time (node, known_csts, known_contexts,
2280 known_aggs_ptrs, &size, &time,
2281 &hints);
2282 time_benefit = base_time - time
2283 + devirtualization_time_bonus (node, known_csts, known_contexts,
2284 known_aggs_ptrs)
2285 + hint_time_bonus (hints)
2286 + removable_params_cost + est_move_cost;
2288 gcc_checking_assert (size >=0);
2289 /* The inliner-heuristics based estimates may think that in certain
2290 contexts some functions do not have any size at all but we want
2291 all specializations to have at least a tiny cost, not least not to
2292 divide by zero. */
2293 if (size == 0)
2294 size = 1;
2296 val->local_time_benefit = time_benefit;
2297 val->local_size_cost = size;
2300 /* Iterate over known values of parameters of NODE and estimate the local
2301 effects in terms of time and size they have. */
2303 static void
2304 estimate_local_effects (struct cgraph_node *node)
2306 struct ipa_node_params *info = IPA_NODE_REF (node);
2307 int i, count = ipa_get_param_count (info);
2308 vec<tree> known_csts;
2309 vec<ipa_polymorphic_call_context> known_contexts;
2310 vec<ipa_agg_jump_function> known_aggs;
2311 vec<ipa_agg_jump_function_p> known_aggs_ptrs;
2312 bool always_const;
2313 int base_time = inline_summary (node)->time;
2314 int removable_params_cost;
2316 if (!count || !ipcp_versionable_function_p (node))
2317 return;
2319 if (dump_file && (dump_flags & TDF_DETAILS))
2320 fprintf (dump_file, "\nEstimating effects for %s/%i, base_time: %i.\n",
2321 node->name (), node->order, base_time);
2323 always_const = gather_context_independent_values (info, &known_csts,
2324 &known_contexts, &known_aggs,
2325 &removable_params_cost);
2326 known_aggs_ptrs = agg_jmp_p_vec_for_t_vec (known_aggs);
2327 if (always_const)
2329 struct caller_statistics stats;
2330 inline_hints hints;
2331 int time, size;
2333 init_caller_stats (&stats);
2334 node->call_for_symbol_thunks_and_aliases (gather_caller_stats, &stats,
2335 false);
2336 estimate_ipcp_clone_size_and_time (node, known_csts, known_contexts,
2337 known_aggs_ptrs, &size, &time, &hints);
2338 time -= devirtualization_time_bonus (node, known_csts, known_contexts,
2339 known_aggs_ptrs);
2340 time -= hint_time_bonus (hints);
2341 time -= removable_params_cost;
2342 size -= stats.n_calls * removable_params_cost;
2344 if (dump_file)
2345 fprintf (dump_file, " - context independent values, size: %i, "
2346 "time_benefit: %i\n", size, base_time - time);
2348 if (size <= 0
2349 || node->will_be_removed_from_program_if_no_direct_calls_p ())
2351 info->do_clone_for_all_contexts = true;
2352 base_time = time;
2354 if (dump_file)
2355 fprintf (dump_file, " Decided to specialize for all "
2356 "known contexts, code not going to grow.\n");
2358 else if (good_cloning_opportunity_p (node, base_time - time,
2359 stats.freq_sum, stats.count_sum,
2360 size))
2362 if (size + overall_size <= max_new_size)
2364 info->do_clone_for_all_contexts = true;
2365 base_time = time;
2366 overall_size += size;
2368 if (dump_file)
2369 fprintf (dump_file, " Decided to specialize for all "
2370 "known contexts, growth deemed beneficial.\n");
2372 else if (dump_file && (dump_flags & TDF_DETAILS))
2373 fprintf (dump_file, " Not cloning for all contexts because "
2374 "max_new_size would be reached with %li.\n",
2375 size + overall_size);
2379 for (i = 0; i < count ; i++)
2381 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
2382 ipcp_lattice<tree> *lat = &plats->itself;
2383 ipcp_value<tree> *val;
2385 if (lat->bottom
2386 || !lat->values
2387 || known_csts[i])
2388 continue;
2390 for (val = lat->values; val; val = val->next)
2392 gcc_checking_assert (TREE_CODE (val->value) != TREE_BINFO);
2393 known_csts[i] = val->value;
2395 int emc = estimate_move_cost (TREE_TYPE (val->value), true);
2396 perform_estimation_of_a_value (node, known_csts, known_contexts,
2397 known_aggs_ptrs, base_time,
2398 removable_params_cost, emc, val);
2400 if (dump_file && (dump_flags & TDF_DETAILS))
2402 fprintf (dump_file, " - estimates for value ");
2403 print_ipcp_constant_value (dump_file, val->value);
2404 fprintf (dump_file, " for ");
2405 ipa_dump_param (dump_file, info, i);
2406 fprintf (dump_file, ": time_benefit: %i, size: %i\n",
2407 val->local_time_benefit, val->local_size_cost);
2410 known_csts[i] = NULL_TREE;
2413 for (i = 0; i < count; i++)
2415 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
2417 if (!plats->virt_call)
2418 continue;
2420 ipcp_lattice<ipa_polymorphic_call_context> *ctxlat = &plats->ctxlat;
2421 ipcp_value<ipa_polymorphic_call_context> *val;
2423 if (ctxlat->bottom
2424 || !ctxlat->values
2425 || !known_contexts[i].useless_p ())
2426 continue;
2428 for (val = ctxlat->values; val; val = val->next)
2430 known_contexts[i] = val->value;
2431 perform_estimation_of_a_value (node, known_csts, known_contexts,
2432 known_aggs_ptrs, base_time,
2433 removable_params_cost, 0, val);
2435 if (dump_file && (dump_flags & TDF_DETAILS))
2437 fprintf (dump_file, " - estimates for polymorphic context ");
2438 print_ipcp_constant_value (dump_file, val->value);
2439 fprintf (dump_file, " for ");
2440 ipa_dump_param (dump_file, info, i);
2441 fprintf (dump_file, ": time_benefit: %i, size: %i\n",
2442 val->local_time_benefit, val->local_size_cost);
2445 known_contexts[i] = ipa_polymorphic_call_context ();
2448 for (i = 0; i < count ; i++)
2450 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
2451 struct ipa_agg_jump_function *ajf;
2452 struct ipcp_agg_lattice *aglat;
2454 if (plats->aggs_bottom || !plats->aggs)
2455 continue;
2457 ajf = &known_aggs[i];
2458 for (aglat = plats->aggs; aglat; aglat = aglat->next)
2460 ipcp_value<tree> *val;
2461 if (aglat->bottom || !aglat->values
2462 /* If the following is true, the one value is in known_aggs. */
2463 || (!plats->aggs_contain_variable
2464 && aglat->is_single_const ()))
2465 continue;
2467 for (val = aglat->values; val; val = val->next)
2469 struct ipa_agg_jf_item item;
2471 item.offset = aglat->offset;
2472 item.value = val->value;
2473 vec_safe_push (ajf->items, item);
2475 perform_estimation_of_a_value (node, known_csts, known_contexts,
2476 known_aggs_ptrs, base_time,
2477 removable_params_cost, 0, val);
2479 if (dump_file && (dump_flags & TDF_DETAILS))
2481 fprintf (dump_file, " - estimates for value ");
2482 print_ipcp_constant_value (dump_file, val->value);
2483 fprintf (dump_file, " for ");
2484 ipa_dump_param (dump_file, info, i);
2485 fprintf (dump_file, "[%soffset: " HOST_WIDE_INT_PRINT_DEC
2486 "]: time_benefit: %i, size: %i\n",
2487 plats->aggs_by_ref ? "ref " : "",
2488 aglat->offset,
2489 val->local_time_benefit, val->local_size_cost);
2492 ajf->items->pop ();
2497 for (i = 0; i < count ; i++)
2498 vec_free (known_aggs[i].items);
2500 known_csts.release ();
2501 known_contexts.release ();
2502 known_aggs.release ();
2503 known_aggs_ptrs.release ();
2507 /* Add value CUR_VAL and all yet-unsorted values it is dependent on to the
2508 topological sort of values. */
2510 template <typename valtype>
2511 void
2512 value_topo_info<valtype>::add_val (ipcp_value<valtype> *cur_val)
2514 ipcp_value_source<valtype> *src;
2516 if (cur_val->dfs)
2517 return;
2519 dfs_counter++;
2520 cur_val->dfs = dfs_counter;
2521 cur_val->low_link = dfs_counter;
2523 cur_val->topo_next = stack;
2524 stack = cur_val;
2525 cur_val->on_stack = true;
2527 for (src = cur_val->sources; src; src = src->next)
2528 if (src->val)
2530 if (src->val->dfs == 0)
2532 add_val (src->val);
2533 if (src->val->low_link < cur_val->low_link)
2534 cur_val->low_link = src->val->low_link;
2536 else if (src->val->on_stack
2537 && src->val->dfs < cur_val->low_link)
2538 cur_val->low_link = src->val->dfs;
2541 if (cur_val->dfs == cur_val->low_link)
2543 ipcp_value<valtype> *v, *scc_list = NULL;
2547 v = stack;
2548 stack = v->topo_next;
2549 v->on_stack = false;
2551 v->scc_next = scc_list;
2552 scc_list = v;
2554 while (v != cur_val);
2556 cur_val->topo_next = values_topo;
2557 values_topo = cur_val;
2561 /* Add all values in lattices associated with NODE to the topological sort if
2562 they are not there yet. */
2564 static void
2565 add_all_node_vals_to_toposort (cgraph_node *node, ipa_topo_info *topo)
2567 struct ipa_node_params *info = IPA_NODE_REF (node);
2568 int i, count = ipa_get_param_count (info);
2570 for (i = 0; i < count ; i++)
2572 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
2573 ipcp_lattice<tree> *lat = &plats->itself;
2574 struct ipcp_agg_lattice *aglat;
2576 if (!lat->bottom)
2578 ipcp_value<tree> *val;
2579 for (val = lat->values; val; val = val->next)
2580 topo->constants.add_val (val);
2583 if (!plats->aggs_bottom)
2584 for (aglat = plats->aggs; aglat; aglat = aglat->next)
2585 if (!aglat->bottom)
2587 ipcp_value<tree> *val;
2588 for (val = aglat->values; val; val = val->next)
2589 topo->constants.add_val (val);
2592 ipcp_lattice<ipa_polymorphic_call_context> *ctxlat = &plats->ctxlat;
2593 if (!ctxlat->bottom)
2595 ipcp_value<ipa_polymorphic_call_context> *ctxval;
2596 for (ctxval = ctxlat->values; ctxval; ctxval = ctxval->next)
2597 topo->contexts.add_val (ctxval);
2602 /* One pass of constants propagation along the call graph edges, from callers
2603 to callees (requires topological ordering in TOPO), iterate over strongly
2604 connected components. */
2606 static void
2607 propagate_constants_topo (struct ipa_topo_info *topo)
2609 int i;
2611 for (i = topo->nnodes - 1; i >= 0; i--)
2613 unsigned j;
2614 struct cgraph_node *v, *node = topo->order[i];
2615 vec<cgraph_node *> cycle_nodes = ipa_get_nodes_in_cycle (node);
2617 /* First, iteratively propagate within the strongly connected component
2618 until all lattices stabilize. */
2619 FOR_EACH_VEC_ELT (cycle_nodes, j, v)
2620 if (v->has_gimple_body_p ())
2621 push_node_to_stack (topo, v);
2623 v = pop_node_from_stack (topo);
2624 while (v)
2626 struct cgraph_edge *cs;
2628 for (cs = v->callees; cs; cs = cs->next_callee)
2629 if (ipa_edge_within_scc (cs)
2630 && propagate_constants_accross_call (cs))
2631 push_node_to_stack (topo, cs->callee);
2632 v = pop_node_from_stack (topo);
2635 /* Afterwards, propagate along edges leading out of the SCC, calculates
2636 the local effects of the discovered constants and all valid values to
2637 their topological sort. */
2638 FOR_EACH_VEC_ELT (cycle_nodes, j, v)
2639 if (v->has_gimple_body_p ())
2641 struct cgraph_edge *cs;
2643 estimate_local_effects (v);
2644 add_all_node_vals_to_toposort (v, topo);
2645 for (cs = v->callees; cs; cs = cs->next_callee)
2646 if (!ipa_edge_within_scc (cs))
2647 propagate_constants_accross_call (cs);
2649 cycle_nodes.release ();
2654 /* Return the sum of A and B if none of them is bigger than INT_MAX/2, return
2655 the bigger one if otherwise. */
2657 static int
2658 safe_add (int a, int b)
2660 if (a > INT_MAX/2 || b > INT_MAX/2)
2661 return a > b ? a : b;
2662 else
2663 return a + b;
2667 /* Propagate the estimated effects of individual values along the topological
2668 from the dependent values to those they depend on. */
2670 template <typename valtype>
2671 void
2672 value_topo_info<valtype>::propagate_effects ()
2674 ipcp_value<valtype> *base;
2676 for (base = values_topo; base; base = base->topo_next)
2678 ipcp_value_source<valtype> *src;
2679 ipcp_value<valtype> *val;
2680 int time = 0, size = 0;
2682 for (val = base; val; val = val->scc_next)
2684 time = safe_add (time,
2685 val->local_time_benefit + val->prop_time_benefit);
2686 size = safe_add (size, val->local_size_cost + val->prop_size_cost);
2689 for (val = base; val; val = val->scc_next)
2690 for (src = val->sources; src; src = src->next)
2691 if (src->val
2692 && src->cs->maybe_hot_p ())
2694 src->val->prop_time_benefit = safe_add (time,
2695 src->val->prop_time_benefit);
2696 src->val->prop_size_cost = safe_add (size,
2697 src->val->prop_size_cost);
2703 /* Propagate constants, polymorphic contexts and their effects from the
2704 summaries interprocedurally. */
2706 static void
2707 ipcp_propagate_stage (struct ipa_topo_info *topo)
2709 struct cgraph_node *node;
2711 if (dump_file)
2712 fprintf (dump_file, "\n Propagating constants:\n\n");
2714 if (in_lto_p)
2715 ipa_update_after_lto_read ();
2718 FOR_EACH_DEFINED_FUNCTION (node)
2720 struct ipa_node_params *info = IPA_NODE_REF (node);
2722 determine_versionability (node);
2723 if (node->has_gimple_body_p ())
2725 info->lattices = XCNEWVEC (struct ipcp_param_lattices,
2726 ipa_get_param_count (info));
2727 initialize_node_lattices (node);
2729 if (node->definition && !node->alias)
2730 overall_size += inline_summary (node)->self_size;
2731 if (node->count > max_count)
2732 max_count = node->count;
2735 max_new_size = overall_size;
2736 if (max_new_size < PARAM_VALUE (PARAM_LARGE_UNIT_INSNS))
2737 max_new_size = PARAM_VALUE (PARAM_LARGE_UNIT_INSNS);
2738 max_new_size += max_new_size * PARAM_VALUE (PARAM_IPCP_UNIT_GROWTH) / 100 + 1;
2740 if (dump_file)
2741 fprintf (dump_file, "\noverall_size: %li, max_new_size: %li\n",
2742 overall_size, max_new_size);
2744 propagate_constants_topo (topo);
2745 #ifdef ENABLE_CHECKING
2746 ipcp_verify_propagated_values ();
2747 #endif
2748 topo->constants.propagate_effects ();
2749 topo->contexts.propagate_effects ();
2751 if (dump_file)
2753 fprintf (dump_file, "\nIPA lattices after all propagation:\n");
2754 print_all_lattices (dump_file, (dump_flags & TDF_DETAILS), true);
2758 /* Discover newly direct outgoing edges from NODE which is a new clone with
2759 known KNOWN_CSTS and make them direct. */
2761 static void
2762 ipcp_discover_new_direct_edges (struct cgraph_node *node,
2763 vec<tree> known_csts,
2764 vec<ipa_polymorphic_call_context>
2765 known_contexts,
2766 struct ipa_agg_replacement_value *aggvals)
2768 struct cgraph_edge *ie, *next_ie;
2769 bool found = false;
2771 for (ie = node->indirect_calls; ie; ie = next_ie)
2773 tree target;
2774 bool speculative;
2776 next_ie = ie->next_callee;
2777 target = ipa_get_indirect_edge_target_1 (ie, known_csts, known_contexts,
2778 vNULL, aggvals, &speculative);
2779 if (target)
2781 bool agg_contents = ie->indirect_info->agg_contents;
2782 bool polymorphic = ie->indirect_info->polymorphic;
2783 int param_index = ie->indirect_info->param_index;
2784 struct cgraph_edge *cs = ipa_make_edge_direct_to_target (ie, target,
2785 speculative);
2786 found = true;
2788 if (cs && !agg_contents && !polymorphic)
2790 struct ipa_node_params *info = IPA_NODE_REF (node);
2791 int c = ipa_get_controlled_uses (info, param_index);
2792 if (c != IPA_UNDESCRIBED_USE)
2794 struct ipa_ref *to_del;
2796 c--;
2797 ipa_set_controlled_uses (info, param_index, c);
2798 if (dump_file && (dump_flags & TDF_DETAILS))
2799 fprintf (dump_file, " controlled uses count of param "
2800 "%i bumped down to %i\n", param_index, c);
2801 if (c == 0
2802 && (to_del = node->find_reference (cs->callee, NULL, 0)))
2804 if (dump_file && (dump_flags & TDF_DETAILS))
2805 fprintf (dump_file, " and even removing its "
2806 "cloning-created reference\n");
2807 to_del->remove_reference ();
2813 /* Turning calls to direct calls will improve overall summary. */
2814 if (found)
2815 inline_update_overall_summary (node);
2818 /* Vector of pointers which for linked lists of clones of an original crgaph
2819 edge. */
2821 static vec<cgraph_edge *> next_edge_clone;
2822 static vec<cgraph_edge *> prev_edge_clone;
2824 static inline void
2825 grow_edge_clone_vectors (void)
2827 if (next_edge_clone.length ()
2828 <= (unsigned) symtab->edges_max_uid)
2829 next_edge_clone.safe_grow_cleared (symtab->edges_max_uid + 1);
2830 if (prev_edge_clone.length ()
2831 <= (unsigned) symtab->edges_max_uid)
2832 prev_edge_clone.safe_grow_cleared (symtab->edges_max_uid + 1);
2835 /* Edge duplication hook to grow the appropriate linked list in
2836 next_edge_clone. */
2838 static void
2839 ipcp_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
2840 void *)
2842 grow_edge_clone_vectors ();
2844 struct cgraph_edge *old_next = next_edge_clone[src->uid];
2845 if (old_next)
2846 prev_edge_clone[old_next->uid] = dst;
2847 prev_edge_clone[dst->uid] = src;
2849 next_edge_clone[dst->uid] = old_next;
2850 next_edge_clone[src->uid] = dst;
2853 /* Hook that is called by cgraph.c when an edge is removed. */
2855 static void
2856 ipcp_edge_removal_hook (struct cgraph_edge *cs, void *)
2858 grow_edge_clone_vectors ();
2860 struct cgraph_edge *prev = prev_edge_clone[cs->uid];
2861 struct cgraph_edge *next = next_edge_clone[cs->uid];
2862 if (prev)
2863 next_edge_clone[prev->uid] = next;
2864 if (next)
2865 prev_edge_clone[next->uid] = prev;
2868 /* See if NODE is a clone with a known aggregate value at a given OFFSET of a
2869 parameter with the given INDEX. */
2871 static tree
2872 get_clone_agg_value (struct cgraph_node *node, HOST_WIDE_INT offset,
2873 int index)
2875 struct ipa_agg_replacement_value *aggval;
2877 aggval = ipa_get_agg_replacements_for_node (node);
2878 while (aggval)
2880 if (aggval->offset == offset
2881 && aggval->index == index)
2882 return aggval->value;
2883 aggval = aggval->next;
2885 return NULL_TREE;
2888 /* Return true is NODE is DEST or its clone for all contexts. */
2890 static bool
2891 same_node_or_its_all_contexts_clone_p (cgraph_node *node, cgraph_node *dest)
2893 if (node == dest)
2894 return true;
2896 struct ipa_node_params *info = IPA_NODE_REF (node);
2897 return info->is_all_contexts_clone && info->ipcp_orig_node == dest;
2900 /* Return true if edge CS does bring about the value described by SRC to node
2901 DEST or its clone for all contexts. */
2903 static bool
2904 cgraph_edge_brings_value_p (cgraph_edge *cs, ipcp_value_source<tree> *src,
2905 cgraph_node *dest)
2907 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
2908 enum availability availability;
2909 cgraph_node *real_dest = cs->callee->function_symbol (&availability);
2911 if (!same_node_or_its_all_contexts_clone_p (real_dest, dest)
2912 || availability <= AVAIL_INTERPOSABLE
2913 || caller_info->node_dead)
2914 return false;
2915 if (!src->val)
2916 return true;
2918 if (caller_info->ipcp_orig_node)
2920 tree t;
2921 if (src->offset == -1)
2922 t = caller_info->known_csts[src->index];
2923 else
2924 t = get_clone_agg_value (cs->caller, src->offset, src->index);
2925 return (t != NULL_TREE
2926 && values_equal_for_ipcp_p (src->val->value, t));
2928 else
2930 struct ipcp_agg_lattice *aglat;
2931 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (caller_info,
2932 src->index);
2933 if (src->offset == -1)
2934 return (plats->itself.is_single_const ()
2935 && values_equal_for_ipcp_p (src->val->value,
2936 plats->itself.values->value));
2937 else
2939 if (plats->aggs_bottom || plats->aggs_contain_variable)
2940 return false;
2941 for (aglat = plats->aggs; aglat; aglat = aglat->next)
2942 if (aglat->offset == src->offset)
2943 return (aglat->is_single_const ()
2944 && values_equal_for_ipcp_p (src->val->value,
2945 aglat->values->value));
2947 return false;
2951 /* Return true if edge CS does bring about the value described by SRC to node
2952 DEST or its clone for all contexts. */
2954 static bool
2955 cgraph_edge_brings_value_p (cgraph_edge *cs,
2956 ipcp_value_source<ipa_polymorphic_call_context> *src,
2957 cgraph_node *dest)
2959 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
2960 cgraph_node *real_dest = cs->callee->function_symbol ();
2962 if (!same_node_or_its_all_contexts_clone_p (real_dest, dest)
2963 || caller_info->node_dead)
2964 return false;
2965 if (!src->val)
2966 return true;
2968 if (caller_info->ipcp_orig_node)
2969 return (caller_info->known_contexts.length () > (unsigned) src->index)
2970 && values_equal_for_ipcp_p (src->val->value,
2971 caller_info->known_contexts[src->index]);
2973 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (caller_info,
2974 src->index);
2975 return plats->ctxlat.is_single_const ()
2976 && values_equal_for_ipcp_p (src->val->value,
2977 plats->ctxlat.values->value);
2980 /* Get the next clone in the linked list of clones of an edge. */
2982 static inline struct cgraph_edge *
2983 get_next_cgraph_edge_clone (struct cgraph_edge *cs)
2985 return next_edge_clone[cs->uid];
2988 /* Given VAL that is intended for DEST, iterate over all its sources and if
2989 they still hold, add their edge frequency and their number into *FREQUENCY
2990 and *CALLER_COUNT respectively. */
2992 template <typename valtype>
2993 static bool
2994 get_info_about_necessary_edges (ipcp_value<valtype> *val, cgraph_node *dest,
2995 int *freq_sum,
2996 gcov_type *count_sum, int *caller_count)
2998 ipcp_value_source<valtype> *src;
2999 int freq = 0, count = 0;
3000 gcov_type cnt = 0;
3001 bool hot = false;
3003 for (src = val->sources; src; src = src->next)
3005 struct cgraph_edge *cs = src->cs;
3006 while (cs)
3008 if (cgraph_edge_brings_value_p (cs, src, dest))
3010 count++;
3011 freq += cs->frequency;
3012 cnt += cs->count;
3013 hot |= cs->maybe_hot_p ();
3015 cs = get_next_cgraph_edge_clone (cs);
3019 *freq_sum = freq;
3020 *count_sum = cnt;
3021 *caller_count = count;
3022 return hot;
3025 /* Return a vector of incoming edges that do bring value VAL to node DEST. It
3026 is assumed their number is known and equal to CALLER_COUNT. */
3028 template <typename valtype>
3029 static vec<cgraph_edge *>
3030 gather_edges_for_value (ipcp_value<valtype> *val, cgraph_node *dest,
3031 int caller_count)
3033 ipcp_value_source<valtype> *src;
3034 vec<cgraph_edge *> ret;
3036 ret.create (caller_count);
3037 for (src = val->sources; src; src = src->next)
3039 struct cgraph_edge *cs = src->cs;
3040 while (cs)
3042 if (cgraph_edge_brings_value_p (cs, src, dest))
3043 ret.quick_push (cs);
3044 cs = get_next_cgraph_edge_clone (cs);
3048 return ret;
3051 /* Construct a replacement map for a know VALUE for a formal parameter PARAM.
3052 Return it or NULL if for some reason it cannot be created. */
3054 static struct ipa_replace_map *
3055 get_replacement_map (struct ipa_node_params *info, tree value, int parm_num)
3057 struct ipa_replace_map *replace_map;
3060 replace_map = ggc_alloc<ipa_replace_map> ();
3061 if (dump_file)
3063 fprintf (dump_file, " replacing ");
3064 ipa_dump_param (dump_file, info, parm_num);
3066 fprintf (dump_file, " with const ");
3067 print_generic_expr (dump_file, value, 0);
3068 fprintf (dump_file, "\n");
3070 replace_map->old_tree = NULL;
3071 replace_map->parm_num = parm_num;
3072 replace_map->new_tree = value;
3073 replace_map->replace_p = true;
3074 replace_map->ref_p = false;
3076 return replace_map;
3079 /* Dump new profiling counts */
3081 static void
3082 dump_profile_updates (struct cgraph_node *orig_node,
3083 struct cgraph_node *new_node)
3085 struct cgraph_edge *cs;
3087 fprintf (dump_file, " setting count of the specialized node to "
3088 HOST_WIDE_INT_PRINT_DEC "\n", (HOST_WIDE_INT) new_node->count);
3089 for (cs = new_node->callees; cs ; cs = cs->next_callee)
3090 fprintf (dump_file, " edge to %s has count "
3091 HOST_WIDE_INT_PRINT_DEC "\n",
3092 cs->callee->name (), (HOST_WIDE_INT) cs->count);
3094 fprintf (dump_file, " setting count of the original node to "
3095 HOST_WIDE_INT_PRINT_DEC "\n", (HOST_WIDE_INT) orig_node->count);
3096 for (cs = orig_node->callees; cs ; cs = cs->next_callee)
3097 fprintf (dump_file, " edge to %s is left with "
3098 HOST_WIDE_INT_PRINT_DEC "\n",
3099 cs->callee->name (), (HOST_WIDE_INT) cs->count);
3102 /* After a specialized NEW_NODE version of ORIG_NODE has been created, update
3103 their profile information to reflect this. */
3105 static void
3106 update_profiling_info (struct cgraph_node *orig_node,
3107 struct cgraph_node *new_node)
3109 struct cgraph_edge *cs;
3110 struct caller_statistics stats;
3111 gcov_type new_sum, orig_sum;
3112 gcov_type remainder, orig_node_count = orig_node->count;
3114 if (orig_node_count == 0)
3115 return;
3117 init_caller_stats (&stats);
3118 orig_node->call_for_symbol_thunks_and_aliases (gather_caller_stats, &stats,
3119 false);
3120 orig_sum = stats.count_sum;
3121 init_caller_stats (&stats);
3122 new_node->call_for_symbol_thunks_and_aliases (gather_caller_stats, &stats,
3123 false);
3124 new_sum = stats.count_sum;
3126 if (orig_node_count < orig_sum + new_sum)
3128 if (dump_file)
3129 fprintf (dump_file, " Problem: node %s/%i has too low count "
3130 HOST_WIDE_INT_PRINT_DEC " while the sum of incoming "
3131 "counts is " HOST_WIDE_INT_PRINT_DEC "\n",
3132 orig_node->name (), orig_node->order,
3133 (HOST_WIDE_INT) orig_node_count,
3134 (HOST_WIDE_INT) (orig_sum + new_sum));
3136 orig_node_count = (orig_sum + new_sum) * 12 / 10;
3137 if (dump_file)
3138 fprintf (dump_file, " proceeding by pretending it was "
3139 HOST_WIDE_INT_PRINT_DEC "\n",
3140 (HOST_WIDE_INT) orig_node_count);
3143 new_node->count = new_sum;
3144 remainder = orig_node_count - new_sum;
3145 orig_node->count = remainder;
3147 for (cs = new_node->callees; cs ; cs = cs->next_callee)
3148 if (cs->frequency)
3149 cs->count = apply_probability (cs->count,
3150 GCOV_COMPUTE_SCALE (new_sum,
3151 orig_node_count));
3152 else
3153 cs->count = 0;
3155 for (cs = orig_node->callees; cs ; cs = cs->next_callee)
3156 cs->count = apply_probability (cs->count,
3157 GCOV_COMPUTE_SCALE (remainder,
3158 orig_node_count));
3160 if (dump_file)
3161 dump_profile_updates (orig_node, new_node);
3164 /* Update the respective profile of specialized NEW_NODE and the original
3165 ORIG_NODE after additional edges with cumulative count sum REDIRECTED_SUM
3166 have been redirected to the specialized version. */
3168 static void
3169 update_specialized_profile (struct cgraph_node *new_node,
3170 struct cgraph_node *orig_node,
3171 gcov_type redirected_sum)
3173 struct cgraph_edge *cs;
3174 gcov_type new_node_count, orig_node_count = orig_node->count;
3176 if (dump_file)
3177 fprintf (dump_file, " the sum of counts of redirected edges is "
3178 HOST_WIDE_INT_PRINT_DEC "\n", (HOST_WIDE_INT) redirected_sum);
3179 if (orig_node_count == 0)
3180 return;
3182 gcc_assert (orig_node_count >= redirected_sum);
3184 new_node_count = new_node->count;
3185 new_node->count += redirected_sum;
3186 orig_node->count -= redirected_sum;
3188 for (cs = new_node->callees; cs ; cs = cs->next_callee)
3189 if (cs->frequency)
3190 cs->count += apply_probability (cs->count,
3191 GCOV_COMPUTE_SCALE (redirected_sum,
3192 new_node_count));
3193 else
3194 cs->count = 0;
3196 for (cs = orig_node->callees; cs ; cs = cs->next_callee)
3198 gcov_type dec = apply_probability (cs->count,
3199 GCOV_COMPUTE_SCALE (redirected_sum,
3200 orig_node_count));
3201 if (dec < cs->count)
3202 cs->count -= dec;
3203 else
3204 cs->count = 0;
3207 if (dump_file)
3208 dump_profile_updates (orig_node, new_node);
3211 /* Create a specialized version of NODE with known constants in KNOWN_CSTS,
3212 known contexts in KNOWN_CONTEXTS and known aggregate values in AGGVALS and
3213 redirect all edges in CALLERS to it. */
3215 static struct cgraph_node *
3216 create_specialized_node (struct cgraph_node *node,
3217 vec<tree> known_csts,
3218 vec<ipa_polymorphic_call_context> known_contexts,
3219 struct ipa_agg_replacement_value *aggvals,
3220 vec<cgraph_edge *> callers)
3222 struct ipa_node_params *new_info, *info = IPA_NODE_REF (node);
3223 vec<ipa_replace_map *, va_gc> *replace_trees = NULL;
3224 struct ipa_agg_replacement_value *av;
3225 struct cgraph_node *new_node;
3226 int i, count = ipa_get_param_count (info);
3227 bitmap args_to_skip;
3229 gcc_assert (!info->ipcp_orig_node);
3231 if (node->local.can_change_signature)
3233 args_to_skip = BITMAP_GGC_ALLOC ();
3234 for (i = 0; i < count; i++)
3236 tree t = known_csts[i];
3238 if (t || !ipa_is_param_used (info, i))
3239 bitmap_set_bit (args_to_skip, i);
3242 else
3244 args_to_skip = NULL;
3245 if (dump_file && (dump_flags & TDF_DETAILS))
3246 fprintf (dump_file, " cannot change function signature\n");
3249 for (i = 0; i < count ; i++)
3251 tree t = known_csts[i];
3252 if (t)
3254 struct ipa_replace_map *replace_map;
3256 gcc_checking_assert (TREE_CODE (t) != TREE_BINFO);
3257 replace_map = get_replacement_map (info, t, i);
3258 if (replace_map)
3259 vec_safe_push (replace_trees, replace_map);
3263 new_node = node->create_virtual_clone (callers, replace_trees,
3264 args_to_skip, "constprop");
3265 ipa_set_node_agg_value_chain (new_node, aggvals);
3266 for (av = aggvals; av; av = av->next)
3267 new_node->maybe_create_reference (av->value, IPA_REF_ADDR, NULL);
3269 if (dump_file && (dump_flags & TDF_DETAILS))
3271 fprintf (dump_file, " the new node is %s/%i.\n",
3272 new_node->name (), new_node->order);
3273 if (known_contexts.exists ())
3275 for (i = 0; i < count ; i++)
3276 if (!known_contexts[i].useless_p ())
3278 fprintf (dump_file, " known ctx %i is ", i);
3279 known_contexts[i].dump (dump_file);
3282 if (aggvals)
3283 ipa_dump_agg_replacement_values (dump_file, aggvals);
3285 ipa_check_create_node_params ();
3286 update_profiling_info (node, new_node);
3287 new_info = IPA_NODE_REF (new_node);
3288 new_info->ipcp_orig_node = node;
3289 new_info->known_csts = known_csts;
3290 new_info->known_contexts = known_contexts;
3292 ipcp_discover_new_direct_edges (new_node, known_csts, known_contexts, aggvals);
3294 callers.release ();
3295 return new_node;
3298 /* Given a NODE, and a subset of its CALLERS, try to populate blanks slots in
3299 KNOWN_CSTS with constants that are also known for all of the CALLERS. */
3301 static void
3302 find_more_scalar_values_for_callers_subset (struct cgraph_node *node,
3303 vec<tree> known_csts,
3304 vec<cgraph_edge *> callers)
3306 struct ipa_node_params *info = IPA_NODE_REF (node);
3307 int i, count = ipa_get_param_count (info);
3309 for (i = 0; i < count ; i++)
3311 struct cgraph_edge *cs;
3312 tree newval = NULL_TREE;
3313 int j;
3314 bool first = true;
3316 if (ipa_get_scalar_lat (info, i)->bottom || known_csts[i])
3317 continue;
3319 FOR_EACH_VEC_ELT (callers, j, cs)
3321 struct ipa_jump_func *jump_func;
3322 tree t;
3324 if (i >= ipa_get_cs_argument_count (IPA_EDGE_REF (cs)))
3326 newval = NULL_TREE;
3327 break;
3329 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
3330 t = ipa_value_from_jfunc (IPA_NODE_REF (cs->caller), jump_func);
3331 if (!t
3332 || (newval
3333 && !values_equal_for_ipcp_p (t, newval))
3334 || (!first && !newval))
3336 newval = NULL_TREE;
3337 break;
3339 else
3340 newval = t;
3341 first = false;
3344 if (newval)
3346 if (dump_file && (dump_flags & TDF_DETAILS))
3348 fprintf (dump_file, " adding an extra known scalar value ");
3349 print_ipcp_constant_value (dump_file, newval);
3350 fprintf (dump_file, " for ");
3351 ipa_dump_param (dump_file, info, i);
3352 fprintf (dump_file, "\n");
3355 known_csts[i] = newval;
3360 /* Given a NODE and a subset of its CALLERS, try to populate plank slots in
3361 KNOWN_CONTEXTS with polymorphic contexts that are also known for all of the
3362 CALLERS. */
3364 static void
3365 find_more_contexts_for_caller_subset (cgraph_node *node,
3366 vec<ipa_polymorphic_call_context>
3367 *known_contexts,
3368 vec<cgraph_edge *> callers)
3370 ipa_node_params *info = IPA_NODE_REF (node);
3371 int i, count = ipa_get_param_count (info);
3373 for (i = 0; i < count ; i++)
3375 cgraph_edge *cs;
3377 if (ipa_get_poly_ctx_lat (info, i)->bottom
3378 || (known_contexts->exists ()
3379 && !(*known_contexts)[i].useless_p ()))
3380 continue;
3382 ipa_polymorphic_call_context newval;
3383 bool first = true;
3384 int j;
3386 FOR_EACH_VEC_ELT (callers, j, cs)
3388 if (i >= ipa_get_cs_argument_count (IPA_EDGE_REF (cs)))
3389 return;
3390 ipa_jump_func *jfunc = ipa_get_ith_jump_func (IPA_EDGE_REF (cs),
3392 ipa_polymorphic_call_context ctx;
3393 ctx = ipa_context_from_jfunc (IPA_NODE_REF (cs->caller), cs, i,
3394 jfunc);
3395 if (first)
3397 newval = ctx;
3398 first = false;
3400 else
3401 newval.meet_with (ctx);
3402 if (newval.useless_p ())
3403 break;
3406 if (!newval.useless_p ())
3408 if (dump_file && (dump_flags & TDF_DETAILS))
3410 fprintf (dump_file, " adding an extra known polymorphic "
3411 "context ");
3412 print_ipcp_constant_value (dump_file, newval);
3413 fprintf (dump_file, " for ");
3414 ipa_dump_param (dump_file, info, i);
3415 fprintf (dump_file, "\n");
3418 if (!known_contexts->exists ())
3419 known_contexts->safe_grow_cleared (ipa_get_param_count (info));
3420 (*known_contexts)[i] = newval;
3426 /* Go through PLATS and create a vector of values consisting of values and
3427 offsets (minus OFFSET) of lattices that contain only a single value. */
3429 static vec<ipa_agg_jf_item>
3430 copy_plats_to_inter (struct ipcp_param_lattices *plats, HOST_WIDE_INT offset)
3432 vec<ipa_agg_jf_item> res = vNULL;
3434 if (!plats->aggs || plats->aggs_contain_variable || plats->aggs_bottom)
3435 return vNULL;
3437 for (struct ipcp_agg_lattice *aglat = plats->aggs; aglat; aglat = aglat->next)
3438 if (aglat->is_single_const ())
3440 struct ipa_agg_jf_item ti;
3441 ti.offset = aglat->offset - offset;
3442 ti.value = aglat->values->value;
3443 res.safe_push (ti);
3445 return res;
3448 /* Intersect all values in INTER with single value lattices in PLATS (while
3449 subtracting OFFSET). */
3451 static void
3452 intersect_with_plats (struct ipcp_param_lattices *plats,
3453 vec<ipa_agg_jf_item> *inter,
3454 HOST_WIDE_INT offset)
3456 struct ipcp_agg_lattice *aglat;
3457 struct ipa_agg_jf_item *item;
3458 int k;
3460 if (!plats->aggs || plats->aggs_contain_variable || plats->aggs_bottom)
3462 inter->release ();
3463 return;
3466 aglat = plats->aggs;
3467 FOR_EACH_VEC_ELT (*inter, k, item)
3469 bool found = false;
3470 if (!item->value)
3471 continue;
3472 while (aglat)
3474 if (aglat->offset - offset > item->offset)
3475 break;
3476 if (aglat->offset - offset == item->offset)
3478 gcc_checking_assert (item->value);
3479 if (values_equal_for_ipcp_p (item->value, aglat->values->value))
3480 found = true;
3481 break;
3483 aglat = aglat->next;
3485 if (!found)
3486 item->value = NULL_TREE;
3490 /* Copy agggregate replacement values of NODE (which is an IPA-CP clone) to the
3491 vector result while subtracting OFFSET from the individual value offsets. */
3493 static vec<ipa_agg_jf_item>
3494 agg_replacements_to_vector (struct cgraph_node *node, int index,
3495 HOST_WIDE_INT offset)
3497 struct ipa_agg_replacement_value *av;
3498 vec<ipa_agg_jf_item> res = vNULL;
3500 for (av = ipa_get_agg_replacements_for_node (node); av; av = av->next)
3501 if (av->index == index
3502 && (av->offset - offset) >= 0)
3504 struct ipa_agg_jf_item item;
3505 gcc_checking_assert (av->value);
3506 item.offset = av->offset - offset;
3507 item.value = av->value;
3508 res.safe_push (item);
3511 return res;
3514 /* Intersect all values in INTER with those that we have already scheduled to
3515 be replaced in parameter number INDEX of NODE, which is an IPA-CP clone
3516 (while subtracting OFFSET). */
3518 static void
3519 intersect_with_agg_replacements (struct cgraph_node *node, int index,
3520 vec<ipa_agg_jf_item> *inter,
3521 HOST_WIDE_INT offset)
3523 struct ipa_agg_replacement_value *srcvals;
3524 struct ipa_agg_jf_item *item;
3525 int i;
3527 srcvals = ipa_get_agg_replacements_for_node (node);
3528 if (!srcvals)
3530 inter->release ();
3531 return;
3534 FOR_EACH_VEC_ELT (*inter, i, item)
3536 struct ipa_agg_replacement_value *av;
3537 bool found = false;
3538 if (!item->value)
3539 continue;
3540 for (av = srcvals; av; av = av->next)
3542 gcc_checking_assert (av->value);
3543 if (av->index == index
3544 && av->offset - offset == item->offset)
3546 if (values_equal_for_ipcp_p (item->value, av->value))
3547 found = true;
3548 break;
3551 if (!found)
3552 item->value = NULL_TREE;
3556 /* Intersect values in INTER with aggregate values that come along edge CS to
3557 parameter number INDEX and return it. If INTER does not actually exist yet,
3558 copy all incoming values to it. If we determine we ended up with no values
3559 whatsoever, return a released vector. */
3561 static vec<ipa_agg_jf_item>
3562 intersect_aggregates_with_edge (struct cgraph_edge *cs, int index,
3563 vec<ipa_agg_jf_item> inter)
3565 struct ipa_jump_func *jfunc;
3566 jfunc = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), index);
3567 if (jfunc->type == IPA_JF_PASS_THROUGH
3568 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3570 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
3571 int src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
3573 if (caller_info->ipcp_orig_node)
3575 struct cgraph_node *orig_node = caller_info->ipcp_orig_node;
3576 struct ipcp_param_lattices *orig_plats;
3577 orig_plats = ipa_get_parm_lattices (IPA_NODE_REF (orig_node),
3578 src_idx);
3579 if (agg_pass_through_permissible_p (orig_plats, jfunc))
3581 if (!inter.exists ())
3582 inter = agg_replacements_to_vector (cs->caller, src_idx, 0);
3583 else
3584 intersect_with_agg_replacements (cs->caller, src_idx,
3585 &inter, 0);
3587 else
3589 inter.release ();
3590 return vNULL;
3593 else
3595 struct ipcp_param_lattices *src_plats;
3596 src_plats = ipa_get_parm_lattices (caller_info, src_idx);
3597 if (agg_pass_through_permissible_p (src_plats, jfunc))
3599 /* Currently we do not produce clobber aggregate jump
3600 functions, adjust when we do. */
3601 gcc_checking_assert (!jfunc->agg.items);
3602 if (!inter.exists ())
3603 inter = copy_plats_to_inter (src_plats, 0);
3604 else
3605 intersect_with_plats (src_plats, &inter, 0);
3607 else
3609 inter.release ();
3610 return vNULL;
3614 else if (jfunc->type == IPA_JF_ANCESTOR
3615 && ipa_get_jf_ancestor_agg_preserved (jfunc))
3617 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
3618 int src_idx = ipa_get_jf_ancestor_formal_id (jfunc);
3619 struct ipcp_param_lattices *src_plats;
3620 HOST_WIDE_INT delta = ipa_get_jf_ancestor_offset (jfunc);
3622 if (caller_info->ipcp_orig_node)
3624 if (!inter.exists ())
3625 inter = agg_replacements_to_vector (cs->caller, src_idx, delta);
3626 else
3627 intersect_with_agg_replacements (cs->caller, src_idx, &inter,
3628 delta);
3630 else
3632 src_plats = ipa_get_parm_lattices (caller_info, src_idx);;
3633 /* Currently we do not produce clobber aggregate jump
3634 functions, adjust when we do. */
3635 gcc_checking_assert (!src_plats->aggs || !jfunc->agg.items);
3636 if (!inter.exists ())
3637 inter = copy_plats_to_inter (src_plats, delta);
3638 else
3639 intersect_with_plats (src_plats, &inter, delta);
3642 else if (jfunc->agg.items)
3644 struct ipa_agg_jf_item *item;
3645 int k;
3647 if (!inter.exists ())
3648 for (unsigned i = 0; i < jfunc->agg.items->length (); i++)
3649 inter.safe_push ((*jfunc->agg.items)[i]);
3650 else
3651 FOR_EACH_VEC_ELT (inter, k, item)
3653 int l = 0;
3654 bool found = false;;
3656 if (!item->value)
3657 continue;
3659 while ((unsigned) l < jfunc->agg.items->length ())
3661 struct ipa_agg_jf_item *ti;
3662 ti = &(*jfunc->agg.items)[l];
3663 if (ti->offset > item->offset)
3664 break;
3665 if (ti->offset == item->offset)
3667 gcc_checking_assert (ti->value);
3668 if (values_equal_for_ipcp_p (item->value,
3669 ti->value))
3670 found = true;
3671 break;
3673 l++;
3675 if (!found)
3676 item->value = NULL;
3679 else
3681 inter.release ();
3682 return vec<ipa_agg_jf_item>();
3684 return inter;
3687 /* Look at edges in CALLERS and collect all known aggregate values that arrive
3688 from all of them. */
3690 static struct ipa_agg_replacement_value *
3691 find_aggregate_values_for_callers_subset (struct cgraph_node *node,
3692 vec<cgraph_edge *> callers)
3694 struct ipa_node_params *dest_info = IPA_NODE_REF (node);
3695 struct ipa_agg_replacement_value *res;
3696 struct ipa_agg_replacement_value **tail = &res;
3697 struct cgraph_edge *cs;
3698 int i, j, count = ipa_get_param_count (dest_info);
3700 FOR_EACH_VEC_ELT (callers, j, cs)
3702 int c = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
3703 if (c < count)
3704 count = c;
3707 for (i = 0; i < count ; i++)
3709 struct cgraph_edge *cs;
3710 vec<ipa_agg_jf_item> inter = vNULL;
3711 struct ipa_agg_jf_item *item;
3712 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (dest_info, i);
3713 int j;
3715 /* Among other things, the following check should deal with all by_ref
3716 mismatches. */
3717 if (plats->aggs_bottom)
3718 continue;
3720 FOR_EACH_VEC_ELT (callers, j, cs)
3722 inter = intersect_aggregates_with_edge (cs, i, inter);
3724 if (!inter.exists ())
3725 goto next_param;
3728 FOR_EACH_VEC_ELT (inter, j, item)
3730 struct ipa_agg_replacement_value *v;
3732 if (!item->value)
3733 continue;
3735 v = ggc_alloc<ipa_agg_replacement_value> ();
3736 v->index = i;
3737 v->offset = item->offset;
3738 v->value = item->value;
3739 v->by_ref = plats->aggs_by_ref;
3740 *tail = v;
3741 tail = &v->next;
3744 next_param:
3745 if (inter.exists ())
3746 inter.release ();
3748 *tail = NULL;
3749 return res;
3752 /* Turn KNOWN_AGGS into a list of aggreate replacement values. */
3754 static struct ipa_agg_replacement_value *
3755 known_aggs_to_agg_replacement_list (vec<ipa_agg_jump_function> known_aggs)
3757 struct ipa_agg_replacement_value *res;
3758 struct ipa_agg_replacement_value **tail = &res;
3759 struct ipa_agg_jump_function *aggjf;
3760 struct ipa_agg_jf_item *item;
3761 int i, j;
3763 FOR_EACH_VEC_ELT (known_aggs, i, aggjf)
3764 FOR_EACH_VEC_SAFE_ELT (aggjf->items, j, item)
3766 struct ipa_agg_replacement_value *v;
3767 v = ggc_alloc<ipa_agg_replacement_value> ();
3768 v->index = i;
3769 v->offset = item->offset;
3770 v->value = item->value;
3771 v->by_ref = aggjf->by_ref;
3772 *tail = v;
3773 tail = &v->next;
3775 *tail = NULL;
3776 return res;
3779 /* Determine whether CS also brings all scalar values that the NODE is
3780 specialized for. */
3782 static bool
3783 cgraph_edge_brings_all_scalars_for_node (struct cgraph_edge *cs,
3784 struct cgraph_node *node)
3786 struct ipa_node_params *dest_info = IPA_NODE_REF (node);
3787 int count = ipa_get_param_count (dest_info);
3788 struct ipa_node_params *caller_info;
3789 struct ipa_edge_args *args;
3790 int i;
3792 caller_info = IPA_NODE_REF (cs->caller);
3793 args = IPA_EDGE_REF (cs);
3794 for (i = 0; i < count; i++)
3796 struct ipa_jump_func *jump_func;
3797 tree val, t;
3799 val = dest_info->known_csts[i];
3800 if (!val)
3801 continue;
3803 if (i >= ipa_get_cs_argument_count (args))
3804 return false;
3805 jump_func = ipa_get_ith_jump_func (args, i);
3806 t = ipa_value_from_jfunc (caller_info, jump_func);
3807 if (!t || !values_equal_for_ipcp_p (val, t))
3808 return false;
3810 return true;
3813 /* Determine whether CS also brings all aggregate values that NODE is
3814 specialized for. */
3815 static bool
3816 cgraph_edge_brings_all_agg_vals_for_node (struct cgraph_edge *cs,
3817 struct cgraph_node *node)
3819 struct ipa_node_params *orig_caller_info = IPA_NODE_REF (cs->caller);
3820 struct ipa_node_params *orig_node_info;
3821 struct ipa_agg_replacement_value *aggval;
3822 int i, ec, count;
3824 aggval = ipa_get_agg_replacements_for_node (node);
3825 if (!aggval)
3826 return true;
3828 count = ipa_get_param_count (IPA_NODE_REF (node));
3829 ec = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
3830 if (ec < count)
3831 for (struct ipa_agg_replacement_value *av = aggval; av; av = av->next)
3832 if (aggval->index >= ec)
3833 return false;
3835 orig_node_info = IPA_NODE_REF (IPA_NODE_REF (node)->ipcp_orig_node);
3836 if (orig_caller_info->ipcp_orig_node)
3837 orig_caller_info = IPA_NODE_REF (orig_caller_info->ipcp_orig_node);
3839 for (i = 0; i < count; i++)
3841 static vec<ipa_agg_jf_item> values = vec<ipa_agg_jf_item>();
3842 struct ipcp_param_lattices *plats;
3843 bool interesting = false;
3844 for (struct ipa_agg_replacement_value *av = aggval; av; av = av->next)
3845 if (aggval->index == i)
3847 interesting = true;
3848 break;
3850 if (!interesting)
3851 continue;
3853 plats = ipa_get_parm_lattices (orig_node_info, aggval->index);
3854 if (plats->aggs_bottom)
3855 return false;
3857 values = intersect_aggregates_with_edge (cs, i, values);
3858 if (!values.exists ())
3859 return false;
3861 for (struct ipa_agg_replacement_value *av = aggval; av; av = av->next)
3862 if (aggval->index == i)
3864 struct ipa_agg_jf_item *item;
3865 int j;
3866 bool found = false;
3867 FOR_EACH_VEC_ELT (values, j, item)
3868 if (item->value
3869 && item->offset == av->offset
3870 && values_equal_for_ipcp_p (item->value, av->value))
3872 found = true;
3873 break;
3875 if (!found)
3877 values.release ();
3878 return false;
3882 return true;
3885 /* Given an original NODE and a VAL for which we have already created a
3886 specialized clone, look whether there are incoming edges that still lead
3887 into the old node but now also bring the requested value and also conform to
3888 all other criteria such that they can be redirected the the special node.
3889 This function can therefore redirect the final edge in a SCC. */
3891 template <typename valtype>
3892 static void
3893 perhaps_add_new_callers (cgraph_node *node, ipcp_value<valtype> *val)
3895 ipcp_value_source<valtype> *src;
3896 gcov_type redirected_sum = 0;
3898 for (src = val->sources; src; src = src->next)
3900 struct cgraph_edge *cs = src->cs;
3901 while (cs)
3903 if (cgraph_edge_brings_value_p (cs, src, node)
3904 && cgraph_edge_brings_all_scalars_for_node (cs, val->spec_node)
3905 && cgraph_edge_brings_all_agg_vals_for_node (cs, val->spec_node))
3907 if (dump_file)
3908 fprintf (dump_file, " - adding an extra caller %s/%i"
3909 " of %s/%i\n",
3910 xstrdup_for_dump (cs->caller->name ()),
3911 cs->caller->order,
3912 xstrdup_for_dump (val->spec_node->name ()),
3913 val->spec_node->order);
3915 cs->redirect_callee_duplicating_thunks (val->spec_node);
3916 val->spec_node->expand_all_artificial_thunks ();
3917 redirected_sum += cs->count;
3919 cs = get_next_cgraph_edge_clone (cs);
3923 if (redirected_sum)
3924 update_specialized_profile (val->spec_node, node, redirected_sum);
3927 /* Return true if KNOWN_CONTEXTS contain at least one useful context. */
3929 static bool
3930 known_contexts_useful_p (vec<ipa_polymorphic_call_context> known_contexts)
3932 ipa_polymorphic_call_context *ctx;
3933 int i;
3935 FOR_EACH_VEC_ELT (known_contexts, i, ctx)
3936 if (!ctx->useless_p ())
3937 return true;
3938 return false;
3941 /* Return a copy of KNOWN_CSTS if it is not empty, otherwise return vNULL. */
3943 static vec<ipa_polymorphic_call_context>
3944 copy_useful_known_contexts (vec<ipa_polymorphic_call_context> known_contexts)
3946 if (known_contexts_useful_p (known_contexts))
3947 return known_contexts.copy ();
3948 else
3949 return vNULL;
3952 /* Copy KNOWN_CSTS and modify the copy according to VAL and INDEX. If
3953 non-empty, replace KNOWN_CONTEXTS with its copy too. */
3955 static void
3956 modify_known_vectors_with_val (vec<tree> *known_csts,
3957 vec<ipa_polymorphic_call_context> *known_contexts,
3958 ipcp_value<tree> *val,
3959 int index)
3961 *known_csts = known_csts->copy ();
3962 *known_contexts = copy_useful_known_contexts (*known_contexts);
3963 (*known_csts)[index] = val->value;
3966 /* Replace KNOWN_CSTS with its copy. Also copy KNOWN_CONTEXTS and modify the
3967 copy according to VAL and INDEX. */
3969 static void
3970 modify_known_vectors_with_val (vec<tree> *known_csts,
3971 vec<ipa_polymorphic_call_context> *known_contexts,
3972 ipcp_value<ipa_polymorphic_call_context> *val,
3973 int index)
3975 *known_csts = known_csts->copy ();
3976 *known_contexts = known_contexts->copy ();
3977 (*known_contexts)[index] = val->value;
3980 /* Return true if OFFSET indicates this was not an aggregate value or there is
3981 a replacement equivalent to VALUE, INDEX and OFFSET among those in the
3982 AGGVALS list. */
3984 DEBUG_FUNCTION bool
3985 ipcp_val_agg_replacement_ok_p (ipa_agg_replacement_value *aggvals,
3986 int index, HOST_WIDE_INT offset, tree value)
3988 if (offset == -1)
3989 return true;
3991 while (aggvals)
3993 if (aggvals->index == index
3994 && aggvals->offset == offset
3995 && values_equal_for_ipcp_p (aggvals->value, value))
3996 return true;
3997 aggvals = aggvals->next;
3999 return false;
4002 /* Return true if offset is minus one because source of a polymorphic contect
4003 cannot be an aggregate value. */
4005 DEBUG_FUNCTION bool
4006 ipcp_val_agg_replacement_ok_p (ipa_agg_replacement_value *,
4007 int , HOST_WIDE_INT offset,
4008 ipa_polymorphic_call_context)
4010 return offset == -1;
4013 /* Decide wheter to create a special version of NODE for value VAL of parameter
4014 at the given INDEX. If OFFSET is -1, the value is for the parameter itself,
4015 otherwise it is stored at the given OFFSET of the parameter. KNOWN_CSTS,
4016 KNOWN_CONTEXTS and KNOWN_AGGS describe the other already known values. */
4018 template <typename valtype>
4019 static bool
4020 decide_about_value (struct cgraph_node *node, int index, HOST_WIDE_INT offset,
4021 ipcp_value<valtype> *val, vec<tree> known_csts,
4022 vec<ipa_polymorphic_call_context> known_contexts)
4024 struct ipa_agg_replacement_value *aggvals;
4025 int freq_sum, caller_count;
4026 gcov_type count_sum;
4027 vec<cgraph_edge *> callers;
4029 if (val->spec_node)
4031 perhaps_add_new_callers (node, val);
4032 return false;
4034 else if (val->local_size_cost + overall_size > max_new_size)
4036 if (dump_file && (dump_flags & TDF_DETAILS))
4037 fprintf (dump_file, " Ignoring candidate value because "
4038 "max_new_size would be reached with %li.\n",
4039 val->local_size_cost + overall_size);
4040 return false;
4042 else if (!get_info_about_necessary_edges (val, node, &freq_sum, &count_sum,
4043 &caller_count))
4044 return false;
4046 if (dump_file && (dump_flags & TDF_DETAILS))
4048 fprintf (dump_file, " - considering value ");
4049 print_ipcp_constant_value (dump_file, val->value);
4050 fprintf (dump_file, " for ");
4051 ipa_dump_param (dump_file, IPA_NODE_REF (node), index);
4052 if (offset != -1)
4053 fprintf (dump_file, ", offset: " HOST_WIDE_INT_PRINT_DEC, offset);
4054 fprintf (dump_file, " (caller_count: %i)\n", caller_count);
4057 if (!good_cloning_opportunity_p (node, val->local_time_benefit,
4058 freq_sum, count_sum,
4059 val->local_size_cost)
4060 && !good_cloning_opportunity_p (node,
4061 val->local_time_benefit
4062 + val->prop_time_benefit,
4063 freq_sum, count_sum,
4064 val->local_size_cost
4065 + val->prop_size_cost))
4066 return false;
4068 if (dump_file)
4069 fprintf (dump_file, " Creating a specialized node of %s/%i.\n",
4070 node->name (), node->order);
4072 callers = gather_edges_for_value (val, node, caller_count);
4073 if (offset == -1)
4074 modify_known_vectors_with_val (&known_csts, &known_contexts, val, index);
4075 else
4077 known_csts = known_csts.copy ();
4078 known_contexts = copy_useful_known_contexts (known_contexts);
4080 find_more_scalar_values_for_callers_subset (node, known_csts, callers);
4081 find_more_contexts_for_caller_subset (node, &known_contexts, callers);
4082 aggvals = find_aggregate_values_for_callers_subset (node, callers);
4083 gcc_checking_assert (ipcp_val_agg_replacement_ok_p (aggvals, index,
4084 offset, val->value));
4085 val->spec_node = create_specialized_node (node, known_csts, known_contexts,
4086 aggvals, callers);
4087 overall_size += val->local_size_cost;
4089 /* TODO: If for some lattice there is only one other known value
4090 left, make a special node for it too. */
4092 return true;
4095 /* Decide whether and what specialized clones of NODE should be created. */
4097 static bool
4098 decide_whether_version_node (struct cgraph_node *node)
4100 struct ipa_node_params *info = IPA_NODE_REF (node);
4101 int i, count = ipa_get_param_count (info);
4102 vec<tree> known_csts;
4103 vec<ipa_polymorphic_call_context> known_contexts;
4104 vec<ipa_agg_jump_function> known_aggs = vNULL;
4105 bool ret = false;
4107 if (count == 0)
4108 return false;
4110 if (dump_file && (dump_flags & TDF_DETAILS))
4111 fprintf (dump_file, "\nEvaluating opportunities for %s/%i.\n",
4112 node->name (), node->order);
4114 gather_context_independent_values (info, &known_csts, &known_contexts,
4115 info->do_clone_for_all_contexts ? &known_aggs
4116 : NULL, NULL);
4118 for (i = 0; i < count ;i++)
4120 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
4121 ipcp_lattice<tree> *lat = &plats->itself;
4122 ipcp_lattice<ipa_polymorphic_call_context> *ctxlat = &plats->ctxlat;
4124 if (!lat->bottom
4125 && !known_csts[i])
4127 ipcp_value<tree> *val;
4128 for (val = lat->values; val; val = val->next)
4129 ret |= decide_about_value (node, i, -1, val, known_csts,
4130 known_contexts);
4133 if (!plats->aggs_bottom)
4135 struct ipcp_agg_lattice *aglat;
4136 ipcp_value<tree> *val;
4137 for (aglat = plats->aggs; aglat; aglat = aglat->next)
4138 if (!aglat->bottom && aglat->values
4139 /* If the following is false, the one value is in
4140 known_aggs. */
4141 && (plats->aggs_contain_variable
4142 || !aglat->is_single_const ()))
4143 for (val = aglat->values; val; val = val->next)
4144 ret |= decide_about_value (node, i, aglat->offset, val,
4145 known_csts, known_contexts);
4148 if (!ctxlat->bottom
4149 && known_contexts[i].useless_p ())
4151 ipcp_value<ipa_polymorphic_call_context> *val;
4152 for (val = ctxlat->values; val; val = val->next)
4153 ret |= decide_about_value (node, i, -1, val, known_csts,
4154 known_contexts);
4157 info = IPA_NODE_REF (node);
4160 if (info->do_clone_for_all_contexts)
4162 struct cgraph_node *clone;
4163 vec<cgraph_edge *> callers;
4165 if (dump_file)
4166 fprintf (dump_file, " - Creating a specialized node of %s/%i "
4167 "for all known contexts.\n", node->name (),
4168 node->order);
4170 callers = node->collect_callers ();
4172 if (!known_contexts_useful_p (known_contexts))
4174 known_contexts.release ();
4175 known_contexts = vNULL;
4177 clone = create_specialized_node (node, known_csts, known_contexts,
4178 known_aggs_to_agg_replacement_list (known_aggs),
4179 callers);
4180 info = IPA_NODE_REF (node);
4181 info->do_clone_for_all_contexts = false;
4182 IPA_NODE_REF (clone)->is_all_contexts_clone = true;
4183 for (i = 0; i < count ; i++)
4184 vec_free (known_aggs[i].items);
4185 known_aggs.release ();
4186 ret = true;
4188 else
4190 known_csts.release ();
4191 known_contexts.release ();
4194 return ret;
4197 /* Transitively mark all callees of NODE within the same SCC as not dead. */
4199 static void
4200 spread_undeadness (struct cgraph_node *node)
4202 struct cgraph_edge *cs;
4204 for (cs = node->callees; cs; cs = cs->next_callee)
4205 if (ipa_edge_within_scc (cs))
4207 struct cgraph_node *callee;
4208 struct ipa_node_params *info;
4210 callee = cs->callee->function_symbol (NULL);
4211 info = IPA_NODE_REF (callee);
4213 if (info->node_dead)
4215 info->node_dead = 0;
4216 spread_undeadness (callee);
4221 /* Return true if NODE has a caller from outside of its SCC that is not
4222 dead. Worker callback for cgraph_for_node_and_aliases. */
4224 static bool
4225 has_undead_caller_from_outside_scc_p (struct cgraph_node *node,
4226 void *data ATTRIBUTE_UNUSED)
4228 struct cgraph_edge *cs;
4230 for (cs = node->callers; cs; cs = cs->next_caller)
4231 if (cs->caller->thunk.thunk_p
4232 && cs->caller->call_for_symbol_thunks_and_aliases
4233 (has_undead_caller_from_outside_scc_p, NULL, true))
4234 return true;
4235 else if (!ipa_edge_within_scc (cs)
4236 && !IPA_NODE_REF (cs->caller)->node_dead)
4237 return true;
4238 return false;
4242 /* Identify nodes within the same SCC as NODE which are no longer needed
4243 because of new clones and will be removed as unreachable. */
4245 static void
4246 identify_dead_nodes (struct cgraph_node *node)
4248 struct cgraph_node *v;
4249 for (v = node; v ; v = ((struct ipa_dfs_info *) v->aux)->next_cycle)
4250 if (v->will_be_removed_from_program_if_no_direct_calls_p ()
4251 && !v->call_for_symbol_thunks_and_aliases
4252 (has_undead_caller_from_outside_scc_p, NULL, true))
4253 IPA_NODE_REF (v)->node_dead = 1;
4255 for (v = node; v ; v = ((struct ipa_dfs_info *) v->aux)->next_cycle)
4256 if (!IPA_NODE_REF (v)->node_dead)
4257 spread_undeadness (v);
4259 if (dump_file && (dump_flags & TDF_DETAILS))
4261 for (v = node; v ; v = ((struct ipa_dfs_info *) v->aux)->next_cycle)
4262 if (IPA_NODE_REF (v)->node_dead)
4263 fprintf (dump_file, " Marking node as dead: %s/%i.\n",
4264 v->name (), v->order);
4268 /* The decision stage. Iterate over the topological order of call graph nodes
4269 TOPO and make specialized clones if deemed beneficial. */
4271 static void
4272 ipcp_decision_stage (struct ipa_topo_info *topo)
4274 int i;
4276 if (dump_file)
4277 fprintf (dump_file, "\nIPA decision stage:\n\n");
4279 for (i = topo->nnodes - 1; i >= 0; i--)
4281 struct cgraph_node *node = topo->order[i];
4282 bool change = false, iterate = true;
4284 while (iterate)
4286 struct cgraph_node *v;
4287 iterate = false;
4288 for (v = node; v ; v = ((struct ipa_dfs_info *) v->aux)->next_cycle)
4289 if (v->has_gimple_body_p ()
4290 && ipcp_versionable_function_p (v))
4291 iterate |= decide_whether_version_node (v);
4293 change |= iterate;
4295 if (change)
4296 identify_dead_nodes (node);
4300 /* Look up all alignment information that we have discovered and copy it over
4301 to the transformation summary. */
4303 static void
4304 ipcp_store_alignment_results (void)
4306 cgraph_node *node;
4308 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
4310 ipa_node_params *info = IPA_NODE_REF (node);
4311 bool dumped_sth = false;
4312 bool found_useful_result = false;
4314 if (info->ipcp_orig_node)
4315 info = IPA_NODE_REF (info->ipcp_orig_node);
4317 unsigned count = ipa_get_param_count (info);
4318 for (unsigned i = 0; i < count ; i++)
4320 ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
4321 if (plats->alignment.known
4322 && plats->alignment.align > 0)
4324 found_useful_result = true;
4325 break;
4328 if (!found_useful_result)
4329 continue;
4331 ipcp_grow_transformations_if_necessary ();
4332 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
4333 vec_safe_reserve_exact (ts->alignments, count);
4335 for (unsigned i = 0; i < count ; i++)
4337 ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
4339 if (plats->alignment.align == 0)
4340 plats->alignment.known = false;
4342 ts->alignments->quick_push (plats->alignment);
4343 if (!dump_file || !plats->alignment.known)
4344 continue;
4345 if (!dumped_sth)
4347 fprintf (dump_file, "Propagated alignment info for function %s/%i:\n",
4348 node->name (), node->order);
4349 dumped_sth = true;
4351 fprintf (dump_file, " param %i: align: %u, misalign: %u\n",
4352 i, plats->alignment.align, plats->alignment.misalign);
4357 /* The IPCP driver. */
4359 static unsigned int
4360 ipcp_driver (void)
4362 struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
4363 struct cgraph_edge_hook_list *edge_removal_hook_holder;
4364 struct ipa_topo_info topo;
4366 ipa_check_create_node_params ();
4367 ipa_check_create_edge_args ();
4368 grow_edge_clone_vectors ();
4369 edge_duplication_hook_holder =
4370 symtab->add_edge_duplication_hook (&ipcp_edge_duplication_hook, NULL);
4371 edge_removal_hook_holder =
4372 symtab->add_edge_removal_hook (&ipcp_edge_removal_hook, NULL);
4374 ipcp_cst_values_pool = create_alloc_pool ("IPA-CP constant values",
4375 sizeof (ipcp_value<tree>), 32);
4376 ipcp_poly_ctx_values_pool = create_alloc_pool
4377 ("IPA-CP polymorphic contexts",
4378 sizeof (ipcp_value<ipa_polymorphic_call_context>), 32);
4379 ipcp_sources_pool = create_alloc_pool ("IPA-CP value sources",
4380 sizeof (ipcp_value_source<tree>), 64);
4381 ipcp_agg_lattice_pool = create_alloc_pool ("IPA_CP aggregate lattices",
4382 sizeof (struct ipcp_agg_lattice),
4383 32);
4384 if (dump_file)
4386 fprintf (dump_file, "\nIPA structures before propagation:\n");
4387 if (dump_flags & TDF_DETAILS)
4388 ipa_print_all_params (dump_file);
4389 ipa_print_all_jump_functions (dump_file);
4392 /* Topological sort. */
4393 build_toporder_info (&topo);
4394 /* Do the interprocedural propagation. */
4395 ipcp_propagate_stage (&topo);
4396 /* Decide what constant propagation and cloning should be performed. */
4397 ipcp_decision_stage (&topo);
4398 /* Store results of alignment propagation. */
4399 ipcp_store_alignment_results ();
4401 /* Free all IPCP structures. */
4402 free_toporder_info (&topo);
4403 next_edge_clone.release ();
4404 symtab->remove_edge_removal_hook (edge_removal_hook_holder);
4405 symtab->remove_edge_duplication_hook (edge_duplication_hook_holder);
4406 ipa_free_all_structures_after_ipa_cp ();
4407 if (dump_file)
4408 fprintf (dump_file, "\nIPA constant propagation end\n");
4409 return 0;
4412 /* Initialization and computation of IPCP data structures. This is the initial
4413 intraprocedural analysis of functions, which gathers information to be
4414 propagated later on. */
4416 static void
4417 ipcp_generate_summary (void)
4419 struct cgraph_node *node;
4421 if (dump_file)
4422 fprintf (dump_file, "\nIPA constant propagation start:\n");
4423 ipa_register_cgraph_hooks ();
4425 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
4427 node->local.versionable
4428 = tree_versionable_function_p (node->decl);
4429 ipa_analyze_node (node);
4433 /* Write ipcp summary for nodes in SET. */
4435 static void
4436 ipcp_write_summary (void)
4438 ipa_prop_write_jump_functions ();
4441 /* Read ipcp summary. */
4443 static void
4444 ipcp_read_summary (void)
4446 ipa_prop_read_jump_functions ();
4449 namespace {
4451 const pass_data pass_data_ipa_cp =
4453 IPA_PASS, /* type */
4454 "cp", /* name */
4455 OPTGROUP_NONE, /* optinfo_flags */
4456 TV_IPA_CONSTANT_PROP, /* tv_id */
4457 0, /* properties_required */
4458 0, /* properties_provided */
4459 0, /* properties_destroyed */
4460 0, /* todo_flags_start */
4461 ( TODO_dump_symtab | TODO_remove_functions ), /* todo_flags_finish */
4464 class pass_ipa_cp : public ipa_opt_pass_d
4466 public:
4467 pass_ipa_cp (gcc::context *ctxt)
4468 : ipa_opt_pass_d (pass_data_ipa_cp, ctxt,
4469 ipcp_generate_summary, /* generate_summary */
4470 ipcp_write_summary, /* write_summary */
4471 ipcp_read_summary, /* read_summary */
4472 ipcp_write_transformation_summaries, /*
4473 write_optimization_summary */
4474 ipcp_read_transformation_summaries, /*
4475 read_optimization_summary */
4476 NULL, /* stmt_fixup */
4477 0, /* function_transform_todo_flags_start */
4478 ipcp_transform_function, /* function_transform */
4479 NULL) /* variable_transform */
4482 /* opt_pass methods: */
4483 virtual bool gate (function *)
4485 /* FIXME: We should remove the optimize check after we ensure we never run
4486 IPA passes when not optimizing. */
4487 return (flag_ipa_cp && optimize) || in_lto_p;
4490 virtual unsigned int execute (function *) { return ipcp_driver (); }
4492 }; // class pass_ipa_cp
4494 } // anon namespace
4496 ipa_opt_pass_d *
4497 make_pass_ipa_cp (gcc::context *ctxt)
4499 return new pass_ipa_cp (ctxt);
4502 /* Reset all state within ipa-cp.c so that we can rerun the compiler
4503 within the same process. For use by toplev::finalize. */
4505 void
4506 ipa_cp_c_finalize (void)
4508 max_count = 0;
4509 overall_size = 0;
4510 max_new_size = 0;