2018-11=12 Richard Biener <rguenther@suse.de>
[official-gcc.git] / gcc / ipa-cp.c
blob81da108fb623f02c1079671ecb9d8eb0c7367231
1 /* Interprocedural constant propagation
2 Copyright (C) 2005-2018 Free Software Foundation, Inc.
4 Contributed by Razya Ladelsky <RAZYA@il.ibm.com> and Martin Jambor
5 <mjambor@suse.cz>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Interprocedural constant propagation (IPA-CP).
25 The goal of this transformation is to
27 1) discover functions which are always invoked with some arguments with the
28 same known constant values and modify the functions so that the
29 subsequent optimizations can take advantage of the knowledge, and
31 2) partial specialization - create specialized versions of functions
32 transformed in this way if some parameters are known constants only in
33 certain contexts but the estimated tradeoff between speedup and cost size
34 is deemed good.
36 The algorithm also propagates types and attempts to perform type based
37 devirtualization. Types are propagated much like constants.
39 The algorithm basically consists of three stages. In the first, functions
40 are analyzed one at a time and jump functions are constructed for all known
41 call-sites. In the second phase, the pass propagates information from the
42 jump functions across the call to reveal what values are available at what
43 call sites, performs estimations of effects of known values on functions and
44 their callees, and finally decides what specialized extra versions should be
45 created. In the third, the special versions materialize and appropriate
46 calls are redirected.
48 The algorithm used is to a certain extent based on "Interprocedural Constant
49 Propagation", by David Callahan, Keith D Cooper, Ken Kennedy, Linda Torczon,
50 Comp86, pg 152-161 and "A Methodology for Procedure Cloning" by Keith D
51 Cooper, Mary W. Hall, and Ken Kennedy.
54 First stage - intraprocedural analysis
55 =======================================
57 This phase computes jump_function and modification flags.
59 A jump function for a call-site represents the values passed as an actual
60 arguments of a given call-site. In principle, there are three types of
61 values:
63 Pass through - the caller's formal parameter is passed as an actual
64 argument, plus an operation on it can be performed.
65 Constant - a constant is passed as an actual argument.
66 Unknown - neither of the above.
68 All jump function types are described in detail in ipa-prop.h, together with
69 the data structures that represent them and methods of accessing them.
71 ipcp_generate_summary() is the main function of the first stage.
73 Second stage - interprocedural analysis
74 ========================================
76 This stage is itself divided into two phases. In the first, we propagate
77 known values over the call graph, in the second, we make cloning decisions.
78 It uses a different algorithm than the original Callahan's paper.
80 First, we traverse the functions topologically from callers to callees and,
81 for each strongly connected component (SCC), we propagate constants
82 according to previously computed jump functions. We also record what known
83 values depend on other known values and estimate local effects. Finally, we
84 propagate cumulative information about these effects from dependent values
85 to those on which they depend.
87 Second, we again traverse the call graph in the same topological order and
88 make clones for functions which we know are called with the same values in
89 all contexts and decide about extra specialized clones of functions just for
90 some contexts - these decisions are based on both local estimates and
91 cumulative estimates propagated from callees.
93 ipcp_propagate_stage() and ipcp_decision_stage() together constitute the
94 third stage.
96 Third phase - materialization of clones, call statement updates.
97 ============================================
99 This stage is currently performed by call graph code (mainly in cgraphunit.c
100 and tree-inline.c) according to instructions inserted to the call graph by
101 the second stage. */
103 #include "config.h"
104 #include "system.h"
105 #include "coretypes.h"
106 #include "backend.h"
107 #include "tree.h"
108 #include "gimple-expr.h"
109 #include "predict.h"
110 #include "alloc-pool.h"
111 #include "tree-pass.h"
112 #include "cgraph.h"
113 #include "diagnostic.h"
114 #include "fold-const.h"
115 #include "gimple-fold.h"
116 #include "symbol-summary.h"
117 #include "tree-vrp.h"
118 #include "ipa-prop.h"
119 #include "tree-pretty-print.h"
120 #include "tree-inline.h"
121 #include "params.h"
122 #include "ipa-fnsummary.h"
123 #include "ipa-utils.h"
124 #include "tree-ssa-ccp.h"
125 #include "stringpool.h"
126 #include "attribs.h"
128 template <typename valtype> class ipcp_value;
130 /* Describes a particular source for an IPA-CP value. */
132 template <typename valtype>
133 class ipcp_value_source
135 public:
136 /* Aggregate offset of the source, negative if the source is scalar value of
137 the argument itself. */
138 HOST_WIDE_INT offset;
139 /* The incoming edge that brought the value. */
140 cgraph_edge *cs;
141 /* If the jump function that resulted into his value was a pass-through or an
142 ancestor, this is the ipcp_value of the caller from which the described
143 value has been derived. Otherwise it is NULL. */
144 ipcp_value<valtype> *val;
145 /* Next pointer in a linked list of sources of a value. */
146 ipcp_value_source *next;
147 /* If the jump function that resulted into his value was a pass-through or an
148 ancestor, this is the index of the parameter of the caller the jump
149 function references. */
150 int index;
153 /* Common ancestor for all ipcp_value instantiations. */
155 class ipcp_value_base
157 public:
158 /* Time benefit and size cost that specializing the function for this value
159 would bring about in this function alone. */
160 int local_time_benefit, local_size_cost;
161 /* Time benefit and size cost that specializing the function for this value
162 can bring about in it's callees (transitively). */
163 int prop_time_benefit, prop_size_cost;
165 ipcp_value_base ()
166 : local_time_benefit (0), local_size_cost (0),
167 prop_time_benefit (0), prop_size_cost (0) {}
170 /* Describes one particular value stored in struct ipcp_lattice. */
172 template <typename valtype>
173 class ipcp_value : public ipcp_value_base
175 public:
176 /* The actual value for the given parameter. */
177 valtype value;
178 /* The list of sources from which this value originates. */
179 ipcp_value_source <valtype> *sources;
180 /* Next pointers in a linked list of all values in a lattice. */
181 ipcp_value *next;
182 /* Next pointers in a linked list of values in a strongly connected component
183 of values. */
184 ipcp_value *scc_next;
185 /* Next pointers in a linked list of SCCs of values sorted topologically
186 according their sources. */
187 ipcp_value *topo_next;
188 /* A specialized node created for this value, NULL if none has been (so far)
189 created. */
190 cgraph_node *spec_node;
191 /* Depth first search number and low link for topological sorting of
192 values. */
193 int dfs, low_link;
194 /* True if this valye is currently on the topo-sort stack. */
195 bool on_stack;
197 ipcp_value()
198 : sources (0), next (0), scc_next (0), topo_next (0),
199 spec_node (0), dfs (0), low_link (0), on_stack (false) {}
201 void add_source (cgraph_edge *cs, ipcp_value *src_val, int src_idx,
202 HOST_WIDE_INT offset);
205 /* Lattice describing potential values of a formal parameter of a function, or
206 a part of an aggregate. TOP is represented by a lattice with zero values
207 and with contains_variable and bottom flags cleared. BOTTOM is represented
208 by a lattice with the bottom flag set. In that case, values and
209 contains_variable flag should be disregarded. */
211 template <typename valtype>
212 class ipcp_lattice
214 public:
215 /* The list of known values and types in this lattice. Note that values are
216 not deallocated if a lattice is set to bottom because there may be value
217 sources referencing them. */
218 ipcp_value<valtype> *values;
219 /* Number of known values and types in this lattice. */
220 int values_count;
221 /* The lattice contains a variable component (in addition to values). */
222 bool contains_variable;
223 /* The value of the lattice is bottom (i.e. variable and unusable for any
224 propagation). */
225 bool bottom;
227 inline bool is_single_const ();
228 inline bool set_to_bottom ();
229 inline bool set_contains_variable ();
230 bool add_value (valtype newval, cgraph_edge *cs,
231 ipcp_value<valtype> *src_val = NULL,
232 int src_idx = 0, HOST_WIDE_INT offset = -1);
233 void print (FILE * f, bool dump_sources, bool dump_benefits);
236 /* Lattice of tree values with an offset to describe a part of an
237 aggregate. */
239 class ipcp_agg_lattice : public ipcp_lattice<tree>
241 public:
242 /* Offset that is being described by this lattice. */
243 HOST_WIDE_INT offset;
244 /* Size so that we don't have to re-compute it every time we traverse the
245 list. Must correspond to TYPE_SIZE of all lat values. */
246 HOST_WIDE_INT size;
247 /* Next element of the linked list. */
248 struct ipcp_agg_lattice *next;
251 /* Lattice of known bits, only capable of holding one value.
252 Bitwise constant propagation propagates which bits of a
253 value are constant.
254 For eg:
255 int f(int x)
257 return some_op (x);
260 int f1(int y)
262 if (cond)
263 return f (y & 0xff);
264 else
265 return f (y & 0xf);
268 In the above case, the param 'x' will always have all
269 the bits (except the bits in lsb) set to 0.
270 Hence the mask of 'x' would be 0xff. The mask
271 reflects that the bits in lsb are unknown.
272 The actual propagated value is given by m_value & ~m_mask. */
274 class ipcp_bits_lattice
276 public:
277 bool bottom_p () { return m_lattice_val == IPA_BITS_VARYING; }
278 bool top_p () { return m_lattice_val == IPA_BITS_UNDEFINED; }
279 bool constant_p () { return m_lattice_val == IPA_BITS_CONSTANT; }
280 bool set_to_bottom ();
281 bool set_to_constant (widest_int, widest_int);
283 widest_int get_value () { return m_value; }
284 widest_int get_mask () { return m_mask; }
286 bool meet_with (ipcp_bits_lattice& other, unsigned, signop,
287 enum tree_code, tree);
289 bool meet_with (widest_int, widest_int, unsigned);
291 void print (FILE *);
293 private:
294 enum { IPA_BITS_UNDEFINED, IPA_BITS_CONSTANT, IPA_BITS_VARYING } m_lattice_val;
296 /* Similar to ccp_lattice_t, mask represents which bits of value are constant.
297 If a bit in mask is set to 0, then the corresponding bit in
298 value is known to be constant. */
299 widest_int m_value, m_mask;
301 bool meet_with_1 (widest_int, widest_int, unsigned);
302 void get_value_and_mask (tree, widest_int *, widest_int *);
305 /* Lattice of value ranges. */
307 class ipcp_vr_lattice
309 public:
310 value_range_base m_vr;
312 inline bool bottom_p () const;
313 inline bool top_p () const;
314 inline bool set_to_bottom ();
315 bool meet_with (const value_range_base *p_vr);
316 bool meet_with (const ipcp_vr_lattice &other);
317 void init () { gcc_assert (m_vr.undefined_p ()); }
318 void print (FILE * f);
320 private:
321 bool meet_with_1 (const value_range_base *other_vr);
324 /* Structure containing lattices for a parameter itself and for pieces of
325 aggregates that are passed in the parameter or by a reference in a parameter
326 plus some other useful flags. */
328 class ipcp_param_lattices
330 public:
331 /* Lattice describing the value of the parameter itself. */
332 ipcp_lattice<tree> itself;
333 /* Lattice describing the polymorphic contexts of a parameter. */
334 ipcp_lattice<ipa_polymorphic_call_context> ctxlat;
335 /* Lattices describing aggregate parts. */
336 ipcp_agg_lattice *aggs;
337 /* Lattice describing known bits. */
338 ipcp_bits_lattice bits_lattice;
339 /* Lattice describing value range. */
340 ipcp_vr_lattice m_value_range;
341 /* Number of aggregate lattices */
342 int aggs_count;
343 /* True if aggregate data were passed by reference (as opposed to by
344 value). */
345 bool aggs_by_ref;
346 /* All aggregate lattices contain a variable component (in addition to
347 values). */
348 bool aggs_contain_variable;
349 /* The value of all aggregate lattices is bottom (i.e. variable and unusable
350 for any propagation). */
351 bool aggs_bottom;
353 /* There is a virtual call based on this parameter. */
354 bool virt_call;
357 /* Allocation pools for values and their sources in ipa-cp. */
359 object_allocator<ipcp_value<tree> > ipcp_cst_values_pool
360 ("IPA-CP constant values");
362 object_allocator<ipcp_value<ipa_polymorphic_call_context> >
363 ipcp_poly_ctx_values_pool ("IPA-CP polymorphic contexts");
365 object_allocator<ipcp_value_source<tree> > ipcp_sources_pool
366 ("IPA-CP value sources");
368 object_allocator<ipcp_agg_lattice> ipcp_agg_lattice_pool
369 ("IPA_CP aggregate lattices");
371 /* Maximal count found in program. */
373 static profile_count max_count;
375 /* Original overall size of the program. */
377 static long overall_size, max_new_size;
379 /* Return the param lattices structure corresponding to the Ith formal
380 parameter of the function described by INFO. */
381 static inline struct ipcp_param_lattices *
382 ipa_get_parm_lattices (struct ipa_node_params *info, int i)
384 gcc_assert (i >= 0 && i < ipa_get_param_count (info));
385 gcc_checking_assert (!info->ipcp_orig_node);
386 gcc_checking_assert (info->lattices);
387 return &(info->lattices[i]);
390 /* Return the lattice corresponding to the scalar value of the Ith formal
391 parameter of the function described by INFO. */
392 static inline ipcp_lattice<tree> *
393 ipa_get_scalar_lat (struct ipa_node_params *info, int i)
395 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
396 return &plats->itself;
399 /* Return the lattice corresponding to the scalar value of the Ith formal
400 parameter of the function described by INFO. */
401 static inline ipcp_lattice<ipa_polymorphic_call_context> *
402 ipa_get_poly_ctx_lat (struct ipa_node_params *info, int i)
404 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
405 return &plats->ctxlat;
408 /* Return whether LAT is a lattice with a single constant and without an
409 undefined value. */
411 template <typename valtype>
412 inline bool
413 ipcp_lattice<valtype>::is_single_const ()
415 if (bottom || contains_variable || values_count != 1)
416 return false;
417 else
418 return true;
421 /* Print V which is extracted from a value in a lattice to F. */
423 static void
424 print_ipcp_constant_value (FILE * f, tree v)
426 if (TREE_CODE (v) == ADDR_EXPR
427 && TREE_CODE (TREE_OPERAND (v, 0)) == CONST_DECL)
429 fprintf (f, "& ");
430 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (v, 0)));
432 else
433 print_generic_expr (f, v);
436 /* Print V which is extracted from a value in a lattice to F. */
438 static void
439 print_ipcp_constant_value (FILE * f, ipa_polymorphic_call_context v)
441 v.dump(f, false);
444 /* Print a lattice LAT to F. */
446 template <typename valtype>
447 void
448 ipcp_lattice<valtype>::print (FILE * f, bool dump_sources, bool dump_benefits)
450 ipcp_value<valtype> *val;
451 bool prev = false;
453 if (bottom)
455 fprintf (f, "BOTTOM\n");
456 return;
459 if (!values_count && !contains_variable)
461 fprintf (f, "TOP\n");
462 return;
465 if (contains_variable)
467 fprintf (f, "VARIABLE");
468 prev = true;
469 if (dump_benefits)
470 fprintf (f, "\n");
473 for (val = values; val; val = val->next)
475 if (dump_benefits && prev)
476 fprintf (f, " ");
477 else if (!dump_benefits && prev)
478 fprintf (f, ", ");
479 else
480 prev = true;
482 print_ipcp_constant_value (f, val->value);
484 if (dump_sources)
486 ipcp_value_source<valtype> *s;
488 fprintf (f, " [from:");
489 for (s = val->sources; s; s = s->next)
490 fprintf (f, " %i(%f)", s->cs->caller->order,
491 s->cs->sreal_frequency ().to_double ());
492 fprintf (f, "]");
495 if (dump_benefits)
496 fprintf (f, " [loc_time: %i, loc_size: %i, "
497 "prop_time: %i, prop_size: %i]\n",
498 val->local_time_benefit, val->local_size_cost,
499 val->prop_time_benefit, val->prop_size_cost);
501 if (!dump_benefits)
502 fprintf (f, "\n");
505 void
506 ipcp_bits_lattice::print (FILE *f)
508 if (top_p ())
509 fprintf (f, " Bits unknown (TOP)\n");
510 else if (bottom_p ())
511 fprintf (f, " Bits unusable (BOTTOM)\n");
512 else
514 fprintf (f, " Bits: value = "); print_hex (get_value (), f);
515 fprintf (f, ", mask = "); print_hex (get_mask (), f);
516 fprintf (f, "\n");
520 /* Print value range lattice to F. */
522 void
523 ipcp_vr_lattice::print (FILE * f)
525 dump_value_range (f, &m_vr);
528 /* Print all ipcp_lattices of all functions to F. */
530 static void
531 print_all_lattices (FILE * f, bool dump_sources, bool dump_benefits)
533 struct cgraph_node *node;
534 int i, count;
536 fprintf (f, "\nLattices:\n");
537 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
539 struct ipa_node_params *info;
541 info = IPA_NODE_REF (node);
542 fprintf (f, " Node: %s:\n", node->dump_name ());
543 count = ipa_get_param_count (info);
544 for (i = 0; i < count; i++)
546 struct ipcp_agg_lattice *aglat;
547 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
548 fprintf (f, " param [%d]: ", i);
549 plats->itself.print (f, dump_sources, dump_benefits);
550 fprintf (f, " ctxs: ");
551 plats->ctxlat.print (f, dump_sources, dump_benefits);
552 plats->bits_lattice.print (f);
553 fprintf (f, " ");
554 plats->m_value_range.print (f);
555 fprintf (f, "\n");
556 if (plats->virt_call)
557 fprintf (f, " virt_call flag set\n");
559 if (plats->aggs_bottom)
561 fprintf (f, " AGGS BOTTOM\n");
562 continue;
564 if (plats->aggs_contain_variable)
565 fprintf (f, " AGGS VARIABLE\n");
566 for (aglat = plats->aggs; aglat; aglat = aglat->next)
568 fprintf (f, " %soffset " HOST_WIDE_INT_PRINT_DEC ": ",
569 plats->aggs_by_ref ? "ref " : "", aglat->offset);
570 aglat->print (f, dump_sources, dump_benefits);
576 /* Determine whether it is at all technically possible to create clones of NODE
577 and store this information in the ipa_node_params structure associated
578 with NODE. */
580 static void
581 determine_versionability (struct cgraph_node *node,
582 struct ipa_node_params *info)
584 const char *reason = NULL;
586 /* There are a number of generic reasons functions cannot be versioned. We
587 also cannot remove parameters if there are type attributes such as fnspec
588 present. */
589 if (node->alias || node->thunk.thunk_p)
590 reason = "alias or thunk";
591 else if (!node->local.versionable)
592 reason = "not a tree_versionable_function";
593 else if (node->get_availability () <= AVAIL_INTERPOSABLE)
594 reason = "insufficient body availability";
595 else if (!opt_for_fn (node->decl, optimize)
596 || !opt_for_fn (node->decl, flag_ipa_cp))
597 reason = "non-optimized function";
598 else if (lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (node->decl)))
600 /* Ideally we should clone the SIMD clones themselves and create
601 vector copies of them, so IPA-cp and SIMD clones can happily
602 coexist, but that may not be worth the effort. */
603 reason = "function has SIMD clones";
605 else if (lookup_attribute ("target_clones", DECL_ATTRIBUTES (node->decl)))
607 /* Ideally we should clone the target clones themselves and create
608 copies of them, so IPA-cp and target clones can happily
609 coexist, but that may not be worth the effort. */
610 reason = "function target_clones attribute";
612 /* Don't clone decls local to a comdat group; it breaks and for C++
613 decloned constructors, inlining is always better anyway. */
614 else if (node->comdat_local_p ())
615 reason = "comdat-local function";
616 else if (node->calls_comdat_local)
618 /* TODO: call is versionable if we make sure that all
619 callers are inside of a comdat group. */
620 reason = "calls comdat-local function";
623 /* Functions calling BUILT_IN_VA_ARG_PACK and BUILT_IN_VA_ARG_PACK_LEN
624 work only when inlined. Cloning them may still lead to better code
625 because ipa-cp will not give up on cloning further. If the function is
626 external this however leads to wrong code because we may end up producing
627 offline copy of the function. */
628 if (DECL_EXTERNAL (node->decl))
629 for (cgraph_edge *edge = node->callees; !reason && edge;
630 edge = edge->next_callee)
631 if (fndecl_built_in_p (edge->callee->decl, BUILT_IN_NORMAL))
633 if (DECL_FUNCTION_CODE (edge->callee->decl) == BUILT_IN_VA_ARG_PACK)
634 reason = "external function which calls va_arg_pack";
635 if (DECL_FUNCTION_CODE (edge->callee->decl)
636 == BUILT_IN_VA_ARG_PACK_LEN)
637 reason = "external function which calls va_arg_pack_len";
640 if (reason && dump_file && !node->alias && !node->thunk.thunk_p)
641 fprintf (dump_file, "Function %s is not versionable, reason: %s.\n",
642 node->dump_name (), reason);
644 info->versionable = (reason == NULL);
647 /* Return true if it is at all technically possible to create clones of a
648 NODE. */
650 static bool
651 ipcp_versionable_function_p (struct cgraph_node *node)
653 return IPA_NODE_REF (node)->versionable;
656 /* Structure holding accumulated information about callers of a node. */
658 struct caller_statistics
660 profile_count count_sum;
661 int n_calls, n_hot_calls, freq_sum;
664 /* Initialize fields of STAT to zeroes. */
666 static inline void
667 init_caller_stats (struct caller_statistics *stats)
669 stats->count_sum = profile_count::zero ();
670 stats->n_calls = 0;
671 stats->n_hot_calls = 0;
672 stats->freq_sum = 0;
675 /* Worker callback of cgraph_for_node_and_aliases accumulating statistics of
676 non-thunk incoming edges to NODE. */
678 static bool
679 gather_caller_stats (struct cgraph_node *node, void *data)
681 struct caller_statistics *stats = (struct caller_statistics *) data;
682 struct cgraph_edge *cs;
684 for (cs = node->callers; cs; cs = cs->next_caller)
685 if (!cs->caller->thunk.thunk_p)
687 if (cs->count.ipa ().initialized_p ())
688 stats->count_sum += cs->count.ipa ();
689 stats->freq_sum += cs->frequency ();
690 stats->n_calls++;
691 if (cs->maybe_hot_p ())
692 stats->n_hot_calls ++;
694 return false;
698 /* Return true if this NODE is viable candidate for cloning. */
700 static bool
701 ipcp_cloning_candidate_p (struct cgraph_node *node)
703 struct caller_statistics stats;
705 gcc_checking_assert (node->has_gimple_body_p ());
707 if (!opt_for_fn (node->decl, flag_ipa_cp_clone))
709 if (dump_file)
710 fprintf (dump_file, "Not considering %s for cloning; "
711 "-fipa-cp-clone disabled.\n",
712 node->name ());
713 return false;
716 if (node->optimize_for_size_p ())
718 if (dump_file)
719 fprintf (dump_file, "Not considering %s for cloning; "
720 "optimizing it for size.\n",
721 node->name ());
722 return false;
725 init_caller_stats (&stats);
726 node->call_for_symbol_thunks_and_aliases (gather_caller_stats, &stats, false);
728 if (ipa_fn_summaries->get (node)->self_size < stats.n_calls)
730 if (dump_file)
731 fprintf (dump_file, "Considering %s for cloning; code might shrink.\n",
732 node->name ());
733 return true;
736 /* When profile is available and function is hot, propagate into it even if
737 calls seems cold; constant propagation can improve function's speed
738 significantly. */
739 if (max_count > profile_count::zero ())
741 if (stats.count_sum > node->count.ipa ().apply_scale (90, 100))
743 if (dump_file)
744 fprintf (dump_file, "Considering %s for cloning; "
745 "usually called directly.\n",
746 node->name ());
747 return true;
750 if (!stats.n_hot_calls)
752 if (dump_file)
753 fprintf (dump_file, "Not considering %s for cloning; no hot calls.\n",
754 node->name ());
755 return false;
757 if (dump_file)
758 fprintf (dump_file, "Considering %s for cloning.\n",
759 node->name ());
760 return true;
763 template <typename valtype>
764 class value_topo_info
766 public:
767 /* Head of the linked list of topologically sorted values. */
768 ipcp_value<valtype> *values_topo;
769 /* Stack for creating SCCs, represented by a linked list too. */
770 ipcp_value<valtype> *stack;
771 /* Counter driving the algorithm in add_val_to_toposort. */
772 int dfs_counter;
774 value_topo_info () : values_topo (NULL), stack (NULL), dfs_counter (0)
776 void add_val (ipcp_value<valtype> *cur_val);
777 void propagate_effects ();
780 /* Arrays representing a topological ordering of call graph nodes and a stack
781 of nodes used during constant propagation and also data required to perform
782 topological sort of values and propagation of benefits in the determined
783 order. */
785 class ipa_topo_info
787 public:
788 /* Array with obtained topological order of cgraph nodes. */
789 struct cgraph_node **order;
790 /* Stack of cgraph nodes used during propagation within SCC until all values
791 in the SCC stabilize. */
792 struct cgraph_node **stack;
793 int nnodes, stack_top;
795 value_topo_info<tree> constants;
796 value_topo_info<ipa_polymorphic_call_context> contexts;
798 ipa_topo_info () : order(NULL), stack(NULL), nnodes(0), stack_top(0),
799 constants ()
803 /* Allocate the arrays in TOPO and topologically sort the nodes into order. */
805 static void
806 build_toporder_info (struct ipa_topo_info *topo)
808 topo->order = XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
809 topo->stack = XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
811 gcc_checking_assert (topo->stack_top == 0);
812 topo->nnodes = ipa_reduced_postorder (topo->order, true, true, NULL);
815 /* Free information about strongly connected components and the arrays in
816 TOPO. */
818 static void
819 free_toporder_info (struct ipa_topo_info *topo)
821 ipa_free_postorder_info ();
822 free (topo->order);
823 free (topo->stack);
826 /* Add NODE to the stack in TOPO, unless it is already there. */
828 static inline void
829 push_node_to_stack (struct ipa_topo_info *topo, struct cgraph_node *node)
831 struct ipa_node_params *info = IPA_NODE_REF (node);
832 if (info->node_enqueued)
833 return;
834 info->node_enqueued = 1;
835 topo->stack[topo->stack_top++] = node;
838 /* Pop a node from the stack in TOPO and return it or return NULL if the stack
839 is empty. */
841 static struct cgraph_node *
842 pop_node_from_stack (struct ipa_topo_info *topo)
844 if (topo->stack_top)
846 struct cgraph_node *node;
847 topo->stack_top--;
848 node = topo->stack[topo->stack_top];
849 IPA_NODE_REF (node)->node_enqueued = 0;
850 return node;
852 else
853 return NULL;
856 /* Set lattice LAT to bottom and return true if it previously was not set as
857 such. */
859 template <typename valtype>
860 inline bool
861 ipcp_lattice<valtype>::set_to_bottom ()
863 bool ret = !bottom;
864 bottom = true;
865 return ret;
868 /* Mark lattice as containing an unknown value and return true if it previously
869 was not marked as such. */
871 template <typename valtype>
872 inline bool
873 ipcp_lattice<valtype>::set_contains_variable ()
875 bool ret = !contains_variable;
876 contains_variable = true;
877 return ret;
880 /* Set all aggegate lattices in PLATS to bottom and return true if they were
881 not previously set as such. */
883 static inline bool
884 set_agg_lats_to_bottom (struct ipcp_param_lattices *plats)
886 bool ret = !plats->aggs_bottom;
887 plats->aggs_bottom = true;
888 return ret;
891 /* Mark all aggegate lattices in PLATS as containing an unknown value and
892 return true if they were not previously marked as such. */
894 static inline bool
895 set_agg_lats_contain_variable (struct ipcp_param_lattices *plats)
897 bool ret = !plats->aggs_contain_variable;
898 plats->aggs_contain_variable = true;
899 return ret;
902 bool
903 ipcp_vr_lattice::meet_with (const ipcp_vr_lattice &other)
905 return meet_with_1 (&other.m_vr);
908 /* Meet the current value of the lattice with value ranfge described by VR
909 lattice. */
911 bool
912 ipcp_vr_lattice::meet_with (const value_range_base *p_vr)
914 return meet_with_1 (p_vr);
917 /* Meet the current value of the lattice with value range described by
918 OTHER_VR lattice. Return TRUE if anything changed. */
920 bool
921 ipcp_vr_lattice::meet_with_1 (const value_range_base *other_vr)
923 if (bottom_p ())
924 return false;
926 if (other_vr->varying_p ())
927 return set_to_bottom ();
929 value_range_base save (m_vr);
930 m_vr.union_ (other_vr);
931 return !m_vr.ignore_equivs_equal_p (save);
934 /* Return true if value range information in the lattice is yet unknown. */
936 bool
937 ipcp_vr_lattice::top_p () const
939 return m_vr.undefined_p ();
942 /* Return true if value range information in the lattice is known to be
943 unusable. */
945 bool
946 ipcp_vr_lattice::bottom_p () const
948 return m_vr.varying_p ();
951 /* Set value range information in the lattice to bottom. Return true if it
952 previously was in a different state. */
954 bool
955 ipcp_vr_lattice::set_to_bottom ()
957 if (m_vr.varying_p ())
958 return false;
959 m_vr.set_varying ();
960 return true;
963 /* Set lattice value to bottom, if it already isn't the case. */
965 bool
966 ipcp_bits_lattice::set_to_bottom ()
968 if (bottom_p ())
969 return false;
970 m_lattice_val = IPA_BITS_VARYING;
971 m_value = 0;
972 m_mask = -1;
973 return true;
976 /* Set to constant if it isn't already. Only meant to be called
977 when switching state from TOP. */
979 bool
980 ipcp_bits_lattice::set_to_constant (widest_int value, widest_int mask)
982 gcc_assert (top_p ());
983 m_lattice_val = IPA_BITS_CONSTANT;
984 m_value = value;
985 m_mask = mask;
986 return true;
989 /* Convert operand to value, mask form. */
991 void
992 ipcp_bits_lattice::get_value_and_mask (tree operand, widest_int *valuep, widest_int *maskp)
994 wide_int get_nonzero_bits (const_tree);
996 if (TREE_CODE (operand) == INTEGER_CST)
998 *valuep = wi::to_widest (operand);
999 *maskp = 0;
1001 else
1003 *valuep = 0;
1004 *maskp = -1;
1008 /* Meet operation, similar to ccp_lattice_meet, we xor values
1009 if this->value, value have different values at same bit positions, we want
1010 to drop that bit to varying. Return true if mask is changed.
1011 This function assumes that the lattice value is in CONSTANT state */
1013 bool
1014 ipcp_bits_lattice::meet_with_1 (widest_int value, widest_int mask,
1015 unsigned precision)
1017 gcc_assert (constant_p ());
1019 widest_int old_mask = m_mask;
1020 m_mask = (m_mask | mask) | (m_value ^ value);
1022 if (wi::sext (m_mask, precision) == -1)
1023 return set_to_bottom ();
1025 return m_mask != old_mask;
1028 /* Meet the bits lattice with operand
1029 described by <value, mask, sgn, precision. */
1031 bool
1032 ipcp_bits_lattice::meet_with (widest_int value, widest_int mask,
1033 unsigned precision)
1035 if (bottom_p ())
1036 return false;
1038 if (top_p ())
1040 if (wi::sext (mask, precision) == -1)
1041 return set_to_bottom ();
1042 return set_to_constant (value, mask);
1045 return meet_with_1 (value, mask, precision);
1048 /* Meet bits lattice with the result of bit_value_binop (other, operand)
1049 if code is binary operation or bit_value_unop (other) if code is unary op.
1050 In the case when code is nop_expr, no adjustment is required. */
1052 bool
1053 ipcp_bits_lattice::meet_with (ipcp_bits_lattice& other, unsigned precision,
1054 signop sgn, enum tree_code code, tree operand)
1056 if (other.bottom_p ())
1057 return set_to_bottom ();
1059 if (bottom_p () || other.top_p ())
1060 return false;
1062 widest_int adjusted_value, adjusted_mask;
1064 if (TREE_CODE_CLASS (code) == tcc_binary)
1066 tree type = TREE_TYPE (operand);
1067 gcc_assert (INTEGRAL_TYPE_P (type));
1068 widest_int o_value, o_mask;
1069 get_value_and_mask (operand, &o_value, &o_mask);
1071 bit_value_binop (code, sgn, precision, &adjusted_value, &adjusted_mask,
1072 sgn, precision, other.get_value (), other.get_mask (),
1073 TYPE_SIGN (type), TYPE_PRECISION (type), o_value, o_mask);
1075 if (wi::sext (adjusted_mask, precision) == -1)
1076 return set_to_bottom ();
1079 else if (TREE_CODE_CLASS (code) == tcc_unary)
1081 bit_value_unop (code, sgn, precision, &adjusted_value,
1082 &adjusted_mask, sgn, precision, other.get_value (),
1083 other.get_mask ());
1085 if (wi::sext (adjusted_mask, precision) == -1)
1086 return set_to_bottom ();
1089 else
1090 return set_to_bottom ();
1092 if (top_p ())
1094 if (wi::sext (adjusted_mask, precision) == -1)
1095 return set_to_bottom ();
1096 return set_to_constant (adjusted_value, adjusted_mask);
1098 else
1099 return meet_with_1 (adjusted_value, adjusted_mask, precision);
1102 /* Mark bot aggregate and scalar lattices as containing an unknown variable,
1103 return true is any of them has not been marked as such so far. */
1105 static inline bool
1106 set_all_contains_variable (struct ipcp_param_lattices *plats)
1108 bool ret;
1109 ret = plats->itself.set_contains_variable ();
1110 ret |= plats->ctxlat.set_contains_variable ();
1111 ret |= set_agg_lats_contain_variable (plats);
1112 ret |= plats->bits_lattice.set_to_bottom ();
1113 ret |= plats->m_value_range.set_to_bottom ();
1114 return ret;
1117 /* Worker of call_for_symbol_thunks_and_aliases, increment the integer DATA
1118 points to by the number of callers to NODE. */
1120 static bool
1121 count_callers (cgraph_node *node, void *data)
1123 int *caller_count = (int *) data;
1125 for (cgraph_edge *cs = node->callers; cs; cs = cs->next_caller)
1126 /* Local thunks can be handled transparently, but if the thunk can not
1127 be optimized out, count it as a real use. */
1128 if (!cs->caller->thunk.thunk_p || !cs->caller->local.local)
1129 ++*caller_count;
1130 return false;
1133 /* Worker of call_for_symbol_thunks_and_aliases, it is supposed to be called on
1134 the one caller of some other node. Set the caller's corresponding flag. */
1136 static bool
1137 set_single_call_flag (cgraph_node *node, void *)
1139 cgraph_edge *cs = node->callers;
1140 /* Local thunks can be handled transparently, skip them. */
1141 while (cs && cs->caller->thunk.thunk_p && cs->caller->local.local)
1142 cs = cs->next_caller;
1143 if (cs)
1145 IPA_NODE_REF (cs->caller)->node_calling_single_call = true;
1146 return true;
1148 return false;
1151 /* Initialize ipcp_lattices. */
1153 static void
1154 initialize_node_lattices (struct cgraph_node *node)
1156 struct ipa_node_params *info = IPA_NODE_REF (node);
1157 struct cgraph_edge *ie;
1158 bool disable = false, variable = false;
1159 int i;
1161 gcc_checking_assert (node->has_gimple_body_p ());
1162 if (node->local.local)
1164 int caller_count = 0;
1165 node->call_for_symbol_thunks_and_aliases (count_callers, &caller_count,
1166 true);
1167 gcc_checking_assert (caller_count > 0);
1168 if (caller_count == 1)
1169 node->call_for_symbol_thunks_and_aliases (set_single_call_flag,
1170 NULL, true);
1172 else
1174 /* When cloning is allowed, we can assume that externally visible
1175 functions are not called. We will compensate this by cloning
1176 later. */
1177 if (ipcp_versionable_function_p (node)
1178 && ipcp_cloning_candidate_p (node))
1179 variable = true;
1180 else
1181 disable = true;
1184 for (i = 0; i < ipa_get_param_count (info); i++)
1186 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
1187 plats->m_value_range.init ();
1190 if (disable || variable)
1192 for (i = 0; i < ipa_get_param_count (info); i++)
1194 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
1195 if (disable)
1197 plats->itself.set_to_bottom ();
1198 plats->ctxlat.set_to_bottom ();
1199 set_agg_lats_to_bottom (plats);
1200 plats->bits_lattice.set_to_bottom ();
1201 plats->m_value_range.set_to_bottom ();
1203 else
1204 set_all_contains_variable (plats);
1206 if (dump_file && (dump_flags & TDF_DETAILS)
1207 && !node->alias && !node->thunk.thunk_p)
1208 fprintf (dump_file, "Marking all lattices of %s as %s\n",
1209 node->dump_name (), disable ? "BOTTOM" : "VARIABLE");
1212 for (ie = node->indirect_calls; ie; ie = ie->next_callee)
1213 if (ie->indirect_info->polymorphic
1214 && ie->indirect_info->param_index >= 0)
1216 gcc_checking_assert (ie->indirect_info->param_index >= 0);
1217 ipa_get_parm_lattices (info,
1218 ie->indirect_info->param_index)->virt_call = 1;
1222 /* Return the result of a (possibly arithmetic) pass through jump function
1223 JFUNC on the constant value INPUT. RES_TYPE is the type of the parameter
1224 to which the result is passed. Return NULL_TREE if that cannot be
1225 determined or be considered an interprocedural invariant. */
1227 static tree
1228 ipa_get_jf_pass_through_result (struct ipa_jump_func *jfunc, tree input,
1229 tree res_type)
1231 tree res;
1233 if (ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
1234 return input;
1235 if (!is_gimple_ip_invariant (input))
1236 return NULL_TREE;
1238 tree_code opcode = ipa_get_jf_pass_through_operation (jfunc);
1239 if (!res_type)
1241 if (TREE_CODE_CLASS (opcode) == tcc_comparison)
1242 res_type = boolean_type_node;
1243 else if (expr_type_first_operand_type_p (opcode))
1244 res_type = TREE_TYPE (input);
1245 else
1246 return NULL_TREE;
1249 if (TREE_CODE_CLASS (opcode) == tcc_unary)
1250 res = fold_unary (opcode, res_type, input);
1251 else
1252 res = fold_binary (opcode, res_type, input,
1253 ipa_get_jf_pass_through_operand (jfunc));
1255 if (res && !is_gimple_ip_invariant (res))
1256 return NULL_TREE;
1258 return res;
1261 /* Return the result of an ancestor jump function JFUNC on the constant value
1262 INPUT. Return NULL_TREE if that cannot be determined. */
1264 static tree
1265 ipa_get_jf_ancestor_result (struct ipa_jump_func *jfunc, tree input)
1267 gcc_checking_assert (TREE_CODE (input) != TREE_BINFO);
1268 if (TREE_CODE (input) == ADDR_EXPR)
1270 tree t = TREE_OPERAND (input, 0);
1271 t = build_ref_for_offset (EXPR_LOCATION (t), t,
1272 ipa_get_jf_ancestor_offset (jfunc), false,
1273 ptr_type_node, NULL, false);
1274 return build_fold_addr_expr (t);
1276 else
1277 return NULL_TREE;
1280 /* Determine whether JFUNC evaluates to a single known constant value and if
1281 so, return it. Otherwise return NULL. INFO describes the caller node or
1282 the one it is inlined to, so that pass-through jump functions can be
1283 evaluated. PARM_TYPE is the type of the parameter to which the result is
1284 passed. */
1286 tree
1287 ipa_value_from_jfunc (struct ipa_node_params *info, struct ipa_jump_func *jfunc,
1288 tree parm_type)
1290 if (jfunc->type == IPA_JF_CONST)
1291 return ipa_get_jf_constant (jfunc);
1292 else if (jfunc->type == IPA_JF_PASS_THROUGH
1293 || jfunc->type == IPA_JF_ANCESTOR)
1295 tree input;
1296 int idx;
1298 if (jfunc->type == IPA_JF_PASS_THROUGH)
1299 idx = ipa_get_jf_pass_through_formal_id (jfunc);
1300 else
1301 idx = ipa_get_jf_ancestor_formal_id (jfunc);
1303 if (info->ipcp_orig_node)
1304 input = info->known_csts[idx];
1305 else
1307 ipcp_lattice<tree> *lat;
1309 if (!info->lattices
1310 || idx >= ipa_get_param_count (info))
1311 return NULL_TREE;
1312 lat = ipa_get_scalar_lat (info, idx);
1313 if (!lat->is_single_const ())
1314 return NULL_TREE;
1315 input = lat->values->value;
1318 if (!input)
1319 return NULL_TREE;
1321 if (jfunc->type == IPA_JF_PASS_THROUGH)
1322 return ipa_get_jf_pass_through_result (jfunc, input, parm_type);
1323 else
1324 return ipa_get_jf_ancestor_result (jfunc, input);
1326 else
1327 return NULL_TREE;
1330 /* Determie whether JFUNC evaluates to single known polymorphic context, given
1331 that INFO describes the caller node or the one it is inlined to, CS is the
1332 call graph edge corresponding to JFUNC and CSIDX index of the described
1333 parameter. */
1335 ipa_polymorphic_call_context
1336 ipa_context_from_jfunc (ipa_node_params *info, cgraph_edge *cs, int csidx,
1337 ipa_jump_func *jfunc)
1339 ipa_edge_args *args = IPA_EDGE_REF (cs);
1340 ipa_polymorphic_call_context ctx;
1341 ipa_polymorphic_call_context *edge_ctx
1342 = cs ? ipa_get_ith_polymorhic_call_context (args, csidx) : NULL;
1344 if (edge_ctx && !edge_ctx->useless_p ())
1345 ctx = *edge_ctx;
1347 if (jfunc->type == IPA_JF_PASS_THROUGH
1348 || jfunc->type == IPA_JF_ANCESTOR)
1350 ipa_polymorphic_call_context srcctx;
1351 int srcidx;
1352 bool type_preserved = true;
1353 if (jfunc->type == IPA_JF_PASS_THROUGH)
1355 if (ipa_get_jf_pass_through_operation (jfunc) != NOP_EXPR)
1356 return ctx;
1357 type_preserved = ipa_get_jf_pass_through_type_preserved (jfunc);
1358 srcidx = ipa_get_jf_pass_through_formal_id (jfunc);
1360 else
1362 type_preserved = ipa_get_jf_ancestor_type_preserved (jfunc);
1363 srcidx = ipa_get_jf_ancestor_formal_id (jfunc);
1365 if (info->ipcp_orig_node)
1367 if (info->known_contexts.exists ())
1368 srcctx = info->known_contexts[srcidx];
1370 else
1372 if (!info->lattices
1373 || srcidx >= ipa_get_param_count (info))
1374 return ctx;
1375 ipcp_lattice<ipa_polymorphic_call_context> *lat;
1376 lat = ipa_get_poly_ctx_lat (info, srcidx);
1377 if (!lat->is_single_const ())
1378 return ctx;
1379 srcctx = lat->values->value;
1381 if (srcctx.useless_p ())
1382 return ctx;
1383 if (jfunc->type == IPA_JF_ANCESTOR)
1384 srcctx.offset_by (ipa_get_jf_ancestor_offset (jfunc));
1385 if (!type_preserved)
1386 srcctx.possible_dynamic_type_change (cs->in_polymorphic_cdtor);
1387 srcctx.combine_with (ctx);
1388 return srcctx;
1391 return ctx;
1394 /* If checking is enabled, verify that no lattice is in the TOP state, i.e. not
1395 bottom, not containing a variable component and without any known value at
1396 the same time. */
1398 DEBUG_FUNCTION void
1399 ipcp_verify_propagated_values (void)
1401 struct cgraph_node *node;
1403 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
1405 struct ipa_node_params *info = IPA_NODE_REF (node);
1406 int i, count = ipa_get_param_count (info);
1408 for (i = 0; i < count; i++)
1410 ipcp_lattice<tree> *lat = ipa_get_scalar_lat (info, i);
1412 if (!lat->bottom
1413 && !lat->contains_variable
1414 && lat->values_count == 0)
1416 if (dump_file)
1418 symtab->dump (dump_file);
1419 fprintf (dump_file, "\nIPA lattices after constant "
1420 "propagation, before gcc_unreachable:\n");
1421 print_all_lattices (dump_file, true, false);
1424 gcc_unreachable ();
1430 /* Return true iff X and Y should be considered equal values by IPA-CP. */
1432 static bool
1433 values_equal_for_ipcp_p (tree x, tree y)
1435 gcc_checking_assert (x != NULL_TREE && y != NULL_TREE);
1437 if (x == y)
1438 return true;
1440 if (TREE_CODE (x) == ADDR_EXPR
1441 && TREE_CODE (y) == ADDR_EXPR
1442 && TREE_CODE (TREE_OPERAND (x, 0)) == CONST_DECL
1443 && TREE_CODE (TREE_OPERAND (y, 0)) == CONST_DECL)
1444 return operand_equal_p (DECL_INITIAL (TREE_OPERAND (x, 0)),
1445 DECL_INITIAL (TREE_OPERAND (y, 0)), 0);
1446 else
1447 return operand_equal_p (x, y, 0);
1450 /* Return true iff X and Y should be considered equal contexts by IPA-CP. */
1452 static bool
1453 values_equal_for_ipcp_p (ipa_polymorphic_call_context x,
1454 ipa_polymorphic_call_context y)
1456 return x.equal_to (y);
1460 /* Add a new value source to the value represented by THIS, marking that a
1461 value comes from edge CS and (if the underlying jump function is a
1462 pass-through or an ancestor one) from a caller value SRC_VAL of a caller
1463 parameter described by SRC_INDEX. OFFSET is negative if the source was the
1464 scalar value of the parameter itself or the offset within an aggregate. */
1466 template <typename valtype>
1467 void
1468 ipcp_value<valtype>::add_source (cgraph_edge *cs, ipcp_value *src_val,
1469 int src_idx, HOST_WIDE_INT offset)
1471 ipcp_value_source<valtype> *src;
1473 src = new (ipcp_sources_pool.allocate ()) ipcp_value_source<valtype>;
1474 src->offset = offset;
1475 src->cs = cs;
1476 src->val = src_val;
1477 src->index = src_idx;
1479 src->next = sources;
1480 sources = src;
1483 /* Allocate a new ipcp_value holding a tree constant, initialize its value to
1484 SOURCE and clear all other fields. */
1486 static ipcp_value<tree> *
1487 allocate_and_init_ipcp_value (tree source)
1489 ipcp_value<tree> *val;
1491 val = new (ipcp_cst_values_pool.allocate ()) ipcp_value<tree>();
1492 val->value = source;
1493 return val;
1496 /* Allocate a new ipcp_value holding a polymorphic context, initialize its
1497 value to SOURCE and clear all other fields. */
1499 static ipcp_value<ipa_polymorphic_call_context> *
1500 allocate_and_init_ipcp_value (ipa_polymorphic_call_context source)
1502 ipcp_value<ipa_polymorphic_call_context> *val;
1504 // TODO
1505 val = new (ipcp_poly_ctx_values_pool.allocate ())
1506 ipcp_value<ipa_polymorphic_call_context>();
1507 val->value = source;
1508 return val;
1511 /* Try to add NEWVAL to LAT, potentially creating a new ipcp_value for it. CS,
1512 SRC_VAL SRC_INDEX and OFFSET are meant for add_source and have the same
1513 meaning. OFFSET -1 means the source is scalar and not a part of an
1514 aggregate. */
1516 template <typename valtype>
1517 bool
1518 ipcp_lattice<valtype>::add_value (valtype newval, cgraph_edge *cs,
1519 ipcp_value<valtype> *src_val,
1520 int src_idx, HOST_WIDE_INT offset)
1522 ipcp_value<valtype> *val;
1524 if (bottom)
1525 return false;
1527 for (val = values; val; val = val->next)
1528 if (values_equal_for_ipcp_p (val->value, newval))
1530 if (ipa_edge_within_scc (cs))
1532 ipcp_value_source<valtype> *s;
1533 for (s = val->sources; s; s = s->next)
1534 if (s->cs == cs)
1535 break;
1536 if (s)
1537 return false;
1540 val->add_source (cs, src_val, src_idx, offset);
1541 return false;
1544 if (values_count == PARAM_VALUE (PARAM_IPA_CP_VALUE_LIST_SIZE))
1546 /* We can only free sources, not the values themselves, because sources
1547 of other values in this SCC might point to them. */
1548 for (val = values; val; val = val->next)
1550 while (val->sources)
1552 ipcp_value_source<valtype> *src = val->sources;
1553 val->sources = src->next;
1554 ipcp_sources_pool.remove ((ipcp_value_source<tree>*)src);
1558 values = NULL;
1559 return set_to_bottom ();
1562 values_count++;
1563 val = allocate_and_init_ipcp_value (newval);
1564 val->add_source (cs, src_val, src_idx, offset);
1565 val->next = values;
1566 values = val;
1567 return true;
1570 /* Propagate values through a pass-through jump function JFUNC associated with
1571 edge CS, taking values from SRC_LAT and putting them into DEST_LAT. SRC_IDX
1572 is the index of the source parameter. PARM_TYPE is the type of the
1573 parameter to which the result is passed. */
1575 static bool
1576 propagate_vals_across_pass_through (cgraph_edge *cs, ipa_jump_func *jfunc,
1577 ipcp_lattice<tree> *src_lat,
1578 ipcp_lattice<tree> *dest_lat, int src_idx,
1579 tree parm_type)
1581 ipcp_value<tree> *src_val;
1582 bool ret = false;
1584 /* Do not create new values when propagating within an SCC because if there
1585 are arithmetic functions with circular dependencies, there is infinite
1586 number of them and we would just make lattices bottom. If this condition
1587 is ever relaxed we have to detect self-feeding recursive calls in
1588 cgraph_edge_brings_value_p in a smarter way. */
1589 if ((ipa_get_jf_pass_through_operation (jfunc) != NOP_EXPR)
1590 && ipa_edge_within_scc (cs))
1591 ret = dest_lat->set_contains_variable ();
1592 else
1593 for (src_val = src_lat->values; src_val; src_val = src_val->next)
1595 tree cstval = ipa_get_jf_pass_through_result (jfunc, src_val->value,
1596 parm_type);
1598 if (cstval)
1599 ret |= dest_lat->add_value (cstval, cs, src_val, src_idx);
1600 else
1601 ret |= dest_lat->set_contains_variable ();
1604 return ret;
1607 /* Propagate values through an ancestor jump function JFUNC associated with
1608 edge CS, taking values from SRC_LAT and putting them into DEST_LAT. SRC_IDX
1609 is the index of the source parameter. */
1611 static bool
1612 propagate_vals_across_ancestor (struct cgraph_edge *cs,
1613 struct ipa_jump_func *jfunc,
1614 ipcp_lattice<tree> *src_lat,
1615 ipcp_lattice<tree> *dest_lat, int src_idx)
1617 ipcp_value<tree> *src_val;
1618 bool ret = false;
1620 if (ipa_edge_within_scc (cs))
1621 return dest_lat->set_contains_variable ();
1623 for (src_val = src_lat->values; src_val; src_val = src_val->next)
1625 tree t = ipa_get_jf_ancestor_result (jfunc, src_val->value);
1627 if (t)
1628 ret |= dest_lat->add_value (t, cs, src_val, src_idx);
1629 else
1630 ret |= dest_lat->set_contains_variable ();
1633 return ret;
1636 /* Propagate scalar values across jump function JFUNC that is associated with
1637 edge CS and put the values into DEST_LAT. PARM_TYPE is the type of the
1638 parameter to which the result is passed. */
1640 static bool
1641 propagate_scalar_across_jump_function (struct cgraph_edge *cs,
1642 struct ipa_jump_func *jfunc,
1643 ipcp_lattice<tree> *dest_lat,
1644 tree param_type)
1646 if (dest_lat->bottom)
1647 return false;
1649 if (jfunc->type == IPA_JF_CONST)
1651 tree val = ipa_get_jf_constant (jfunc);
1652 return dest_lat->add_value (val, cs, NULL, 0);
1654 else if (jfunc->type == IPA_JF_PASS_THROUGH
1655 || jfunc->type == IPA_JF_ANCESTOR)
1657 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
1658 ipcp_lattice<tree> *src_lat;
1659 int src_idx;
1660 bool ret;
1662 if (jfunc->type == IPA_JF_PASS_THROUGH)
1663 src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
1664 else
1665 src_idx = ipa_get_jf_ancestor_formal_id (jfunc);
1667 src_lat = ipa_get_scalar_lat (caller_info, src_idx);
1668 if (src_lat->bottom)
1669 return dest_lat->set_contains_variable ();
1671 /* If we would need to clone the caller and cannot, do not propagate. */
1672 if (!ipcp_versionable_function_p (cs->caller)
1673 && (src_lat->contains_variable
1674 || (src_lat->values_count > 1)))
1675 return dest_lat->set_contains_variable ();
1677 if (jfunc->type == IPA_JF_PASS_THROUGH)
1678 ret = propagate_vals_across_pass_through (cs, jfunc, src_lat,
1679 dest_lat, src_idx, param_type);
1680 else
1681 ret = propagate_vals_across_ancestor (cs, jfunc, src_lat, dest_lat,
1682 src_idx);
1684 if (src_lat->contains_variable)
1685 ret |= dest_lat->set_contains_variable ();
1687 return ret;
1690 /* TODO: We currently do not handle member method pointers in IPA-CP (we only
1691 use it for indirect inlining), we should propagate them too. */
1692 return dest_lat->set_contains_variable ();
1695 /* Propagate scalar values across jump function JFUNC that is associated with
1696 edge CS and describes argument IDX and put the values into DEST_LAT. */
1698 static bool
1699 propagate_context_across_jump_function (cgraph_edge *cs,
1700 ipa_jump_func *jfunc, int idx,
1701 ipcp_lattice<ipa_polymorphic_call_context> *dest_lat)
1703 ipa_edge_args *args = IPA_EDGE_REF (cs);
1704 if (dest_lat->bottom)
1705 return false;
1706 bool ret = false;
1707 bool added_sth = false;
1708 bool type_preserved = true;
1710 ipa_polymorphic_call_context edge_ctx, *edge_ctx_ptr
1711 = ipa_get_ith_polymorhic_call_context (args, idx);
1713 if (edge_ctx_ptr)
1714 edge_ctx = *edge_ctx_ptr;
1716 if (jfunc->type == IPA_JF_PASS_THROUGH
1717 || jfunc->type == IPA_JF_ANCESTOR)
1719 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
1720 int src_idx;
1721 ipcp_lattice<ipa_polymorphic_call_context> *src_lat;
1723 /* TODO: Once we figure out how to propagate speculations, it will
1724 probably be a good idea to switch to speculation if type_preserved is
1725 not set instead of punting. */
1726 if (jfunc->type == IPA_JF_PASS_THROUGH)
1728 if (ipa_get_jf_pass_through_operation (jfunc) != NOP_EXPR)
1729 goto prop_fail;
1730 type_preserved = ipa_get_jf_pass_through_type_preserved (jfunc);
1731 src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
1733 else
1735 type_preserved = ipa_get_jf_ancestor_type_preserved (jfunc);
1736 src_idx = ipa_get_jf_ancestor_formal_id (jfunc);
1739 src_lat = ipa_get_poly_ctx_lat (caller_info, src_idx);
1740 /* If we would need to clone the caller and cannot, do not propagate. */
1741 if (!ipcp_versionable_function_p (cs->caller)
1742 && (src_lat->contains_variable
1743 || (src_lat->values_count > 1)))
1744 goto prop_fail;
1746 ipcp_value<ipa_polymorphic_call_context> *src_val;
1747 for (src_val = src_lat->values; src_val; src_val = src_val->next)
1749 ipa_polymorphic_call_context cur = src_val->value;
1751 if (!type_preserved)
1752 cur.possible_dynamic_type_change (cs->in_polymorphic_cdtor);
1753 if (jfunc->type == IPA_JF_ANCESTOR)
1754 cur.offset_by (ipa_get_jf_ancestor_offset (jfunc));
1755 /* TODO: In cases we know how the context is going to be used,
1756 we can improve the result by passing proper OTR_TYPE. */
1757 cur.combine_with (edge_ctx);
1758 if (!cur.useless_p ())
1760 if (src_lat->contains_variable
1761 && !edge_ctx.equal_to (cur))
1762 ret |= dest_lat->set_contains_variable ();
1763 ret |= dest_lat->add_value (cur, cs, src_val, src_idx);
1764 added_sth = true;
1770 prop_fail:
1771 if (!added_sth)
1773 if (!edge_ctx.useless_p ())
1774 ret |= dest_lat->add_value (edge_ctx, cs);
1775 else
1776 ret |= dest_lat->set_contains_variable ();
1779 return ret;
1782 /* Propagate bits across jfunc that is associated with
1783 edge cs and update dest_lattice accordingly. */
1785 bool
1786 propagate_bits_across_jump_function (cgraph_edge *cs, int idx,
1787 ipa_jump_func *jfunc,
1788 ipcp_bits_lattice *dest_lattice)
1790 if (dest_lattice->bottom_p ())
1791 return false;
1793 enum availability availability;
1794 cgraph_node *callee = cs->callee->function_symbol (&availability);
1795 struct ipa_node_params *callee_info = IPA_NODE_REF (callee);
1796 tree parm_type = ipa_get_type (callee_info, idx);
1798 /* For K&R C programs, ipa_get_type() could return NULL_TREE. Avoid the
1799 transform for these cases. Similarly, we can have bad type mismatches
1800 with LTO, avoid doing anything with those too. */
1801 if (!parm_type
1802 || (!INTEGRAL_TYPE_P (parm_type) && !POINTER_TYPE_P (parm_type)))
1804 if (dump_file && (dump_flags & TDF_DETAILS))
1805 fprintf (dump_file, "Setting dest_lattice to bottom, because type of "
1806 "param %i of %s is NULL or unsuitable for bits propagation\n",
1807 idx, cs->callee->name ());
1809 return dest_lattice->set_to_bottom ();
1812 unsigned precision = TYPE_PRECISION (parm_type);
1813 signop sgn = TYPE_SIGN (parm_type);
1815 if (jfunc->type == IPA_JF_PASS_THROUGH
1816 || jfunc->type == IPA_JF_ANCESTOR)
1818 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
1819 tree operand = NULL_TREE;
1820 enum tree_code code;
1821 unsigned src_idx;
1823 if (jfunc->type == IPA_JF_PASS_THROUGH)
1825 code = ipa_get_jf_pass_through_operation (jfunc);
1826 src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
1827 if (code != NOP_EXPR)
1828 operand = ipa_get_jf_pass_through_operand (jfunc);
1830 else
1832 code = POINTER_PLUS_EXPR;
1833 src_idx = ipa_get_jf_ancestor_formal_id (jfunc);
1834 unsigned HOST_WIDE_INT offset = ipa_get_jf_ancestor_offset (jfunc) / BITS_PER_UNIT;
1835 operand = build_int_cstu (size_type_node, offset);
1838 struct ipcp_param_lattices *src_lats
1839 = ipa_get_parm_lattices (caller_info, src_idx);
1841 /* Try to propagate bits if src_lattice is bottom, but jfunc is known.
1842 for eg consider:
1843 int f(int x)
1845 g (x & 0xff);
1847 Assume lattice for x is bottom, however we can still propagate
1848 result of x & 0xff == 0xff, which gets computed during ccp1 pass
1849 and we store it in jump function during analysis stage. */
1851 if (src_lats->bits_lattice.bottom_p ()
1852 && jfunc->bits)
1853 return dest_lattice->meet_with (jfunc->bits->value, jfunc->bits->mask,
1854 precision);
1855 else
1856 return dest_lattice->meet_with (src_lats->bits_lattice, precision, sgn,
1857 code, operand);
1860 else if (jfunc->type == IPA_JF_ANCESTOR)
1861 return dest_lattice->set_to_bottom ();
1862 else if (jfunc->bits)
1863 return dest_lattice->meet_with (jfunc->bits->value, jfunc->bits->mask,
1864 precision);
1865 else
1866 return dest_lattice->set_to_bottom ();
1869 /* Emulate effects of unary OPERATION and/or conversion from SRC_TYPE to
1870 DST_TYPE on value range in SRC_VR and store it to DST_VR. Return true if
1871 the result is a range or an anti-range. */
1873 static bool
1874 ipa_vr_operation_and_type_effects (value_range_base *dst_vr,
1875 value_range_base *src_vr,
1876 enum tree_code operation,
1877 tree dst_type, tree src_type)
1879 extract_range_from_unary_expr (dst_vr, operation, dst_type,
1880 src_vr, src_type);
1881 if (dst_vr->varying_p () || dst_vr->undefined_p ())
1882 return false;
1883 return true;
1886 /* Propagate value range across jump function JFUNC that is associated with
1887 edge CS with param of callee of PARAM_TYPE and update DEST_PLATS
1888 accordingly. */
1890 static bool
1891 propagate_vr_across_jump_function (cgraph_edge *cs, ipa_jump_func *jfunc,
1892 struct ipcp_param_lattices *dest_plats,
1893 tree param_type)
1895 ipcp_vr_lattice *dest_lat = &dest_plats->m_value_range;
1897 if (dest_lat->bottom_p ())
1898 return false;
1900 if (!param_type
1901 || (!INTEGRAL_TYPE_P (param_type)
1902 && !POINTER_TYPE_P (param_type)))
1903 return dest_lat->set_to_bottom ();
1905 if (jfunc->type == IPA_JF_PASS_THROUGH)
1907 enum tree_code operation = ipa_get_jf_pass_through_operation (jfunc);
1909 if (TREE_CODE_CLASS (operation) == tcc_unary)
1911 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
1912 int src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
1913 tree operand_type = ipa_get_type (caller_info, src_idx);
1914 struct ipcp_param_lattices *src_lats
1915 = ipa_get_parm_lattices (caller_info, src_idx);
1917 if (src_lats->m_value_range.bottom_p ())
1918 return dest_lat->set_to_bottom ();
1919 value_range_base vr;
1920 if (ipa_vr_operation_and_type_effects (&vr,
1921 &src_lats->m_value_range.m_vr,
1922 operation, param_type,
1923 operand_type))
1924 return dest_lat->meet_with (&vr);
1927 else if (jfunc->type == IPA_JF_CONST)
1929 tree val = ipa_get_jf_constant (jfunc);
1930 if (TREE_CODE (val) == INTEGER_CST)
1932 val = fold_convert (param_type, val);
1933 if (TREE_OVERFLOW_P (val))
1934 val = drop_tree_overflow (val);
1936 value_range_base tmpvr (VR_RANGE, val, val);
1937 return dest_lat->meet_with (&tmpvr);
1941 value_range_base vr;
1942 if (jfunc->m_vr
1943 && ipa_vr_operation_and_type_effects (&vr, jfunc->m_vr, NOP_EXPR,
1944 param_type,
1945 jfunc->m_vr->type ()))
1946 return dest_lat->meet_with (&vr);
1947 else
1948 return dest_lat->set_to_bottom ();
1951 /* If DEST_PLATS already has aggregate items, check that aggs_by_ref matches
1952 NEW_AGGS_BY_REF and if not, mark all aggs as bottoms and return true (in all
1953 other cases, return false). If there are no aggregate items, set
1954 aggs_by_ref to NEW_AGGS_BY_REF. */
1956 static bool
1957 set_check_aggs_by_ref (struct ipcp_param_lattices *dest_plats,
1958 bool new_aggs_by_ref)
1960 if (dest_plats->aggs)
1962 if (dest_plats->aggs_by_ref != new_aggs_by_ref)
1964 set_agg_lats_to_bottom (dest_plats);
1965 return true;
1968 else
1969 dest_plats->aggs_by_ref = new_aggs_by_ref;
1970 return false;
1973 /* Walk aggregate lattices in DEST_PLATS from ***AGLAT on, until ***aglat is an
1974 already existing lattice for the given OFFSET and SIZE, marking all skipped
1975 lattices as containing variable and checking for overlaps. If there is no
1976 already existing lattice for the OFFSET and VAL_SIZE, create one, initialize
1977 it with offset, size and contains_variable to PRE_EXISTING, and return true,
1978 unless there are too many already. If there are two many, return false. If
1979 there are overlaps turn whole DEST_PLATS to bottom and return false. If any
1980 skipped lattices were newly marked as containing variable, set *CHANGE to
1981 true. */
1983 static bool
1984 merge_agg_lats_step (struct ipcp_param_lattices *dest_plats,
1985 HOST_WIDE_INT offset, HOST_WIDE_INT val_size,
1986 struct ipcp_agg_lattice ***aglat,
1987 bool pre_existing, bool *change)
1989 gcc_checking_assert (offset >= 0);
1991 while (**aglat && (**aglat)->offset < offset)
1993 if ((**aglat)->offset + (**aglat)->size > offset)
1995 set_agg_lats_to_bottom (dest_plats);
1996 return false;
1998 *change |= (**aglat)->set_contains_variable ();
1999 *aglat = &(**aglat)->next;
2002 if (**aglat && (**aglat)->offset == offset)
2004 if ((**aglat)->size != val_size
2005 || ((**aglat)->next
2006 && (**aglat)->next->offset < offset + val_size))
2008 set_agg_lats_to_bottom (dest_plats);
2009 return false;
2011 gcc_checking_assert (!(**aglat)->next
2012 || (**aglat)->next->offset >= offset + val_size);
2013 return true;
2015 else
2017 struct ipcp_agg_lattice *new_al;
2019 if (**aglat && (**aglat)->offset < offset + val_size)
2021 set_agg_lats_to_bottom (dest_plats);
2022 return false;
2024 if (dest_plats->aggs_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
2025 return false;
2026 dest_plats->aggs_count++;
2027 new_al = ipcp_agg_lattice_pool.allocate ();
2028 memset (new_al, 0, sizeof (*new_al));
2030 new_al->offset = offset;
2031 new_al->size = val_size;
2032 new_al->contains_variable = pre_existing;
2034 new_al->next = **aglat;
2035 **aglat = new_al;
2036 return true;
2040 /* Set all AGLAT and all other aggregate lattices reachable by next pointers as
2041 containing an unknown value. */
2043 static bool
2044 set_chain_of_aglats_contains_variable (struct ipcp_agg_lattice *aglat)
2046 bool ret = false;
2047 while (aglat)
2049 ret |= aglat->set_contains_variable ();
2050 aglat = aglat->next;
2052 return ret;
2055 /* Merge existing aggregate lattices in SRC_PLATS to DEST_PLATS, subtracting
2056 DELTA_OFFSET. CS is the call graph edge and SRC_IDX the index of the source
2057 parameter used for lattice value sources. Return true if DEST_PLATS changed
2058 in any way. */
2060 static bool
2061 merge_aggregate_lattices (struct cgraph_edge *cs,
2062 struct ipcp_param_lattices *dest_plats,
2063 struct ipcp_param_lattices *src_plats,
2064 int src_idx, HOST_WIDE_INT offset_delta)
2066 bool pre_existing = dest_plats->aggs != NULL;
2067 struct ipcp_agg_lattice **dst_aglat;
2068 bool ret = false;
2070 if (set_check_aggs_by_ref (dest_plats, src_plats->aggs_by_ref))
2071 return true;
2072 if (src_plats->aggs_bottom)
2073 return set_agg_lats_contain_variable (dest_plats);
2074 if (src_plats->aggs_contain_variable)
2075 ret |= set_agg_lats_contain_variable (dest_plats);
2076 dst_aglat = &dest_plats->aggs;
2078 for (struct ipcp_agg_lattice *src_aglat = src_plats->aggs;
2079 src_aglat;
2080 src_aglat = src_aglat->next)
2082 HOST_WIDE_INT new_offset = src_aglat->offset - offset_delta;
2084 if (new_offset < 0)
2085 continue;
2086 if (merge_agg_lats_step (dest_plats, new_offset, src_aglat->size,
2087 &dst_aglat, pre_existing, &ret))
2089 struct ipcp_agg_lattice *new_al = *dst_aglat;
2091 dst_aglat = &(*dst_aglat)->next;
2092 if (src_aglat->bottom)
2094 ret |= new_al->set_contains_variable ();
2095 continue;
2097 if (src_aglat->contains_variable)
2098 ret |= new_al->set_contains_variable ();
2099 for (ipcp_value<tree> *val = src_aglat->values;
2100 val;
2101 val = val->next)
2102 ret |= new_al->add_value (val->value, cs, val, src_idx,
2103 src_aglat->offset);
2105 else if (dest_plats->aggs_bottom)
2106 return true;
2108 ret |= set_chain_of_aglats_contains_variable (*dst_aglat);
2109 return ret;
2112 /* Determine whether there is anything to propagate FROM SRC_PLATS through a
2113 pass-through JFUNC and if so, whether it has conform and conforms to the
2114 rules about propagating values passed by reference. */
2116 static bool
2117 agg_pass_through_permissible_p (struct ipcp_param_lattices *src_plats,
2118 struct ipa_jump_func *jfunc)
2120 return src_plats->aggs
2121 && (!src_plats->aggs_by_ref
2122 || ipa_get_jf_pass_through_agg_preserved (jfunc));
2125 /* Propagate scalar values across jump function JFUNC that is associated with
2126 edge CS and put the values into DEST_LAT. */
2128 static bool
2129 propagate_aggs_across_jump_function (struct cgraph_edge *cs,
2130 struct ipa_jump_func *jfunc,
2131 struct ipcp_param_lattices *dest_plats)
2133 bool ret = false;
2135 if (dest_plats->aggs_bottom)
2136 return false;
2138 if (jfunc->type == IPA_JF_PASS_THROUGH
2139 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
2141 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
2142 int src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
2143 struct ipcp_param_lattices *src_plats;
2145 src_plats = ipa_get_parm_lattices (caller_info, src_idx);
2146 if (agg_pass_through_permissible_p (src_plats, jfunc))
2148 /* Currently we do not produce clobber aggregate jump
2149 functions, replace with merging when we do. */
2150 gcc_assert (!jfunc->agg.items);
2151 ret |= merge_aggregate_lattices (cs, dest_plats, src_plats,
2152 src_idx, 0);
2154 else
2155 ret |= set_agg_lats_contain_variable (dest_plats);
2157 else if (jfunc->type == IPA_JF_ANCESTOR
2158 && ipa_get_jf_ancestor_agg_preserved (jfunc))
2160 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
2161 int src_idx = ipa_get_jf_ancestor_formal_id (jfunc);
2162 struct ipcp_param_lattices *src_plats;
2164 src_plats = ipa_get_parm_lattices (caller_info, src_idx);
2165 if (src_plats->aggs && src_plats->aggs_by_ref)
2167 /* Currently we do not produce clobber aggregate jump
2168 functions, replace with merging when we do. */
2169 gcc_assert (!jfunc->agg.items);
2170 ret |= merge_aggregate_lattices (cs, dest_plats, src_plats, src_idx,
2171 ipa_get_jf_ancestor_offset (jfunc));
2173 else if (!src_plats->aggs_by_ref)
2174 ret |= set_agg_lats_to_bottom (dest_plats);
2175 else
2176 ret |= set_agg_lats_contain_variable (dest_plats);
2178 else if (jfunc->agg.items)
2180 bool pre_existing = dest_plats->aggs != NULL;
2181 struct ipcp_agg_lattice **aglat = &dest_plats->aggs;
2182 struct ipa_agg_jf_item *item;
2183 int i;
2185 if (set_check_aggs_by_ref (dest_plats, jfunc->agg.by_ref))
2186 return true;
2188 FOR_EACH_VEC_ELT (*jfunc->agg.items, i, item)
2190 HOST_WIDE_INT val_size;
2192 if (item->offset < 0)
2193 continue;
2194 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2195 val_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (item->value)));
2197 if (merge_agg_lats_step (dest_plats, item->offset, val_size,
2198 &aglat, pre_existing, &ret))
2200 ret |= (*aglat)->add_value (item->value, cs, NULL, 0, 0);
2201 aglat = &(*aglat)->next;
2203 else if (dest_plats->aggs_bottom)
2204 return true;
2207 ret |= set_chain_of_aglats_contains_variable (*aglat);
2209 else
2210 ret |= set_agg_lats_contain_variable (dest_plats);
2212 return ret;
2215 /* Return true if on the way cfrom CS->caller to the final (non-alias and
2216 non-thunk) destination, the call passes through a thunk. */
2218 static bool
2219 call_passes_through_thunk_p (cgraph_edge *cs)
2221 cgraph_node *alias_or_thunk = cs->callee;
2222 while (alias_or_thunk->alias)
2223 alias_or_thunk = alias_or_thunk->get_alias_target ();
2224 return alias_or_thunk->thunk.thunk_p;
2227 /* Propagate constants from the caller to the callee of CS. INFO describes the
2228 caller. */
2230 static bool
2231 propagate_constants_across_call (struct cgraph_edge *cs)
2233 struct ipa_node_params *callee_info;
2234 enum availability availability;
2235 cgraph_node *callee;
2236 struct ipa_edge_args *args;
2237 bool ret = false;
2238 int i, args_count, parms_count;
2240 callee = cs->callee->function_symbol (&availability);
2241 if (!callee->definition)
2242 return false;
2243 gcc_checking_assert (callee->has_gimple_body_p ());
2244 callee_info = IPA_NODE_REF (callee);
2246 args = IPA_EDGE_REF (cs);
2247 args_count = ipa_get_cs_argument_count (args);
2248 parms_count = ipa_get_param_count (callee_info);
2249 if (parms_count == 0)
2250 return false;
2252 /* If this call goes through a thunk we must not propagate to the first (0th)
2253 parameter. However, we might need to uncover a thunk from below a series
2254 of aliases first. */
2255 if (call_passes_through_thunk_p (cs))
2257 ret |= set_all_contains_variable (ipa_get_parm_lattices (callee_info,
2258 0));
2259 i = 1;
2261 else
2262 i = 0;
2264 for (; (i < args_count) && (i < parms_count); i++)
2266 struct ipa_jump_func *jump_func = ipa_get_ith_jump_func (args, i);
2267 struct ipcp_param_lattices *dest_plats;
2268 tree param_type = ipa_get_type (callee_info, i);
2270 dest_plats = ipa_get_parm_lattices (callee_info, i);
2271 if (availability == AVAIL_INTERPOSABLE)
2272 ret |= set_all_contains_variable (dest_plats);
2273 else
2275 ret |= propagate_scalar_across_jump_function (cs, jump_func,
2276 &dest_plats->itself,
2277 param_type);
2278 ret |= propagate_context_across_jump_function (cs, jump_func, i,
2279 &dest_plats->ctxlat);
2281 |= propagate_bits_across_jump_function (cs, i, jump_func,
2282 &dest_plats->bits_lattice);
2283 ret |= propagate_aggs_across_jump_function (cs, jump_func,
2284 dest_plats);
2285 if (opt_for_fn (callee->decl, flag_ipa_vrp))
2286 ret |= propagate_vr_across_jump_function (cs, jump_func,
2287 dest_plats, param_type);
2288 else
2289 ret |= dest_plats->m_value_range.set_to_bottom ();
2292 for (; i < parms_count; i++)
2293 ret |= set_all_contains_variable (ipa_get_parm_lattices (callee_info, i));
2295 return ret;
2298 /* If an indirect edge IE can be turned into a direct one based on KNOWN_VALS
2299 KNOWN_CONTEXTS, KNOWN_AGGS or AGG_REPS return the destination. The latter
2300 three can be NULL. If AGG_REPS is not NULL, KNOWN_AGGS is ignored. */
2302 static tree
2303 ipa_get_indirect_edge_target_1 (struct cgraph_edge *ie,
2304 vec<tree> known_csts,
2305 vec<ipa_polymorphic_call_context> known_contexts,
2306 vec<ipa_agg_jump_function_p> known_aggs,
2307 struct ipa_agg_replacement_value *agg_reps,
2308 bool *speculative)
2310 int param_index = ie->indirect_info->param_index;
2311 HOST_WIDE_INT anc_offset;
2312 tree t;
2313 tree target = NULL;
2315 *speculative = false;
2317 if (param_index == -1
2318 || known_csts.length () <= (unsigned int) param_index)
2319 return NULL_TREE;
2321 if (!ie->indirect_info->polymorphic)
2323 tree t;
2325 if (ie->indirect_info->agg_contents)
2327 t = NULL;
2328 if (agg_reps && ie->indirect_info->guaranteed_unmodified)
2330 while (agg_reps)
2332 if (agg_reps->index == param_index
2333 && agg_reps->offset == ie->indirect_info->offset
2334 && agg_reps->by_ref == ie->indirect_info->by_ref)
2336 t = agg_reps->value;
2337 break;
2339 agg_reps = agg_reps->next;
2342 if (!t)
2344 struct ipa_agg_jump_function *agg;
2345 if (known_aggs.length () > (unsigned int) param_index)
2346 agg = known_aggs[param_index];
2347 else
2348 agg = NULL;
2349 bool from_global_constant;
2350 t = ipa_find_agg_cst_for_param (agg, known_csts[param_index],
2351 ie->indirect_info->offset,
2352 ie->indirect_info->by_ref,
2353 &from_global_constant);
2354 if (t
2355 && !from_global_constant
2356 && !ie->indirect_info->guaranteed_unmodified)
2357 t = NULL_TREE;
2360 else
2361 t = known_csts[param_index];
2363 if (t
2364 && TREE_CODE (t) == ADDR_EXPR
2365 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL)
2366 return TREE_OPERAND (t, 0);
2367 else
2368 return NULL_TREE;
2371 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
2372 return NULL_TREE;
2374 gcc_assert (!ie->indirect_info->agg_contents);
2375 anc_offset = ie->indirect_info->offset;
2377 t = NULL;
2379 /* Try to work out value of virtual table pointer value in replacemnets. */
2380 if (!t && agg_reps && !ie->indirect_info->by_ref)
2382 while (agg_reps)
2384 if (agg_reps->index == param_index
2385 && agg_reps->offset == ie->indirect_info->offset
2386 && agg_reps->by_ref)
2388 t = agg_reps->value;
2389 break;
2391 agg_reps = agg_reps->next;
2395 /* Try to work out value of virtual table pointer value in known
2396 aggregate values. */
2397 if (!t && known_aggs.length () > (unsigned int) param_index
2398 && !ie->indirect_info->by_ref)
2400 struct ipa_agg_jump_function *agg;
2401 agg = known_aggs[param_index];
2402 t = ipa_find_agg_cst_for_param (agg, known_csts[param_index],
2403 ie->indirect_info->offset, true);
2406 /* If we found the virtual table pointer, lookup the target. */
2407 if (t)
2409 tree vtable;
2410 unsigned HOST_WIDE_INT offset;
2411 if (vtable_pointer_value_to_vtable (t, &vtable, &offset))
2413 bool can_refer;
2414 target = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
2415 vtable, offset, &can_refer);
2416 if (can_refer)
2418 if (!target
2419 || (TREE_CODE (TREE_TYPE (target)) == FUNCTION_TYPE
2420 && DECL_FUNCTION_CODE (target) == BUILT_IN_UNREACHABLE)
2421 || !possible_polymorphic_call_target_p
2422 (ie, cgraph_node::get (target)))
2424 /* Do not speculate builtin_unreachable, it is stupid! */
2425 if (ie->indirect_info->vptr_changed)
2426 return NULL;
2427 target = ipa_impossible_devirt_target (ie, target);
2429 *speculative = ie->indirect_info->vptr_changed;
2430 if (!*speculative)
2431 return target;
2436 /* Do we know the constant value of pointer? */
2437 if (!t)
2438 t = known_csts[param_index];
2440 gcc_checking_assert (!t || TREE_CODE (t) != TREE_BINFO);
2442 ipa_polymorphic_call_context context;
2443 if (known_contexts.length () > (unsigned int) param_index)
2445 context = known_contexts[param_index];
2446 context.offset_by (anc_offset);
2447 if (ie->indirect_info->vptr_changed)
2448 context.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
2449 ie->indirect_info->otr_type);
2450 if (t)
2452 ipa_polymorphic_call_context ctx2 = ipa_polymorphic_call_context
2453 (t, ie->indirect_info->otr_type, anc_offset);
2454 if (!ctx2.useless_p ())
2455 context.combine_with (ctx2, ie->indirect_info->otr_type);
2458 else if (t)
2460 context = ipa_polymorphic_call_context (t, ie->indirect_info->otr_type,
2461 anc_offset);
2462 if (ie->indirect_info->vptr_changed)
2463 context.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
2464 ie->indirect_info->otr_type);
2466 else
2467 return NULL_TREE;
2469 vec <cgraph_node *>targets;
2470 bool final;
2472 targets = possible_polymorphic_call_targets
2473 (ie->indirect_info->otr_type,
2474 ie->indirect_info->otr_token,
2475 context, &final);
2476 if (!final || targets.length () > 1)
2478 struct cgraph_node *node;
2479 if (*speculative)
2480 return target;
2481 if (!opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
2482 || ie->speculative || !ie->maybe_hot_p ())
2483 return NULL;
2484 node = try_speculative_devirtualization (ie->indirect_info->otr_type,
2485 ie->indirect_info->otr_token,
2486 context);
2487 if (node)
2489 *speculative = true;
2490 target = node->decl;
2492 else
2493 return NULL;
2495 else
2497 *speculative = false;
2498 if (targets.length () == 1)
2499 target = targets[0]->decl;
2500 else
2501 target = ipa_impossible_devirt_target (ie, NULL_TREE);
2504 if (target && !possible_polymorphic_call_target_p (ie,
2505 cgraph_node::get (target)))
2507 if (*speculative)
2508 return NULL;
2509 target = ipa_impossible_devirt_target (ie, target);
2512 return target;
2516 /* If an indirect edge IE can be turned into a direct one based on KNOWN_CSTS,
2517 KNOWN_CONTEXTS (which can be vNULL) or KNOWN_AGGS (which also can be vNULL)
2518 return the destination. */
2520 tree
2521 ipa_get_indirect_edge_target (struct cgraph_edge *ie,
2522 vec<tree> known_csts,
2523 vec<ipa_polymorphic_call_context> known_contexts,
2524 vec<ipa_agg_jump_function_p> known_aggs,
2525 bool *speculative)
2527 return ipa_get_indirect_edge_target_1 (ie, known_csts, known_contexts,
2528 known_aggs, NULL, speculative);
2531 /* Calculate devirtualization time bonus for NODE, assuming we know KNOWN_CSTS
2532 and KNOWN_CONTEXTS. */
2534 static int
2535 devirtualization_time_bonus (struct cgraph_node *node,
2536 vec<tree> known_csts,
2537 vec<ipa_polymorphic_call_context> known_contexts,
2538 vec<ipa_agg_jump_function_p> known_aggs)
2540 struct cgraph_edge *ie;
2541 int res = 0;
2543 for (ie = node->indirect_calls; ie; ie = ie->next_callee)
2545 struct cgraph_node *callee;
2546 struct ipa_fn_summary *isummary;
2547 enum availability avail;
2548 tree target;
2549 bool speculative;
2551 target = ipa_get_indirect_edge_target (ie, known_csts, known_contexts,
2552 known_aggs, &speculative);
2553 if (!target)
2554 continue;
2556 /* Only bare minimum benefit for clearly un-inlineable targets. */
2557 res += 1;
2558 callee = cgraph_node::get (target);
2559 if (!callee || !callee->definition)
2560 continue;
2561 callee = callee->function_symbol (&avail);
2562 if (avail < AVAIL_AVAILABLE)
2563 continue;
2564 isummary = ipa_fn_summaries->get (callee);
2565 if (!isummary->inlinable)
2566 continue;
2568 /* FIXME: The values below need re-considering and perhaps also
2569 integrating into the cost metrics, at lest in some very basic way. */
2570 if (isummary->size <= MAX_INLINE_INSNS_AUTO / 4)
2571 res += 31 / ((int)speculative + 1);
2572 else if (isummary->size <= MAX_INLINE_INSNS_AUTO / 2)
2573 res += 15 / ((int)speculative + 1);
2574 else if (isummary->size <= MAX_INLINE_INSNS_AUTO
2575 || DECL_DECLARED_INLINE_P (callee->decl))
2576 res += 7 / ((int)speculative + 1);
2579 return res;
2582 /* Return time bonus incurred because of HINTS. */
2584 static int
2585 hint_time_bonus (ipa_hints hints)
2587 int result = 0;
2588 if (hints & (INLINE_HINT_loop_iterations | INLINE_HINT_loop_stride))
2589 result += PARAM_VALUE (PARAM_IPA_CP_LOOP_HINT_BONUS);
2590 if (hints & INLINE_HINT_array_index)
2591 result += PARAM_VALUE (PARAM_IPA_CP_ARRAY_INDEX_HINT_BONUS);
2592 return result;
2595 /* If there is a reason to penalize the function described by INFO in the
2596 cloning goodness evaluation, do so. */
2598 static inline int64_t
2599 incorporate_penalties (ipa_node_params *info, int64_t evaluation)
2601 if (info->node_within_scc)
2602 evaluation = (evaluation
2603 * (100 - PARAM_VALUE (PARAM_IPA_CP_RECURSION_PENALTY))) / 100;
2605 if (info->node_calling_single_call)
2606 evaluation = (evaluation
2607 * (100 - PARAM_VALUE (PARAM_IPA_CP_SINGLE_CALL_PENALTY)))
2608 / 100;
2610 return evaluation;
2613 /* Return true if cloning NODE is a good idea, given the estimated TIME_BENEFIT
2614 and SIZE_COST and with the sum of frequencies of incoming edges to the
2615 potential new clone in FREQUENCIES. */
2617 static bool
2618 good_cloning_opportunity_p (struct cgraph_node *node, int time_benefit,
2619 int freq_sum, profile_count count_sum, int size_cost)
2621 if (time_benefit == 0
2622 || !opt_for_fn (node->decl, flag_ipa_cp_clone)
2623 || node->optimize_for_size_p ())
2624 return false;
2626 gcc_assert (size_cost > 0);
2628 struct ipa_node_params *info = IPA_NODE_REF (node);
2629 if (max_count > profile_count::zero ())
2631 int factor = RDIV (count_sum.probability_in
2632 (max_count).to_reg_br_prob_base ()
2633 * 1000, REG_BR_PROB_BASE);
2634 int64_t evaluation = (((int64_t) time_benefit * factor)
2635 / size_cost);
2636 evaluation = incorporate_penalties (info, evaluation);
2638 if (dump_file && (dump_flags & TDF_DETAILS))
2640 fprintf (dump_file, " good_cloning_opportunity_p (time: %i, "
2641 "size: %i, count_sum: ", time_benefit, size_cost);
2642 count_sum.dump (dump_file);
2643 fprintf (dump_file, "%s%s) -> evaluation: " "%" PRId64
2644 ", threshold: %i\n",
2645 info->node_within_scc ? ", scc" : "",
2646 info->node_calling_single_call ? ", single_call" : "",
2647 evaluation, PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD));
2650 return evaluation >= PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD);
2652 else
2654 int64_t evaluation = (((int64_t) time_benefit * freq_sum)
2655 / size_cost);
2656 evaluation = incorporate_penalties (info, evaluation);
2658 if (dump_file && (dump_flags & TDF_DETAILS))
2659 fprintf (dump_file, " good_cloning_opportunity_p (time: %i, "
2660 "size: %i, freq_sum: %i%s%s) -> evaluation: "
2661 "%" PRId64 ", threshold: %i\n",
2662 time_benefit, size_cost, freq_sum,
2663 info->node_within_scc ? ", scc" : "",
2664 info->node_calling_single_call ? ", single_call" : "",
2665 evaluation, PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD));
2667 return evaluation >= PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD);
2671 /* Return all context independent values from aggregate lattices in PLATS in a
2672 vector. Return NULL if there are none. */
2674 static vec<ipa_agg_jf_item, va_gc> *
2675 context_independent_aggregate_values (struct ipcp_param_lattices *plats)
2677 vec<ipa_agg_jf_item, va_gc> *res = NULL;
2679 if (plats->aggs_bottom
2680 || plats->aggs_contain_variable
2681 || plats->aggs_count == 0)
2682 return NULL;
2684 for (struct ipcp_agg_lattice *aglat = plats->aggs;
2685 aglat;
2686 aglat = aglat->next)
2687 if (aglat->is_single_const ())
2689 struct ipa_agg_jf_item item;
2690 item.offset = aglat->offset;
2691 item.value = aglat->values->value;
2692 vec_safe_push (res, item);
2694 return res;
2697 /* Allocate KNOWN_CSTS, KNOWN_CONTEXTS and, if non-NULL, KNOWN_AGGS and
2698 populate them with values of parameters that are known independent of the
2699 context. INFO describes the function. If REMOVABLE_PARAMS_COST is
2700 non-NULL, the movement cost of all removable parameters will be stored in
2701 it. */
2703 static bool
2704 gather_context_independent_values (struct ipa_node_params *info,
2705 vec<tree> *known_csts,
2706 vec<ipa_polymorphic_call_context>
2707 *known_contexts,
2708 vec<ipa_agg_jump_function> *known_aggs,
2709 int *removable_params_cost)
2711 int i, count = ipa_get_param_count (info);
2712 bool ret = false;
2714 known_csts->create (0);
2715 known_contexts->create (0);
2716 known_csts->safe_grow_cleared (count);
2717 known_contexts->safe_grow_cleared (count);
2718 if (known_aggs)
2720 known_aggs->create (0);
2721 known_aggs->safe_grow_cleared (count);
2724 if (removable_params_cost)
2725 *removable_params_cost = 0;
2727 for (i = 0; i < count; i++)
2729 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
2730 ipcp_lattice<tree> *lat = &plats->itself;
2732 if (lat->is_single_const ())
2734 ipcp_value<tree> *val = lat->values;
2735 gcc_checking_assert (TREE_CODE (val->value) != TREE_BINFO);
2736 (*known_csts)[i] = val->value;
2737 if (removable_params_cost)
2738 *removable_params_cost
2739 += estimate_move_cost (TREE_TYPE (val->value), false);
2740 ret = true;
2742 else if (removable_params_cost
2743 && !ipa_is_param_used (info, i))
2744 *removable_params_cost
2745 += ipa_get_param_move_cost (info, i);
2747 if (!ipa_is_param_used (info, i))
2748 continue;
2750 ipcp_lattice<ipa_polymorphic_call_context> *ctxlat = &plats->ctxlat;
2751 /* Do not account known context as reason for cloning. We can see
2752 if it permits devirtualization. */
2753 if (ctxlat->is_single_const ())
2754 (*known_contexts)[i] = ctxlat->values->value;
2756 if (known_aggs)
2758 vec<ipa_agg_jf_item, va_gc> *agg_items;
2759 struct ipa_agg_jump_function *ajf;
2761 agg_items = context_independent_aggregate_values (plats);
2762 ajf = &(*known_aggs)[i];
2763 ajf->items = agg_items;
2764 ajf->by_ref = plats->aggs_by_ref;
2765 ret |= agg_items != NULL;
2769 return ret;
2772 /* The current interface in ipa-inline-analysis requires a pointer vector.
2773 Create it.
2775 FIXME: That interface should be re-worked, this is slightly silly. Still,
2776 I'd like to discuss how to change it first and this demonstrates the
2777 issue. */
2779 static vec<ipa_agg_jump_function_p>
2780 agg_jmp_p_vec_for_t_vec (vec<ipa_agg_jump_function> known_aggs)
2782 vec<ipa_agg_jump_function_p> ret;
2783 struct ipa_agg_jump_function *ajf;
2784 int i;
2786 ret.create (known_aggs.length ());
2787 FOR_EACH_VEC_ELT (known_aggs, i, ajf)
2788 ret.quick_push (ajf);
2789 return ret;
2792 /* Perform time and size measurement of NODE with the context given in
2793 KNOWN_CSTS, KNOWN_CONTEXTS and KNOWN_AGGS, calculate the benefit and cost
2794 given BASE_TIME of the node without specialization, REMOVABLE_PARAMS_COST of
2795 all context-independent removable parameters and EST_MOVE_COST of estimated
2796 movement of the considered parameter and store it into VAL. */
2798 static void
2799 perform_estimation_of_a_value (cgraph_node *node, vec<tree> known_csts,
2800 vec<ipa_polymorphic_call_context> known_contexts,
2801 vec<ipa_agg_jump_function_p> known_aggs_ptrs,
2802 int removable_params_cost,
2803 int est_move_cost, ipcp_value_base *val)
2805 int size, time_benefit;
2806 sreal time, base_time;
2807 ipa_hints hints;
2809 estimate_ipcp_clone_size_and_time (node, known_csts, known_contexts,
2810 known_aggs_ptrs, &size, &time,
2811 &base_time, &hints);
2812 base_time -= time;
2813 if (base_time > 65535)
2814 base_time = 65535;
2815 time_benefit = base_time.to_int ()
2816 + devirtualization_time_bonus (node, known_csts, known_contexts,
2817 known_aggs_ptrs)
2818 + hint_time_bonus (hints)
2819 + removable_params_cost + est_move_cost;
2821 gcc_checking_assert (size >=0);
2822 /* The inliner-heuristics based estimates may think that in certain
2823 contexts some functions do not have any size at all but we want
2824 all specializations to have at least a tiny cost, not least not to
2825 divide by zero. */
2826 if (size == 0)
2827 size = 1;
2829 val->local_time_benefit = time_benefit;
2830 val->local_size_cost = size;
2833 /* Iterate over known values of parameters of NODE and estimate the local
2834 effects in terms of time and size they have. */
2836 static void
2837 estimate_local_effects (struct cgraph_node *node)
2839 struct ipa_node_params *info = IPA_NODE_REF (node);
2840 int i, count = ipa_get_param_count (info);
2841 vec<tree> known_csts;
2842 vec<ipa_polymorphic_call_context> known_contexts;
2843 vec<ipa_agg_jump_function> known_aggs;
2844 vec<ipa_agg_jump_function_p> known_aggs_ptrs;
2845 bool always_const;
2846 int removable_params_cost;
2848 if (!count || !ipcp_versionable_function_p (node))
2849 return;
2851 if (dump_file && (dump_flags & TDF_DETAILS))
2852 fprintf (dump_file, "\nEstimating effects for %s.\n", node->dump_name ());
2854 always_const = gather_context_independent_values (info, &known_csts,
2855 &known_contexts, &known_aggs,
2856 &removable_params_cost);
2857 known_aggs_ptrs = agg_jmp_p_vec_for_t_vec (known_aggs);
2858 int devirt_bonus = devirtualization_time_bonus (node, known_csts,
2859 known_contexts, known_aggs_ptrs);
2860 if (always_const || devirt_bonus
2861 || (removable_params_cost && node->local.can_change_signature))
2863 struct caller_statistics stats;
2864 ipa_hints hints;
2865 sreal time, base_time;
2866 int size;
2868 init_caller_stats (&stats);
2869 node->call_for_symbol_thunks_and_aliases (gather_caller_stats, &stats,
2870 false);
2871 estimate_ipcp_clone_size_and_time (node, known_csts, known_contexts,
2872 known_aggs_ptrs, &size, &time,
2873 &base_time, &hints);
2874 time -= devirt_bonus;
2875 time -= hint_time_bonus (hints);
2876 time -= removable_params_cost;
2877 size -= stats.n_calls * removable_params_cost;
2879 if (dump_file)
2880 fprintf (dump_file, " - context independent values, size: %i, "
2881 "time_benefit: %f\n", size, (base_time - time).to_double ());
2883 if (size <= 0 || node->local.local)
2885 info->do_clone_for_all_contexts = true;
2887 if (dump_file)
2888 fprintf (dump_file, " Decided to specialize for all "
2889 "known contexts, code not going to grow.\n");
2891 else if (good_cloning_opportunity_p (node,
2892 MIN ((base_time - time).to_int (),
2893 65536),
2894 stats.freq_sum, stats.count_sum,
2895 size))
2897 if (size + overall_size <= max_new_size)
2899 info->do_clone_for_all_contexts = true;
2900 overall_size += size;
2902 if (dump_file)
2903 fprintf (dump_file, " Decided to specialize for all "
2904 "known contexts, growth deemed beneficial.\n");
2906 else if (dump_file && (dump_flags & TDF_DETAILS))
2907 fprintf (dump_file, " Not cloning for all contexts because "
2908 "max_new_size would be reached with %li.\n",
2909 size + overall_size);
2911 else if (dump_file && (dump_flags & TDF_DETAILS))
2912 fprintf (dump_file, " Not cloning for all contexts because "
2913 "!good_cloning_opportunity_p.\n");
2917 for (i = 0; i < count; i++)
2919 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
2920 ipcp_lattice<tree> *lat = &plats->itself;
2921 ipcp_value<tree> *val;
2923 if (lat->bottom
2924 || !lat->values
2925 || known_csts[i])
2926 continue;
2928 for (val = lat->values; val; val = val->next)
2930 gcc_checking_assert (TREE_CODE (val->value) != TREE_BINFO);
2931 known_csts[i] = val->value;
2933 int emc = estimate_move_cost (TREE_TYPE (val->value), true);
2934 perform_estimation_of_a_value (node, known_csts, known_contexts,
2935 known_aggs_ptrs,
2936 removable_params_cost, emc, val);
2938 if (dump_file && (dump_flags & TDF_DETAILS))
2940 fprintf (dump_file, " - estimates for value ");
2941 print_ipcp_constant_value (dump_file, val->value);
2942 fprintf (dump_file, " for ");
2943 ipa_dump_param (dump_file, info, i);
2944 fprintf (dump_file, ": time_benefit: %i, size: %i\n",
2945 val->local_time_benefit, val->local_size_cost);
2948 known_csts[i] = NULL_TREE;
2951 for (i = 0; i < count; i++)
2953 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
2955 if (!plats->virt_call)
2956 continue;
2958 ipcp_lattice<ipa_polymorphic_call_context> *ctxlat = &plats->ctxlat;
2959 ipcp_value<ipa_polymorphic_call_context> *val;
2961 if (ctxlat->bottom
2962 || !ctxlat->values
2963 || !known_contexts[i].useless_p ())
2964 continue;
2966 for (val = ctxlat->values; val; val = val->next)
2968 known_contexts[i] = val->value;
2969 perform_estimation_of_a_value (node, known_csts, known_contexts,
2970 known_aggs_ptrs,
2971 removable_params_cost, 0, val);
2973 if (dump_file && (dump_flags & TDF_DETAILS))
2975 fprintf (dump_file, " - estimates for polymorphic context ");
2976 print_ipcp_constant_value (dump_file, val->value);
2977 fprintf (dump_file, " for ");
2978 ipa_dump_param (dump_file, info, i);
2979 fprintf (dump_file, ": time_benefit: %i, size: %i\n",
2980 val->local_time_benefit, val->local_size_cost);
2983 known_contexts[i] = ipa_polymorphic_call_context ();
2986 for (i = 0; i < count; i++)
2988 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
2989 struct ipa_agg_jump_function *ajf;
2990 struct ipcp_agg_lattice *aglat;
2992 if (plats->aggs_bottom || !plats->aggs)
2993 continue;
2995 ajf = &known_aggs[i];
2996 for (aglat = plats->aggs; aglat; aglat = aglat->next)
2998 ipcp_value<tree> *val;
2999 if (aglat->bottom || !aglat->values
3000 /* If the following is true, the one value is in known_aggs. */
3001 || (!plats->aggs_contain_variable
3002 && aglat->is_single_const ()))
3003 continue;
3005 for (val = aglat->values; val; val = val->next)
3007 struct ipa_agg_jf_item item;
3009 item.offset = aglat->offset;
3010 item.value = val->value;
3011 vec_safe_push (ajf->items, item);
3013 perform_estimation_of_a_value (node, known_csts, known_contexts,
3014 known_aggs_ptrs,
3015 removable_params_cost, 0, val);
3017 if (dump_file && (dump_flags & TDF_DETAILS))
3019 fprintf (dump_file, " - estimates for value ");
3020 print_ipcp_constant_value (dump_file, val->value);
3021 fprintf (dump_file, " for ");
3022 ipa_dump_param (dump_file, info, i);
3023 fprintf (dump_file, "[%soffset: " HOST_WIDE_INT_PRINT_DEC
3024 "]: time_benefit: %i, size: %i\n",
3025 plats->aggs_by_ref ? "ref " : "",
3026 aglat->offset,
3027 val->local_time_benefit, val->local_size_cost);
3030 ajf->items->pop ();
3035 for (i = 0; i < count; i++)
3036 vec_free (known_aggs[i].items);
3038 known_csts.release ();
3039 known_contexts.release ();
3040 known_aggs.release ();
3041 known_aggs_ptrs.release ();
3045 /* Add value CUR_VAL and all yet-unsorted values it is dependent on to the
3046 topological sort of values. */
3048 template <typename valtype>
3049 void
3050 value_topo_info<valtype>::add_val (ipcp_value<valtype> *cur_val)
3052 ipcp_value_source<valtype> *src;
3054 if (cur_val->dfs)
3055 return;
3057 dfs_counter++;
3058 cur_val->dfs = dfs_counter;
3059 cur_val->low_link = dfs_counter;
3061 cur_val->topo_next = stack;
3062 stack = cur_val;
3063 cur_val->on_stack = true;
3065 for (src = cur_val->sources; src; src = src->next)
3066 if (src->val)
3068 if (src->val->dfs == 0)
3070 add_val (src->val);
3071 if (src->val->low_link < cur_val->low_link)
3072 cur_val->low_link = src->val->low_link;
3074 else if (src->val->on_stack
3075 && src->val->dfs < cur_val->low_link)
3076 cur_val->low_link = src->val->dfs;
3079 if (cur_val->dfs == cur_val->low_link)
3081 ipcp_value<valtype> *v, *scc_list = NULL;
3085 v = stack;
3086 stack = v->topo_next;
3087 v->on_stack = false;
3089 v->scc_next = scc_list;
3090 scc_list = v;
3092 while (v != cur_val);
3094 cur_val->topo_next = values_topo;
3095 values_topo = cur_val;
3099 /* Add all values in lattices associated with NODE to the topological sort if
3100 they are not there yet. */
3102 static void
3103 add_all_node_vals_to_toposort (cgraph_node *node, ipa_topo_info *topo)
3105 struct ipa_node_params *info = IPA_NODE_REF (node);
3106 int i, count = ipa_get_param_count (info);
3108 for (i = 0; i < count; i++)
3110 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
3111 ipcp_lattice<tree> *lat = &plats->itself;
3112 struct ipcp_agg_lattice *aglat;
3114 if (!lat->bottom)
3116 ipcp_value<tree> *val;
3117 for (val = lat->values; val; val = val->next)
3118 topo->constants.add_val (val);
3121 if (!plats->aggs_bottom)
3122 for (aglat = plats->aggs; aglat; aglat = aglat->next)
3123 if (!aglat->bottom)
3125 ipcp_value<tree> *val;
3126 for (val = aglat->values; val; val = val->next)
3127 topo->constants.add_val (val);
3130 ipcp_lattice<ipa_polymorphic_call_context> *ctxlat = &plats->ctxlat;
3131 if (!ctxlat->bottom)
3133 ipcp_value<ipa_polymorphic_call_context> *ctxval;
3134 for (ctxval = ctxlat->values; ctxval; ctxval = ctxval->next)
3135 topo->contexts.add_val (ctxval);
3140 /* One pass of constants propagation along the call graph edges, from callers
3141 to callees (requires topological ordering in TOPO), iterate over strongly
3142 connected components. */
3144 static void
3145 propagate_constants_topo (struct ipa_topo_info *topo)
3147 int i;
3149 for (i = topo->nnodes - 1; i >= 0; i--)
3151 unsigned j;
3152 struct cgraph_node *v, *node = topo->order[i];
3153 vec<cgraph_node *> cycle_nodes = ipa_get_nodes_in_cycle (node);
3155 /* First, iteratively propagate within the strongly connected component
3156 until all lattices stabilize. */
3157 FOR_EACH_VEC_ELT (cycle_nodes, j, v)
3158 if (v->has_gimple_body_p ())
3159 push_node_to_stack (topo, v);
3161 v = pop_node_from_stack (topo);
3162 while (v)
3164 struct cgraph_edge *cs;
3166 for (cs = v->callees; cs; cs = cs->next_callee)
3167 if (ipa_edge_within_scc (cs))
3169 IPA_NODE_REF (v)->node_within_scc = true;
3170 if (propagate_constants_across_call (cs))
3171 push_node_to_stack (topo, cs->callee->function_symbol ());
3173 v = pop_node_from_stack (topo);
3176 /* Afterwards, propagate along edges leading out of the SCC, calculates
3177 the local effects of the discovered constants and all valid values to
3178 their topological sort. */
3179 FOR_EACH_VEC_ELT (cycle_nodes, j, v)
3180 if (v->has_gimple_body_p ())
3182 struct cgraph_edge *cs;
3184 estimate_local_effects (v);
3185 add_all_node_vals_to_toposort (v, topo);
3186 for (cs = v->callees; cs; cs = cs->next_callee)
3187 if (!ipa_edge_within_scc (cs))
3188 propagate_constants_across_call (cs);
3190 cycle_nodes.release ();
3195 /* Return the sum of A and B if none of them is bigger than INT_MAX/2, return
3196 the bigger one if otherwise. */
3198 static int
3199 safe_add (int a, int b)
3201 if (a > INT_MAX/2 || b > INT_MAX/2)
3202 return a > b ? a : b;
3203 else
3204 return a + b;
3208 /* Propagate the estimated effects of individual values along the topological
3209 from the dependent values to those they depend on. */
3211 template <typename valtype>
3212 void
3213 value_topo_info<valtype>::propagate_effects ()
3215 ipcp_value<valtype> *base;
3217 for (base = values_topo; base; base = base->topo_next)
3219 ipcp_value_source<valtype> *src;
3220 ipcp_value<valtype> *val;
3221 int time = 0, size = 0;
3223 for (val = base; val; val = val->scc_next)
3225 time = safe_add (time,
3226 val->local_time_benefit + val->prop_time_benefit);
3227 size = safe_add (size, val->local_size_cost + val->prop_size_cost);
3230 for (val = base; val; val = val->scc_next)
3231 for (src = val->sources; src; src = src->next)
3232 if (src->val
3233 && src->cs->maybe_hot_p ())
3235 src->val->prop_time_benefit = safe_add (time,
3236 src->val->prop_time_benefit);
3237 src->val->prop_size_cost = safe_add (size,
3238 src->val->prop_size_cost);
3244 /* Propagate constants, polymorphic contexts and their effects from the
3245 summaries interprocedurally. */
3247 static void
3248 ipcp_propagate_stage (struct ipa_topo_info *topo)
3250 struct cgraph_node *node;
3252 if (dump_file)
3253 fprintf (dump_file, "\n Propagating constants:\n\n");
3255 max_count = profile_count::uninitialized ();
3257 FOR_EACH_DEFINED_FUNCTION (node)
3259 struct ipa_node_params *info = IPA_NODE_REF (node);
3261 determine_versionability (node, info);
3262 if (node->has_gimple_body_p ())
3264 info->lattices = XCNEWVEC (struct ipcp_param_lattices,
3265 ipa_get_param_count (info));
3266 initialize_node_lattices (node);
3268 ipa_fn_summary *s = ipa_fn_summaries->get (node);
3269 if (node->definition && !node->alias && s != NULL)
3270 overall_size += s->self_size;
3271 max_count = max_count.max (node->count.ipa ());
3274 max_new_size = overall_size;
3275 if (max_new_size < PARAM_VALUE (PARAM_LARGE_UNIT_INSNS))
3276 max_new_size = PARAM_VALUE (PARAM_LARGE_UNIT_INSNS);
3277 max_new_size += max_new_size * PARAM_VALUE (PARAM_IPCP_UNIT_GROWTH) / 100 + 1;
3279 if (dump_file)
3280 fprintf (dump_file, "\noverall_size: %li, max_new_size: %li\n",
3281 overall_size, max_new_size);
3283 propagate_constants_topo (topo);
3284 if (flag_checking)
3285 ipcp_verify_propagated_values ();
3286 topo->constants.propagate_effects ();
3287 topo->contexts.propagate_effects ();
3289 if (dump_file)
3291 fprintf (dump_file, "\nIPA lattices after all propagation:\n");
3292 print_all_lattices (dump_file, (dump_flags & TDF_DETAILS), true);
3296 /* Discover newly direct outgoing edges from NODE which is a new clone with
3297 known KNOWN_CSTS and make them direct. */
3299 static void
3300 ipcp_discover_new_direct_edges (struct cgraph_node *node,
3301 vec<tree> known_csts,
3302 vec<ipa_polymorphic_call_context>
3303 known_contexts,
3304 struct ipa_agg_replacement_value *aggvals)
3306 struct cgraph_edge *ie, *next_ie;
3307 bool found = false;
3309 for (ie = node->indirect_calls; ie; ie = next_ie)
3311 tree target;
3312 bool speculative;
3314 next_ie = ie->next_callee;
3315 target = ipa_get_indirect_edge_target_1 (ie, known_csts, known_contexts,
3316 vNULL, aggvals, &speculative);
3317 if (target)
3319 bool agg_contents = ie->indirect_info->agg_contents;
3320 bool polymorphic = ie->indirect_info->polymorphic;
3321 int param_index = ie->indirect_info->param_index;
3322 struct cgraph_edge *cs = ipa_make_edge_direct_to_target (ie, target,
3323 speculative);
3324 found = true;
3326 if (cs && !agg_contents && !polymorphic)
3328 struct ipa_node_params *info = IPA_NODE_REF (node);
3329 int c = ipa_get_controlled_uses (info, param_index);
3330 if (c != IPA_UNDESCRIBED_USE)
3332 struct ipa_ref *to_del;
3334 c--;
3335 ipa_set_controlled_uses (info, param_index, c);
3336 if (dump_file && (dump_flags & TDF_DETAILS))
3337 fprintf (dump_file, " controlled uses count of param "
3338 "%i bumped down to %i\n", param_index, c);
3339 if (c == 0
3340 && (to_del = node->find_reference (cs->callee, NULL, 0)))
3342 if (dump_file && (dump_flags & TDF_DETAILS))
3343 fprintf (dump_file, " and even removing its "
3344 "cloning-created reference\n");
3345 to_del->remove_reference ();
3351 /* Turning calls to direct calls will improve overall summary. */
3352 if (found)
3353 ipa_update_overall_fn_summary (node);
3356 class edge_clone_summary;
3357 static call_summary <edge_clone_summary *> *edge_clone_summaries = NULL;
3359 /* Edge clone summary. */
3361 struct edge_clone_summary
3363 /* Default constructor. */
3364 edge_clone_summary (): prev_clone (NULL), next_clone (NULL) {}
3366 /* Default destructor. */
3367 ~edge_clone_summary ()
3369 if (prev_clone)
3370 edge_clone_summaries->get (prev_clone)->next_clone = next_clone;
3371 if (next_clone)
3372 edge_clone_summaries->get (next_clone)->prev_clone = prev_clone;
3375 cgraph_edge *prev_clone;
3376 cgraph_edge *next_clone;
3379 class edge_clone_summary_t:
3380 public call_summary <edge_clone_summary *>
3382 public:
3383 edge_clone_summary_t (symbol_table *symtab):
3384 call_summary <edge_clone_summary *> (symtab)
3386 m_initialize_when_cloning = true;
3389 virtual void duplicate (cgraph_edge *src_edge, cgraph_edge *dst_edge,
3390 edge_clone_summary *src_data,
3391 edge_clone_summary *dst_data);
3394 /* Edge duplication hook. */
3396 void
3397 edge_clone_summary_t::duplicate (cgraph_edge *src_edge, cgraph_edge *dst_edge,
3398 edge_clone_summary *src_data,
3399 edge_clone_summary *dst_data)
3401 if (src_data->next_clone)
3402 edge_clone_summaries->get (src_data->next_clone)->prev_clone = dst_edge;
3403 dst_data->prev_clone = src_edge;
3404 dst_data->next_clone = src_data->next_clone;
3405 src_data->next_clone = dst_edge;
3408 /* See if NODE is a clone with a known aggregate value at a given OFFSET of a
3409 parameter with the given INDEX. */
3411 static tree
3412 get_clone_agg_value (struct cgraph_node *node, HOST_WIDE_INT offset,
3413 int index)
3415 struct ipa_agg_replacement_value *aggval;
3417 aggval = ipa_get_agg_replacements_for_node (node);
3418 while (aggval)
3420 if (aggval->offset == offset
3421 && aggval->index == index)
3422 return aggval->value;
3423 aggval = aggval->next;
3425 return NULL_TREE;
3428 /* Return true is NODE is DEST or its clone for all contexts. */
3430 static bool
3431 same_node_or_its_all_contexts_clone_p (cgraph_node *node, cgraph_node *dest)
3433 if (node == dest)
3434 return true;
3436 struct ipa_node_params *info = IPA_NODE_REF (node);
3437 return info->is_all_contexts_clone && info->ipcp_orig_node == dest;
3440 /* Return true if edge CS does bring about the value described by SRC to
3441 DEST_VAL of node DEST or its clone for all contexts. */
3443 static bool
3444 cgraph_edge_brings_value_p (cgraph_edge *cs, ipcp_value_source<tree> *src,
3445 cgraph_node *dest, ipcp_value<tree> *dest_val)
3447 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
3448 enum availability availability;
3449 cgraph_node *real_dest = cs->callee->function_symbol (&availability);
3451 if (!same_node_or_its_all_contexts_clone_p (real_dest, dest)
3452 || availability <= AVAIL_INTERPOSABLE
3453 || caller_info->node_dead)
3454 return false;
3456 if (!src->val)
3457 return true;
3459 if (caller_info->ipcp_orig_node)
3461 tree t;
3462 if (src->offset == -1)
3463 t = caller_info->known_csts[src->index];
3464 else
3465 t = get_clone_agg_value (cs->caller, src->offset, src->index);
3466 return (t != NULL_TREE
3467 && values_equal_for_ipcp_p (src->val->value, t));
3469 else
3471 /* At the moment we do not propagate over arithmetic jump functions in
3472 SCCs, so it is safe to detect self-feeding recursive calls in this
3473 way. */
3474 if (src->val == dest_val)
3475 return true;
3477 struct ipcp_agg_lattice *aglat;
3478 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (caller_info,
3479 src->index);
3480 if (src->offset == -1)
3481 return (plats->itself.is_single_const ()
3482 && values_equal_for_ipcp_p (src->val->value,
3483 plats->itself.values->value));
3484 else
3486 if (plats->aggs_bottom || plats->aggs_contain_variable)
3487 return false;
3488 for (aglat = plats->aggs; aglat; aglat = aglat->next)
3489 if (aglat->offset == src->offset)
3490 return (aglat->is_single_const ()
3491 && values_equal_for_ipcp_p (src->val->value,
3492 aglat->values->value));
3494 return false;
3498 /* Return true if edge CS does bring about the value described by SRC to
3499 DST_VAL of node DEST or its clone for all contexts. */
3501 static bool
3502 cgraph_edge_brings_value_p (cgraph_edge *cs,
3503 ipcp_value_source<ipa_polymorphic_call_context> *src,
3504 cgraph_node *dest,
3505 ipcp_value<ipa_polymorphic_call_context> *)
3507 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
3508 cgraph_node *real_dest = cs->callee->function_symbol ();
3510 if (!same_node_or_its_all_contexts_clone_p (real_dest, dest)
3511 || caller_info->node_dead)
3512 return false;
3513 if (!src->val)
3514 return true;
3516 if (caller_info->ipcp_orig_node)
3517 return (caller_info->known_contexts.length () > (unsigned) src->index)
3518 && values_equal_for_ipcp_p (src->val->value,
3519 caller_info->known_contexts[src->index]);
3521 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (caller_info,
3522 src->index);
3523 return plats->ctxlat.is_single_const ()
3524 && values_equal_for_ipcp_p (src->val->value,
3525 plats->ctxlat.values->value);
3528 /* Get the next clone in the linked list of clones of an edge. */
3530 static inline struct cgraph_edge *
3531 get_next_cgraph_edge_clone (struct cgraph_edge *cs)
3533 edge_clone_summary *s = edge_clone_summaries->get (cs);
3534 return s != NULL ? s->next_clone : NULL;
3537 /* Given VAL that is intended for DEST, iterate over all its sources and if any
3538 of them is viable and hot, return true. In that case, for those that still
3539 hold, add their edge frequency and their number into *FREQUENCY and
3540 *CALLER_COUNT respectively. */
3542 template <typename valtype>
3543 static bool
3544 get_info_about_necessary_edges (ipcp_value<valtype> *val, cgraph_node *dest,
3545 int *freq_sum,
3546 profile_count *count_sum, int *caller_count)
3548 ipcp_value_source<valtype> *src;
3549 int freq = 0, count = 0;
3550 profile_count cnt = profile_count::zero ();
3551 bool hot = false;
3552 bool non_self_recursive = false;
3554 for (src = val->sources; src; src = src->next)
3556 struct cgraph_edge *cs = src->cs;
3557 while (cs)
3559 if (cgraph_edge_brings_value_p (cs, src, dest, val))
3561 count++;
3562 freq += cs->frequency ();
3563 if (cs->count.ipa ().initialized_p ())
3564 cnt += cs->count.ipa ();
3565 hot |= cs->maybe_hot_p ();
3566 if (cs->caller != dest)
3567 non_self_recursive = true;
3569 cs = get_next_cgraph_edge_clone (cs);
3573 /* If the only edges bringing a value are self-recursive ones, do not bother
3574 evaluating it. */
3575 if (!non_self_recursive)
3576 return false;
3578 *freq_sum = freq;
3579 *count_sum = cnt;
3580 *caller_count = count;
3581 return hot;
3584 /* Return a vector of incoming edges that do bring value VAL to node DEST. It
3585 is assumed their number is known and equal to CALLER_COUNT. */
3587 template <typename valtype>
3588 static vec<cgraph_edge *>
3589 gather_edges_for_value (ipcp_value<valtype> *val, cgraph_node *dest,
3590 int caller_count)
3592 ipcp_value_source<valtype> *src;
3593 vec<cgraph_edge *> ret;
3595 ret.create (caller_count);
3596 for (src = val->sources; src; src = src->next)
3598 struct cgraph_edge *cs = src->cs;
3599 while (cs)
3601 if (cgraph_edge_brings_value_p (cs, src, dest, val))
3602 ret.quick_push (cs);
3603 cs = get_next_cgraph_edge_clone (cs);
3607 return ret;
3610 /* Construct a replacement map for a know VALUE for a formal parameter PARAM.
3611 Return it or NULL if for some reason it cannot be created. */
3613 static struct ipa_replace_map *
3614 get_replacement_map (struct ipa_node_params *info, tree value, int parm_num)
3616 struct ipa_replace_map *replace_map;
3619 replace_map = ggc_alloc<ipa_replace_map> ();
3620 if (dump_file)
3622 fprintf (dump_file, " replacing ");
3623 ipa_dump_param (dump_file, info, parm_num);
3625 fprintf (dump_file, " with const ");
3626 print_generic_expr (dump_file, value);
3627 fprintf (dump_file, "\n");
3629 replace_map->old_tree = NULL;
3630 replace_map->parm_num = parm_num;
3631 replace_map->new_tree = value;
3632 replace_map->replace_p = true;
3633 replace_map->ref_p = false;
3635 return replace_map;
3638 /* Dump new profiling counts */
3640 static void
3641 dump_profile_updates (struct cgraph_node *orig_node,
3642 struct cgraph_node *new_node)
3644 struct cgraph_edge *cs;
3646 fprintf (dump_file, " setting count of the specialized node to ");
3647 new_node->count.dump (dump_file);
3648 fprintf (dump_file, "\n");
3649 for (cs = new_node->callees; cs; cs = cs->next_callee)
3651 fprintf (dump_file, " edge to %s has count ",
3652 cs->callee->name ());
3653 cs->count.dump (dump_file);
3654 fprintf (dump_file, "\n");
3657 fprintf (dump_file, " setting count of the original node to ");
3658 orig_node->count.dump (dump_file);
3659 fprintf (dump_file, "\n");
3660 for (cs = orig_node->callees; cs; cs = cs->next_callee)
3662 fprintf (dump_file, " edge to %s is left with ",
3663 cs->callee->name ());
3664 cs->count.dump (dump_file);
3665 fprintf (dump_file, "\n");
3669 /* After a specialized NEW_NODE version of ORIG_NODE has been created, update
3670 their profile information to reflect this. */
3672 static void
3673 update_profiling_info (struct cgraph_node *orig_node,
3674 struct cgraph_node *new_node)
3676 struct cgraph_edge *cs;
3677 struct caller_statistics stats;
3678 profile_count new_sum, orig_sum;
3679 profile_count remainder, orig_node_count = orig_node->count;
3681 if (!(orig_node_count.ipa () > profile_count::zero ()))
3682 return;
3684 init_caller_stats (&stats);
3685 orig_node->call_for_symbol_thunks_and_aliases (gather_caller_stats, &stats,
3686 false);
3687 orig_sum = stats.count_sum;
3688 init_caller_stats (&stats);
3689 new_node->call_for_symbol_thunks_and_aliases (gather_caller_stats, &stats,
3690 false);
3691 new_sum = stats.count_sum;
3693 if (orig_node_count < orig_sum + new_sum)
3695 if (dump_file)
3697 fprintf (dump_file, " Problem: node %s has too low count ",
3698 orig_node->dump_name ());
3699 orig_node_count.dump (dump_file);
3700 fprintf (dump_file, "while the sum of incoming count is ");
3701 (orig_sum + new_sum).dump (dump_file);
3702 fprintf (dump_file, "\n");
3705 orig_node_count = (orig_sum + new_sum).apply_scale (12, 10);
3706 if (dump_file)
3708 fprintf (dump_file, " proceeding by pretending it was ");
3709 orig_node_count.dump (dump_file);
3710 fprintf (dump_file, "\n");
3714 remainder = orig_node_count.combine_with_ipa_count (orig_node_count.ipa ()
3715 - new_sum.ipa ());
3716 new_sum = orig_node_count.combine_with_ipa_count (new_sum);
3717 orig_node->count = remainder;
3719 for (cs = new_node->callees; cs; cs = cs->next_callee)
3720 cs->count = cs->count.apply_scale (new_sum, orig_node_count);
3722 for (cs = orig_node->callees; cs; cs = cs->next_callee)
3723 cs->count = cs->count.apply_scale (remainder, orig_node_count);
3725 if (dump_file)
3726 dump_profile_updates (orig_node, new_node);
3729 /* Update the respective profile of specialized NEW_NODE and the original
3730 ORIG_NODE after additional edges with cumulative count sum REDIRECTED_SUM
3731 have been redirected to the specialized version. */
3733 static void
3734 update_specialized_profile (struct cgraph_node *new_node,
3735 struct cgraph_node *orig_node,
3736 profile_count redirected_sum)
3738 struct cgraph_edge *cs;
3739 profile_count new_node_count, orig_node_count = orig_node->count;
3741 if (dump_file)
3743 fprintf (dump_file, " the sum of counts of redirected edges is ");
3744 redirected_sum.dump (dump_file);
3745 fprintf (dump_file, "\n");
3747 if (!(orig_node_count > profile_count::zero ()))
3748 return;
3750 gcc_assert (orig_node_count >= redirected_sum);
3752 new_node_count = new_node->count;
3753 new_node->count += redirected_sum;
3754 orig_node->count -= redirected_sum;
3756 for (cs = new_node->callees; cs; cs = cs->next_callee)
3757 cs->count += cs->count.apply_scale (redirected_sum, new_node_count);
3759 for (cs = orig_node->callees; cs; cs = cs->next_callee)
3761 profile_count dec = cs->count.apply_scale (redirected_sum,
3762 orig_node_count);
3763 cs->count -= dec;
3766 if (dump_file)
3767 dump_profile_updates (orig_node, new_node);
3770 /* Create a specialized version of NODE with known constants in KNOWN_CSTS,
3771 known contexts in KNOWN_CONTEXTS and known aggregate values in AGGVALS and
3772 redirect all edges in CALLERS to it. */
3774 static struct cgraph_node *
3775 create_specialized_node (struct cgraph_node *node,
3776 vec<tree> known_csts,
3777 vec<ipa_polymorphic_call_context> known_contexts,
3778 struct ipa_agg_replacement_value *aggvals,
3779 vec<cgraph_edge *> callers)
3781 struct ipa_node_params *new_info, *info = IPA_NODE_REF (node);
3782 vec<ipa_replace_map *, va_gc> *replace_trees = NULL;
3783 struct ipa_agg_replacement_value *av;
3784 struct cgraph_node *new_node;
3785 int i, count = ipa_get_param_count (info);
3786 bitmap args_to_skip;
3788 gcc_assert (!info->ipcp_orig_node);
3790 if (node->local.can_change_signature)
3792 args_to_skip = BITMAP_GGC_ALLOC ();
3793 for (i = 0; i < count; i++)
3795 tree t = known_csts[i];
3797 if (t || !ipa_is_param_used (info, i))
3798 bitmap_set_bit (args_to_skip, i);
3801 else
3803 args_to_skip = NULL;
3804 if (dump_file && (dump_flags & TDF_DETAILS))
3805 fprintf (dump_file, " cannot change function signature\n");
3808 for (i = 0; i < count; i++)
3810 tree t = known_csts[i];
3811 if (t)
3813 struct ipa_replace_map *replace_map;
3815 gcc_checking_assert (TREE_CODE (t) != TREE_BINFO);
3816 replace_map = get_replacement_map (info, t, i);
3817 if (replace_map)
3818 vec_safe_push (replace_trees, replace_map);
3821 auto_vec<cgraph_edge *, 2> self_recursive_calls;
3822 for (i = callers.length () - 1; i >= 0; i--)
3824 cgraph_edge *cs = callers[i];
3825 if (cs->caller == node)
3827 self_recursive_calls.safe_push (cs);
3828 callers.unordered_remove (i);
3832 new_node = node->create_virtual_clone (callers, replace_trees,
3833 args_to_skip, "constprop");
3835 bool have_self_recursive_calls = !self_recursive_calls.is_empty ();
3836 for (unsigned j = 0; j < self_recursive_calls.length (); j++)
3838 cgraph_edge *cs = get_next_cgraph_edge_clone (self_recursive_calls[j]);
3839 /* Cloned edges can disappear during cloning as speculation can be
3840 resolved, check that we have one and that it comes from the last
3841 cloning. */
3842 if (cs && cs->caller == new_node)
3843 cs->redirect_callee_duplicating_thunks (new_node);
3844 /* Any future code that would make more than one clone of an outgoing
3845 edge would confuse this mechanism, so let's check that does not
3846 happen. */
3847 gcc_checking_assert (!cs
3848 || !get_next_cgraph_edge_clone (cs)
3849 || get_next_cgraph_edge_clone (cs)->caller != new_node);
3851 if (have_self_recursive_calls)
3852 new_node->expand_all_artificial_thunks ();
3854 ipa_set_node_agg_value_chain (new_node, aggvals);
3855 for (av = aggvals; av; av = av->next)
3856 new_node->maybe_create_reference (av->value, NULL);
3858 if (dump_file && (dump_flags & TDF_DETAILS))
3860 fprintf (dump_file, " the new node is %s.\n", new_node->dump_name ());
3861 if (known_contexts.exists ())
3863 for (i = 0; i < count; i++)
3864 if (!known_contexts[i].useless_p ())
3866 fprintf (dump_file, " known ctx %i is ", i);
3867 known_contexts[i].dump (dump_file);
3870 if (aggvals)
3871 ipa_dump_agg_replacement_values (dump_file, aggvals);
3873 ipa_check_create_node_params ();
3874 update_profiling_info (node, new_node);
3875 new_info = IPA_NODE_REF (new_node);
3876 new_info->ipcp_orig_node = node;
3877 new_info->known_csts = known_csts;
3878 new_info->known_contexts = known_contexts;
3880 ipcp_discover_new_direct_edges (new_node, known_csts, known_contexts, aggvals);
3882 callers.release ();
3883 return new_node;
3886 /* Return true, if JFUNC, which describes a i-th parameter of call CS, is a
3887 simple no-operation pass-through function to itself. */
3889 static bool
3890 self_recursive_pass_through_p (cgraph_edge *cs, ipa_jump_func *jfunc, int i)
3892 enum availability availability;
3893 if (cs->caller == cs->callee->function_symbol (&availability)
3894 && availability > AVAIL_INTERPOSABLE
3895 && jfunc->type == IPA_JF_PASS_THROUGH
3896 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR
3897 && ipa_get_jf_pass_through_formal_id (jfunc) == i)
3898 return true;
3899 return false;
3902 /* Given a NODE, and a subset of its CALLERS, try to populate blanks slots in
3903 KNOWN_CSTS with constants that are also known for all of the CALLERS. */
3905 static void
3906 find_more_scalar_values_for_callers_subset (struct cgraph_node *node,
3907 vec<tree> known_csts,
3908 vec<cgraph_edge *> callers)
3910 struct ipa_node_params *info = IPA_NODE_REF (node);
3911 int i, count = ipa_get_param_count (info);
3913 for (i = 0; i < count; i++)
3915 struct cgraph_edge *cs;
3916 tree newval = NULL_TREE;
3917 int j;
3918 bool first = true;
3919 tree type = ipa_get_type (info, i);
3921 if (ipa_get_scalar_lat (info, i)->bottom || known_csts[i])
3922 continue;
3924 FOR_EACH_VEC_ELT (callers, j, cs)
3926 struct ipa_jump_func *jump_func;
3927 tree t;
3929 if (IPA_NODE_REF (cs->caller)->node_dead)
3930 continue;
3932 if (i >= ipa_get_cs_argument_count (IPA_EDGE_REF (cs))
3933 || (i == 0
3934 && call_passes_through_thunk_p (cs)))
3936 newval = NULL_TREE;
3937 break;
3939 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
3940 if (self_recursive_pass_through_p (cs, jump_func, i))
3941 continue;
3943 t = ipa_value_from_jfunc (IPA_NODE_REF (cs->caller), jump_func, type);
3944 if (!t
3945 || (newval
3946 && !values_equal_for_ipcp_p (t, newval))
3947 || (!first && !newval))
3949 newval = NULL_TREE;
3950 break;
3952 else
3953 newval = t;
3954 first = false;
3957 if (newval)
3959 if (dump_file && (dump_flags & TDF_DETAILS))
3961 fprintf (dump_file, " adding an extra known scalar value ");
3962 print_ipcp_constant_value (dump_file, newval);
3963 fprintf (dump_file, " for ");
3964 ipa_dump_param (dump_file, info, i);
3965 fprintf (dump_file, "\n");
3968 known_csts[i] = newval;
3973 /* Given a NODE and a subset of its CALLERS, try to populate plank slots in
3974 KNOWN_CONTEXTS with polymorphic contexts that are also known for all of the
3975 CALLERS. */
3977 static void
3978 find_more_contexts_for_caller_subset (cgraph_node *node,
3979 vec<ipa_polymorphic_call_context>
3980 *known_contexts,
3981 vec<cgraph_edge *> callers)
3983 ipa_node_params *info = IPA_NODE_REF (node);
3984 int i, count = ipa_get_param_count (info);
3986 for (i = 0; i < count; i++)
3988 cgraph_edge *cs;
3990 if (ipa_get_poly_ctx_lat (info, i)->bottom
3991 || (known_contexts->exists ()
3992 && !(*known_contexts)[i].useless_p ()))
3993 continue;
3995 ipa_polymorphic_call_context newval;
3996 bool first = true;
3997 int j;
3999 FOR_EACH_VEC_ELT (callers, j, cs)
4001 if (i >= ipa_get_cs_argument_count (IPA_EDGE_REF (cs)))
4002 return;
4003 ipa_jump_func *jfunc = ipa_get_ith_jump_func (IPA_EDGE_REF (cs),
4005 ipa_polymorphic_call_context ctx;
4006 ctx = ipa_context_from_jfunc (IPA_NODE_REF (cs->caller), cs, i,
4007 jfunc);
4008 if (first)
4010 newval = ctx;
4011 first = false;
4013 else
4014 newval.meet_with (ctx);
4015 if (newval.useless_p ())
4016 break;
4019 if (!newval.useless_p ())
4021 if (dump_file && (dump_flags & TDF_DETAILS))
4023 fprintf (dump_file, " adding an extra known polymorphic "
4024 "context ");
4025 print_ipcp_constant_value (dump_file, newval);
4026 fprintf (dump_file, " for ");
4027 ipa_dump_param (dump_file, info, i);
4028 fprintf (dump_file, "\n");
4031 if (!known_contexts->exists ())
4032 known_contexts->safe_grow_cleared (ipa_get_param_count (info));
4033 (*known_contexts)[i] = newval;
4039 /* Go through PLATS and create a vector of values consisting of values and
4040 offsets (minus OFFSET) of lattices that contain only a single value. */
4042 static vec<ipa_agg_jf_item>
4043 copy_plats_to_inter (struct ipcp_param_lattices *plats, HOST_WIDE_INT offset)
4045 vec<ipa_agg_jf_item> res = vNULL;
4047 if (!plats->aggs || plats->aggs_contain_variable || plats->aggs_bottom)
4048 return vNULL;
4050 for (struct ipcp_agg_lattice *aglat = plats->aggs; aglat; aglat = aglat->next)
4051 if (aglat->is_single_const ())
4053 struct ipa_agg_jf_item ti;
4054 ti.offset = aglat->offset - offset;
4055 ti.value = aglat->values->value;
4056 res.safe_push (ti);
4058 return res;
4061 /* Intersect all values in INTER with single value lattices in PLATS (while
4062 subtracting OFFSET). */
4064 static void
4065 intersect_with_plats (struct ipcp_param_lattices *plats,
4066 vec<ipa_agg_jf_item> *inter,
4067 HOST_WIDE_INT offset)
4069 struct ipcp_agg_lattice *aglat;
4070 struct ipa_agg_jf_item *item;
4071 int k;
4073 if (!plats->aggs || plats->aggs_contain_variable || plats->aggs_bottom)
4075 inter->release ();
4076 return;
4079 aglat = plats->aggs;
4080 FOR_EACH_VEC_ELT (*inter, k, item)
4082 bool found = false;
4083 if (!item->value)
4084 continue;
4085 while (aglat)
4087 if (aglat->offset - offset > item->offset)
4088 break;
4089 if (aglat->offset - offset == item->offset)
4091 gcc_checking_assert (item->value);
4092 if (aglat->is_single_const ()
4093 && values_equal_for_ipcp_p (item->value,
4094 aglat->values->value))
4095 found = true;
4096 break;
4098 aglat = aglat->next;
4100 if (!found)
4101 item->value = NULL_TREE;
4105 /* Copy aggregate replacement values of NODE (which is an IPA-CP clone) to the
4106 vector result while subtracting OFFSET from the individual value offsets. */
4108 static vec<ipa_agg_jf_item>
4109 agg_replacements_to_vector (struct cgraph_node *node, int index,
4110 HOST_WIDE_INT offset)
4112 struct ipa_agg_replacement_value *av;
4113 vec<ipa_agg_jf_item> res = vNULL;
4115 for (av = ipa_get_agg_replacements_for_node (node); av; av = av->next)
4116 if (av->index == index
4117 && (av->offset - offset) >= 0)
4119 struct ipa_agg_jf_item item;
4120 gcc_checking_assert (av->value);
4121 item.offset = av->offset - offset;
4122 item.value = av->value;
4123 res.safe_push (item);
4126 return res;
4129 /* Intersect all values in INTER with those that we have already scheduled to
4130 be replaced in parameter number INDEX of NODE, which is an IPA-CP clone
4131 (while subtracting OFFSET). */
4133 static void
4134 intersect_with_agg_replacements (struct cgraph_node *node, int index,
4135 vec<ipa_agg_jf_item> *inter,
4136 HOST_WIDE_INT offset)
4138 struct ipa_agg_replacement_value *srcvals;
4139 struct ipa_agg_jf_item *item;
4140 int i;
4142 srcvals = ipa_get_agg_replacements_for_node (node);
4143 if (!srcvals)
4145 inter->release ();
4146 return;
4149 FOR_EACH_VEC_ELT (*inter, i, item)
4151 struct ipa_agg_replacement_value *av;
4152 bool found = false;
4153 if (!item->value)
4154 continue;
4155 for (av = srcvals; av; av = av->next)
4157 gcc_checking_assert (av->value);
4158 if (av->index == index
4159 && av->offset - offset == item->offset)
4161 if (values_equal_for_ipcp_p (item->value, av->value))
4162 found = true;
4163 break;
4166 if (!found)
4167 item->value = NULL_TREE;
4171 /* Intersect values in INTER with aggregate values that come along edge CS to
4172 parameter number INDEX and return it. If INTER does not actually exist yet,
4173 copy all incoming values to it. If we determine we ended up with no values
4174 whatsoever, return a released vector. */
4176 static vec<ipa_agg_jf_item>
4177 intersect_aggregates_with_edge (struct cgraph_edge *cs, int index,
4178 vec<ipa_agg_jf_item> inter)
4180 struct ipa_jump_func *jfunc;
4181 jfunc = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), index);
4182 if (jfunc->type == IPA_JF_PASS_THROUGH
4183 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
4185 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
4186 int src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
4188 if (caller_info->ipcp_orig_node)
4190 struct cgraph_node *orig_node = caller_info->ipcp_orig_node;
4191 struct ipcp_param_lattices *orig_plats;
4192 orig_plats = ipa_get_parm_lattices (IPA_NODE_REF (orig_node),
4193 src_idx);
4194 if (agg_pass_through_permissible_p (orig_plats, jfunc))
4196 if (!inter.exists ())
4197 inter = agg_replacements_to_vector (cs->caller, src_idx, 0);
4198 else
4199 intersect_with_agg_replacements (cs->caller, src_idx,
4200 &inter, 0);
4202 else
4204 inter.release ();
4205 return vNULL;
4208 else
4210 struct ipcp_param_lattices *src_plats;
4211 src_plats = ipa_get_parm_lattices (caller_info, src_idx);
4212 if (agg_pass_through_permissible_p (src_plats, jfunc))
4214 /* Currently we do not produce clobber aggregate jump
4215 functions, adjust when we do. */
4216 gcc_checking_assert (!jfunc->agg.items);
4217 if (!inter.exists ())
4218 inter = copy_plats_to_inter (src_plats, 0);
4219 else
4220 intersect_with_plats (src_plats, &inter, 0);
4222 else
4224 inter.release ();
4225 return vNULL;
4229 else if (jfunc->type == IPA_JF_ANCESTOR
4230 && ipa_get_jf_ancestor_agg_preserved (jfunc))
4232 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
4233 int src_idx = ipa_get_jf_ancestor_formal_id (jfunc);
4234 struct ipcp_param_lattices *src_plats;
4235 HOST_WIDE_INT delta = ipa_get_jf_ancestor_offset (jfunc);
4237 if (caller_info->ipcp_orig_node)
4239 if (!inter.exists ())
4240 inter = agg_replacements_to_vector (cs->caller, src_idx, delta);
4241 else
4242 intersect_with_agg_replacements (cs->caller, src_idx, &inter,
4243 delta);
4245 else
4247 src_plats = ipa_get_parm_lattices (caller_info, src_idx);
4248 /* Currently we do not produce clobber aggregate jump
4249 functions, adjust when we do. */
4250 gcc_checking_assert (!src_plats->aggs || !jfunc->agg.items);
4251 if (!inter.exists ())
4252 inter = copy_plats_to_inter (src_plats, delta);
4253 else
4254 intersect_with_plats (src_plats, &inter, delta);
4257 else if (jfunc->agg.items)
4259 struct ipa_agg_jf_item *item;
4260 int k;
4262 if (!inter.exists ())
4263 for (unsigned i = 0; i < jfunc->agg.items->length (); i++)
4264 inter.safe_push ((*jfunc->agg.items)[i]);
4265 else
4266 FOR_EACH_VEC_ELT (inter, k, item)
4268 int l = 0;
4269 bool found = false;
4271 if (!item->value)
4272 continue;
4274 while ((unsigned) l < jfunc->agg.items->length ())
4276 struct ipa_agg_jf_item *ti;
4277 ti = &(*jfunc->agg.items)[l];
4278 if (ti->offset > item->offset)
4279 break;
4280 if (ti->offset == item->offset)
4282 gcc_checking_assert (ti->value);
4283 if (values_equal_for_ipcp_p (item->value,
4284 ti->value))
4285 found = true;
4286 break;
4288 l++;
4290 if (!found)
4291 item->value = NULL;
4294 else
4296 inter.release ();
4297 return vec<ipa_agg_jf_item>();
4299 return inter;
4302 /* Look at edges in CALLERS and collect all known aggregate values that arrive
4303 from all of them. */
4305 static struct ipa_agg_replacement_value *
4306 find_aggregate_values_for_callers_subset (struct cgraph_node *node,
4307 vec<cgraph_edge *> callers)
4309 struct ipa_node_params *dest_info = IPA_NODE_REF (node);
4310 struct ipa_agg_replacement_value *res;
4311 struct ipa_agg_replacement_value **tail = &res;
4312 struct cgraph_edge *cs;
4313 int i, j, count = ipa_get_param_count (dest_info);
4315 FOR_EACH_VEC_ELT (callers, j, cs)
4317 int c = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
4318 if (c < count)
4319 count = c;
4322 for (i = 0; i < count; i++)
4324 struct cgraph_edge *cs;
4325 vec<ipa_agg_jf_item> inter = vNULL;
4326 struct ipa_agg_jf_item *item;
4327 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (dest_info, i);
4328 int j;
4330 /* Among other things, the following check should deal with all by_ref
4331 mismatches. */
4332 if (plats->aggs_bottom)
4333 continue;
4335 FOR_EACH_VEC_ELT (callers, j, cs)
4337 struct ipa_jump_func *jfunc
4338 = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
4339 if (self_recursive_pass_through_p (cs, jfunc, i)
4340 && (!plats->aggs_by_ref
4341 || ipa_get_jf_pass_through_agg_preserved (jfunc)))
4342 continue;
4343 inter = intersect_aggregates_with_edge (cs, i, inter);
4345 if (!inter.exists ())
4346 goto next_param;
4349 FOR_EACH_VEC_ELT (inter, j, item)
4351 struct ipa_agg_replacement_value *v;
4353 if (!item->value)
4354 continue;
4356 v = ggc_alloc<ipa_agg_replacement_value> ();
4357 v->index = i;
4358 v->offset = item->offset;
4359 v->value = item->value;
4360 v->by_ref = plats->aggs_by_ref;
4361 *tail = v;
4362 tail = &v->next;
4365 next_param:
4366 if (inter.exists ())
4367 inter.release ();
4369 *tail = NULL;
4370 return res;
4373 /* Determine whether CS also brings all scalar values that the NODE is
4374 specialized for. */
4376 static bool
4377 cgraph_edge_brings_all_scalars_for_node (struct cgraph_edge *cs,
4378 struct cgraph_node *node)
4380 struct ipa_node_params *dest_info = IPA_NODE_REF (node);
4381 int count = ipa_get_param_count (dest_info);
4382 struct ipa_node_params *caller_info;
4383 struct ipa_edge_args *args;
4384 int i;
4386 caller_info = IPA_NODE_REF (cs->caller);
4387 args = IPA_EDGE_REF (cs);
4388 for (i = 0; i < count; i++)
4390 struct ipa_jump_func *jump_func;
4391 tree val, t;
4393 val = dest_info->known_csts[i];
4394 if (!val)
4395 continue;
4397 if (i >= ipa_get_cs_argument_count (args))
4398 return false;
4399 jump_func = ipa_get_ith_jump_func (args, i);
4400 t = ipa_value_from_jfunc (caller_info, jump_func,
4401 ipa_get_type (dest_info, i));
4402 if (!t || !values_equal_for_ipcp_p (val, t))
4403 return false;
4405 return true;
4408 /* Determine whether CS also brings all aggregate values that NODE is
4409 specialized for. */
4410 static bool
4411 cgraph_edge_brings_all_agg_vals_for_node (struct cgraph_edge *cs,
4412 struct cgraph_node *node)
4414 struct ipa_node_params *orig_caller_info = IPA_NODE_REF (cs->caller);
4415 struct ipa_node_params *orig_node_info;
4416 struct ipa_agg_replacement_value *aggval;
4417 int i, ec, count;
4419 aggval = ipa_get_agg_replacements_for_node (node);
4420 if (!aggval)
4421 return true;
4423 count = ipa_get_param_count (IPA_NODE_REF (node));
4424 ec = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
4425 if (ec < count)
4426 for (struct ipa_agg_replacement_value *av = aggval; av; av = av->next)
4427 if (aggval->index >= ec)
4428 return false;
4430 orig_node_info = IPA_NODE_REF (IPA_NODE_REF (node)->ipcp_orig_node);
4431 if (orig_caller_info->ipcp_orig_node)
4432 orig_caller_info = IPA_NODE_REF (orig_caller_info->ipcp_orig_node);
4434 for (i = 0; i < count; i++)
4436 static vec<ipa_agg_jf_item> values = vec<ipa_agg_jf_item>();
4437 struct ipcp_param_lattices *plats;
4438 bool interesting = false;
4439 for (struct ipa_agg_replacement_value *av = aggval; av; av = av->next)
4440 if (aggval->index == i)
4442 interesting = true;
4443 break;
4445 if (!interesting)
4446 continue;
4448 plats = ipa_get_parm_lattices (orig_node_info, aggval->index);
4449 if (plats->aggs_bottom)
4450 return false;
4452 values = intersect_aggregates_with_edge (cs, i, values);
4453 if (!values.exists ())
4454 return false;
4456 for (struct ipa_agg_replacement_value *av = aggval; av; av = av->next)
4457 if (aggval->index == i)
4459 struct ipa_agg_jf_item *item;
4460 int j;
4461 bool found = false;
4462 FOR_EACH_VEC_ELT (values, j, item)
4463 if (item->value
4464 && item->offset == av->offset
4465 && values_equal_for_ipcp_p (item->value, av->value))
4467 found = true;
4468 break;
4470 if (!found)
4472 values.release ();
4473 return false;
4477 return true;
4480 /* Given an original NODE and a VAL for which we have already created a
4481 specialized clone, look whether there are incoming edges that still lead
4482 into the old node but now also bring the requested value and also conform to
4483 all other criteria such that they can be redirected the special node.
4484 This function can therefore redirect the final edge in a SCC. */
4486 template <typename valtype>
4487 static void
4488 perhaps_add_new_callers (cgraph_node *node, ipcp_value<valtype> *val)
4490 ipcp_value_source<valtype> *src;
4491 profile_count redirected_sum = profile_count::zero ();
4493 for (src = val->sources; src; src = src->next)
4495 struct cgraph_edge *cs = src->cs;
4496 while (cs)
4498 if (cgraph_edge_brings_value_p (cs, src, node, val)
4499 && cgraph_edge_brings_all_scalars_for_node (cs, val->spec_node)
4500 && cgraph_edge_brings_all_agg_vals_for_node (cs, val->spec_node))
4502 if (dump_file)
4503 fprintf (dump_file, " - adding an extra caller %s of %s\n",
4504 cs->caller->dump_name (),
4505 val->spec_node->dump_name ());
4507 cs->redirect_callee_duplicating_thunks (val->spec_node);
4508 val->spec_node->expand_all_artificial_thunks ();
4509 if (cs->count.ipa ().initialized_p ())
4510 redirected_sum = redirected_sum + cs->count.ipa ();
4512 cs = get_next_cgraph_edge_clone (cs);
4516 if (redirected_sum.nonzero_p ())
4517 update_specialized_profile (val->spec_node, node, redirected_sum);
4520 /* Return true if KNOWN_CONTEXTS contain at least one useful context. */
4522 static bool
4523 known_contexts_useful_p (vec<ipa_polymorphic_call_context> known_contexts)
4525 ipa_polymorphic_call_context *ctx;
4526 int i;
4528 FOR_EACH_VEC_ELT (known_contexts, i, ctx)
4529 if (!ctx->useless_p ())
4530 return true;
4531 return false;
4534 /* Return a copy of KNOWN_CSTS if it is not empty, otherwise return vNULL. */
4536 static vec<ipa_polymorphic_call_context>
4537 copy_useful_known_contexts (vec<ipa_polymorphic_call_context> known_contexts)
4539 if (known_contexts_useful_p (known_contexts))
4540 return known_contexts.copy ();
4541 else
4542 return vNULL;
4545 /* Copy KNOWN_CSTS and modify the copy according to VAL and INDEX. If
4546 non-empty, replace KNOWN_CONTEXTS with its copy too. */
4548 static void
4549 modify_known_vectors_with_val (vec<tree> *known_csts,
4550 vec<ipa_polymorphic_call_context> *known_contexts,
4551 ipcp_value<tree> *val,
4552 int index)
4554 *known_csts = known_csts->copy ();
4555 *known_contexts = copy_useful_known_contexts (*known_contexts);
4556 (*known_csts)[index] = val->value;
4559 /* Replace KNOWN_CSTS with its copy. Also copy KNOWN_CONTEXTS and modify the
4560 copy according to VAL and INDEX. */
4562 static void
4563 modify_known_vectors_with_val (vec<tree> *known_csts,
4564 vec<ipa_polymorphic_call_context> *known_contexts,
4565 ipcp_value<ipa_polymorphic_call_context> *val,
4566 int index)
4568 *known_csts = known_csts->copy ();
4569 *known_contexts = known_contexts->copy ();
4570 (*known_contexts)[index] = val->value;
4573 /* Return true if OFFSET indicates this was not an aggregate value or there is
4574 a replacement equivalent to VALUE, INDEX and OFFSET among those in the
4575 AGGVALS list. */
4577 DEBUG_FUNCTION bool
4578 ipcp_val_agg_replacement_ok_p (ipa_agg_replacement_value *aggvals,
4579 int index, HOST_WIDE_INT offset, tree value)
4581 if (offset == -1)
4582 return true;
4584 while (aggvals)
4586 if (aggvals->index == index
4587 && aggvals->offset == offset
4588 && values_equal_for_ipcp_p (aggvals->value, value))
4589 return true;
4590 aggvals = aggvals->next;
4592 return false;
4595 /* Return true if offset is minus one because source of a polymorphic contect
4596 cannot be an aggregate value. */
4598 DEBUG_FUNCTION bool
4599 ipcp_val_agg_replacement_ok_p (ipa_agg_replacement_value *,
4600 int , HOST_WIDE_INT offset,
4601 ipa_polymorphic_call_context)
4603 return offset == -1;
4606 /* Decide wheter to create a special version of NODE for value VAL of parameter
4607 at the given INDEX. If OFFSET is -1, the value is for the parameter itself,
4608 otherwise it is stored at the given OFFSET of the parameter. KNOWN_CSTS,
4609 KNOWN_CONTEXTS and KNOWN_AGGS describe the other already known values. */
4611 template <typename valtype>
4612 static bool
4613 decide_about_value (struct cgraph_node *node, int index, HOST_WIDE_INT offset,
4614 ipcp_value<valtype> *val, vec<tree> known_csts,
4615 vec<ipa_polymorphic_call_context> known_contexts)
4617 struct ipa_agg_replacement_value *aggvals;
4618 int freq_sum, caller_count;
4619 profile_count count_sum;
4620 vec<cgraph_edge *> callers;
4622 if (val->spec_node)
4624 perhaps_add_new_callers (node, val);
4625 return false;
4627 else if (val->local_size_cost + overall_size > max_new_size)
4629 if (dump_file && (dump_flags & TDF_DETAILS))
4630 fprintf (dump_file, " Ignoring candidate value because "
4631 "max_new_size would be reached with %li.\n",
4632 val->local_size_cost + overall_size);
4633 return false;
4635 else if (!get_info_about_necessary_edges (val, node, &freq_sum, &count_sum,
4636 &caller_count))
4637 return false;
4639 if (dump_file && (dump_flags & TDF_DETAILS))
4641 fprintf (dump_file, " - considering value ");
4642 print_ipcp_constant_value (dump_file, val->value);
4643 fprintf (dump_file, " for ");
4644 ipa_dump_param (dump_file, IPA_NODE_REF (node), index);
4645 if (offset != -1)
4646 fprintf (dump_file, ", offset: " HOST_WIDE_INT_PRINT_DEC, offset);
4647 fprintf (dump_file, " (caller_count: %i)\n", caller_count);
4650 if (!good_cloning_opportunity_p (node, val->local_time_benefit,
4651 freq_sum, count_sum,
4652 val->local_size_cost)
4653 && !good_cloning_opportunity_p (node,
4654 val->local_time_benefit
4655 + val->prop_time_benefit,
4656 freq_sum, count_sum,
4657 val->local_size_cost
4658 + val->prop_size_cost))
4659 return false;
4661 if (dump_file)
4662 fprintf (dump_file, " Creating a specialized node of %s.\n",
4663 node->dump_name ());
4665 callers = gather_edges_for_value (val, node, caller_count);
4666 if (offset == -1)
4667 modify_known_vectors_with_val (&known_csts, &known_contexts, val, index);
4668 else
4670 known_csts = known_csts.copy ();
4671 known_contexts = copy_useful_known_contexts (known_contexts);
4673 find_more_scalar_values_for_callers_subset (node, known_csts, callers);
4674 find_more_contexts_for_caller_subset (node, &known_contexts, callers);
4675 aggvals = find_aggregate_values_for_callers_subset (node, callers);
4676 gcc_checking_assert (ipcp_val_agg_replacement_ok_p (aggvals, index,
4677 offset, val->value));
4678 val->spec_node = create_specialized_node (node, known_csts, known_contexts,
4679 aggvals, callers);
4680 overall_size += val->local_size_cost;
4682 /* TODO: If for some lattice there is only one other known value
4683 left, make a special node for it too. */
4685 return true;
4688 /* Decide whether and what specialized clones of NODE should be created. */
4690 static bool
4691 decide_whether_version_node (struct cgraph_node *node)
4693 struct ipa_node_params *info = IPA_NODE_REF (node);
4694 int i, count = ipa_get_param_count (info);
4695 vec<tree> known_csts;
4696 vec<ipa_polymorphic_call_context> known_contexts;
4697 vec<ipa_agg_jump_function> known_aggs = vNULL;
4698 bool ret = false;
4700 if (count == 0)
4701 return false;
4703 if (dump_file && (dump_flags & TDF_DETAILS))
4704 fprintf (dump_file, "\nEvaluating opportunities for %s.\n",
4705 node->dump_name ());
4707 gather_context_independent_values (info, &known_csts, &known_contexts,
4708 info->do_clone_for_all_contexts ? &known_aggs
4709 : NULL, NULL);
4711 for (i = 0; i < count;i++)
4713 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
4714 ipcp_lattice<tree> *lat = &plats->itself;
4715 ipcp_lattice<ipa_polymorphic_call_context> *ctxlat = &plats->ctxlat;
4717 if (!lat->bottom
4718 && !known_csts[i])
4720 ipcp_value<tree> *val;
4721 for (val = lat->values; val; val = val->next)
4722 ret |= decide_about_value (node, i, -1, val, known_csts,
4723 known_contexts);
4726 if (!plats->aggs_bottom)
4728 struct ipcp_agg_lattice *aglat;
4729 ipcp_value<tree> *val;
4730 for (aglat = plats->aggs; aglat; aglat = aglat->next)
4731 if (!aglat->bottom && aglat->values
4732 /* If the following is false, the one value is in
4733 known_aggs. */
4734 && (plats->aggs_contain_variable
4735 || !aglat->is_single_const ()))
4736 for (val = aglat->values; val; val = val->next)
4737 ret |= decide_about_value (node, i, aglat->offset, val,
4738 known_csts, known_contexts);
4741 if (!ctxlat->bottom
4742 && known_contexts[i].useless_p ())
4744 ipcp_value<ipa_polymorphic_call_context> *val;
4745 for (val = ctxlat->values; val; val = val->next)
4746 ret |= decide_about_value (node, i, -1, val, known_csts,
4747 known_contexts);
4750 info = IPA_NODE_REF (node);
4753 if (info->do_clone_for_all_contexts)
4755 struct cgraph_node *clone;
4756 vec<cgraph_edge *> callers;
4758 if (dump_file)
4759 fprintf (dump_file, " - Creating a specialized node of %s "
4760 "for all known contexts.\n", node->dump_name ());
4762 callers = node->collect_callers ();
4763 find_more_scalar_values_for_callers_subset (node, known_csts, callers);
4764 find_more_contexts_for_caller_subset (node, &known_contexts, callers);
4765 ipa_agg_replacement_value *aggvals
4766 = find_aggregate_values_for_callers_subset (node, callers);
4768 if (!known_contexts_useful_p (known_contexts))
4770 known_contexts.release ();
4771 known_contexts = vNULL;
4773 clone = create_specialized_node (node, known_csts, known_contexts,
4774 aggvals, callers);
4775 info = IPA_NODE_REF (node);
4776 info->do_clone_for_all_contexts = false;
4777 IPA_NODE_REF (clone)->is_all_contexts_clone = true;
4778 for (i = 0; i < count; i++)
4779 vec_free (known_aggs[i].items);
4780 known_aggs.release ();
4781 ret = true;
4783 else
4785 known_csts.release ();
4786 known_contexts.release ();
4789 return ret;
4792 /* Transitively mark all callees of NODE within the same SCC as not dead. */
4794 static void
4795 spread_undeadness (struct cgraph_node *node)
4797 struct cgraph_edge *cs;
4799 for (cs = node->callees; cs; cs = cs->next_callee)
4800 if (ipa_edge_within_scc (cs))
4802 struct cgraph_node *callee;
4803 struct ipa_node_params *info;
4805 callee = cs->callee->function_symbol (NULL);
4806 info = IPA_NODE_REF (callee);
4808 if (info->node_dead)
4810 info->node_dead = 0;
4811 spread_undeadness (callee);
4816 /* Return true if NODE has a caller from outside of its SCC that is not
4817 dead. Worker callback for cgraph_for_node_and_aliases. */
4819 static bool
4820 has_undead_caller_from_outside_scc_p (struct cgraph_node *node,
4821 void *data ATTRIBUTE_UNUSED)
4823 struct cgraph_edge *cs;
4825 for (cs = node->callers; cs; cs = cs->next_caller)
4826 if (cs->caller->thunk.thunk_p
4827 && cs->caller->call_for_symbol_thunks_and_aliases
4828 (has_undead_caller_from_outside_scc_p, NULL, true))
4829 return true;
4830 else if (!ipa_edge_within_scc (cs)
4831 && !IPA_NODE_REF (cs->caller)->node_dead)
4832 return true;
4833 return false;
4837 /* Identify nodes within the same SCC as NODE which are no longer needed
4838 because of new clones and will be removed as unreachable. */
4840 static void
4841 identify_dead_nodes (struct cgraph_node *node)
4843 struct cgraph_node *v;
4844 for (v = node; v; v = ((struct ipa_dfs_info *) v->aux)->next_cycle)
4845 if (v->local.local
4846 && !v->call_for_symbol_thunks_and_aliases
4847 (has_undead_caller_from_outside_scc_p, NULL, true))
4848 IPA_NODE_REF (v)->node_dead = 1;
4850 for (v = node; v; v = ((struct ipa_dfs_info *) v->aux)->next_cycle)
4851 if (!IPA_NODE_REF (v)->node_dead)
4852 spread_undeadness (v);
4854 if (dump_file && (dump_flags & TDF_DETAILS))
4856 for (v = node; v; v = ((struct ipa_dfs_info *) v->aux)->next_cycle)
4857 if (IPA_NODE_REF (v)->node_dead)
4858 fprintf (dump_file, " Marking node as dead: %s.\n", v->dump_name ());
4862 /* The decision stage. Iterate over the topological order of call graph nodes
4863 TOPO and make specialized clones if deemed beneficial. */
4865 static void
4866 ipcp_decision_stage (struct ipa_topo_info *topo)
4868 int i;
4870 if (dump_file)
4871 fprintf (dump_file, "\nIPA decision stage:\n\n");
4873 for (i = topo->nnodes - 1; i >= 0; i--)
4875 struct cgraph_node *node = topo->order[i];
4876 bool change = false, iterate = true;
4878 while (iterate)
4880 struct cgraph_node *v;
4881 iterate = false;
4882 for (v = node; v; v = ((struct ipa_dfs_info *) v->aux)->next_cycle)
4883 if (v->has_gimple_body_p ()
4884 && ipcp_versionable_function_p (v))
4885 iterate |= decide_whether_version_node (v);
4887 change |= iterate;
4889 if (change)
4890 identify_dead_nodes (node);
4894 /* Look up all the bits information that we have discovered and copy it over
4895 to the transformation summary. */
4897 static void
4898 ipcp_store_bits_results (void)
4900 cgraph_node *node;
4902 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
4904 ipa_node_params *info = IPA_NODE_REF (node);
4905 bool dumped_sth = false;
4906 bool found_useful_result = false;
4908 if (!opt_for_fn (node->decl, flag_ipa_bit_cp))
4910 if (dump_file)
4911 fprintf (dump_file, "Not considering %s for ipa bitwise propagation "
4912 "; -fipa-bit-cp: disabled.\n",
4913 node->name ());
4914 continue;
4917 if (info->ipcp_orig_node)
4918 info = IPA_NODE_REF (info->ipcp_orig_node);
4920 unsigned count = ipa_get_param_count (info);
4921 for (unsigned i = 0; i < count; i++)
4923 ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
4924 if (plats->bits_lattice.constant_p ())
4926 found_useful_result = true;
4927 break;
4931 if (!found_useful_result)
4932 continue;
4934 ipcp_transformation_initialize ();
4935 ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
4936 vec_safe_reserve_exact (ts->bits, count);
4938 for (unsigned i = 0; i < count; i++)
4940 ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
4941 ipa_bits *jfbits;
4943 if (plats->bits_lattice.constant_p ())
4944 jfbits
4945 = ipa_get_ipa_bits_for_value (plats->bits_lattice.get_value (),
4946 plats->bits_lattice.get_mask ());
4947 else
4948 jfbits = NULL;
4950 ts->bits->quick_push (jfbits);
4951 if (!dump_file || !jfbits)
4952 continue;
4953 if (!dumped_sth)
4955 fprintf (dump_file, "Propagated bits info for function %s:\n",
4956 node->dump_name ());
4957 dumped_sth = true;
4959 fprintf (dump_file, " param %i: value = ", i);
4960 print_hex (jfbits->value, dump_file);
4961 fprintf (dump_file, ", mask = ");
4962 print_hex (jfbits->mask, dump_file);
4963 fprintf (dump_file, "\n");
4968 /* Look up all VR information that we have discovered and copy it over
4969 to the transformation summary. */
4971 static void
4972 ipcp_store_vr_results (void)
4974 cgraph_node *node;
4976 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
4978 ipa_node_params *info = IPA_NODE_REF (node);
4979 bool found_useful_result = false;
4981 if (!opt_for_fn (node->decl, flag_ipa_vrp))
4983 if (dump_file)
4984 fprintf (dump_file, "Not considering %s for VR discovery "
4985 "and propagate; -fipa-ipa-vrp: disabled.\n",
4986 node->name ());
4987 continue;
4990 if (info->ipcp_orig_node)
4991 info = IPA_NODE_REF (info->ipcp_orig_node);
4993 unsigned count = ipa_get_param_count (info);
4994 for (unsigned i = 0; i < count; i++)
4996 ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
4997 if (!plats->m_value_range.bottom_p ()
4998 && !plats->m_value_range.top_p ())
5000 found_useful_result = true;
5001 break;
5004 if (!found_useful_result)
5005 continue;
5007 ipcp_transformation_initialize ();
5008 ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
5009 vec_safe_reserve_exact (ts->m_vr, count);
5011 for (unsigned i = 0; i < count; i++)
5013 ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
5014 ipa_vr vr;
5016 if (!plats->m_value_range.bottom_p ()
5017 && !plats->m_value_range.top_p ())
5019 vr.known = true;
5020 vr.type = plats->m_value_range.m_vr.kind ();
5021 vr.min = wi::to_wide (plats->m_value_range.m_vr.min ());
5022 vr.max = wi::to_wide (plats->m_value_range.m_vr.max ());
5024 else
5026 vr.known = false;
5027 vr.type = VR_VARYING;
5028 vr.min = vr.max = wi::zero (INT_TYPE_SIZE);
5030 ts->m_vr->quick_push (vr);
5035 /* The IPCP driver. */
5037 static unsigned int
5038 ipcp_driver (void)
5040 struct ipa_topo_info topo;
5042 if (edge_clone_summaries == NULL)
5043 edge_clone_summaries = new edge_clone_summary_t (symtab);
5045 ipa_check_create_node_params ();
5046 ipa_check_create_edge_args ();
5048 if (dump_file)
5050 fprintf (dump_file, "\nIPA structures before propagation:\n");
5051 if (dump_flags & TDF_DETAILS)
5052 ipa_print_all_params (dump_file);
5053 ipa_print_all_jump_functions (dump_file);
5056 /* Topological sort. */
5057 build_toporder_info (&topo);
5058 /* Do the interprocedural propagation. */
5059 ipcp_propagate_stage (&topo);
5060 /* Decide what constant propagation and cloning should be performed. */
5061 ipcp_decision_stage (&topo);
5062 /* Store results of bits propagation. */
5063 ipcp_store_bits_results ();
5064 /* Store results of value range propagation. */
5065 ipcp_store_vr_results ();
5067 /* Free all IPCP structures. */
5068 free_toporder_info (&topo);
5069 delete edge_clone_summaries;
5070 edge_clone_summaries = NULL;
5071 ipa_free_all_structures_after_ipa_cp ();
5072 if (dump_file)
5073 fprintf (dump_file, "\nIPA constant propagation end\n");
5074 return 0;
5077 /* Initialization and computation of IPCP data structures. This is the initial
5078 intraprocedural analysis of functions, which gathers information to be
5079 propagated later on. */
5081 static void
5082 ipcp_generate_summary (void)
5084 struct cgraph_node *node;
5086 if (dump_file)
5087 fprintf (dump_file, "\nIPA constant propagation start:\n");
5088 ipa_register_cgraph_hooks ();
5090 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
5091 ipa_analyze_node (node);
5094 /* Write ipcp summary for nodes in SET. */
5096 static void
5097 ipcp_write_summary (void)
5099 ipa_prop_write_jump_functions ();
5102 /* Read ipcp summary. */
5104 static void
5105 ipcp_read_summary (void)
5107 ipa_prop_read_jump_functions ();
5110 namespace {
5112 const pass_data pass_data_ipa_cp =
5114 IPA_PASS, /* type */
5115 "cp", /* name */
5116 OPTGROUP_NONE, /* optinfo_flags */
5117 TV_IPA_CONSTANT_PROP, /* tv_id */
5118 0, /* properties_required */
5119 0, /* properties_provided */
5120 0, /* properties_destroyed */
5121 0, /* todo_flags_start */
5122 ( TODO_dump_symtab | TODO_remove_functions ), /* todo_flags_finish */
5125 class pass_ipa_cp : public ipa_opt_pass_d
5127 public:
5128 pass_ipa_cp (gcc::context *ctxt)
5129 : ipa_opt_pass_d (pass_data_ipa_cp, ctxt,
5130 ipcp_generate_summary, /* generate_summary */
5131 ipcp_write_summary, /* write_summary */
5132 ipcp_read_summary, /* read_summary */
5133 ipcp_write_transformation_summaries, /*
5134 write_optimization_summary */
5135 ipcp_read_transformation_summaries, /*
5136 read_optimization_summary */
5137 NULL, /* stmt_fixup */
5138 0, /* function_transform_todo_flags_start */
5139 ipcp_transform_function, /* function_transform */
5140 NULL) /* variable_transform */
5143 /* opt_pass methods: */
5144 virtual bool gate (function *)
5146 /* FIXME: We should remove the optimize check after we ensure we never run
5147 IPA passes when not optimizing. */
5148 return (flag_ipa_cp && optimize) || in_lto_p;
5151 virtual unsigned int execute (function *) { return ipcp_driver (); }
5153 }; // class pass_ipa_cp
5155 } // anon namespace
5157 ipa_opt_pass_d *
5158 make_pass_ipa_cp (gcc::context *ctxt)
5160 return new pass_ipa_cp (ctxt);
5163 /* Reset all state within ipa-cp.c so that we can rerun the compiler
5164 within the same process. For use by toplev::finalize. */
5166 void
5167 ipa_cp_c_finalize (void)
5169 max_count = profile_count::uninitialized ();
5170 overall_size = 0;
5171 max_new_size = 0;