Relocation (= move+destroy)
[official-gcc.git] / gcc / ipa-cp.c
blob4471bae11c7c118f035105071da332d02e9d6f93
1 /* Interprocedural constant propagation
2 Copyright (C) 2005-2018 Free Software Foundation, Inc.
4 Contributed by Razya Ladelsky <RAZYA@il.ibm.com> and Martin Jambor
5 <mjambor@suse.cz>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Interprocedural constant propagation (IPA-CP).
25 The goal of this transformation is to
27 1) discover functions which are always invoked with some arguments with the
28 same known constant values and modify the functions so that the
29 subsequent optimizations can take advantage of the knowledge, and
31 2) partial specialization - create specialized versions of functions
32 transformed in this way if some parameters are known constants only in
33 certain contexts but the estimated tradeoff between speedup and cost size
34 is deemed good.
36 The algorithm also propagates types and attempts to perform type based
37 devirtualization. Types are propagated much like constants.
39 The algorithm basically consists of three stages. In the first, functions
40 are analyzed one at a time and jump functions are constructed for all known
41 call-sites. In the second phase, the pass propagates information from the
42 jump functions across the call to reveal what values are available at what
43 call sites, performs estimations of effects of known values on functions and
44 their callees, and finally decides what specialized extra versions should be
45 created. In the third, the special versions materialize and appropriate
46 calls are redirected.
48 The algorithm used is to a certain extent based on "Interprocedural Constant
49 Propagation", by David Callahan, Keith D Cooper, Ken Kennedy, Linda Torczon,
50 Comp86, pg 152-161 and "A Methodology for Procedure Cloning" by Keith D
51 Cooper, Mary W. Hall, and Ken Kennedy.
54 First stage - intraprocedural analysis
55 =======================================
57 This phase computes jump_function and modification flags.
59 A jump function for a call-site represents the values passed as an actual
60 arguments of a given call-site. In principle, there are three types of
61 values:
63 Pass through - the caller's formal parameter is passed as an actual
64 argument, plus an operation on it can be performed.
65 Constant - a constant is passed as an actual argument.
66 Unknown - neither of the above.
68 All jump function types are described in detail in ipa-prop.h, together with
69 the data structures that represent them and methods of accessing them.
71 ipcp_generate_summary() is the main function of the first stage.
73 Second stage - interprocedural analysis
74 ========================================
76 This stage is itself divided into two phases. In the first, we propagate
77 known values over the call graph, in the second, we make cloning decisions.
78 It uses a different algorithm than the original Callahan's paper.
80 First, we traverse the functions topologically from callers to callees and,
81 for each strongly connected component (SCC), we propagate constants
82 according to previously computed jump functions. We also record what known
83 values depend on other known values and estimate local effects. Finally, we
84 propagate cumulative information about these effects from dependent values
85 to those on which they depend.
87 Second, we again traverse the call graph in the same topological order and
88 make clones for functions which we know are called with the same values in
89 all contexts and decide about extra specialized clones of functions just for
90 some contexts - these decisions are based on both local estimates and
91 cumulative estimates propagated from callees.
93 ipcp_propagate_stage() and ipcp_decision_stage() together constitute the
94 third stage.
96 Third phase - materialization of clones, call statement updates.
97 ============================================
99 This stage is currently performed by call graph code (mainly in cgraphunit.c
100 and tree-inline.c) according to instructions inserted to the call graph by
101 the second stage. */
103 #include "config.h"
104 #include "system.h"
105 #include "coretypes.h"
106 #include "backend.h"
107 #include "tree.h"
108 #include "gimple-expr.h"
109 #include "predict.h"
110 #include "alloc-pool.h"
111 #include "tree-pass.h"
112 #include "cgraph.h"
113 #include "diagnostic.h"
114 #include "fold-const.h"
115 #include "gimple-fold.h"
116 #include "symbol-summary.h"
117 #include "tree-vrp.h"
118 #include "ipa-prop.h"
119 #include "tree-pretty-print.h"
120 #include "tree-inline.h"
121 #include "params.h"
122 #include "ipa-fnsummary.h"
123 #include "ipa-utils.h"
124 #include "tree-ssa-ccp.h"
125 #include "stringpool.h"
126 #include "attribs.h"
128 template <typename valtype> class ipcp_value;
130 /* Describes a particular source for an IPA-CP value. */
132 template <typename valtype>
133 class ipcp_value_source
135 public:
136 /* Aggregate offset of the source, negative if the source is scalar value of
137 the argument itself. */
138 HOST_WIDE_INT offset;
139 /* The incoming edge that brought the value. */
140 cgraph_edge *cs;
141 /* If the jump function that resulted into his value was a pass-through or an
142 ancestor, this is the ipcp_value of the caller from which the described
143 value has been derived. Otherwise it is NULL. */
144 ipcp_value<valtype> *val;
145 /* Next pointer in a linked list of sources of a value. */
146 ipcp_value_source *next;
147 /* If the jump function that resulted into his value was a pass-through or an
148 ancestor, this is the index of the parameter of the caller the jump
149 function references. */
150 int index;
153 /* Common ancestor for all ipcp_value instantiations. */
155 class ipcp_value_base
157 public:
158 /* Time benefit and size cost that specializing the function for this value
159 would bring about in this function alone. */
160 int local_time_benefit, local_size_cost;
161 /* Time benefit and size cost that specializing the function for this value
162 can bring about in it's callees (transitively). */
163 int prop_time_benefit, prop_size_cost;
165 ipcp_value_base ()
166 : local_time_benefit (0), local_size_cost (0),
167 prop_time_benefit (0), prop_size_cost (0) {}
170 /* Describes one particular value stored in struct ipcp_lattice. */
172 template <typename valtype>
173 class ipcp_value : public ipcp_value_base
175 public:
176 /* The actual value for the given parameter. */
177 valtype value;
178 /* The list of sources from which this value originates. */
179 ipcp_value_source <valtype> *sources;
180 /* Next pointers in a linked list of all values in a lattice. */
181 ipcp_value *next;
182 /* Next pointers in a linked list of values in a strongly connected component
183 of values. */
184 ipcp_value *scc_next;
185 /* Next pointers in a linked list of SCCs of values sorted topologically
186 according their sources. */
187 ipcp_value *topo_next;
188 /* A specialized node created for this value, NULL if none has been (so far)
189 created. */
190 cgraph_node *spec_node;
191 /* Depth first search number and low link for topological sorting of
192 values. */
193 int dfs, low_link;
194 /* True if this valye is currently on the topo-sort stack. */
195 bool on_stack;
197 ipcp_value()
198 : sources (0), next (0), scc_next (0), topo_next (0),
199 spec_node (0), dfs (0), low_link (0), on_stack (false) {}
201 void add_source (cgraph_edge *cs, ipcp_value *src_val, int src_idx,
202 HOST_WIDE_INT offset);
205 /* Lattice describing potential values of a formal parameter of a function, or
206 a part of an aggregate. TOP is represented by a lattice with zero values
207 and with contains_variable and bottom flags cleared. BOTTOM is represented
208 by a lattice with the bottom flag set. In that case, values and
209 contains_variable flag should be disregarded. */
211 template <typename valtype>
212 class ipcp_lattice
214 public:
215 /* The list of known values and types in this lattice. Note that values are
216 not deallocated if a lattice is set to bottom because there may be value
217 sources referencing them. */
218 ipcp_value<valtype> *values;
219 /* Number of known values and types in this lattice. */
220 int values_count;
221 /* The lattice contains a variable component (in addition to values). */
222 bool contains_variable;
223 /* The value of the lattice is bottom (i.e. variable and unusable for any
224 propagation). */
225 bool bottom;
227 inline bool is_single_const ();
228 inline bool set_to_bottom ();
229 inline bool set_contains_variable ();
230 bool add_value (valtype newval, cgraph_edge *cs,
231 ipcp_value<valtype> *src_val = NULL,
232 int src_idx = 0, HOST_WIDE_INT offset = -1);
233 void print (FILE * f, bool dump_sources, bool dump_benefits);
236 /* Lattice of tree values with an offset to describe a part of an
237 aggregate. */
239 class ipcp_agg_lattice : public ipcp_lattice<tree>
241 public:
242 /* Offset that is being described by this lattice. */
243 HOST_WIDE_INT offset;
244 /* Size so that we don't have to re-compute it every time we traverse the
245 list. Must correspond to TYPE_SIZE of all lat values. */
246 HOST_WIDE_INT size;
247 /* Next element of the linked list. */
248 struct ipcp_agg_lattice *next;
251 /* Lattice of known bits, only capable of holding one value.
252 Bitwise constant propagation propagates which bits of a
253 value are constant.
254 For eg:
255 int f(int x)
257 return some_op (x);
260 int f1(int y)
262 if (cond)
263 return f (y & 0xff);
264 else
265 return f (y & 0xf);
268 In the above case, the param 'x' will always have all
269 the bits (except the bits in lsb) set to 0.
270 Hence the mask of 'x' would be 0xff. The mask
271 reflects that the bits in lsb are unknown.
272 The actual propagated value is given by m_value & ~m_mask. */
274 class ipcp_bits_lattice
276 public:
277 bool bottom_p () { return m_lattice_val == IPA_BITS_VARYING; }
278 bool top_p () { return m_lattice_val == IPA_BITS_UNDEFINED; }
279 bool constant_p () { return m_lattice_val == IPA_BITS_CONSTANT; }
280 bool set_to_bottom ();
281 bool set_to_constant (widest_int, widest_int);
283 widest_int get_value () { return m_value; }
284 widest_int get_mask () { return m_mask; }
286 bool meet_with (ipcp_bits_lattice& other, unsigned, signop,
287 enum tree_code, tree);
289 bool meet_with (widest_int, widest_int, unsigned);
291 void print (FILE *);
293 private:
294 enum { IPA_BITS_UNDEFINED, IPA_BITS_CONSTANT, IPA_BITS_VARYING } m_lattice_val;
296 /* Similar to ccp_lattice_t, mask represents which bits of value are constant.
297 If a bit in mask is set to 0, then the corresponding bit in
298 value is known to be constant. */
299 widest_int m_value, m_mask;
301 bool meet_with_1 (widest_int, widest_int, unsigned);
302 void get_value_and_mask (tree, widest_int *, widest_int *);
305 /* Lattice of value ranges. */
307 class ipcp_vr_lattice
309 public:
310 value_range m_vr;
312 inline bool bottom_p () const;
313 inline bool top_p () const;
314 inline bool set_to_bottom ();
315 bool meet_with (const value_range *p_vr);
316 bool meet_with (const ipcp_vr_lattice &other);
317 void init () { gcc_assert (m_vr.undefined_p ()); }
318 void print (FILE * f);
320 private:
321 bool meet_with_1 (const value_range *other_vr);
324 /* Structure containing lattices for a parameter itself and for pieces of
325 aggregates that are passed in the parameter or by a reference in a parameter
326 plus some other useful flags. */
328 class ipcp_param_lattices
330 public:
331 /* Lattice describing the value of the parameter itself. */
332 ipcp_lattice<tree> itself;
333 /* Lattice describing the polymorphic contexts of a parameter. */
334 ipcp_lattice<ipa_polymorphic_call_context> ctxlat;
335 /* Lattices describing aggregate parts. */
336 ipcp_agg_lattice *aggs;
337 /* Lattice describing known bits. */
338 ipcp_bits_lattice bits_lattice;
339 /* Lattice describing value range. */
340 ipcp_vr_lattice m_value_range;
341 /* Number of aggregate lattices */
342 int aggs_count;
343 /* True if aggregate data were passed by reference (as opposed to by
344 value). */
345 bool aggs_by_ref;
346 /* All aggregate lattices contain a variable component (in addition to
347 values). */
348 bool aggs_contain_variable;
349 /* The value of all aggregate lattices is bottom (i.e. variable and unusable
350 for any propagation). */
351 bool aggs_bottom;
353 /* There is a virtual call based on this parameter. */
354 bool virt_call;
357 /* Allocation pools for values and their sources in ipa-cp. */
359 object_allocator<ipcp_value<tree> > ipcp_cst_values_pool
360 ("IPA-CP constant values");
362 object_allocator<ipcp_value<ipa_polymorphic_call_context> >
363 ipcp_poly_ctx_values_pool ("IPA-CP polymorphic contexts");
365 object_allocator<ipcp_value_source<tree> > ipcp_sources_pool
366 ("IPA-CP value sources");
368 object_allocator<ipcp_agg_lattice> ipcp_agg_lattice_pool
369 ("IPA_CP aggregate lattices");
371 /* Maximal count found in program. */
373 static profile_count max_count;
375 /* Original overall size of the program. */
377 static long overall_size, max_new_size;
379 /* Return the param lattices structure corresponding to the Ith formal
380 parameter of the function described by INFO. */
381 static inline struct ipcp_param_lattices *
382 ipa_get_parm_lattices (struct ipa_node_params *info, int i)
384 gcc_assert (i >= 0 && i < ipa_get_param_count (info));
385 gcc_checking_assert (!info->ipcp_orig_node);
386 gcc_checking_assert (info->lattices);
387 return &(info->lattices[i]);
390 /* Return the lattice corresponding to the scalar value of the Ith formal
391 parameter of the function described by INFO. */
392 static inline ipcp_lattice<tree> *
393 ipa_get_scalar_lat (struct ipa_node_params *info, int i)
395 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
396 return &plats->itself;
399 /* Return the lattice corresponding to the scalar value of the Ith formal
400 parameter of the function described by INFO. */
401 static inline ipcp_lattice<ipa_polymorphic_call_context> *
402 ipa_get_poly_ctx_lat (struct ipa_node_params *info, int i)
404 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
405 return &plats->ctxlat;
408 /* Return whether LAT is a lattice with a single constant and without an
409 undefined value. */
411 template <typename valtype>
412 inline bool
413 ipcp_lattice<valtype>::is_single_const ()
415 if (bottom || contains_variable || values_count != 1)
416 return false;
417 else
418 return true;
421 /* Print V which is extracted from a value in a lattice to F. */
423 static void
424 print_ipcp_constant_value (FILE * f, tree v)
426 if (TREE_CODE (v) == ADDR_EXPR
427 && TREE_CODE (TREE_OPERAND (v, 0)) == CONST_DECL)
429 fprintf (f, "& ");
430 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (v, 0)));
432 else
433 print_generic_expr (f, v);
436 /* Print V which is extracted from a value in a lattice to F. */
438 static void
439 print_ipcp_constant_value (FILE * f, ipa_polymorphic_call_context v)
441 v.dump(f, false);
444 /* Print a lattice LAT to F. */
446 template <typename valtype>
447 void
448 ipcp_lattice<valtype>::print (FILE * f, bool dump_sources, bool dump_benefits)
450 ipcp_value<valtype> *val;
451 bool prev = false;
453 if (bottom)
455 fprintf (f, "BOTTOM\n");
456 return;
459 if (!values_count && !contains_variable)
461 fprintf (f, "TOP\n");
462 return;
465 if (contains_variable)
467 fprintf (f, "VARIABLE");
468 prev = true;
469 if (dump_benefits)
470 fprintf (f, "\n");
473 for (val = values; val; val = val->next)
475 if (dump_benefits && prev)
476 fprintf (f, " ");
477 else if (!dump_benefits && prev)
478 fprintf (f, ", ");
479 else
480 prev = true;
482 print_ipcp_constant_value (f, val->value);
484 if (dump_sources)
486 ipcp_value_source<valtype> *s;
488 fprintf (f, " [from:");
489 for (s = val->sources; s; s = s->next)
490 fprintf (f, " %i(%f)", s->cs->caller->order,
491 s->cs->sreal_frequency ().to_double ());
492 fprintf (f, "]");
495 if (dump_benefits)
496 fprintf (f, " [loc_time: %i, loc_size: %i, "
497 "prop_time: %i, prop_size: %i]\n",
498 val->local_time_benefit, val->local_size_cost,
499 val->prop_time_benefit, val->prop_size_cost);
501 if (!dump_benefits)
502 fprintf (f, "\n");
505 void
506 ipcp_bits_lattice::print (FILE *f)
508 if (top_p ())
509 fprintf (f, " Bits unknown (TOP)\n");
510 else if (bottom_p ())
511 fprintf (f, " Bits unusable (BOTTOM)\n");
512 else
514 fprintf (f, " Bits: value = "); print_hex (get_value (), f);
515 fprintf (f, ", mask = "); print_hex (get_mask (), f);
516 fprintf (f, "\n");
520 /* Print value range lattice to F. */
522 void
523 ipcp_vr_lattice::print (FILE * f)
525 dump_value_range (f, &m_vr);
528 /* Print all ipcp_lattices of all functions to F. */
530 static void
531 print_all_lattices (FILE * f, bool dump_sources, bool dump_benefits)
533 struct cgraph_node *node;
534 int i, count;
536 fprintf (f, "\nLattices:\n");
537 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
539 struct ipa_node_params *info;
541 info = IPA_NODE_REF (node);
542 fprintf (f, " Node: %s:\n", node->dump_name ());
543 count = ipa_get_param_count (info);
544 for (i = 0; i < count; i++)
546 struct ipcp_agg_lattice *aglat;
547 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
548 fprintf (f, " param [%d]: ", i);
549 plats->itself.print (f, dump_sources, dump_benefits);
550 fprintf (f, " ctxs: ");
551 plats->ctxlat.print (f, dump_sources, dump_benefits);
552 plats->bits_lattice.print (f);
553 fprintf (f, " ");
554 plats->m_value_range.print (f);
555 fprintf (f, "\n");
556 if (plats->virt_call)
557 fprintf (f, " virt_call flag set\n");
559 if (plats->aggs_bottom)
561 fprintf (f, " AGGS BOTTOM\n");
562 continue;
564 if (plats->aggs_contain_variable)
565 fprintf (f, " AGGS VARIABLE\n");
566 for (aglat = plats->aggs; aglat; aglat = aglat->next)
568 fprintf (f, " %soffset " HOST_WIDE_INT_PRINT_DEC ": ",
569 plats->aggs_by_ref ? "ref " : "", aglat->offset);
570 aglat->print (f, dump_sources, dump_benefits);
576 /* Determine whether it is at all technically possible to create clones of NODE
577 and store this information in the ipa_node_params structure associated
578 with NODE. */
580 static void
581 determine_versionability (struct cgraph_node *node,
582 struct ipa_node_params *info)
584 const char *reason = NULL;
586 /* There are a number of generic reasons functions cannot be versioned. We
587 also cannot remove parameters if there are type attributes such as fnspec
588 present. */
589 if (node->alias || node->thunk.thunk_p)
590 reason = "alias or thunk";
591 else if (!node->local.versionable)
592 reason = "not a tree_versionable_function";
593 else if (node->get_availability () <= AVAIL_INTERPOSABLE)
594 reason = "insufficient body availability";
595 else if (!opt_for_fn (node->decl, optimize)
596 || !opt_for_fn (node->decl, flag_ipa_cp))
597 reason = "non-optimized function";
598 else if (lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (node->decl)))
600 /* Ideally we should clone the SIMD clones themselves and create
601 vector copies of them, so IPA-cp and SIMD clones can happily
602 coexist, but that may not be worth the effort. */
603 reason = "function has SIMD clones";
605 else if (lookup_attribute ("target_clones", DECL_ATTRIBUTES (node->decl)))
607 /* Ideally we should clone the target clones themselves and create
608 copies of them, so IPA-cp and target clones can happily
609 coexist, but that may not be worth the effort. */
610 reason = "function target_clones attribute";
612 /* Don't clone decls local to a comdat group; it breaks and for C++
613 decloned constructors, inlining is always better anyway. */
614 else if (node->comdat_local_p ())
615 reason = "comdat-local function";
616 else if (node->calls_comdat_local)
618 /* TODO: call is versionable if we make sure that all
619 callers are inside of a comdat group. */
620 reason = "calls comdat-local function";
623 /* Functions calling BUILT_IN_VA_ARG_PACK and BUILT_IN_VA_ARG_PACK_LEN
624 work only when inlined. Cloning them may still lead to better code
625 because ipa-cp will not give up on cloning further. If the function is
626 external this however leads to wrong code because we may end up producing
627 offline copy of the function. */
628 if (DECL_EXTERNAL (node->decl))
629 for (cgraph_edge *edge = node->callees; !reason && edge;
630 edge = edge->next_callee)
631 if (fndecl_built_in_p (edge->callee->decl, BUILT_IN_NORMAL))
633 if (DECL_FUNCTION_CODE (edge->callee->decl) == BUILT_IN_VA_ARG_PACK)
634 reason = "external function which calls va_arg_pack";
635 if (DECL_FUNCTION_CODE (edge->callee->decl)
636 == BUILT_IN_VA_ARG_PACK_LEN)
637 reason = "external function which calls va_arg_pack_len";
640 if (reason && dump_file && !node->alias && !node->thunk.thunk_p)
641 fprintf (dump_file, "Function %s is not versionable, reason: %s.\n",
642 node->dump_name (), reason);
644 info->versionable = (reason == NULL);
647 /* Return true if it is at all technically possible to create clones of a
648 NODE. */
650 static bool
651 ipcp_versionable_function_p (struct cgraph_node *node)
653 return IPA_NODE_REF (node)->versionable;
656 /* Structure holding accumulated information about callers of a node. */
658 struct caller_statistics
660 profile_count count_sum;
661 int n_calls, n_hot_calls, freq_sum;
664 /* Initialize fields of STAT to zeroes. */
666 static inline void
667 init_caller_stats (struct caller_statistics *stats)
669 stats->count_sum = profile_count::zero ();
670 stats->n_calls = 0;
671 stats->n_hot_calls = 0;
672 stats->freq_sum = 0;
675 /* Worker callback of cgraph_for_node_and_aliases accumulating statistics of
676 non-thunk incoming edges to NODE. */
678 static bool
679 gather_caller_stats (struct cgraph_node *node, void *data)
681 struct caller_statistics *stats = (struct caller_statistics *) data;
682 struct cgraph_edge *cs;
684 for (cs = node->callers; cs; cs = cs->next_caller)
685 if (!cs->caller->thunk.thunk_p)
687 if (cs->count.ipa ().initialized_p ())
688 stats->count_sum += cs->count.ipa ();
689 stats->freq_sum += cs->frequency ();
690 stats->n_calls++;
691 if (cs->maybe_hot_p ())
692 stats->n_hot_calls ++;
694 return false;
698 /* Return true if this NODE is viable candidate for cloning. */
700 static bool
701 ipcp_cloning_candidate_p (struct cgraph_node *node)
703 struct caller_statistics stats;
705 gcc_checking_assert (node->has_gimple_body_p ());
707 if (!opt_for_fn (node->decl, flag_ipa_cp_clone))
709 if (dump_file)
710 fprintf (dump_file, "Not considering %s for cloning; "
711 "-fipa-cp-clone disabled.\n",
712 node->name ());
713 return false;
716 if (node->optimize_for_size_p ())
718 if (dump_file)
719 fprintf (dump_file, "Not considering %s for cloning; "
720 "optimizing it for size.\n",
721 node->name ());
722 return false;
725 init_caller_stats (&stats);
726 node->call_for_symbol_thunks_and_aliases (gather_caller_stats, &stats, false);
728 if (ipa_fn_summaries->get (node)->self_size < stats.n_calls)
730 if (dump_file)
731 fprintf (dump_file, "Considering %s for cloning; code might shrink.\n",
732 node->name ());
733 return true;
736 /* When profile is available and function is hot, propagate into it even if
737 calls seems cold; constant propagation can improve function's speed
738 significantly. */
739 if (max_count > profile_count::zero ())
741 if (stats.count_sum > node->count.ipa ().apply_scale (90, 100))
743 if (dump_file)
744 fprintf (dump_file, "Considering %s for cloning; "
745 "usually called directly.\n",
746 node->name ());
747 return true;
750 if (!stats.n_hot_calls)
752 if (dump_file)
753 fprintf (dump_file, "Not considering %s for cloning; no hot calls.\n",
754 node->name ());
755 return false;
757 if (dump_file)
758 fprintf (dump_file, "Considering %s for cloning.\n",
759 node->name ());
760 return true;
763 template <typename valtype>
764 class value_topo_info
766 public:
767 /* Head of the linked list of topologically sorted values. */
768 ipcp_value<valtype> *values_topo;
769 /* Stack for creating SCCs, represented by a linked list too. */
770 ipcp_value<valtype> *stack;
771 /* Counter driving the algorithm in add_val_to_toposort. */
772 int dfs_counter;
774 value_topo_info () : values_topo (NULL), stack (NULL), dfs_counter (0)
776 void add_val (ipcp_value<valtype> *cur_val);
777 void propagate_effects ();
780 /* Arrays representing a topological ordering of call graph nodes and a stack
781 of nodes used during constant propagation and also data required to perform
782 topological sort of values and propagation of benefits in the determined
783 order. */
785 class ipa_topo_info
787 public:
788 /* Array with obtained topological order of cgraph nodes. */
789 struct cgraph_node **order;
790 /* Stack of cgraph nodes used during propagation within SCC until all values
791 in the SCC stabilize. */
792 struct cgraph_node **stack;
793 int nnodes, stack_top;
795 value_topo_info<tree> constants;
796 value_topo_info<ipa_polymorphic_call_context> contexts;
798 ipa_topo_info () : order(NULL), stack(NULL), nnodes(0), stack_top(0),
799 constants ()
803 /* Allocate the arrays in TOPO and topologically sort the nodes into order. */
805 static void
806 build_toporder_info (struct ipa_topo_info *topo)
808 topo->order = XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
809 topo->stack = XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
811 gcc_checking_assert (topo->stack_top == 0);
812 topo->nnodes = ipa_reduced_postorder (topo->order, true, true, NULL);
815 /* Free information about strongly connected components and the arrays in
816 TOPO. */
818 static void
819 free_toporder_info (struct ipa_topo_info *topo)
821 ipa_free_postorder_info ();
822 free (topo->order);
823 free (topo->stack);
826 /* Add NODE to the stack in TOPO, unless it is already there. */
828 static inline void
829 push_node_to_stack (struct ipa_topo_info *topo, struct cgraph_node *node)
831 struct ipa_node_params *info = IPA_NODE_REF (node);
832 if (info->node_enqueued)
833 return;
834 info->node_enqueued = 1;
835 topo->stack[topo->stack_top++] = node;
838 /* Pop a node from the stack in TOPO and return it or return NULL if the stack
839 is empty. */
841 static struct cgraph_node *
842 pop_node_from_stack (struct ipa_topo_info *topo)
844 if (topo->stack_top)
846 struct cgraph_node *node;
847 topo->stack_top--;
848 node = topo->stack[topo->stack_top];
849 IPA_NODE_REF (node)->node_enqueued = 0;
850 return node;
852 else
853 return NULL;
856 /* Set lattice LAT to bottom and return true if it previously was not set as
857 such. */
859 template <typename valtype>
860 inline bool
861 ipcp_lattice<valtype>::set_to_bottom ()
863 bool ret = !bottom;
864 bottom = true;
865 return ret;
868 /* Mark lattice as containing an unknown value and return true if it previously
869 was not marked as such. */
871 template <typename valtype>
872 inline bool
873 ipcp_lattice<valtype>::set_contains_variable ()
875 bool ret = !contains_variable;
876 contains_variable = true;
877 return ret;
880 /* Set all aggegate lattices in PLATS to bottom and return true if they were
881 not previously set as such. */
883 static inline bool
884 set_agg_lats_to_bottom (struct ipcp_param_lattices *plats)
886 bool ret = !plats->aggs_bottom;
887 plats->aggs_bottom = true;
888 return ret;
891 /* Mark all aggegate lattices in PLATS as containing an unknown value and
892 return true if they were not previously marked as such. */
894 static inline bool
895 set_agg_lats_contain_variable (struct ipcp_param_lattices *plats)
897 bool ret = !plats->aggs_contain_variable;
898 plats->aggs_contain_variable = true;
899 return ret;
902 bool
903 ipcp_vr_lattice::meet_with (const ipcp_vr_lattice &other)
905 return meet_with_1 (&other.m_vr);
908 /* Meet the current value of the lattice with value ranfge described by VR
909 lattice. */
911 bool
912 ipcp_vr_lattice::meet_with (const value_range *p_vr)
914 return meet_with_1 (p_vr);
917 /* Meet the current value of the lattice with value range described by
918 OTHER_VR lattice. Return TRUE if anything changed. */
920 bool
921 ipcp_vr_lattice::meet_with_1 (const value_range *other_vr)
923 if (bottom_p ())
924 return false;
926 if (other_vr->varying_p ())
927 return set_to_bottom ();
929 value_range save (m_vr);
930 m_vr.union_ (other_vr);
931 return !m_vr.ignore_equivs_equal_p (save);
934 /* Return true if value range information in the lattice is yet unknown. */
936 bool
937 ipcp_vr_lattice::top_p () const
939 return m_vr.undefined_p ();
942 /* Return true if value range information in the lattice is known to be
943 unusable. */
945 bool
946 ipcp_vr_lattice::bottom_p () const
948 return m_vr.varying_p ();
951 /* Set value range information in the lattice to bottom. Return true if it
952 previously was in a different state. */
954 bool
955 ipcp_vr_lattice::set_to_bottom ()
957 if (m_vr.varying_p ())
958 return false;
959 m_vr.set_varying ();
960 return true;
963 /* Set lattice value to bottom, if it already isn't the case. */
965 bool
966 ipcp_bits_lattice::set_to_bottom ()
968 if (bottom_p ())
969 return false;
970 m_lattice_val = IPA_BITS_VARYING;
971 m_value = 0;
972 m_mask = -1;
973 return true;
976 /* Set to constant if it isn't already. Only meant to be called
977 when switching state from TOP. */
979 bool
980 ipcp_bits_lattice::set_to_constant (widest_int value, widest_int mask)
982 gcc_assert (top_p ());
983 m_lattice_val = IPA_BITS_CONSTANT;
984 m_value = value;
985 m_mask = mask;
986 return true;
989 /* Convert operand to value, mask form. */
991 void
992 ipcp_bits_lattice::get_value_and_mask (tree operand, widest_int *valuep, widest_int *maskp)
994 wide_int get_nonzero_bits (const_tree);
996 if (TREE_CODE (operand) == INTEGER_CST)
998 *valuep = wi::to_widest (operand);
999 *maskp = 0;
1001 else
1003 *valuep = 0;
1004 *maskp = -1;
1008 /* Meet operation, similar to ccp_lattice_meet, we xor values
1009 if this->value, value have different values at same bit positions, we want
1010 to drop that bit to varying. Return true if mask is changed.
1011 This function assumes that the lattice value is in CONSTANT state */
1013 bool
1014 ipcp_bits_lattice::meet_with_1 (widest_int value, widest_int mask,
1015 unsigned precision)
1017 gcc_assert (constant_p ());
1019 widest_int old_mask = m_mask;
1020 m_mask = (m_mask | mask) | (m_value ^ value);
1022 if (wi::sext (m_mask, precision) == -1)
1023 return set_to_bottom ();
1025 return m_mask != old_mask;
1028 /* Meet the bits lattice with operand
1029 described by <value, mask, sgn, precision. */
1031 bool
1032 ipcp_bits_lattice::meet_with (widest_int value, widest_int mask,
1033 unsigned precision)
1035 if (bottom_p ())
1036 return false;
1038 if (top_p ())
1040 if (wi::sext (mask, precision) == -1)
1041 return set_to_bottom ();
1042 return set_to_constant (value, mask);
1045 return meet_with_1 (value, mask, precision);
1048 /* Meet bits lattice with the result of bit_value_binop (other, operand)
1049 if code is binary operation or bit_value_unop (other) if code is unary op.
1050 In the case when code is nop_expr, no adjustment is required. */
1052 bool
1053 ipcp_bits_lattice::meet_with (ipcp_bits_lattice& other, unsigned precision,
1054 signop sgn, enum tree_code code, tree operand)
1056 if (other.bottom_p ())
1057 return set_to_bottom ();
1059 if (bottom_p () || other.top_p ())
1060 return false;
1062 widest_int adjusted_value, adjusted_mask;
1064 if (TREE_CODE_CLASS (code) == tcc_binary)
1066 tree type = TREE_TYPE (operand);
1067 gcc_assert (INTEGRAL_TYPE_P (type));
1068 widest_int o_value, o_mask;
1069 get_value_and_mask (operand, &o_value, &o_mask);
1071 bit_value_binop (code, sgn, precision, &adjusted_value, &adjusted_mask,
1072 sgn, precision, other.get_value (), other.get_mask (),
1073 TYPE_SIGN (type), TYPE_PRECISION (type), o_value, o_mask);
1075 if (wi::sext (adjusted_mask, precision) == -1)
1076 return set_to_bottom ();
1079 else if (TREE_CODE_CLASS (code) == tcc_unary)
1081 bit_value_unop (code, sgn, precision, &adjusted_value,
1082 &adjusted_mask, sgn, precision, other.get_value (),
1083 other.get_mask ());
1085 if (wi::sext (adjusted_mask, precision) == -1)
1086 return set_to_bottom ();
1089 else
1090 return set_to_bottom ();
1092 if (top_p ())
1094 if (wi::sext (adjusted_mask, precision) == -1)
1095 return set_to_bottom ();
1096 return set_to_constant (adjusted_value, adjusted_mask);
1098 else
1099 return meet_with_1 (adjusted_value, adjusted_mask, precision);
1102 /* Mark bot aggregate and scalar lattices as containing an unknown variable,
1103 return true is any of them has not been marked as such so far. */
1105 static inline bool
1106 set_all_contains_variable (struct ipcp_param_lattices *plats)
1108 bool ret;
1109 ret = plats->itself.set_contains_variable ();
1110 ret |= plats->ctxlat.set_contains_variable ();
1111 ret |= set_agg_lats_contain_variable (plats);
1112 ret |= plats->bits_lattice.set_to_bottom ();
1113 ret |= plats->m_value_range.set_to_bottom ();
1114 return ret;
1117 /* Worker of call_for_symbol_thunks_and_aliases, increment the integer DATA
1118 points to by the number of callers to NODE. */
1120 static bool
1121 count_callers (cgraph_node *node, void *data)
1123 int *caller_count = (int *) data;
1125 for (cgraph_edge *cs = node->callers; cs; cs = cs->next_caller)
1126 /* Local thunks can be handled transparently, but if the thunk can not
1127 be optimized out, count it as a real use. */
1128 if (!cs->caller->thunk.thunk_p || !cs->caller->local.local)
1129 ++*caller_count;
1130 return false;
1133 /* Worker of call_for_symbol_thunks_and_aliases, it is supposed to be called on
1134 the one caller of some other node. Set the caller's corresponding flag. */
1136 static bool
1137 set_single_call_flag (cgraph_node *node, void *)
1139 cgraph_edge *cs = node->callers;
1140 /* Local thunks can be handled transparently, skip them. */
1141 while (cs && cs->caller->thunk.thunk_p && cs->caller->local.local)
1142 cs = cs->next_caller;
1143 if (cs)
1145 IPA_NODE_REF (cs->caller)->node_calling_single_call = true;
1146 return true;
1148 return false;
1151 /* Initialize ipcp_lattices. */
1153 static void
1154 initialize_node_lattices (struct cgraph_node *node)
1156 struct ipa_node_params *info = IPA_NODE_REF (node);
1157 struct cgraph_edge *ie;
1158 bool disable = false, variable = false;
1159 int i;
1161 gcc_checking_assert (node->has_gimple_body_p ());
1162 if (node->local.local)
1164 int caller_count = 0;
1165 node->call_for_symbol_thunks_and_aliases (count_callers, &caller_count,
1166 true);
1167 gcc_checking_assert (caller_count > 0);
1168 if (caller_count == 1)
1169 node->call_for_symbol_thunks_and_aliases (set_single_call_flag,
1170 NULL, true);
1172 else
1174 /* When cloning is allowed, we can assume that externally visible
1175 functions are not called. We will compensate this by cloning
1176 later. */
1177 if (ipcp_versionable_function_p (node)
1178 && ipcp_cloning_candidate_p (node))
1179 variable = true;
1180 else
1181 disable = true;
1184 for (i = 0; i < ipa_get_param_count (info); i++)
1186 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
1187 plats->m_value_range.init ();
1190 if (disable || variable)
1192 for (i = 0; i < ipa_get_param_count (info); i++)
1194 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
1195 if (disable)
1197 plats->itself.set_to_bottom ();
1198 plats->ctxlat.set_to_bottom ();
1199 set_agg_lats_to_bottom (plats);
1200 plats->bits_lattice.set_to_bottom ();
1201 plats->m_value_range.set_to_bottom ();
1203 else
1204 set_all_contains_variable (plats);
1206 if (dump_file && (dump_flags & TDF_DETAILS)
1207 && !node->alias && !node->thunk.thunk_p)
1208 fprintf (dump_file, "Marking all lattices of %s as %s\n",
1209 node->dump_name (), disable ? "BOTTOM" : "VARIABLE");
1212 for (ie = node->indirect_calls; ie; ie = ie->next_callee)
1213 if (ie->indirect_info->polymorphic
1214 && ie->indirect_info->param_index >= 0)
1216 gcc_checking_assert (ie->indirect_info->param_index >= 0);
1217 ipa_get_parm_lattices (info,
1218 ie->indirect_info->param_index)->virt_call = 1;
1222 /* Return the result of a (possibly arithmetic) pass through jump function
1223 JFUNC on the constant value INPUT. RES_TYPE is the type of the parameter
1224 to which the result is passed. Return NULL_TREE if that cannot be
1225 determined or be considered an interprocedural invariant. */
1227 static tree
1228 ipa_get_jf_pass_through_result (struct ipa_jump_func *jfunc, tree input,
1229 tree res_type)
1231 tree res;
1233 if (ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
1234 return input;
1235 if (!is_gimple_ip_invariant (input))
1236 return NULL_TREE;
1238 tree_code opcode = ipa_get_jf_pass_through_operation (jfunc);
1239 if (!res_type)
1241 if (TREE_CODE_CLASS (opcode) == tcc_comparison)
1242 res_type = boolean_type_node;
1243 else if (expr_type_first_operand_type_p (opcode))
1244 res_type = TREE_TYPE (input);
1245 else
1246 return NULL_TREE;
1249 if (TREE_CODE_CLASS (opcode) == tcc_unary)
1250 res = fold_unary (opcode, res_type, input);
1251 else
1252 res = fold_binary (opcode, res_type, input,
1253 ipa_get_jf_pass_through_operand (jfunc));
1255 if (res && !is_gimple_ip_invariant (res))
1256 return NULL_TREE;
1258 return res;
1261 /* Return the result of an ancestor jump function JFUNC on the constant value
1262 INPUT. Return NULL_TREE if that cannot be determined. */
1264 static tree
1265 ipa_get_jf_ancestor_result (struct ipa_jump_func *jfunc, tree input)
1267 gcc_checking_assert (TREE_CODE (input) != TREE_BINFO);
1268 if (TREE_CODE (input) == ADDR_EXPR)
1270 tree t = TREE_OPERAND (input, 0);
1271 t = build_ref_for_offset (EXPR_LOCATION (t), t,
1272 ipa_get_jf_ancestor_offset (jfunc), false,
1273 ptr_type_node, NULL, false);
1274 return build_fold_addr_expr (t);
1276 else
1277 return NULL_TREE;
1280 /* Determine whether JFUNC evaluates to a single known constant value and if
1281 so, return it. Otherwise return NULL. INFO describes the caller node or
1282 the one it is inlined to, so that pass-through jump functions can be
1283 evaluated. PARM_TYPE is the type of the parameter to which the result is
1284 passed. */
1286 tree
1287 ipa_value_from_jfunc (struct ipa_node_params *info, struct ipa_jump_func *jfunc,
1288 tree parm_type)
1290 if (jfunc->type == IPA_JF_CONST)
1291 return ipa_get_jf_constant (jfunc);
1292 else if (jfunc->type == IPA_JF_PASS_THROUGH
1293 || jfunc->type == IPA_JF_ANCESTOR)
1295 tree input;
1296 int idx;
1298 if (jfunc->type == IPA_JF_PASS_THROUGH)
1299 idx = ipa_get_jf_pass_through_formal_id (jfunc);
1300 else
1301 idx = ipa_get_jf_ancestor_formal_id (jfunc);
1303 if (info->ipcp_orig_node)
1304 input = info->known_csts[idx];
1305 else
1307 ipcp_lattice<tree> *lat;
1309 if (!info->lattices
1310 || idx >= ipa_get_param_count (info))
1311 return NULL_TREE;
1312 lat = ipa_get_scalar_lat (info, idx);
1313 if (!lat->is_single_const ())
1314 return NULL_TREE;
1315 input = lat->values->value;
1318 if (!input)
1319 return NULL_TREE;
1321 if (jfunc->type == IPA_JF_PASS_THROUGH)
1322 return ipa_get_jf_pass_through_result (jfunc, input, parm_type);
1323 else
1324 return ipa_get_jf_ancestor_result (jfunc, input);
1326 else
1327 return NULL_TREE;
1330 /* Determie whether JFUNC evaluates to single known polymorphic context, given
1331 that INFO describes the caller node or the one it is inlined to, CS is the
1332 call graph edge corresponding to JFUNC and CSIDX index of the described
1333 parameter. */
1335 ipa_polymorphic_call_context
1336 ipa_context_from_jfunc (ipa_node_params *info, cgraph_edge *cs, int csidx,
1337 ipa_jump_func *jfunc)
1339 ipa_edge_args *args = IPA_EDGE_REF (cs);
1340 ipa_polymorphic_call_context ctx;
1341 ipa_polymorphic_call_context *edge_ctx
1342 = cs ? ipa_get_ith_polymorhic_call_context (args, csidx) : NULL;
1344 if (edge_ctx && !edge_ctx->useless_p ())
1345 ctx = *edge_ctx;
1347 if (jfunc->type == IPA_JF_PASS_THROUGH
1348 || jfunc->type == IPA_JF_ANCESTOR)
1350 ipa_polymorphic_call_context srcctx;
1351 int srcidx;
1352 bool type_preserved = true;
1353 if (jfunc->type == IPA_JF_PASS_THROUGH)
1355 if (ipa_get_jf_pass_through_operation (jfunc) != NOP_EXPR)
1356 return ctx;
1357 type_preserved = ipa_get_jf_pass_through_type_preserved (jfunc);
1358 srcidx = ipa_get_jf_pass_through_formal_id (jfunc);
1360 else
1362 type_preserved = ipa_get_jf_ancestor_type_preserved (jfunc);
1363 srcidx = ipa_get_jf_ancestor_formal_id (jfunc);
1365 if (info->ipcp_orig_node)
1367 if (info->known_contexts.exists ())
1368 srcctx = info->known_contexts[srcidx];
1370 else
1372 if (!info->lattices
1373 || srcidx >= ipa_get_param_count (info))
1374 return ctx;
1375 ipcp_lattice<ipa_polymorphic_call_context> *lat;
1376 lat = ipa_get_poly_ctx_lat (info, srcidx);
1377 if (!lat->is_single_const ())
1378 return ctx;
1379 srcctx = lat->values->value;
1381 if (srcctx.useless_p ())
1382 return ctx;
1383 if (jfunc->type == IPA_JF_ANCESTOR)
1384 srcctx.offset_by (ipa_get_jf_ancestor_offset (jfunc));
1385 if (!type_preserved)
1386 srcctx.possible_dynamic_type_change (cs->in_polymorphic_cdtor);
1387 srcctx.combine_with (ctx);
1388 return srcctx;
1391 return ctx;
1394 /* If checking is enabled, verify that no lattice is in the TOP state, i.e. not
1395 bottom, not containing a variable component and without any known value at
1396 the same time. */
1398 DEBUG_FUNCTION void
1399 ipcp_verify_propagated_values (void)
1401 struct cgraph_node *node;
1403 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
1405 struct ipa_node_params *info = IPA_NODE_REF (node);
1406 int i, count = ipa_get_param_count (info);
1408 for (i = 0; i < count; i++)
1410 ipcp_lattice<tree> *lat = ipa_get_scalar_lat (info, i);
1412 if (!lat->bottom
1413 && !lat->contains_variable
1414 && lat->values_count == 0)
1416 if (dump_file)
1418 symtab->dump (dump_file);
1419 fprintf (dump_file, "\nIPA lattices after constant "
1420 "propagation, before gcc_unreachable:\n");
1421 print_all_lattices (dump_file, true, false);
1424 gcc_unreachable ();
1430 /* Return true iff X and Y should be considered equal values by IPA-CP. */
1432 static bool
1433 values_equal_for_ipcp_p (tree x, tree y)
1435 gcc_checking_assert (x != NULL_TREE && y != NULL_TREE);
1437 if (x == y)
1438 return true;
1440 if (TREE_CODE (x) == ADDR_EXPR
1441 && TREE_CODE (y) == ADDR_EXPR
1442 && TREE_CODE (TREE_OPERAND (x, 0)) == CONST_DECL
1443 && TREE_CODE (TREE_OPERAND (y, 0)) == CONST_DECL)
1444 return operand_equal_p (DECL_INITIAL (TREE_OPERAND (x, 0)),
1445 DECL_INITIAL (TREE_OPERAND (y, 0)), 0);
1446 else
1447 return operand_equal_p (x, y, 0);
1450 /* Return true iff X and Y should be considered equal contexts by IPA-CP. */
1452 static bool
1453 values_equal_for_ipcp_p (ipa_polymorphic_call_context x,
1454 ipa_polymorphic_call_context y)
1456 return x.equal_to (y);
1460 /* Add a new value source to the value represented by THIS, marking that a
1461 value comes from edge CS and (if the underlying jump function is a
1462 pass-through or an ancestor one) from a caller value SRC_VAL of a caller
1463 parameter described by SRC_INDEX. OFFSET is negative if the source was the
1464 scalar value of the parameter itself or the offset within an aggregate. */
1466 template <typename valtype>
1467 void
1468 ipcp_value<valtype>::add_source (cgraph_edge *cs, ipcp_value *src_val,
1469 int src_idx, HOST_WIDE_INT offset)
1471 ipcp_value_source<valtype> *src;
1473 src = new (ipcp_sources_pool.allocate ()) ipcp_value_source<valtype>;
1474 src->offset = offset;
1475 src->cs = cs;
1476 src->val = src_val;
1477 src->index = src_idx;
1479 src->next = sources;
1480 sources = src;
1483 /* Allocate a new ipcp_value holding a tree constant, initialize its value to
1484 SOURCE and clear all other fields. */
1486 static ipcp_value<tree> *
1487 allocate_and_init_ipcp_value (tree source)
1489 ipcp_value<tree> *val;
1491 val = new (ipcp_cst_values_pool.allocate ()) ipcp_value<tree>();
1492 val->value = source;
1493 return val;
1496 /* Allocate a new ipcp_value holding a polymorphic context, initialize its
1497 value to SOURCE and clear all other fields. */
1499 static ipcp_value<ipa_polymorphic_call_context> *
1500 allocate_and_init_ipcp_value (ipa_polymorphic_call_context source)
1502 ipcp_value<ipa_polymorphic_call_context> *val;
1504 // TODO
1505 val = new (ipcp_poly_ctx_values_pool.allocate ())
1506 ipcp_value<ipa_polymorphic_call_context>();
1507 val->value = source;
1508 return val;
1511 /* Try to add NEWVAL to LAT, potentially creating a new ipcp_value for it. CS,
1512 SRC_VAL SRC_INDEX and OFFSET are meant for add_source and have the same
1513 meaning. OFFSET -1 means the source is scalar and not a part of an
1514 aggregate. */
1516 template <typename valtype>
1517 bool
1518 ipcp_lattice<valtype>::add_value (valtype newval, cgraph_edge *cs,
1519 ipcp_value<valtype> *src_val,
1520 int src_idx, HOST_WIDE_INT offset)
1522 ipcp_value<valtype> *val;
1524 if (bottom)
1525 return false;
1527 for (val = values; val; val = val->next)
1528 if (values_equal_for_ipcp_p (val->value, newval))
1530 if (ipa_edge_within_scc (cs))
1532 ipcp_value_source<valtype> *s;
1533 for (s = val->sources; s; s = s->next)
1534 if (s->cs == cs)
1535 break;
1536 if (s)
1537 return false;
1540 val->add_source (cs, src_val, src_idx, offset);
1541 return false;
1544 if (values_count == PARAM_VALUE (PARAM_IPA_CP_VALUE_LIST_SIZE))
1546 /* We can only free sources, not the values themselves, because sources
1547 of other values in this SCC might point to them. */
1548 for (val = values; val; val = val->next)
1550 while (val->sources)
1552 ipcp_value_source<valtype> *src = val->sources;
1553 val->sources = src->next;
1554 ipcp_sources_pool.remove ((ipcp_value_source<tree>*)src);
1558 values = NULL;
1559 return set_to_bottom ();
1562 values_count++;
1563 val = allocate_and_init_ipcp_value (newval);
1564 val->add_source (cs, src_val, src_idx, offset);
1565 val->next = values;
1566 values = val;
1567 return true;
1570 /* Propagate values through a pass-through jump function JFUNC associated with
1571 edge CS, taking values from SRC_LAT and putting them into DEST_LAT. SRC_IDX
1572 is the index of the source parameter. PARM_TYPE is the type of the
1573 parameter to which the result is passed. */
1575 static bool
1576 propagate_vals_across_pass_through (cgraph_edge *cs, ipa_jump_func *jfunc,
1577 ipcp_lattice<tree> *src_lat,
1578 ipcp_lattice<tree> *dest_lat, int src_idx,
1579 tree parm_type)
1581 ipcp_value<tree> *src_val;
1582 bool ret = false;
1584 /* Do not create new values when propagating within an SCC because if there
1585 are arithmetic functions with circular dependencies, there is infinite
1586 number of them and we would just make lattices bottom. If this condition
1587 is ever relaxed we have to detect self-feeding recursive calls in
1588 cgraph_edge_brings_value_p in a smarter way. */
1589 if ((ipa_get_jf_pass_through_operation (jfunc) != NOP_EXPR)
1590 && ipa_edge_within_scc (cs))
1591 ret = dest_lat->set_contains_variable ();
1592 else
1593 for (src_val = src_lat->values; src_val; src_val = src_val->next)
1595 tree cstval = ipa_get_jf_pass_through_result (jfunc, src_val->value,
1596 parm_type);
1598 if (cstval)
1599 ret |= dest_lat->add_value (cstval, cs, src_val, src_idx);
1600 else
1601 ret |= dest_lat->set_contains_variable ();
1604 return ret;
1607 /* Propagate values through an ancestor jump function JFUNC associated with
1608 edge CS, taking values from SRC_LAT and putting them into DEST_LAT. SRC_IDX
1609 is the index of the source parameter. */
1611 static bool
1612 propagate_vals_across_ancestor (struct cgraph_edge *cs,
1613 struct ipa_jump_func *jfunc,
1614 ipcp_lattice<tree> *src_lat,
1615 ipcp_lattice<tree> *dest_lat, int src_idx)
1617 ipcp_value<tree> *src_val;
1618 bool ret = false;
1620 if (ipa_edge_within_scc (cs))
1621 return dest_lat->set_contains_variable ();
1623 for (src_val = src_lat->values; src_val; src_val = src_val->next)
1625 tree t = ipa_get_jf_ancestor_result (jfunc, src_val->value);
1627 if (t)
1628 ret |= dest_lat->add_value (t, cs, src_val, src_idx);
1629 else
1630 ret |= dest_lat->set_contains_variable ();
1633 return ret;
1636 /* Propagate scalar values across jump function JFUNC that is associated with
1637 edge CS and put the values into DEST_LAT. PARM_TYPE is the type of the
1638 parameter to which the result is passed. */
1640 static bool
1641 propagate_scalar_across_jump_function (struct cgraph_edge *cs,
1642 struct ipa_jump_func *jfunc,
1643 ipcp_lattice<tree> *dest_lat,
1644 tree param_type)
1646 if (dest_lat->bottom)
1647 return false;
1649 if (jfunc->type == IPA_JF_CONST)
1651 tree val = ipa_get_jf_constant (jfunc);
1652 return dest_lat->add_value (val, cs, NULL, 0);
1654 else if (jfunc->type == IPA_JF_PASS_THROUGH
1655 || jfunc->type == IPA_JF_ANCESTOR)
1657 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
1658 ipcp_lattice<tree> *src_lat;
1659 int src_idx;
1660 bool ret;
1662 if (jfunc->type == IPA_JF_PASS_THROUGH)
1663 src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
1664 else
1665 src_idx = ipa_get_jf_ancestor_formal_id (jfunc);
1667 src_lat = ipa_get_scalar_lat (caller_info, src_idx);
1668 if (src_lat->bottom)
1669 return dest_lat->set_contains_variable ();
1671 /* If we would need to clone the caller and cannot, do not propagate. */
1672 if (!ipcp_versionable_function_p (cs->caller)
1673 && (src_lat->contains_variable
1674 || (src_lat->values_count > 1)))
1675 return dest_lat->set_contains_variable ();
1677 if (jfunc->type == IPA_JF_PASS_THROUGH)
1678 ret = propagate_vals_across_pass_through (cs, jfunc, src_lat,
1679 dest_lat, src_idx, param_type);
1680 else
1681 ret = propagate_vals_across_ancestor (cs, jfunc, src_lat, dest_lat,
1682 src_idx);
1684 if (src_lat->contains_variable)
1685 ret |= dest_lat->set_contains_variable ();
1687 return ret;
1690 /* TODO: We currently do not handle member method pointers in IPA-CP (we only
1691 use it for indirect inlining), we should propagate them too. */
1692 return dest_lat->set_contains_variable ();
1695 /* Propagate scalar values across jump function JFUNC that is associated with
1696 edge CS and describes argument IDX and put the values into DEST_LAT. */
1698 static bool
1699 propagate_context_across_jump_function (cgraph_edge *cs,
1700 ipa_jump_func *jfunc, int idx,
1701 ipcp_lattice<ipa_polymorphic_call_context> *dest_lat)
1703 ipa_edge_args *args = IPA_EDGE_REF (cs);
1704 if (dest_lat->bottom)
1705 return false;
1706 bool ret = false;
1707 bool added_sth = false;
1708 bool type_preserved = true;
1710 ipa_polymorphic_call_context edge_ctx, *edge_ctx_ptr
1711 = ipa_get_ith_polymorhic_call_context (args, idx);
1713 if (edge_ctx_ptr)
1714 edge_ctx = *edge_ctx_ptr;
1716 if (jfunc->type == IPA_JF_PASS_THROUGH
1717 || jfunc->type == IPA_JF_ANCESTOR)
1719 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
1720 int src_idx;
1721 ipcp_lattice<ipa_polymorphic_call_context> *src_lat;
1723 /* TODO: Once we figure out how to propagate speculations, it will
1724 probably be a good idea to switch to speculation if type_preserved is
1725 not set instead of punting. */
1726 if (jfunc->type == IPA_JF_PASS_THROUGH)
1728 if (ipa_get_jf_pass_through_operation (jfunc) != NOP_EXPR)
1729 goto prop_fail;
1730 type_preserved = ipa_get_jf_pass_through_type_preserved (jfunc);
1731 src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
1733 else
1735 type_preserved = ipa_get_jf_ancestor_type_preserved (jfunc);
1736 src_idx = ipa_get_jf_ancestor_formal_id (jfunc);
1739 src_lat = ipa_get_poly_ctx_lat (caller_info, src_idx);
1740 /* If we would need to clone the caller and cannot, do not propagate. */
1741 if (!ipcp_versionable_function_p (cs->caller)
1742 && (src_lat->contains_variable
1743 || (src_lat->values_count > 1)))
1744 goto prop_fail;
1746 ipcp_value<ipa_polymorphic_call_context> *src_val;
1747 for (src_val = src_lat->values; src_val; src_val = src_val->next)
1749 ipa_polymorphic_call_context cur = src_val->value;
1751 if (!type_preserved)
1752 cur.possible_dynamic_type_change (cs->in_polymorphic_cdtor);
1753 if (jfunc->type == IPA_JF_ANCESTOR)
1754 cur.offset_by (ipa_get_jf_ancestor_offset (jfunc));
1755 /* TODO: In cases we know how the context is going to be used,
1756 we can improve the result by passing proper OTR_TYPE. */
1757 cur.combine_with (edge_ctx);
1758 if (!cur.useless_p ())
1760 if (src_lat->contains_variable
1761 && !edge_ctx.equal_to (cur))
1762 ret |= dest_lat->set_contains_variable ();
1763 ret |= dest_lat->add_value (cur, cs, src_val, src_idx);
1764 added_sth = true;
1770 prop_fail:
1771 if (!added_sth)
1773 if (!edge_ctx.useless_p ())
1774 ret |= dest_lat->add_value (edge_ctx, cs);
1775 else
1776 ret |= dest_lat->set_contains_variable ();
1779 return ret;
1782 /* Propagate bits across jfunc that is associated with
1783 edge cs and update dest_lattice accordingly. */
1785 bool
1786 propagate_bits_across_jump_function (cgraph_edge *cs, int idx,
1787 ipa_jump_func *jfunc,
1788 ipcp_bits_lattice *dest_lattice)
1790 if (dest_lattice->bottom_p ())
1791 return false;
1793 enum availability availability;
1794 cgraph_node *callee = cs->callee->function_symbol (&availability);
1795 struct ipa_node_params *callee_info = IPA_NODE_REF (callee);
1796 tree parm_type = ipa_get_type (callee_info, idx);
1798 /* For K&R C programs, ipa_get_type() could return NULL_TREE. Avoid the
1799 transform for these cases. Similarly, we can have bad type mismatches
1800 with LTO, avoid doing anything with those too. */
1801 if (!parm_type
1802 || (!INTEGRAL_TYPE_P (parm_type) && !POINTER_TYPE_P (parm_type)))
1804 if (dump_file && (dump_flags & TDF_DETAILS))
1805 fprintf (dump_file, "Setting dest_lattice to bottom, because type of "
1806 "param %i of %s is NULL or unsuitable for bits propagation\n",
1807 idx, cs->callee->name ());
1809 return dest_lattice->set_to_bottom ();
1812 unsigned precision = TYPE_PRECISION (parm_type);
1813 signop sgn = TYPE_SIGN (parm_type);
1815 if (jfunc->type == IPA_JF_PASS_THROUGH
1816 || jfunc->type == IPA_JF_ANCESTOR)
1818 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
1819 tree operand = NULL_TREE;
1820 enum tree_code code;
1821 unsigned src_idx;
1823 if (jfunc->type == IPA_JF_PASS_THROUGH)
1825 code = ipa_get_jf_pass_through_operation (jfunc);
1826 src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
1827 if (code != NOP_EXPR)
1828 operand = ipa_get_jf_pass_through_operand (jfunc);
1830 else
1832 code = POINTER_PLUS_EXPR;
1833 src_idx = ipa_get_jf_ancestor_formal_id (jfunc);
1834 unsigned HOST_WIDE_INT offset = ipa_get_jf_ancestor_offset (jfunc) / BITS_PER_UNIT;
1835 operand = build_int_cstu (size_type_node, offset);
1838 struct ipcp_param_lattices *src_lats
1839 = ipa_get_parm_lattices (caller_info, src_idx);
1841 /* Try to propagate bits if src_lattice is bottom, but jfunc is known.
1842 for eg consider:
1843 int f(int x)
1845 g (x & 0xff);
1847 Assume lattice for x is bottom, however we can still propagate
1848 result of x & 0xff == 0xff, which gets computed during ccp1 pass
1849 and we store it in jump function during analysis stage. */
1851 if (src_lats->bits_lattice.bottom_p ()
1852 && jfunc->bits)
1853 return dest_lattice->meet_with (jfunc->bits->value, jfunc->bits->mask,
1854 precision);
1855 else
1856 return dest_lattice->meet_with (src_lats->bits_lattice, precision, sgn,
1857 code, operand);
1860 else if (jfunc->type == IPA_JF_ANCESTOR)
1861 return dest_lattice->set_to_bottom ();
1862 else if (jfunc->bits)
1863 return dest_lattice->meet_with (jfunc->bits->value, jfunc->bits->mask,
1864 precision);
1865 else
1866 return dest_lattice->set_to_bottom ();
1869 /* Emulate effects of unary OPERATION and/or conversion from SRC_TYPE to
1870 DST_TYPE on value range in SRC_VR and store it to DST_VR. Return true if
1871 the result is a range or an anti-range. */
1873 static bool
1874 ipa_vr_operation_and_type_effects (value_range *dst_vr, value_range *src_vr,
1875 enum tree_code operation,
1876 tree dst_type, tree src_type)
1878 *dst_vr = value_range ();
1879 extract_range_from_unary_expr (dst_vr, operation, dst_type, src_vr, src_type);
1880 if (dst_vr->varying_p () || dst_vr->undefined_p ())
1881 return false;
1882 return true;
1885 /* Propagate value range across jump function JFUNC that is associated with
1886 edge CS with param of callee of PARAM_TYPE and update DEST_PLATS
1887 accordingly. */
1889 static bool
1890 propagate_vr_across_jump_function (cgraph_edge *cs, ipa_jump_func *jfunc,
1891 struct ipcp_param_lattices *dest_plats,
1892 tree param_type)
1894 ipcp_vr_lattice *dest_lat = &dest_plats->m_value_range;
1896 if (dest_lat->bottom_p ())
1897 return false;
1899 if (!param_type
1900 || (!INTEGRAL_TYPE_P (param_type)
1901 && !POINTER_TYPE_P (param_type)))
1902 return dest_lat->set_to_bottom ();
1904 if (jfunc->type == IPA_JF_PASS_THROUGH)
1906 enum tree_code operation = ipa_get_jf_pass_through_operation (jfunc);
1908 if (TREE_CODE_CLASS (operation) == tcc_unary)
1910 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
1911 int src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
1912 tree operand_type = ipa_get_type (caller_info, src_idx);
1913 struct ipcp_param_lattices *src_lats
1914 = ipa_get_parm_lattices (caller_info, src_idx);
1916 if (src_lats->m_value_range.bottom_p ())
1917 return dest_lat->set_to_bottom ();
1918 value_range vr;
1919 if (ipa_vr_operation_and_type_effects (&vr,
1920 &src_lats->m_value_range.m_vr,
1921 operation, param_type,
1922 operand_type))
1923 return dest_lat->meet_with (&vr);
1926 else if (jfunc->type == IPA_JF_CONST)
1928 tree val = ipa_get_jf_constant (jfunc);
1929 if (TREE_CODE (val) == INTEGER_CST)
1931 val = fold_convert (param_type, val);
1932 if (TREE_OVERFLOW_P (val))
1933 val = drop_tree_overflow (val);
1935 value_range tmpvr (VR_RANGE, val, val);
1936 return dest_lat->meet_with (&tmpvr);
1940 value_range vr;
1941 if (jfunc->m_vr
1942 && ipa_vr_operation_and_type_effects (&vr, jfunc->m_vr, NOP_EXPR,
1943 param_type,
1944 jfunc->m_vr->type ()))
1945 return dest_lat->meet_with (&vr);
1946 else
1947 return dest_lat->set_to_bottom ();
1950 /* If DEST_PLATS already has aggregate items, check that aggs_by_ref matches
1951 NEW_AGGS_BY_REF and if not, mark all aggs as bottoms and return true (in all
1952 other cases, return false). If there are no aggregate items, set
1953 aggs_by_ref to NEW_AGGS_BY_REF. */
1955 static bool
1956 set_check_aggs_by_ref (struct ipcp_param_lattices *dest_plats,
1957 bool new_aggs_by_ref)
1959 if (dest_plats->aggs)
1961 if (dest_plats->aggs_by_ref != new_aggs_by_ref)
1963 set_agg_lats_to_bottom (dest_plats);
1964 return true;
1967 else
1968 dest_plats->aggs_by_ref = new_aggs_by_ref;
1969 return false;
1972 /* Walk aggregate lattices in DEST_PLATS from ***AGLAT on, until ***aglat is an
1973 already existing lattice for the given OFFSET and SIZE, marking all skipped
1974 lattices as containing variable and checking for overlaps. If there is no
1975 already existing lattice for the OFFSET and VAL_SIZE, create one, initialize
1976 it with offset, size and contains_variable to PRE_EXISTING, and return true,
1977 unless there are too many already. If there are two many, return false. If
1978 there are overlaps turn whole DEST_PLATS to bottom and return false. If any
1979 skipped lattices were newly marked as containing variable, set *CHANGE to
1980 true. */
1982 static bool
1983 merge_agg_lats_step (struct ipcp_param_lattices *dest_plats,
1984 HOST_WIDE_INT offset, HOST_WIDE_INT val_size,
1985 struct ipcp_agg_lattice ***aglat,
1986 bool pre_existing, bool *change)
1988 gcc_checking_assert (offset >= 0);
1990 while (**aglat && (**aglat)->offset < offset)
1992 if ((**aglat)->offset + (**aglat)->size > offset)
1994 set_agg_lats_to_bottom (dest_plats);
1995 return false;
1997 *change |= (**aglat)->set_contains_variable ();
1998 *aglat = &(**aglat)->next;
2001 if (**aglat && (**aglat)->offset == offset)
2003 if ((**aglat)->size != val_size
2004 || ((**aglat)->next
2005 && (**aglat)->next->offset < offset + val_size))
2007 set_agg_lats_to_bottom (dest_plats);
2008 return false;
2010 gcc_checking_assert (!(**aglat)->next
2011 || (**aglat)->next->offset >= offset + val_size);
2012 return true;
2014 else
2016 struct ipcp_agg_lattice *new_al;
2018 if (**aglat && (**aglat)->offset < offset + val_size)
2020 set_agg_lats_to_bottom (dest_plats);
2021 return false;
2023 if (dest_plats->aggs_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
2024 return false;
2025 dest_plats->aggs_count++;
2026 new_al = ipcp_agg_lattice_pool.allocate ();
2027 memset (new_al, 0, sizeof (*new_al));
2029 new_al->offset = offset;
2030 new_al->size = val_size;
2031 new_al->contains_variable = pre_existing;
2033 new_al->next = **aglat;
2034 **aglat = new_al;
2035 return true;
2039 /* Set all AGLAT and all other aggregate lattices reachable by next pointers as
2040 containing an unknown value. */
2042 static bool
2043 set_chain_of_aglats_contains_variable (struct ipcp_agg_lattice *aglat)
2045 bool ret = false;
2046 while (aglat)
2048 ret |= aglat->set_contains_variable ();
2049 aglat = aglat->next;
2051 return ret;
2054 /* Merge existing aggregate lattices in SRC_PLATS to DEST_PLATS, subtracting
2055 DELTA_OFFSET. CS is the call graph edge and SRC_IDX the index of the source
2056 parameter used for lattice value sources. Return true if DEST_PLATS changed
2057 in any way. */
2059 static bool
2060 merge_aggregate_lattices (struct cgraph_edge *cs,
2061 struct ipcp_param_lattices *dest_plats,
2062 struct ipcp_param_lattices *src_plats,
2063 int src_idx, HOST_WIDE_INT offset_delta)
2065 bool pre_existing = dest_plats->aggs != NULL;
2066 struct ipcp_agg_lattice **dst_aglat;
2067 bool ret = false;
2069 if (set_check_aggs_by_ref (dest_plats, src_plats->aggs_by_ref))
2070 return true;
2071 if (src_plats->aggs_bottom)
2072 return set_agg_lats_contain_variable (dest_plats);
2073 if (src_plats->aggs_contain_variable)
2074 ret |= set_agg_lats_contain_variable (dest_plats);
2075 dst_aglat = &dest_plats->aggs;
2077 for (struct ipcp_agg_lattice *src_aglat = src_plats->aggs;
2078 src_aglat;
2079 src_aglat = src_aglat->next)
2081 HOST_WIDE_INT new_offset = src_aglat->offset - offset_delta;
2083 if (new_offset < 0)
2084 continue;
2085 if (merge_agg_lats_step (dest_plats, new_offset, src_aglat->size,
2086 &dst_aglat, pre_existing, &ret))
2088 struct ipcp_agg_lattice *new_al = *dst_aglat;
2090 dst_aglat = &(*dst_aglat)->next;
2091 if (src_aglat->bottom)
2093 ret |= new_al->set_contains_variable ();
2094 continue;
2096 if (src_aglat->contains_variable)
2097 ret |= new_al->set_contains_variable ();
2098 for (ipcp_value<tree> *val = src_aglat->values;
2099 val;
2100 val = val->next)
2101 ret |= new_al->add_value (val->value, cs, val, src_idx,
2102 src_aglat->offset);
2104 else if (dest_plats->aggs_bottom)
2105 return true;
2107 ret |= set_chain_of_aglats_contains_variable (*dst_aglat);
2108 return ret;
2111 /* Determine whether there is anything to propagate FROM SRC_PLATS through a
2112 pass-through JFUNC and if so, whether it has conform and conforms to the
2113 rules about propagating values passed by reference. */
2115 static bool
2116 agg_pass_through_permissible_p (struct ipcp_param_lattices *src_plats,
2117 struct ipa_jump_func *jfunc)
2119 return src_plats->aggs
2120 && (!src_plats->aggs_by_ref
2121 || ipa_get_jf_pass_through_agg_preserved (jfunc));
2124 /* Propagate scalar values across jump function JFUNC that is associated with
2125 edge CS and put the values into DEST_LAT. */
2127 static bool
2128 propagate_aggs_across_jump_function (struct cgraph_edge *cs,
2129 struct ipa_jump_func *jfunc,
2130 struct ipcp_param_lattices *dest_plats)
2132 bool ret = false;
2134 if (dest_plats->aggs_bottom)
2135 return false;
2137 if (jfunc->type == IPA_JF_PASS_THROUGH
2138 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
2140 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
2141 int src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
2142 struct ipcp_param_lattices *src_plats;
2144 src_plats = ipa_get_parm_lattices (caller_info, src_idx);
2145 if (agg_pass_through_permissible_p (src_plats, jfunc))
2147 /* Currently we do not produce clobber aggregate jump
2148 functions, replace with merging when we do. */
2149 gcc_assert (!jfunc->agg.items);
2150 ret |= merge_aggregate_lattices (cs, dest_plats, src_plats,
2151 src_idx, 0);
2153 else
2154 ret |= set_agg_lats_contain_variable (dest_plats);
2156 else if (jfunc->type == IPA_JF_ANCESTOR
2157 && ipa_get_jf_ancestor_agg_preserved (jfunc))
2159 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
2160 int src_idx = ipa_get_jf_ancestor_formal_id (jfunc);
2161 struct ipcp_param_lattices *src_plats;
2163 src_plats = ipa_get_parm_lattices (caller_info, src_idx);
2164 if (src_plats->aggs && src_plats->aggs_by_ref)
2166 /* Currently we do not produce clobber aggregate jump
2167 functions, replace with merging when we do. */
2168 gcc_assert (!jfunc->agg.items);
2169 ret |= merge_aggregate_lattices (cs, dest_plats, src_plats, src_idx,
2170 ipa_get_jf_ancestor_offset (jfunc));
2172 else if (!src_plats->aggs_by_ref)
2173 ret |= set_agg_lats_to_bottom (dest_plats);
2174 else
2175 ret |= set_agg_lats_contain_variable (dest_plats);
2177 else if (jfunc->agg.items)
2179 bool pre_existing = dest_plats->aggs != NULL;
2180 struct ipcp_agg_lattice **aglat = &dest_plats->aggs;
2181 struct ipa_agg_jf_item *item;
2182 int i;
2184 if (set_check_aggs_by_ref (dest_plats, jfunc->agg.by_ref))
2185 return true;
2187 FOR_EACH_VEC_ELT (*jfunc->agg.items, i, item)
2189 HOST_WIDE_INT val_size;
2191 if (item->offset < 0)
2192 continue;
2193 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2194 val_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (item->value)));
2196 if (merge_agg_lats_step (dest_plats, item->offset, val_size,
2197 &aglat, pre_existing, &ret))
2199 ret |= (*aglat)->add_value (item->value, cs, NULL, 0, 0);
2200 aglat = &(*aglat)->next;
2202 else if (dest_plats->aggs_bottom)
2203 return true;
2206 ret |= set_chain_of_aglats_contains_variable (*aglat);
2208 else
2209 ret |= set_agg_lats_contain_variable (dest_plats);
2211 return ret;
2214 /* Return true if on the way cfrom CS->caller to the final (non-alias and
2215 non-thunk) destination, the call passes through a thunk. */
2217 static bool
2218 call_passes_through_thunk_p (cgraph_edge *cs)
2220 cgraph_node *alias_or_thunk = cs->callee;
2221 while (alias_or_thunk->alias)
2222 alias_or_thunk = alias_or_thunk->get_alias_target ();
2223 return alias_or_thunk->thunk.thunk_p;
2226 /* Propagate constants from the caller to the callee of CS. INFO describes the
2227 caller. */
2229 static bool
2230 propagate_constants_across_call (struct cgraph_edge *cs)
2232 struct ipa_node_params *callee_info;
2233 enum availability availability;
2234 cgraph_node *callee;
2235 struct ipa_edge_args *args;
2236 bool ret = false;
2237 int i, args_count, parms_count;
2239 callee = cs->callee->function_symbol (&availability);
2240 if (!callee->definition)
2241 return false;
2242 gcc_checking_assert (callee->has_gimple_body_p ());
2243 callee_info = IPA_NODE_REF (callee);
2245 args = IPA_EDGE_REF (cs);
2246 args_count = ipa_get_cs_argument_count (args);
2247 parms_count = ipa_get_param_count (callee_info);
2248 if (parms_count == 0)
2249 return false;
2251 /* If this call goes through a thunk we must not propagate to the first (0th)
2252 parameter. However, we might need to uncover a thunk from below a series
2253 of aliases first. */
2254 if (call_passes_through_thunk_p (cs))
2256 ret |= set_all_contains_variable (ipa_get_parm_lattices (callee_info,
2257 0));
2258 i = 1;
2260 else
2261 i = 0;
2263 for (; (i < args_count) && (i < parms_count); i++)
2265 struct ipa_jump_func *jump_func = ipa_get_ith_jump_func (args, i);
2266 struct ipcp_param_lattices *dest_plats;
2267 tree param_type = ipa_get_type (callee_info, i);
2269 dest_plats = ipa_get_parm_lattices (callee_info, i);
2270 if (availability == AVAIL_INTERPOSABLE)
2271 ret |= set_all_contains_variable (dest_plats);
2272 else
2274 ret |= propagate_scalar_across_jump_function (cs, jump_func,
2275 &dest_plats->itself,
2276 param_type);
2277 ret |= propagate_context_across_jump_function (cs, jump_func, i,
2278 &dest_plats->ctxlat);
2280 |= propagate_bits_across_jump_function (cs, i, jump_func,
2281 &dest_plats->bits_lattice);
2282 ret |= propagate_aggs_across_jump_function (cs, jump_func,
2283 dest_plats);
2284 if (opt_for_fn (callee->decl, flag_ipa_vrp))
2285 ret |= propagate_vr_across_jump_function (cs, jump_func,
2286 dest_plats, param_type);
2287 else
2288 ret |= dest_plats->m_value_range.set_to_bottom ();
2291 for (; i < parms_count; i++)
2292 ret |= set_all_contains_variable (ipa_get_parm_lattices (callee_info, i));
2294 return ret;
2297 /* If an indirect edge IE can be turned into a direct one based on KNOWN_VALS
2298 KNOWN_CONTEXTS, KNOWN_AGGS or AGG_REPS return the destination. The latter
2299 three can be NULL. If AGG_REPS is not NULL, KNOWN_AGGS is ignored. */
2301 static tree
2302 ipa_get_indirect_edge_target_1 (struct cgraph_edge *ie,
2303 vec<tree> known_csts,
2304 vec<ipa_polymorphic_call_context> known_contexts,
2305 vec<ipa_agg_jump_function_p> known_aggs,
2306 struct ipa_agg_replacement_value *agg_reps,
2307 bool *speculative)
2309 int param_index = ie->indirect_info->param_index;
2310 HOST_WIDE_INT anc_offset;
2311 tree t;
2312 tree target = NULL;
2314 *speculative = false;
2316 if (param_index == -1
2317 || known_csts.length () <= (unsigned int) param_index)
2318 return NULL_TREE;
2320 if (!ie->indirect_info->polymorphic)
2322 tree t;
2324 if (ie->indirect_info->agg_contents)
2326 t = NULL;
2327 if (agg_reps && ie->indirect_info->guaranteed_unmodified)
2329 while (agg_reps)
2331 if (agg_reps->index == param_index
2332 && agg_reps->offset == ie->indirect_info->offset
2333 && agg_reps->by_ref == ie->indirect_info->by_ref)
2335 t = agg_reps->value;
2336 break;
2338 agg_reps = agg_reps->next;
2341 if (!t)
2343 struct ipa_agg_jump_function *agg;
2344 if (known_aggs.length () > (unsigned int) param_index)
2345 agg = known_aggs[param_index];
2346 else
2347 agg = NULL;
2348 bool from_global_constant;
2349 t = ipa_find_agg_cst_for_param (agg, known_csts[param_index],
2350 ie->indirect_info->offset,
2351 ie->indirect_info->by_ref,
2352 &from_global_constant);
2353 if (t
2354 && !from_global_constant
2355 && !ie->indirect_info->guaranteed_unmodified)
2356 t = NULL_TREE;
2359 else
2360 t = known_csts[param_index];
2362 if (t
2363 && TREE_CODE (t) == ADDR_EXPR
2364 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL)
2365 return TREE_OPERAND (t, 0);
2366 else
2367 return NULL_TREE;
2370 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
2371 return NULL_TREE;
2373 gcc_assert (!ie->indirect_info->agg_contents);
2374 anc_offset = ie->indirect_info->offset;
2376 t = NULL;
2378 /* Try to work out value of virtual table pointer value in replacemnets. */
2379 if (!t && agg_reps && !ie->indirect_info->by_ref)
2381 while (agg_reps)
2383 if (agg_reps->index == param_index
2384 && agg_reps->offset == ie->indirect_info->offset
2385 && agg_reps->by_ref)
2387 t = agg_reps->value;
2388 break;
2390 agg_reps = agg_reps->next;
2394 /* Try to work out value of virtual table pointer value in known
2395 aggregate values. */
2396 if (!t && known_aggs.length () > (unsigned int) param_index
2397 && !ie->indirect_info->by_ref)
2399 struct ipa_agg_jump_function *agg;
2400 agg = known_aggs[param_index];
2401 t = ipa_find_agg_cst_for_param (agg, known_csts[param_index],
2402 ie->indirect_info->offset, true);
2405 /* If we found the virtual table pointer, lookup the target. */
2406 if (t)
2408 tree vtable;
2409 unsigned HOST_WIDE_INT offset;
2410 if (vtable_pointer_value_to_vtable (t, &vtable, &offset))
2412 bool can_refer;
2413 target = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
2414 vtable, offset, &can_refer);
2415 if (can_refer)
2417 if (!target
2418 || (TREE_CODE (TREE_TYPE (target)) == FUNCTION_TYPE
2419 && DECL_FUNCTION_CODE (target) == BUILT_IN_UNREACHABLE)
2420 || !possible_polymorphic_call_target_p
2421 (ie, cgraph_node::get (target)))
2423 /* Do not speculate builtin_unreachable, it is stupid! */
2424 if (ie->indirect_info->vptr_changed)
2425 return NULL;
2426 target = ipa_impossible_devirt_target (ie, target);
2428 *speculative = ie->indirect_info->vptr_changed;
2429 if (!*speculative)
2430 return target;
2435 /* Do we know the constant value of pointer? */
2436 if (!t)
2437 t = known_csts[param_index];
2439 gcc_checking_assert (!t || TREE_CODE (t) != TREE_BINFO);
2441 ipa_polymorphic_call_context context;
2442 if (known_contexts.length () > (unsigned int) param_index)
2444 context = known_contexts[param_index];
2445 context.offset_by (anc_offset);
2446 if (ie->indirect_info->vptr_changed)
2447 context.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
2448 ie->indirect_info->otr_type);
2449 if (t)
2451 ipa_polymorphic_call_context ctx2 = ipa_polymorphic_call_context
2452 (t, ie->indirect_info->otr_type, anc_offset);
2453 if (!ctx2.useless_p ())
2454 context.combine_with (ctx2, ie->indirect_info->otr_type);
2457 else if (t)
2459 context = ipa_polymorphic_call_context (t, ie->indirect_info->otr_type,
2460 anc_offset);
2461 if (ie->indirect_info->vptr_changed)
2462 context.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
2463 ie->indirect_info->otr_type);
2465 else
2466 return NULL_TREE;
2468 vec <cgraph_node *>targets;
2469 bool final;
2471 targets = possible_polymorphic_call_targets
2472 (ie->indirect_info->otr_type,
2473 ie->indirect_info->otr_token,
2474 context, &final);
2475 if (!final || targets.length () > 1)
2477 struct cgraph_node *node;
2478 if (*speculative)
2479 return target;
2480 if (!opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
2481 || ie->speculative || !ie->maybe_hot_p ())
2482 return NULL;
2483 node = try_speculative_devirtualization (ie->indirect_info->otr_type,
2484 ie->indirect_info->otr_token,
2485 context);
2486 if (node)
2488 *speculative = true;
2489 target = node->decl;
2491 else
2492 return NULL;
2494 else
2496 *speculative = false;
2497 if (targets.length () == 1)
2498 target = targets[0]->decl;
2499 else
2500 target = ipa_impossible_devirt_target (ie, NULL_TREE);
2503 if (target && !possible_polymorphic_call_target_p (ie,
2504 cgraph_node::get (target)))
2506 if (*speculative)
2507 return NULL;
2508 target = ipa_impossible_devirt_target (ie, target);
2511 return target;
2515 /* If an indirect edge IE can be turned into a direct one based on KNOWN_CSTS,
2516 KNOWN_CONTEXTS (which can be vNULL) or KNOWN_AGGS (which also can be vNULL)
2517 return the destination. */
2519 tree
2520 ipa_get_indirect_edge_target (struct cgraph_edge *ie,
2521 vec<tree> known_csts,
2522 vec<ipa_polymorphic_call_context> known_contexts,
2523 vec<ipa_agg_jump_function_p> known_aggs,
2524 bool *speculative)
2526 return ipa_get_indirect_edge_target_1 (ie, known_csts, known_contexts,
2527 known_aggs, NULL, speculative);
2530 /* Calculate devirtualization time bonus for NODE, assuming we know KNOWN_CSTS
2531 and KNOWN_CONTEXTS. */
2533 static int
2534 devirtualization_time_bonus (struct cgraph_node *node,
2535 vec<tree> known_csts,
2536 vec<ipa_polymorphic_call_context> known_contexts,
2537 vec<ipa_agg_jump_function_p> known_aggs)
2539 struct cgraph_edge *ie;
2540 int res = 0;
2542 for (ie = node->indirect_calls; ie; ie = ie->next_callee)
2544 struct cgraph_node *callee;
2545 struct ipa_fn_summary *isummary;
2546 enum availability avail;
2547 tree target;
2548 bool speculative;
2550 target = ipa_get_indirect_edge_target (ie, known_csts, known_contexts,
2551 known_aggs, &speculative);
2552 if (!target)
2553 continue;
2555 /* Only bare minimum benefit for clearly un-inlineable targets. */
2556 res += 1;
2557 callee = cgraph_node::get (target);
2558 if (!callee || !callee->definition)
2559 continue;
2560 callee = callee->function_symbol (&avail);
2561 if (avail < AVAIL_AVAILABLE)
2562 continue;
2563 isummary = ipa_fn_summaries->get (callee);
2564 if (!isummary->inlinable)
2565 continue;
2567 /* FIXME: The values below need re-considering and perhaps also
2568 integrating into the cost metrics, at lest in some very basic way. */
2569 if (isummary->size <= MAX_INLINE_INSNS_AUTO / 4)
2570 res += 31 / ((int)speculative + 1);
2571 else if (isummary->size <= MAX_INLINE_INSNS_AUTO / 2)
2572 res += 15 / ((int)speculative + 1);
2573 else if (isummary->size <= MAX_INLINE_INSNS_AUTO
2574 || DECL_DECLARED_INLINE_P (callee->decl))
2575 res += 7 / ((int)speculative + 1);
2578 return res;
2581 /* Return time bonus incurred because of HINTS. */
2583 static int
2584 hint_time_bonus (ipa_hints hints)
2586 int result = 0;
2587 if (hints & (INLINE_HINT_loop_iterations | INLINE_HINT_loop_stride))
2588 result += PARAM_VALUE (PARAM_IPA_CP_LOOP_HINT_BONUS);
2589 if (hints & INLINE_HINT_array_index)
2590 result += PARAM_VALUE (PARAM_IPA_CP_ARRAY_INDEX_HINT_BONUS);
2591 return result;
2594 /* If there is a reason to penalize the function described by INFO in the
2595 cloning goodness evaluation, do so. */
2597 static inline int64_t
2598 incorporate_penalties (ipa_node_params *info, int64_t evaluation)
2600 if (info->node_within_scc)
2601 evaluation = (evaluation
2602 * (100 - PARAM_VALUE (PARAM_IPA_CP_RECURSION_PENALTY))) / 100;
2604 if (info->node_calling_single_call)
2605 evaluation = (evaluation
2606 * (100 - PARAM_VALUE (PARAM_IPA_CP_SINGLE_CALL_PENALTY)))
2607 / 100;
2609 return evaluation;
2612 /* Return true if cloning NODE is a good idea, given the estimated TIME_BENEFIT
2613 and SIZE_COST and with the sum of frequencies of incoming edges to the
2614 potential new clone in FREQUENCIES. */
2616 static bool
2617 good_cloning_opportunity_p (struct cgraph_node *node, int time_benefit,
2618 int freq_sum, profile_count count_sum, int size_cost)
2620 if (time_benefit == 0
2621 || !opt_for_fn (node->decl, flag_ipa_cp_clone)
2622 || node->optimize_for_size_p ())
2623 return false;
2625 gcc_assert (size_cost > 0);
2627 struct ipa_node_params *info = IPA_NODE_REF (node);
2628 if (max_count > profile_count::zero ())
2630 int factor = RDIV (count_sum.probability_in
2631 (max_count).to_reg_br_prob_base ()
2632 * 1000, REG_BR_PROB_BASE);
2633 int64_t evaluation = (((int64_t) time_benefit * factor)
2634 / size_cost);
2635 evaluation = incorporate_penalties (info, evaluation);
2637 if (dump_file && (dump_flags & TDF_DETAILS))
2639 fprintf (dump_file, " good_cloning_opportunity_p (time: %i, "
2640 "size: %i, count_sum: ", time_benefit, size_cost);
2641 count_sum.dump (dump_file);
2642 fprintf (dump_file, "%s%s) -> evaluation: " "%" PRId64
2643 ", threshold: %i\n",
2644 info->node_within_scc ? ", scc" : "",
2645 info->node_calling_single_call ? ", single_call" : "",
2646 evaluation, PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD));
2649 return evaluation >= PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD);
2651 else
2653 int64_t evaluation = (((int64_t) time_benefit * freq_sum)
2654 / size_cost);
2655 evaluation = incorporate_penalties (info, evaluation);
2657 if (dump_file && (dump_flags & TDF_DETAILS))
2658 fprintf (dump_file, " good_cloning_opportunity_p (time: %i, "
2659 "size: %i, freq_sum: %i%s%s) -> evaluation: "
2660 "%" PRId64 ", threshold: %i\n",
2661 time_benefit, size_cost, freq_sum,
2662 info->node_within_scc ? ", scc" : "",
2663 info->node_calling_single_call ? ", single_call" : "",
2664 evaluation, PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD));
2666 return evaluation >= PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD);
2670 /* Return all context independent values from aggregate lattices in PLATS in a
2671 vector. Return NULL if there are none. */
2673 static vec<ipa_agg_jf_item, va_gc> *
2674 context_independent_aggregate_values (struct ipcp_param_lattices *plats)
2676 vec<ipa_agg_jf_item, va_gc> *res = NULL;
2678 if (plats->aggs_bottom
2679 || plats->aggs_contain_variable
2680 || plats->aggs_count == 0)
2681 return NULL;
2683 for (struct ipcp_agg_lattice *aglat = plats->aggs;
2684 aglat;
2685 aglat = aglat->next)
2686 if (aglat->is_single_const ())
2688 struct ipa_agg_jf_item item;
2689 item.offset = aglat->offset;
2690 item.value = aglat->values->value;
2691 vec_safe_push (res, item);
2693 return res;
2696 /* Allocate KNOWN_CSTS, KNOWN_CONTEXTS and, if non-NULL, KNOWN_AGGS and
2697 populate them with values of parameters that are known independent of the
2698 context. INFO describes the function. If REMOVABLE_PARAMS_COST is
2699 non-NULL, the movement cost of all removable parameters will be stored in
2700 it. */
2702 static bool
2703 gather_context_independent_values (struct ipa_node_params *info,
2704 vec<tree> *known_csts,
2705 vec<ipa_polymorphic_call_context>
2706 *known_contexts,
2707 vec<ipa_agg_jump_function> *known_aggs,
2708 int *removable_params_cost)
2710 int i, count = ipa_get_param_count (info);
2711 bool ret = false;
2713 known_csts->create (0);
2714 known_contexts->create (0);
2715 known_csts->safe_grow_cleared (count);
2716 known_contexts->safe_grow_cleared (count);
2717 if (known_aggs)
2719 known_aggs->create (0);
2720 known_aggs->safe_grow_cleared (count);
2723 if (removable_params_cost)
2724 *removable_params_cost = 0;
2726 for (i = 0; i < count; i++)
2728 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
2729 ipcp_lattice<tree> *lat = &plats->itself;
2731 if (lat->is_single_const ())
2733 ipcp_value<tree> *val = lat->values;
2734 gcc_checking_assert (TREE_CODE (val->value) != TREE_BINFO);
2735 (*known_csts)[i] = val->value;
2736 if (removable_params_cost)
2737 *removable_params_cost
2738 += estimate_move_cost (TREE_TYPE (val->value), false);
2739 ret = true;
2741 else if (removable_params_cost
2742 && !ipa_is_param_used (info, i))
2743 *removable_params_cost
2744 += ipa_get_param_move_cost (info, i);
2746 if (!ipa_is_param_used (info, i))
2747 continue;
2749 ipcp_lattice<ipa_polymorphic_call_context> *ctxlat = &plats->ctxlat;
2750 /* Do not account known context as reason for cloning. We can see
2751 if it permits devirtualization. */
2752 if (ctxlat->is_single_const ())
2753 (*known_contexts)[i] = ctxlat->values->value;
2755 if (known_aggs)
2757 vec<ipa_agg_jf_item, va_gc> *agg_items;
2758 struct ipa_agg_jump_function *ajf;
2760 agg_items = context_independent_aggregate_values (plats);
2761 ajf = &(*known_aggs)[i];
2762 ajf->items = agg_items;
2763 ajf->by_ref = plats->aggs_by_ref;
2764 ret |= agg_items != NULL;
2768 return ret;
2771 /* The current interface in ipa-inline-analysis requires a pointer vector.
2772 Create it.
2774 FIXME: That interface should be re-worked, this is slightly silly. Still,
2775 I'd like to discuss how to change it first and this demonstrates the
2776 issue. */
2778 static vec<ipa_agg_jump_function_p>
2779 agg_jmp_p_vec_for_t_vec (vec<ipa_agg_jump_function> known_aggs)
2781 vec<ipa_agg_jump_function_p> ret;
2782 struct ipa_agg_jump_function *ajf;
2783 int i;
2785 ret.create (known_aggs.length ());
2786 FOR_EACH_VEC_ELT (known_aggs, i, ajf)
2787 ret.quick_push (ajf);
2788 return ret;
2791 /* Perform time and size measurement of NODE with the context given in
2792 KNOWN_CSTS, KNOWN_CONTEXTS and KNOWN_AGGS, calculate the benefit and cost
2793 given BASE_TIME of the node without specialization, REMOVABLE_PARAMS_COST of
2794 all context-independent removable parameters and EST_MOVE_COST of estimated
2795 movement of the considered parameter and store it into VAL. */
2797 static void
2798 perform_estimation_of_a_value (cgraph_node *node, vec<tree> known_csts,
2799 vec<ipa_polymorphic_call_context> known_contexts,
2800 vec<ipa_agg_jump_function_p> known_aggs_ptrs,
2801 int removable_params_cost,
2802 int est_move_cost, ipcp_value_base *val)
2804 int size, time_benefit;
2805 sreal time, base_time;
2806 ipa_hints hints;
2808 estimate_ipcp_clone_size_and_time (node, known_csts, known_contexts,
2809 known_aggs_ptrs, &size, &time,
2810 &base_time, &hints);
2811 base_time -= time;
2812 if (base_time > 65535)
2813 base_time = 65535;
2814 time_benefit = base_time.to_int ()
2815 + devirtualization_time_bonus (node, known_csts, known_contexts,
2816 known_aggs_ptrs)
2817 + hint_time_bonus (hints)
2818 + removable_params_cost + est_move_cost;
2820 gcc_checking_assert (size >=0);
2821 /* The inliner-heuristics based estimates may think that in certain
2822 contexts some functions do not have any size at all but we want
2823 all specializations to have at least a tiny cost, not least not to
2824 divide by zero. */
2825 if (size == 0)
2826 size = 1;
2828 val->local_time_benefit = time_benefit;
2829 val->local_size_cost = size;
2832 /* Iterate over known values of parameters of NODE and estimate the local
2833 effects in terms of time and size they have. */
2835 static void
2836 estimate_local_effects (struct cgraph_node *node)
2838 struct ipa_node_params *info = IPA_NODE_REF (node);
2839 int i, count = ipa_get_param_count (info);
2840 vec<tree> known_csts;
2841 vec<ipa_polymorphic_call_context> known_contexts;
2842 vec<ipa_agg_jump_function> known_aggs;
2843 vec<ipa_agg_jump_function_p> known_aggs_ptrs;
2844 bool always_const;
2845 int removable_params_cost;
2847 if (!count || !ipcp_versionable_function_p (node))
2848 return;
2850 if (dump_file && (dump_flags & TDF_DETAILS))
2851 fprintf (dump_file, "\nEstimating effects for %s.\n", node->dump_name ());
2853 always_const = gather_context_independent_values (info, &known_csts,
2854 &known_contexts, &known_aggs,
2855 &removable_params_cost);
2856 known_aggs_ptrs = agg_jmp_p_vec_for_t_vec (known_aggs);
2857 int devirt_bonus = devirtualization_time_bonus (node, known_csts,
2858 known_contexts, known_aggs_ptrs);
2859 if (always_const || devirt_bonus
2860 || (removable_params_cost && node->local.can_change_signature))
2862 struct caller_statistics stats;
2863 ipa_hints hints;
2864 sreal time, base_time;
2865 int size;
2867 init_caller_stats (&stats);
2868 node->call_for_symbol_thunks_and_aliases (gather_caller_stats, &stats,
2869 false);
2870 estimate_ipcp_clone_size_and_time (node, known_csts, known_contexts,
2871 known_aggs_ptrs, &size, &time,
2872 &base_time, &hints);
2873 time -= devirt_bonus;
2874 time -= hint_time_bonus (hints);
2875 time -= removable_params_cost;
2876 size -= stats.n_calls * removable_params_cost;
2878 if (dump_file)
2879 fprintf (dump_file, " - context independent values, size: %i, "
2880 "time_benefit: %f\n", size, (base_time - time).to_double ());
2882 if (size <= 0 || node->local.local)
2884 info->do_clone_for_all_contexts = true;
2886 if (dump_file)
2887 fprintf (dump_file, " Decided to specialize for all "
2888 "known contexts, code not going to grow.\n");
2890 else if (good_cloning_opportunity_p (node,
2891 MIN ((base_time - time).to_int (),
2892 65536),
2893 stats.freq_sum, stats.count_sum,
2894 size))
2896 if (size + overall_size <= max_new_size)
2898 info->do_clone_for_all_contexts = true;
2899 overall_size += size;
2901 if (dump_file)
2902 fprintf (dump_file, " Decided to specialize for all "
2903 "known contexts, growth deemed beneficial.\n");
2905 else if (dump_file && (dump_flags & TDF_DETAILS))
2906 fprintf (dump_file, " Not cloning for all contexts because "
2907 "max_new_size would be reached with %li.\n",
2908 size + overall_size);
2910 else if (dump_file && (dump_flags & TDF_DETAILS))
2911 fprintf (dump_file, " Not cloning for all contexts because "
2912 "!good_cloning_opportunity_p.\n");
2916 for (i = 0; i < count; i++)
2918 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
2919 ipcp_lattice<tree> *lat = &plats->itself;
2920 ipcp_value<tree> *val;
2922 if (lat->bottom
2923 || !lat->values
2924 || known_csts[i])
2925 continue;
2927 for (val = lat->values; val; val = val->next)
2929 gcc_checking_assert (TREE_CODE (val->value) != TREE_BINFO);
2930 known_csts[i] = val->value;
2932 int emc = estimate_move_cost (TREE_TYPE (val->value), true);
2933 perform_estimation_of_a_value (node, known_csts, known_contexts,
2934 known_aggs_ptrs,
2935 removable_params_cost, emc, val);
2937 if (dump_file && (dump_flags & TDF_DETAILS))
2939 fprintf (dump_file, " - estimates for value ");
2940 print_ipcp_constant_value (dump_file, val->value);
2941 fprintf (dump_file, " for ");
2942 ipa_dump_param (dump_file, info, i);
2943 fprintf (dump_file, ": time_benefit: %i, size: %i\n",
2944 val->local_time_benefit, val->local_size_cost);
2947 known_csts[i] = NULL_TREE;
2950 for (i = 0; i < count; i++)
2952 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
2954 if (!plats->virt_call)
2955 continue;
2957 ipcp_lattice<ipa_polymorphic_call_context> *ctxlat = &plats->ctxlat;
2958 ipcp_value<ipa_polymorphic_call_context> *val;
2960 if (ctxlat->bottom
2961 || !ctxlat->values
2962 || !known_contexts[i].useless_p ())
2963 continue;
2965 for (val = ctxlat->values; val; val = val->next)
2967 known_contexts[i] = val->value;
2968 perform_estimation_of_a_value (node, known_csts, known_contexts,
2969 known_aggs_ptrs,
2970 removable_params_cost, 0, val);
2972 if (dump_file && (dump_flags & TDF_DETAILS))
2974 fprintf (dump_file, " - estimates for polymorphic context ");
2975 print_ipcp_constant_value (dump_file, val->value);
2976 fprintf (dump_file, " for ");
2977 ipa_dump_param (dump_file, info, i);
2978 fprintf (dump_file, ": time_benefit: %i, size: %i\n",
2979 val->local_time_benefit, val->local_size_cost);
2982 known_contexts[i] = ipa_polymorphic_call_context ();
2985 for (i = 0; i < count; i++)
2987 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
2988 struct ipa_agg_jump_function *ajf;
2989 struct ipcp_agg_lattice *aglat;
2991 if (plats->aggs_bottom || !plats->aggs)
2992 continue;
2994 ajf = &known_aggs[i];
2995 for (aglat = plats->aggs; aglat; aglat = aglat->next)
2997 ipcp_value<tree> *val;
2998 if (aglat->bottom || !aglat->values
2999 /* If the following is true, the one value is in known_aggs. */
3000 || (!plats->aggs_contain_variable
3001 && aglat->is_single_const ()))
3002 continue;
3004 for (val = aglat->values; val; val = val->next)
3006 struct ipa_agg_jf_item item;
3008 item.offset = aglat->offset;
3009 item.value = val->value;
3010 vec_safe_push (ajf->items, item);
3012 perform_estimation_of_a_value (node, known_csts, known_contexts,
3013 known_aggs_ptrs,
3014 removable_params_cost, 0, val);
3016 if (dump_file && (dump_flags & TDF_DETAILS))
3018 fprintf (dump_file, " - estimates for value ");
3019 print_ipcp_constant_value (dump_file, val->value);
3020 fprintf (dump_file, " for ");
3021 ipa_dump_param (dump_file, info, i);
3022 fprintf (dump_file, "[%soffset: " HOST_WIDE_INT_PRINT_DEC
3023 "]: time_benefit: %i, size: %i\n",
3024 plats->aggs_by_ref ? "ref " : "",
3025 aglat->offset,
3026 val->local_time_benefit, val->local_size_cost);
3029 ajf->items->pop ();
3034 for (i = 0; i < count; i++)
3035 vec_free (known_aggs[i].items);
3037 known_csts.release ();
3038 known_contexts.release ();
3039 known_aggs.release ();
3040 known_aggs_ptrs.release ();
3044 /* Add value CUR_VAL and all yet-unsorted values it is dependent on to the
3045 topological sort of values. */
3047 template <typename valtype>
3048 void
3049 value_topo_info<valtype>::add_val (ipcp_value<valtype> *cur_val)
3051 ipcp_value_source<valtype> *src;
3053 if (cur_val->dfs)
3054 return;
3056 dfs_counter++;
3057 cur_val->dfs = dfs_counter;
3058 cur_val->low_link = dfs_counter;
3060 cur_val->topo_next = stack;
3061 stack = cur_val;
3062 cur_val->on_stack = true;
3064 for (src = cur_val->sources; src; src = src->next)
3065 if (src->val)
3067 if (src->val->dfs == 0)
3069 add_val (src->val);
3070 if (src->val->low_link < cur_val->low_link)
3071 cur_val->low_link = src->val->low_link;
3073 else if (src->val->on_stack
3074 && src->val->dfs < cur_val->low_link)
3075 cur_val->low_link = src->val->dfs;
3078 if (cur_val->dfs == cur_val->low_link)
3080 ipcp_value<valtype> *v, *scc_list = NULL;
3084 v = stack;
3085 stack = v->topo_next;
3086 v->on_stack = false;
3088 v->scc_next = scc_list;
3089 scc_list = v;
3091 while (v != cur_val);
3093 cur_val->topo_next = values_topo;
3094 values_topo = cur_val;
3098 /* Add all values in lattices associated with NODE to the topological sort if
3099 they are not there yet. */
3101 static void
3102 add_all_node_vals_to_toposort (cgraph_node *node, ipa_topo_info *topo)
3104 struct ipa_node_params *info = IPA_NODE_REF (node);
3105 int i, count = ipa_get_param_count (info);
3107 for (i = 0; i < count; i++)
3109 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
3110 ipcp_lattice<tree> *lat = &plats->itself;
3111 struct ipcp_agg_lattice *aglat;
3113 if (!lat->bottom)
3115 ipcp_value<tree> *val;
3116 for (val = lat->values; val; val = val->next)
3117 topo->constants.add_val (val);
3120 if (!plats->aggs_bottom)
3121 for (aglat = plats->aggs; aglat; aglat = aglat->next)
3122 if (!aglat->bottom)
3124 ipcp_value<tree> *val;
3125 for (val = aglat->values; val; val = val->next)
3126 topo->constants.add_val (val);
3129 ipcp_lattice<ipa_polymorphic_call_context> *ctxlat = &plats->ctxlat;
3130 if (!ctxlat->bottom)
3132 ipcp_value<ipa_polymorphic_call_context> *ctxval;
3133 for (ctxval = ctxlat->values; ctxval; ctxval = ctxval->next)
3134 topo->contexts.add_val (ctxval);
3139 /* One pass of constants propagation along the call graph edges, from callers
3140 to callees (requires topological ordering in TOPO), iterate over strongly
3141 connected components. */
3143 static void
3144 propagate_constants_topo (struct ipa_topo_info *topo)
3146 int i;
3148 for (i = topo->nnodes - 1; i >= 0; i--)
3150 unsigned j;
3151 struct cgraph_node *v, *node = topo->order[i];
3152 vec<cgraph_node *> cycle_nodes = ipa_get_nodes_in_cycle (node);
3154 /* First, iteratively propagate within the strongly connected component
3155 until all lattices stabilize. */
3156 FOR_EACH_VEC_ELT (cycle_nodes, j, v)
3157 if (v->has_gimple_body_p ())
3158 push_node_to_stack (topo, v);
3160 v = pop_node_from_stack (topo);
3161 while (v)
3163 struct cgraph_edge *cs;
3165 for (cs = v->callees; cs; cs = cs->next_callee)
3166 if (ipa_edge_within_scc (cs))
3168 IPA_NODE_REF (v)->node_within_scc = true;
3169 if (propagate_constants_across_call (cs))
3170 push_node_to_stack (topo, cs->callee->function_symbol ());
3172 v = pop_node_from_stack (topo);
3175 /* Afterwards, propagate along edges leading out of the SCC, calculates
3176 the local effects of the discovered constants and all valid values to
3177 their topological sort. */
3178 FOR_EACH_VEC_ELT (cycle_nodes, j, v)
3179 if (v->has_gimple_body_p ())
3181 struct cgraph_edge *cs;
3183 estimate_local_effects (v);
3184 add_all_node_vals_to_toposort (v, topo);
3185 for (cs = v->callees; cs; cs = cs->next_callee)
3186 if (!ipa_edge_within_scc (cs))
3187 propagate_constants_across_call (cs);
3189 cycle_nodes.release ();
3194 /* Return the sum of A and B if none of them is bigger than INT_MAX/2, return
3195 the bigger one if otherwise. */
3197 static int
3198 safe_add (int a, int b)
3200 if (a > INT_MAX/2 || b > INT_MAX/2)
3201 return a > b ? a : b;
3202 else
3203 return a + b;
3207 /* Propagate the estimated effects of individual values along the topological
3208 from the dependent values to those they depend on. */
3210 template <typename valtype>
3211 void
3212 value_topo_info<valtype>::propagate_effects ()
3214 ipcp_value<valtype> *base;
3216 for (base = values_topo; base; base = base->topo_next)
3218 ipcp_value_source<valtype> *src;
3219 ipcp_value<valtype> *val;
3220 int time = 0, size = 0;
3222 for (val = base; val; val = val->scc_next)
3224 time = safe_add (time,
3225 val->local_time_benefit + val->prop_time_benefit);
3226 size = safe_add (size, val->local_size_cost + val->prop_size_cost);
3229 for (val = base; val; val = val->scc_next)
3230 for (src = val->sources; src; src = src->next)
3231 if (src->val
3232 && src->cs->maybe_hot_p ())
3234 src->val->prop_time_benefit = safe_add (time,
3235 src->val->prop_time_benefit);
3236 src->val->prop_size_cost = safe_add (size,
3237 src->val->prop_size_cost);
3243 /* Propagate constants, polymorphic contexts and their effects from the
3244 summaries interprocedurally. */
3246 static void
3247 ipcp_propagate_stage (struct ipa_topo_info *topo)
3249 struct cgraph_node *node;
3251 if (dump_file)
3252 fprintf (dump_file, "\n Propagating constants:\n\n");
3254 max_count = profile_count::uninitialized ();
3256 FOR_EACH_DEFINED_FUNCTION (node)
3258 struct ipa_node_params *info = IPA_NODE_REF (node);
3260 determine_versionability (node, info);
3261 if (node->has_gimple_body_p ())
3263 info->lattices = XCNEWVEC (struct ipcp_param_lattices,
3264 ipa_get_param_count (info));
3265 initialize_node_lattices (node);
3267 ipa_fn_summary *s = ipa_fn_summaries->get (node);
3268 if (node->definition && !node->alias && s != NULL)
3269 overall_size += s->self_size;
3270 max_count = max_count.max (node->count.ipa ());
3273 max_new_size = overall_size;
3274 if (max_new_size < PARAM_VALUE (PARAM_LARGE_UNIT_INSNS))
3275 max_new_size = PARAM_VALUE (PARAM_LARGE_UNIT_INSNS);
3276 max_new_size += max_new_size * PARAM_VALUE (PARAM_IPCP_UNIT_GROWTH) / 100 + 1;
3278 if (dump_file)
3279 fprintf (dump_file, "\noverall_size: %li, max_new_size: %li\n",
3280 overall_size, max_new_size);
3282 propagate_constants_topo (topo);
3283 if (flag_checking)
3284 ipcp_verify_propagated_values ();
3285 topo->constants.propagate_effects ();
3286 topo->contexts.propagate_effects ();
3288 if (dump_file)
3290 fprintf (dump_file, "\nIPA lattices after all propagation:\n");
3291 print_all_lattices (dump_file, (dump_flags & TDF_DETAILS), true);
3295 /* Discover newly direct outgoing edges from NODE which is a new clone with
3296 known KNOWN_CSTS and make them direct. */
3298 static void
3299 ipcp_discover_new_direct_edges (struct cgraph_node *node,
3300 vec<tree> known_csts,
3301 vec<ipa_polymorphic_call_context>
3302 known_contexts,
3303 struct ipa_agg_replacement_value *aggvals)
3305 struct cgraph_edge *ie, *next_ie;
3306 bool found = false;
3308 for (ie = node->indirect_calls; ie; ie = next_ie)
3310 tree target;
3311 bool speculative;
3313 next_ie = ie->next_callee;
3314 target = ipa_get_indirect_edge_target_1 (ie, known_csts, known_contexts,
3315 vNULL, aggvals, &speculative);
3316 if (target)
3318 bool agg_contents = ie->indirect_info->agg_contents;
3319 bool polymorphic = ie->indirect_info->polymorphic;
3320 int param_index = ie->indirect_info->param_index;
3321 struct cgraph_edge *cs = ipa_make_edge_direct_to_target (ie, target,
3322 speculative);
3323 found = true;
3325 if (cs && !agg_contents && !polymorphic)
3327 struct ipa_node_params *info = IPA_NODE_REF (node);
3328 int c = ipa_get_controlled_uses (info, param_index);
3329 if (c != IPA_UNDESCRIBED_USE)
3331 struct ipa_ref *to_del;
3333 c--;
3334 ipa_set_controlled_uses (info, param_index, c);
3335 if (dump_file && (dump_flags & TDF_DETAILS))
3336 fprintf (dump_file, " controlled uses count of param "
3337 "%i bumped down to %i\n", param_index, c);
3338 if (c == 0
3339 && (to_del = node->find_reference (cs->callee, NULL, 0)))
3341 if (dump_file && (dump_flags & TDF_DETAILS))
3342 fprintf (dump_file, " and even removing its "
3343 "cloning-created reference\n");
3344 to_del->remove_reference ();
3350 /* Turning calls to direct calls will improve overall summary. */
3351 if (found)
3352 ipa_update_overall_fn_summary (node);
3355 class edge_clone_summary;
3356 static call_summary <edge_clone_summary *> *edge_clone_summaries = NULL;
3358 /* Edge clone summary. */
3360 struct edge_clone_summary
3362 /* Default constructor. */
3363 edge_clone_summary (): prev_clone (NULL), next_clone (NULL) {}
3365 /* Default destructor. */
3366 ~edge_clone_summary ()
3368 if (prev_clone)
3369 edge_clone_summaries->get (prev_clone)->next_clone = next_clone;
3370 if (next_clone)
3371 edge_clone_summaries->get (next_clone)->prev_clone = prev_clone;
3374 cgraph_edge *prev_clone;
3375 cgraph_edge *next_clone;
3378 class edge_clone_summary_t:
3379 public call_summary <edge_clone_summary *>
3381 public:
3382 edge_clone_summary_t (symbol_table *symtab):
3383 call_summary <edge_clone_summary *> (symtab)
3385 m_initialize_when_cloning = true;
3388 virtual void duplicate (cgraph_edge *src_edge, cgraph_edge *dst_edge,
3389 edge_clone_summary *src_data,
3390 edge_clone_summary *dst_data);
3393 /* Edge duplication hook. */
3395 void
3396 edge_clone_summary_t::duplicate (cgraph_edge *src_edge, cgraph_edge *dst_edge,
3397 edge_clone_summary *src_data,
3398 edge_clone_summary *dst_data)
3400 if (src_data->next_clone)
3401 edge_clone_summaries->get (src_data->next_clone)->prev_clone = dst_edge;
3402 dst_data->prev_clone = src_edge;
3403 dst_data->next_clone = src_data->next_clone;
3404 src_data->next_clone = dst_edge;
3407 /* See if NODE is a clone with a known aggregate value at a given OFFSET of a
3408 parameter with the given INDEX. */
3410 static tree
3411 get_clone_agg_value (struct cgraph_node *node, HOST_WIDE_INT offset,
3412 int index)
3414 struct ipa_agg_replacement_value *aggval;
3416 aggval = ipa_get_agg_replacements_for_node (node);
3417 while (aggval)
3419 if (aggval->offset == offset
3420 && aggval->index == index)
3421 return aggval->value;
3422 aggval = aggval->next;
3424 return NULL_TREE;
3427 /* Return true is NODE is DEST or its clone for all contexts. */
3429 static bool
3430 same_node_or_its_all_contexts_clone_p (cgraph_node *node, cgraph_node *dest)
3432 if (node == dest)
3433 return true;
3435 struct ipa_node_params *info = IPA_NODE_REF (node);
3436 return info->is_all_contexts_clone && info->ipcp_orig_node == dest;
3439 /* Return true if edge CS does bring about the value described by SRC to
3440 DEST_VAL of node DEST or its clone for all contexts. */
3442 static bool
3443 cgraph_edge_brings_value_p (cgraph_edge *cs, ipcp_value_source<tree> *src,
3444 cgraph_node *dest, ipcp_value<tree> *dest_val)
3446 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
3447 enum availability availability;
3448 cgraph_node *real_dest = cs->callee->function_symbol (&availability);
3450 if (!same_node_or_its_all_contexts_clone_p (real_dest, dest)
3451 || availability <= AVAIL_INTERPOSABLE
3452 || caller_info->node_dead)
3453 return false;
3455 if (!src->val)
3456 return true;
3458 if (caller_info->ipcp_orig_node)
3460 tree t;
3461 if (src->offset == -1)
3462 t = caller_info->known_csts[src->index];
3463 else
3464 t = get_clone_agg_value (cs->caller, src->offset, src->index);
3465 return (t != NULL_TREE
3466 && values_equal_for_ipcp_p (src->val->value, t));
3468 else
3470 /* At the moment we do not propagate over arithmetic jump functions in
3471 SCCs, so it is safe to detect self-feeding recursive calls in this
3472 way. */
3473 if (src->val == dest_val)
3474 return true;
3476 struct ipcp_agg_lattice *aglat;
3477 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (caller_info,
3478 src->index);
3479 if (src->offset == -1)
3480 return (plats->itself.is_single_const ()
3481 && values_equal_for_ipcp_p (src->val->value,
3482 plats->itself.values->value));
3483 else
3485 if (plats->aggs_bottom || plats->aggs_contain_variable)
3486 return false;
3487 for (aglat = plats->aggs; aglat; aglat = aglat->next)
3488 if (aglat->offset == src->offset)
3489 return (aglat->is_single_const ()
3490 && values_equal_for_ipcp_p (src->val->value,
3491 aglat->values->value));
3493 return false;
3497 /* Return true if edge CS does bring about the value described by SRC to
3498 DST_VAL of node DEST or its clone for all contexts. */
3500 static bool
3501 cgraph_edge_brings_value_p (cgraph_edge *cs,
3502 ipcp_value_source<ipa_polymorphic_call_context> *src,
3503 cgraph_node *dest,
3504 ipcp_value<ipa_polymorphic_call_context> *)
3506 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
3507 cgraph_node *real_dest = cs->callee->function_symbol ();
3509 if (!same_node_or_its_all_contexts_clone_p (real_dest, dest)
3510 || caller_info->node_dead)
3511 return false;
3512 if (!src->val)
3513 return true;
3515 if (caller_info->ipcp_orig_node)
3516 return (caller_info->known_contexts.length () > (unsigned) src->index)
3517 && values_equal_for_ipcp_p (src->val->value,
3518 caller_info->known_contexts[src->index]);
3520 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (caller_info,
3521 src->index);
3522 return plats->ctxlat.is_single_const ()
3523 && values_equal_for_ipcp_p (src->val->value,
3524 plats->ctxlat.values->value);
3527 /* Get the next clone in the linked list of clones of an edge. */
3529 static inline struct cgraph_edge *
3530 get_next_cgraph_edge_clone (struct cgraph_edge *cs)
3532 edge_clone_summary *s = edge_clone_summaries->get (cs);
3533 return s != NULL ? s->next_clone : NULL;
3536 /* Given VAL that is intended for DEST, iterate over all its sources and if any
3537 of them is viable and hot, return true. In that case, for those that still
3538 hold, add their edge frequency and their number into *FREQUENCY and
3539 *CALLER_COUNT respectively. */
3541 template <typename valtype>
3542 static bool
3543 get_info_about_necessary_edges (ipcp_value<valtype> *val, cgraph_node *dest,
3544 int *freq_sum,
3545 profile_count *count_sum, int *caller_count)
3547 ipcp_value_source<valtype> *src;
3548 int freq = 0, count = 0;
3549 profile_count cnt = profile_count::zero ();
3550 bool hot = false;
3551 bool non_self_recursive = false;
3553 for (src = val->sources; src; src = src->next)
3555 struct cgraph_edge *cs = src->cs;
3556 while (cs)
3558 if (cgraph_edge_brings_value_p (cs, src, dest, val))
3560 count++;
3561 freq += cs->frequency ();
3562 if (cs->count.ipa ().initialized_p ())
3563 cnt += cs->count.ipa ();
3564 hot |= cs->maybe_hot_p ();
3565 if (cs->caller != dest)
3566 non_self_recursive = true;
3568 cs = get_next_cgraph_edge_clone (cs);
3572 /* If the only edges bringing a value are self-recursive ones, do not bother
3573 evaluating it. */
3574 if (!non_self_recursive)
3575 return false;
3577 *freq_sum = freq;
3578 *count_sum = cnt;
3579 *caller_count = count;
3580 return hot;
3583 /* Return a vector of incoming edges that do bring value VAL to node DEST. It
3584 is assumed their number is known and equal to CALLER_COUNT. */
3586 template <typename valtype>
3587 static vec<cgraph_edge *>
3588 gather_edges_for_value (ipcp_value<valtype> *val, cgraph_node *dest,
3589 int caller_count)
3591 ipcp_value_source<valtype> *src;
3592 vec<cgraph_edge *> ret;
3594 ret.create (caller_count);
3595 for (src = val->sources; src; src = src->next)
3597 struct cgraph_edge *cs = src->cs;
3598 while (cs)
3600 if (cgraph_edge_brings_value_p (cs, src, dest, val))
3601 ret.quick_push (cs);
3602 cs = get_next_cgraph_edge_clone (cs);
3606 return ret;
3609 /* Construct a replacement map for a know VALUE for a formal parameter PARAM.
3610 Return it or NULL if for some reason it cannot be created. */
3612 static struct ipa_replace_map *
3613 get_replacement_map (struct ipa_node_params *info, tree value, int parm_num)
3615 struct ipa_replace_map *replace_map;
3618 replace_map = ggc_alloc<ipa_replace_map> ();
3619 if (dump_file)
3621 fprintf (dump_file, " replacing ");
3622 ipa_dump_param (dump_file, info, parm_num);
3624 fprintf (dump_file, " with const ");
3625 print_generic_expr (dump_file, value);
3626 fprintf (dump_file, "\n");
3628 replace_map->old_tree = NULL;
3629 replace_map->parm_num = parm_num;
3630 replace_map->new_tree = value;
3631 replace_map->replace_p = true;
3632 replace_map->ref_p = false;
3634 return replace_map;
3637 /* Dump new profiling counts */
3639 static void
3640 dump_profile_updates (struct cgraph_node *orig_node,
3641 struct cgraph_node *new_node)
3643 struct cgraph_edge *cs;
3645 fprintf (dump_file, " setting count of the specialized node to ");
3646 new_node->count.dump (dump_file);
3647 fprintf (dump_file, "\n");
3648 for (cs = new_node->callees; cs; cs = cs->next_callee)
3650 fprintf (dump_file, " edge to %s has count ",
3651 cs->callee->name ());
3652 cs->count.dump (dump_file);
3653 fprintf (dump_file, "\n");
3656 fprintf (dump_file, " setting count of the original node to ");
3657 orig_node->count.dump (dump_file);
3658 fprintf (dump_file, "\n");
3659 for (cs = orig_node->callees; cs; cs = cs->next_callee)
3661 fprintf (dump_file, " edge to %s is left with ",
3662 cs->callee->name ());
3663 cs->count.dump (dump_file);
3664 fprintf (dump_file, "\n");
3668 /* After a specialized NEW_NODE version of ORIG_NODE has been created, update
3669 their profile information to reflect this. */
3671 static void
3672 update_profiling_info (struct cgraph_node *orig_node,
3673 struct cgraph_node *new_node)
3675 struct cgraph_edge *cs;
3676 struct caller_statistics stats;
3677 profile_count new_sum, orig_sum;
3678 profile_count remainder, orig_node_count = orig_node->count;
3680 if (!(orig_node_count.ipa () > profile_count::zero ()))
3681 return;
3683 init_caller_stats (&stats);
3684 orig_node->call_for_symbol_thunks_and_aliases (gather_caller_stats, &stats,
3685 false);
3686 orig_sum = stats.count_sum;
3687 init_caller_stats (&stats);
3688 new_node->call_for_symbol_thunks_and_aliases (gather_caller_stats, &stats,
3689 false);
3690 new_sum = stats.count_sum;
3692 if (orig_node_count < orig_sum + new_sum)
3694 if (dump_file)
3696 fprintf (dump_file, " Problem: node %s has too low count ",
3697 orig_node->dump_name ());
3698 orig_node_count.dump (dump_file);
3699 fprintf (dump_file, "while the sum of incoming count is ");
3700 (orig_sum + new_sum).dump (dump_file);
3701 fprintf (dump_file, "\n");
3704 orig_node_count = (orig_sum + new_sum).apply_scale (12, 10);
3705 if (dump_file)
3707 fprintf (dump_file, " proceeding by pretending it was ");
3708 orig_node_count.dump (dump_file);
3709 fprintf (dump_file, "\n");
3713 remainder = orig_node_count.combine_with_ipa_count (orig_node_count.ipa ()
3714 - new_sum.ipa ());
3715 new_sum = orig_node_count.combine_with_ipa_count (new_sum);
3716 orig_node->count = remainder;
3718 for (cs = new_node->callees; cs; cs = cs->next_callee)
3719 cs->count = cs->count.apply_scale (new_sum, orig_node_count);
3721 for (cs = orig_node->callees; cs; cs = cs->next_callee)
3722 cs->count = cs->count.apply_scale (remainder, orig_node_count);
3724 if (dump_file)
3725 dump_profile_updates (orig_node, new_node);
3728 /* Update the respective profile of specialized NEW_NODE and the original
3729 ORIG_NODE after additional edges with cumulative count sum REDIRECTED_SUM
3730 have been redirected to the specialized version. */
3732 static void
3733 update_specialized_profile (struct cgraph_node *new_node,
3734 struct cgraph_node *orig_node,
3735 profile_count redirected_sum)
3737 struct cgraph_edge *cs;
3738 profile_count new_node_count, orig_node_count = orig_node->count;
3740 if (dump_file)
3742 fprintf (dump_file, " the sum of counts of redirected edges is ");
3743 redirected_sum.dump (dump_file);
3744 fprintf (dump_file, "\n");
3746 if (!(orig_node_count > profile_count::zero ()))
3747 return;
3749 gcc_assert (orig_node_count >= redirected_sum);
3751 new_node_count = new_node->count;
3752 new_node->count += redirected_sum;
3753 orig_node->count -= redirected_sum;
3755 for (cs = new_node->callees; cs; cs = cs->next_callee)
3756 cs->count += cs->count.apply_scale (redirected_sum, new_node_count);
3758 for (cs = orig_node->callees; cs; cs = cs->next_callee)
3760 profile_count dec = cs->count.apply_scale (redirected_sum,
3761 orig_node_count);
3762 cs->count -= dec;
3765 if (dump_file)
3766 dump_profile_updates (orig_node, new_node);
3769 /* Create a specialized version of NODE with known constants in KNOWN_CSTS,
3770 known contexts in KNOWN_CONTEXTS and known aggregate values in AGGVALS and
3771 redirect all edges in CALLERS to it. */
3773 static struct cgraph_node *
3774 create_specialized_node (struct cgraph_node *node,
3775 vec<tree> known_csts,
3776 vec<ipa_polymorphic_call_context> known_contexts,
3777 struct ipa_agg_replacement_value *aggvals,
3778 vec<cgraph_edge *> callers)
3780 struct ipa_node_params *new_info, *info = IPA_NODE_REF (node);
3781 vec<ipa_replace_map *, va_gc> *replace_trees = NULL;
3782 struct ipa_agg_replacement_value *av;
3783 struct cgraph_node *new_node;
3784 int i, count = ipa_get_param_count (info);
3785 bitmap args_to_skip;
3787 gcc_assert (!info->ipcp_orig_node);
3789 if (node->local.can_change_signature)
3791 args_to_skip = BITMAP_GGC_ALLOC ();
3792 for (i = 0; i < count; i++)
3794 tree t = known_csts[i];
3796 if (t || !ipa_is_param_used (info, i))
3797 bitmap_set_bit (args_to_skip, i);
3800 else
3802 args_to_skip = NULL;
3803 if (dump_file && (dump_flags & TDF_DETAILS))
3804 fprintf (dump_file, " cannot change function signature\n");
3807 for (i = 0; i < count; i++)
3809 tree t = known_csts[i];
3810 if (t)
3812 struct ipa_replace_map *replace_map;
3814 gcc_checking_assert (TREE_CODE (t) != TREE_BINFO);
3815 replace_map = get_replacement_map (info, t, i);
3816 if (replace_map)
3817 vec_safe_push (replace_trees, replace_map);
3820 auto_vec<cgraph_edge *, 2> self_recursive_calls;
3821 for (i = callers.length () - 1; i >= 0; i--)
3823 cgraph_edge *cs = callers[i];
3824 if (cs->caller == node)
3826 self_recursive_calls.safe_push (cs);
3827 callers.unordered_remove (i);
3831 new_node = node->create_virtual_clone (callers, replace_trees,
3832 args_to_skip, "constprop");
3834 bool have_self_recursive_calls = !self_recursive_calls.is_empty ();
3835 for (unsigned j = 0; j < self_recursive_calls.length (); j++)
3837 cgraph_edge *cs = get_next_cgraph_edge_clone (self_recursive_calls[j]);
3838 /* Cloned edges can disappear during cloning as speculation can be
3839 resolved, check that we have one and that it comes from the last
3840 cloning. */
3841 if (cs && cs->caller == new_node)
3842 cs->redirect_callee_duplicating_thunks (new_node);
3843 /* Any future code that would make more than one clone of an outgoing
3844 edge would confuse this mechanism, so let's check that does not
3845 happen. */
3846 gcc_checking_assert (!cs
3847 || !get_next_cgraph_edge_clone (cs)
3848 || get_next_cgraph_edge_clone (cs)->caller != new_node);
3850 if (have_self_recursive_calls)
3851 new_node->expand_all_artificial_thunks ();
3853 ipa_set_node_agg_value_chain (new_node, aggvals);
3854 for (av = aggvals; av; av = av->next)
3855 new_node->maybe_create_reference (av->value, NULL);
3857 if (dump_file && (dump_flags & TDF_DETAILS))
3859 fprintf (dump_file, " the new node is %s.\n", new_node->dump_name ());
3860 if (known_contexts.exists ())
3862 for (i = 0; i < count; i++)
3863 if (!known_contexts[i].useless_p ())
3865 fprintf (dump_file, " known ctx %i is ", i);
3866 known_contexts[i].dump (dump_file);
3869 if (aggvals)
3870 ipa_dump_agg_replacement_values (dump_file, aggvals);
3872 ipa_check_create_node_params ();
3873 update_profiling_info (node, new_node);
3874 new_info = IPA_NODE_REF (new_node);
3875 new_info->ipcp_orig_node = node;
3876 new_info->known_csts = known_csts;
3877 new_info->known_contexts = known_contexts;
3879 ipcp_discover_new_direct_edges (new_node, known_csts, known_contexts, aggvals);
3881 callers.release ();
3882 return new_node;
3885 /* Return true, if JFUNC, which describes a i-th parameter of call CS, is a
3886 simple no-operation pass-through function to itself. */
3888 static bool
3889 self_recursive_pass_through_p (cgraph_edge *cs, ipa_jump_func *jfunc, int i)
3891 enum availability availability;
3892 if (cs->caller == cs->callee->function_symbol (&availability)
3893 && availability > AVAIL_INTERPOSABLE
3894 && jfunc->type == IPA_JF_PASS_THROUGH
3895 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR
3896 && ipa_get_jf_pass_through_formal_id (jfunc) == i)
3897 return true;
3898 return false;
3901 /* Given a NODE, and a subset of its CALLERS, try to populate blanks slots in
3902 KNOWN_CSTS with constants that are also known for all of the CALLERS. */
3904 static void
3905 find_more_scalar_values_for_callers_subset (struct cgraph_node *node,
3906 vec<tree> known_csts,
3907 vec<cgraph_edge *> callers)
3909 struct ipa_node_params *info = IPA_NODE_REF (node);
3910 int i, count = ipa_get_param_count (info);
3912 for (i = 0; i < count; i++)
3914 struct cgraph_edge *cs;
3915 tree newval = NULL_TREE;
3916 int j;
3917 bool first = true;
3918 tree type = ipa_get_type (info, i);
3920 if (ipa_get_scalar_lat (info, i)->bottom || known_csts[i])
3921 continue;
3923 FOR_EACH_VEC_ELT (callers, j, cs)
3925 struct ipa_jump_func *jump_func;
3926 tree t;
3928 if (IPA_NODE_REF (cs->caller)->node_dead)
3929 continue;
3931 if (i >= ipa_get_cs_argument_count (IPA_EDGE_REF (cs))
3932 || (i == 0
3933 && call_passes_through_thunk_p (cs)))
3935 newval = NULL_TREE;
3936 break;
3938 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
3939 if (self_recursive_pass_through_p (cs, jump_func, i))
3940 continue;
3942 t = ipa_value_from_jfunc (IPA_NODE_REF (cs->caller), jump_func, type);
3943 if (!t
3944 || (newval
3945 && !values_equal_for_ipcp_p (t, newval))
3946 || (!first && !newval))
3948 newval = NULL_TREE;
3949 break;
3951 else
3952 newval = t;
3953 first = false;
3956 if (newval)
3958 if (dump_file && (dump_flags & TDF_DETAILS))
3960 fprintf (dump_file, " adding an extra known scalar value ");
3961 print_ipcp_constant_value (dump_file, newval);
3962 fprintf (dump_file, " for ");
3963 ipa_dump_param (dump_file, info, i);
3964 fprintf (dump_file, "\n");
3967 known_csts[i] = newval;
3972 /* Given a NODE and a subset of its CALLERS, try to populate plank slots in
3973 KNOWN_CONTEXTS with polymorphic contexts that are also known for all of the
3974 CALLERS. */
3976 static void
3977 find_more_contexts_for_caller_subset (cgraph_node *node,
3978 vec<ipa_polymorphic_call_context>
3979 *known_contexts,
3980 vec<cgraph_edge *> callers)
3982 ipa_node_params *info = IPA_NODE_REF (node);
3983 int i, count = ipa_get_param_count (info);
3985 for (i = 0; i < count; i++)
3987 cgraph_edge *cs;
3989 if (ipa_get_poly_ctx_lat (info, i)->bottom
3990 || (known_contexts->exists ()
3991 && !(*known_contexts)[i].useless_p ()))
3992 continue;
3994 ipa_polymorphic_call_context newval;
3995 bool first = true;
3996 int j;
3998 FOR_EACH_VEC_ELT (callers, j, cs)
4000 if (i >= ipa_get_cs_argument_count (IPA_EDGE_REF (cs)))
4001 return;
4002 ipa_jump_func *jfunc = ipa_get_ith_jump_func (IPA_EDGE_REF (cs),
4004 ipa_polymorphic_call_context ctx;
4005 ctx = ipa_context_from_jfunc (IPA_NODE_REF (cs->caller), cs, i,
4006 jfunc);
4007 if (first)
4009 newval = ctx;
4010 first = false;
4012 else
4013 newval.meet_with (ctx);
4014 if (newval.useless_p ())
4015 break;
4018 if (!newval.useless_p ())
4020 if (dump_file && (dump_flags & TDF_DETAILS))
4022 fprintf (dump_file, " adding an extra known polymorphic "
4023 "context ");
4024 print_ipcp_constant_value (dump_file, newval);
4025 fprintf (dump_file, " for ");
4026 ipa_dump_param (dump_file, info, i);
4027 fprintf (dump_file, "\n");
4030 if (!known_contexts->exists ())
4031 known_contexts->safe_grow_cleared (ipa_get_param_count (info));
4032 (*known_contexts)[i] = newval;
4038 /* Go through PLATS and create a vector of values consisting of values and
4039 offsets (minus OFFSET) of lattices that contain only a single value. */
4041 static vec<ipa_agg_jf_item>
4042 copy_plats_to_inter (struct ipcp_param_lattices *plats, HOST_WIDE_INT offset)
4044 vec<ipa_agg_jf_item> res = vNULL;
4046 if (!plats->aggs || plats->aggs_contain_variable || plats->aggs_bottom)
4047 return vNULL;
4049 for (struct ipcp_agg_lattice *aglat = plats->aggs; aglat; aglat = aglat->next)
4050 if (aglat->is_single_const ())
4052 struct ipa_agg_jf_item ti;
4053 ti.offset = aglat->offset - offset;
4054 ti.value = aglat->values->value;
4055 res.safe_push (ti);
4057 return res;
4060 /* Intersect all values in INTER with single value lattices in PLATS (while
4061 subtracting OFFSET). */
4063 static void
4064 intersect_with_plats (struct ipcp_param_lattices *plats,
4065 vec<ipa_agg_jf_item> *inter,
4066 HOST_WIDE_INT offset)
4068 struct ipcp_agg_lattice *aglat;
4069 struct ipa_agg_jf_item *item;
4070 int k;
4072 if (!plats->aggs || plats->aggs_contain_variable || plats->aggs_bottom)
4074 inter->release ();
4075 return;
4078 aglat = plats->aggs;
4079 FOR_EACH_VEC_ELT (*inter, k, item)
4081 bool found = false;
4082 if (!item->value)
4083 continue;
4084 while (aglat)
4086 if (aglat->offset - offset > item->offset)
4087 break;
4088 if (aglat->offset - offset == item->offset)
4090 gcc_checking_assert (item->value);
4091 if (aglat->is_single_const ()
4092 && values_equal_for_ipcp_p (item->value,
4093 aglat->values->value))
4094 found = true;
4095 break;
4097 aglat = aglat->next;
4099 if (!found)
4100 item->value = NULL_TREE;
4104 /* Copy aggregate replacement values of NODE (which is an IPA-CP clone) to the
4105 vector result while subtracting OFFSET from the individual value offsets. */
4107 static vec<ipa_agg_jf_item>
4108 agg_replacements_to_vector (struct cgraph_node *node, int index,
4109 HOST_WIDE_INT offset)
4111 struct ipa_agg_replacement_value *av;
4112 vec<ipa_agg_jf_item> res = vNULL;
4114 for (av = ipa_get_agg_replacements_for_node (node); av; av = av->next)
4115 if (av->index == index
4116 && (av->offset - offset) >= 0)
4118 struct ipa_agg_jf_item item;
4119 gcc_checking_assert (av->value);
4120 item.offset = av->offset - offset;
4121 item.value = av->value;
4122 res.safe_push (item);
4125 return res;
4128 /* Intersect all values in INTER with those that we have already scheduled to
4129 be replaced in parameter number INDEX of NODE, which is an IPA-CP clone
4130 (while subtracting OFFSET). */
4132 static void
4133 intersect_with_agg_replacements (struct cgraph_node *node, int index,
4134 vec<ipa_agg_jf_item> *inter,
4135 HOST_WIDE_INT offset)
4137 struct ipa_agg_replacement_value *srcvals;
4138 struct ipa_agg_jf_item *item;
4139 int i;
4141 srcvals = ipa_get_agg_replacements_for_node (node);
4142 if (!srcvals)
4144 inter->release ();
4145 return;
4148 FOR_EACH_VEC_ELT (*inter, i, item)
4150 struct ipa_agg_replacement_value *av;
4151 bool found = false;
4152 if (!item->value)
4153 continue;
4154 for (av = srcvals; av; av = av->next)
4156 gcc_checking_assert (av->value);
4157 if (av->index == index
4158 && av->offset - offset == item->offset)
4160 if (values_equal_for_ipcp_p (item->value, av->value))
4161 found = true;
4162 break;
4165 if (!found)
4166 item->value = NULL_TREE;
4170 /* Intersect values in INTER with aggregate values that come along edge CS to
4171 parameter number INDEX and return it. If INTER does not actually exist yet,
4172 copy all incoming values to it. If we determine we ended up with no values
4173 whatsoever, return a released vector. */
4175 static vec<ipa_agg_jf_item>
4176 intersect_aggregates_with_edge (struct cgraph_edge *cs, int index,
4177 vec<ipa_agg_jf_item> inter)
4179 struct ipa_jump_func *jfunc;
4180 jfunc = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), index);
4181 if (jfunc->type == IPA_JF_PASS_THROUGH
4182 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
4184 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
4185 int src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
4187 if (caller_info->ipcp_orig_node)
4189 struct cgraph_node *orig_node = caller_info->ipcp_orig_node;
4190 struct ipcp_param_lattices *orig_plats;
4191 orig_plats = ipa_get_parm_lattices (IPA_NODE_REF (orig_node),
4192 src_idx);
4193 if (agg_pass_through_permissible_p (orig_plats, jfunc))
4195 if (!inter.exists ())
4196 inter = agg_replacements_to_vector (cs->caller, src_idx, 0);
4197 else
4198 intersect_with_agg_replacements (cs->caller, src_idx,
4199 &inter, 0);
4201 else
4203 inter.release ();
4204 return vNULL;
4207 else
4209 struct ipcp_param_lattices *src_plats;
4210 src_plats = ipa_get_parm_lattices (caller_info, src_idx);
4211 if (agg_pass_through_permissible_p (src_plats, jfunc))
4213 /* Currently we do not produce clobber aggregate jump
4214 functions, adjust when we do. */
4215 gcc_checking_assert (!jfunc->agg.items);
4216 if (!inter.exists ())
4217 inter = copy_plats_to_inter (src_plats, 0);
4218 else
4219 intersect_with_plats (src_plats, &inter, 0);
4221 else
4223 inter.release ();
4224 return vNULL;
4228 else if (jfunc->type == IPA_JF_ANCESTOR
4229 && ipa_get_jf_ancestor_agg_preserved (jfunc))
4231 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
4232 int src_idx = ipa_get_jf_ancestor_formal_id (jfunc);
4233 struct ipcp_param_lattices *src_plats;
4234 HOST_WIDE_INT delta = ipa_get_jf_ancestor_offset (jfunc);
4236 if (caller_info->ipcp_orig_node)
4238 if (!inter.exists ())
4239 inter = agg_replacements_to_vector (cs->caller, src_idx, delta);
4240 else
4241 intersect_with_agg_replacements (cs->caller, src_idx, &inter,
4242 delta);
4244 else
4246 src_plats = ipa_get_parm_lattices (caller_info, src_idx);
4247 /* Currently we do not produce clobber aggregate jump
4248 functions, adjust when we do. */
4249 gcc_checking_assert (!src_plats->aggs || !jfunc->agg.items);
4250 if (!inter.exists ())
4251 inter = copy_plats_to_inter (src_plats, delta);
4252 else
4253 intersect_with_plats (src_plats, &inter, delta);
4256 else if (jfunc->agg.items)
4258 struct ipa_agg_jf_item *item;
4259 int k;
4261 if (!inter.exists ())
4262 for (unsigned i = 0; i < jfunc->agg.items->length (); i++)
4263 inter.safe_push ((*jfunc->agg.items)[i]);
4264 else
4265 FOR_EACH_VEC_ELT (inter, k, item)
4267 int l = 0;
4268 bool found = false;
4270 if (!item->value)
4271 continue;
4273 while ((unsigned) l < jfunc->agg.items->length ())
4275 struct ipa_agg_jf_item *ti;
4276 ti = &(*jfunc->agg.items)[l];
4277 if (ti->offset > item->offset)
4278 break;
4279 if (ti->offset == item->offset)
4281 gcc_checking_assert (ti->value);
4282 if (values_equal_for_ipcp_p (item->value,
4283 ti->value))
4284 found = true;
4285 break;
4287 l++;
4289 if (!found)
4290 item->value = NULL;
4293 else
4295 inter.release ();
4296 return vec<ipa_agg_jf_item>();
4298 return inter;
4301 /* Look at edges in CALLERS and collect all known aggregate values that arrive
4302 from all of them. */
4304 static struct ipa_agg_replacement_value *
4305 find_aggregate_values_for_callers_subset (struct cgraph_node *node,
4306 vec<cgraph_edge *> callers)
4308 struct ipa_node_params *dest_info = IPA_NODE_REF (node);
4309 struct ipa_agg_replacement_value *res;
4310 struct ipa_agg_replacement_value **tail = &res;
4311 struct cgraph_edge *cs;
4312 int i, j, count = ipa_get_param_count (dest_info);
4314 FOR_EACH_VEC_ELT (callers, j, cs)
4316 int c = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
4317 if (c < count)
4318 count = c;
4321 for (i = 0; i < count; i++)
4323 struct cgraph_edge *cs;
4324 vec<ipa_agg_jf_item> inter = vNULL;
4325 struct ipa_agg_jf_item *item;
4326 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (dest_info, i);
4327 int j;
4329 /* Among other things, the following check should deal with all by_ref
4330 mismatches. */
4331 if (plats->aggs_bottom)
4332 continue;
4334 FOR_EACH_VEC_ELT (callers, j, cs)
4336 struct ipa_jump_func *jfunc
4337 = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
4338 if (self_recursive_pass_through_p (cs, jfunc, i)
4339 && (!plats->aggs_by_ref
4340 || ipa_get_jf_pass_through_agg_preserved (jfunc)))
4341 continue;
4342 inter = intersect_aggregates_with_edge (cs, i, inter);
4344 if (!inter.exists ())
4345 goto next_param;
4348 FOR_EACH_VEC_ELT (inter, j, item)
4350 struct ipa_agg_replacement_value *v;
4352 if (!item->value)
4353 continue;
4355 v = ggc_alloc<ipa_agg_replacement_value> ();
4356 v->index = i;
4357 v->offset = item->offset;
4358 v->value = item->value;
4359 v->by_ref = plats->aggs_by_ref;
4360 *tail = v;
4361 tail = &v->next;
4364 next_param:
4365 if (inter.exists ())
4366 inter.release ();
4368 *tail = NULL;
4369 return res;
4372 /* Determine whether CS also brings all scalar values that the NODE is
4373 specialized for. */
4375 static bool
4376 cgraph_edge_brings_all_scalars_for_node (struct cgraph_edge *cs,
4377 struct cgraph_node *node)
4379 struct ipa_node_params *dest_info = IPA_NODE_REF (node);
4380 int count = ipa_get_param_count (dest_info);
4381 struct ipa_node_params *caller_info;
4382 struct ipa_edge_args *args;
4383 int i;
4385 caller_info = IPA_NODE_REF (cs->caller);
4386 args = IPA_EDGE_REF (cs);
4387 for (i = 0; i < count; i++)
4389 struct ipa_jump_func *jump_func;
4390 tree val, t;
4392 val = dest_info->known_csts[i];
4393 if (!val)
4394 continue;
4396 if (i >= ipa_get_cs_argument_count (args))
4397 return false;
4398 jump_func = ipa_get_ith_jump_func (args, i);
4399 t = ipa_value_from_jfunc (caller_info, jump_func,
4400 ipa_get_type (dest_info, i));
4401 if (!t || !values_equal_for_ipcp_p (val, t))
4402 return false;
4404 return true;
4407 /* Determine whether CS also brings all aggregate values that NODE is
4408 specialized for. */
4409 static bool
4410 cgraph_edge_brings_all_agg_vals_for_node (struct cgraph_edge *cs,
4411 struct cgraph_node *node)
4413 struct ipa_node_params *orig_caller_info = IPA_NODE_REF (cs->caller);
4414 struct ipa_node_params *orig_node_info;
4415 struct ipa_agg_replacement_value *aggval;
4416 int i, ec, count;
4418 aggval = ipa_get_agg_replacements_for_node (node);
4419 if (!aggval)
4420 return true;
4422 count = ipa_get_param_count (IPA_NODE_REF (node));
4423 ec = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
4424 if (ec < count)
4425 for (struct ipa_agg_replacement_value *av = aggval; av; av = av->next)
4426 if (aggval->index >= ec)
4427 return false;
4429 orig_node_info = IPA_NODE_REF (IPA_NODE_REF (node)->ipcp_orig_node);
4430 if (orig_caller_info->ipcp_orig_node)
4431 orig_caller_info = IPA_NODE_REF (orig_caller_info->ipcp_orig_node);
4433 for (i = 0; i < count; i++)
4435 static vec<ipa_agg_jf_item> values = vec<ipa_agg_jf_item>();
4436 struct ipcp_param_lattices *plats;
4437 bool interesting = false;
4438 for (struct ipa_agg_replacement_value *av = aggval; av; av = av->next)
4439 if (aggval->index == i)
4441 interesting = true;
4442 break;
4444 if (!interesting)
4445 continue;
4447 plats = ipa_get_parm_lattices (orig_node_info, aggval->index);
4448 if (plats->aggs_bottom)
4449 return false;
4451 values = intersect_aggregates_with_edge (cs, i, values);
4452 if (!values.exists ())
4453 return false;
4455 for (struct ipa_agg_replacement_value *av = aggval; av; av = av->next)
4456 if (aggval->index == i)
4458 struct ipa_agg_jf_item *item;
4459 int j;
4460 bool found = false;
4461 FOR_EACH_VEC_ELT (values, j, item)
4462 if (item->value
4463 && item->offset == av->offset
4464 && values_equal_for_ipcp_p (item->value, av->value))
4466 found = true;
4467 break;
4469 if (!found)
4471 values.release ();
4472 return false;
4476 return true;
4479 /* Given an original NODE and a VAL for which we have already created a
4480 specialized clone, look whether there are incoming edges that still lead
4481 into the old node but now also bring the requested value and also conform to
4482 all other criteria such that they can be redirected the special node.
4483 This function can therefore redirect the final edge in a SCC. */
4485 template <typename valtype>
4486 static void
4487 perhaps_add_new_callers (cgraph_node *node, ipcp_value<valtype> *val)
4489 ipcp_value_source<valtype> *src;
4490 profile_count redirected_sum = profile_count::zero ();
4492 for (src = val->sources; src; src = src->next)
4494 struct cgraph_edge *cs = src->cs;
4495 while (cs)
4497 if (cgraph_edge_brings_value_p (cs, src, node, val)
4498 && cgraph_edge_brings_all_scalars_for_node (cs, val->spec_node)
4499 && cgraph_edge_brings_all_agg_vals_for_node (cs, val->spec_node))
4501 if (dump_file)
4502 fprintf (dump_file, " - adding an extra caller %s of %s\n",
4503 cs->caller->dump_name (),
4504 val->spec_node->dump_name ());
4506 cs->redirect_callee_duplicating_thunks (val->spec_node);
4507 val->spec_node->expand_all_artificial_thunks ();
4508 if (cs->count.ipa ().initialized_p ())
4509 redirected_sum = redirected_sum + cs->count.ipa ();
4511 cs = get_next_cgraph_edge_clone (cs);
4515 if (redirected_sum.nonzero_p ())
4516 update_specialized_profile (val->spec_node, node, redirected_sum);
4519 /* Return true if KNOWN_CONTEXTS contain at least one useful context. */
4521 static bool
4522 known_contexts_useful_p (vec<ipa_polymorphic_call_context> known_contexts)
4524 ipa_polymorphic_call_context *ctx;
4525 int i;
4527 FOR_EACH_VEC_ELT (known_contexts, i, ctx)
4528 if (!ctx->useless_p ())
4529 return true;
4530 return false;
4533 /* Return a copy of KNOWN_CSTS if it is not empty, otherwise return vNULL. */
4535 static vec<ipa_polymorphic_call_context>
4536 copy_useful_known_contexts (vec<ipa_polymorphic_call_context> known_contexts)
4538 if (known_contexts_useful_p (known_contexts))
4539 return known_contexts.copy ();
4540 else
4541 return vNULL;
4544 /* Copy KNOWN_CSTS and modify the copy according to VAL and INDEX. If
4545 non-empty, replace KNOWN_CONTEXTS with its copy too. */
4547 static void
4548 modify_known_vectors_with_val (vec<tree> *known_csts,
4549 vec<ipa_polymorphic_call_context> *known_contexts,
4550 ipcp_value<tree> *val,
4551 int index)
4553 *known_csts = known_csts->copy ();
4554 *known_contexts = copy_useful_known_contexts (*known_contexts);
4555 (*known_csts)[index] = val->value;
4558 /* Replace KNOWN_CSTS with its copy. Also copy KNOWN_CONTEXTS and modify the
4559 copy according to VAL and INDEX. */
4561 static void
4562 modify_known_vectors_with_val (vec<tree> *known_csts,
4563 vec<ipa_polymorphic_call_context> *known_contexts,
4564 ipcp_value<ipa_polymorphic_call_context> *val,
4565 int index)
4567 *known_csts = known_csts->copy ();
4568 *known_contexts = known_contexts->copy ();
4569 (*known_contexts)[index] = val->value;
4572 /* Return true if OFFSET indicates this was not an aggregate value or there is
4573 a replacement equivalent to VALUE, INDEX and OFFSET among those in the
4574 AGGVALS list. */
4576 DEBUG_FUNCTION bool
4577 ipcp_val_agg_replacement_ok_p (ipa_agg_replacement_value *aggvals,
4578 int index, HOST_WIDE_INT offset, tree value)
4580 if (offset == -1)
4581 return true;
4583 while (aggvals)
4585 if (aggvals->index == index
4586 && aggvals->offset == offset
4587 && values_equal_for_ipcp_p (aggvals->value, value))
4588 return true;
4589 aggvals = aggvals->next;
4591 return false;
4594 /* Return true if offset is minus one because source of a polymorphic contect
4595 cannot be an aggregate value. */
4597 DEBUG_FUNCTION bool
4598 ipcp_val_agg_replacement_ok_p (ipa_agg_replacement_value *,
4599 int , HOST_WIDE_INT offset,
4600 ipa_polymorphic_call_context)
4602 return offset == -1;
4605 /* Decide wheter to create a special version of NODE for value VAL of parameter
4606 at the given INDEX. If OFFSET is -1, the value is for the parameter itself,
4607 otherwise it is stored at the given OFFSET of the parameter. KNOWN_CSTS,
4608 KNOWN_CONTEXTS and KNOWN_AGGS describe the other already known values. */
4610 template <typename valtype>
4611 static bool
4612 decide_about_value (struct cgraph_node *node, int index, HOST_WIDE_INT offset,
4613 ipcp_value<valtype> *val, vec<tree> known_csts,
4614 vec<ipa_polymorphic_call_context> known_contexts)
4616 struct ipa_agg_replacement_value *aggvals;
4617 int freq_sum, caller_count;
4618 profile_count count_sum;
4619 vec<cgraph_edge *> callers;
4621 if (val->spec_node)
4623 perhaps_add_new_callers (node, val);
4624 return false;
4626 else if (val->local_size_cost + overall_size > max_new_size)
4628 if (dump_file && (dump_flags & TDF_DETAILS))
4629 fprintf (dump_file, " Ignoring candidate value because "
4630 "max_new_size would be reached with %li.\n",
4631 val->local_size_cost + overall_size);
4632 return false;
4634 else if (!get_info_about_necessary_edges (val, node, &freq_sum, &count_sum,
4635 &caller_count))
4636 return false;
4638 if (dump_file && (dump_flags & TDF_DETAILS))
4640 fprintf (dump_file, " - considering value ");
4641 print_ipcp_constant_value (dump_file, val->value);
4642 fprintf (dump_file, " for ");
4643 ipa_dump_param (dump_file, IPA_NODE_REF (node), index);
4644 if (offset != -1)
4645 fprintf (dump_file, ", offset: " HOST_WIDE_INT_PRINT_DEC, offset);
4646 fprintf (dump_file, " (caller_count: %i)\n", caller_count);
4649 if (!good_cloning_opportunity_p (node, val->local_time_benefit,
4650 freq_sum, count_sum,
4651 val->local_size_cost)
4652 && !good_cloning_opportunity_p (node,
4653 val->local_time_benefit
4654 + val->prop_time_benefit,
4655 freq_sum, count_sum,
4656 val->local_size_cost
4657 + val->prop_size_cost))
4658 return false;
4660 if (dump_file)
4661 fprintf (dump_file, " Creating a specialized node of %s.\n",
4662 node->dump_name ());
4664 callers = gather_edges_for_value (val, node, caller_count);
4665 if (offset == -1)
4666 modify_known_vectors_with_val (&known_csts, &known_contexts, val, index);
4667 else
4669 known_csts = known_csts.copy ();
4670 known_contexts = copy_useful_known_contexts (known_contexts);
4672 find_more_scalar_values_for_callers_subset (node, known_csts, callers);
4673 find_more_contexts_for_caller_subset (node, &known_contexts, callers);
4674 aggvals = find_aggregate_values_for_callers_subset (node, callers);
4675 gcc_checking_assert (ipcp_val_agg_replacement_ok_p (aggvals, index,
4676 offset, val->value));
4677 val->spec_node = create_specialized_node (node, known_csts, known_contexts,
4678 aggvals, callers);
4679 overall_size += val->local_size_cost;
4681 /* TODO: If for some lattice there is only one other known value
4682 left, make a special node for it too. */
4684 return true;
4687 /* Decide whether and what specialized clones of NODE should be created. */
4689 static bool
4690 decide_whether_version_node (struct cgraph_node *node)
4692 struct ipa_node_params *info = IPA_NODE_REF (node);
4693 int i, count = ipa_get_param_count (info);
4694 vec<tree> known_csts;
4695 vec<ipa_polymorphic_call_context> known_contexts;
4696 vec<ipa_agg_jump_function> known_aggs = vNULL;
4697 bool ret = false;
4699 if (count == 0)
4700 return false;
4702 if (dump_file && (dump_flags & TDF_DETAILS))
4703 fprintf (dump_file, "\nEvaluating opportunities for %s.\n",
4704 node->dump_name ());
4706 gather_context_independent_values (info, &known_csts, &known_contexts,
4707 info->do_clone_for_all_contexts ? &known_aggs
4708 : NULL, NULL);
4710 for (i = 0; i < count;i++)
4712 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
4713 ipcp_lattice<tree> *lat = &plats->itself;
4714 ipcp_lattice<ipa_polymorphic_call_context> *ctxlat = &plats->ctxlat;
4716 if (!lat->bottom
4717 && !known_csts[i])
4719 ipcp_value<tree> *val;
4720 for (val = lat->values; val; val = val->next)
4721 ret |= decide_about_value (node, i, -1, val, known_csts,
4722 known_contexts);
4725 if (!plats->aggs_bottom)
4727 struct ipcp_agg_lattice *aglat;
4728 ipcp_value<tree> *val;
4729 for (aglat = plats->aggs; aglat; aglat = aglat->next)
4730 if (!aglat->bottom && aglat->values
4731 /* If the following is false, the one value is in
4732 known_aggs. */
4733 && (plats->aggs_contain_variable
4734 || !aglat->is_single_const ()))
4735 for (val = aglat->values; val; val = val->next)
4736 ret |= decide_about_value (node, i, aglat->offset, val,
4737 known_csts, known_contexts);
4740 if (!ctxlat->bottom
4741 && known_contexts[i].useless_p ())
4743 ipcp_value<ipa_polymorphic_call_context> *val;
4744 for (val = ctxlat->values; val; val = val->next)
4745 ret |= decide_about_value (node, i, -1, val, known_csts,
4746 known_contexts);
4749 info = IPA_NODE_REF (node);
4752 if (info->do_clone_for_all_contexts)
4754 struct cgraph_node *clone;
4755 vec<cgraph_edge *> callers;
4757 if (dump_file)
4758 fprintf (dump_file, " - Creating a specialized node of %s "
4759 "for all known contexts.\n", node->dump_name ());
4761 callers = node->collect_callers ();
4762 find_more_scalar_values_for_callers_subset (node, known_csts, callers);
4763 find_more_contexts_for_caller_subset (node, &known_contexts, callers);
4764 ipa_agg_replacement_value *aggvals
4765 = find_aggregate_values_for_callers_subset (node, callers);
4767 if (!known_contexts_useful_p (known_contexts))
4769 known_contexts.release ();
4770 known_contexts = vNULL;
4772 clone = create_specialized_node (node, known_csts, known_contexts,
4773 aggvals, callers);
4774 info = IPA_NODE_REF (node);
4775 info->do_clone_for_all_contexts = false;
4776 IPA_NODE_REF (clone)->is_all_contexts_clone = true;
4777 for (i = 0; i < count; i++)
4778 vec_free (known_aggs[i].items);
4779 known_aggs.release ();
4780 ret = true;
4782 else
4784 known_csts.release ();
4785 known_contexts.release ();
4788 return ret;
4791 /* Transitively mark all callees of NODE within the same SCC as not dead. */
4793 static void
4794 spread_undeadness (struct cgraph_node *node)
4796 struct cgraph_edge *cs;
4798 for (cs = node->callees; cs; cs = cs->next_callee)
4799 if (ipa_edge_within_scc (cs))
4801 struct cgraph_node *callee;
4802 struct ipa_node_params *info;
4804 callee = cs->callee->function_symbol (NULL);
4805 info = IPA_NODE_REF (callee);
4807 if (info->node_dead)
4809 info->node_dead = 0;
4810 spread_undeadness (callee);
4815 /* Return true if NODE has a caller from outside of its SCC that is not
4816 dead. Worker callback for cgraph_for_node_and_aliases. */
4818 static bool
4819 has_undead_caller_from_outside_scc_p (struct cgraph_node *node,
4820 void *data ATTRIBUTE_UNUSED)
4822 struct cgraph_edge *cs;
4824 for (cs = node->callers; cs; cs = cs->next_caller)
4825 if (cs->caller->thunk.thunk_p
4826 && cs->caller->call_for_symbol_thunks_and_aliases
4827 (has_undead_caller_from_outside_scc_p, NULL, true))
4828 return true;
4829 else if (!ipa_edge_within_scc (cs)
4830 && !IPA_NODE_REF (cs->caller)->node_dead)
4831 return true;
4832 return false;
4836 /* Identify nodes within the same SCC as NODE which are no longer needed
4837 because of new clones and will be removed as unreachable. */
4839 static void
4840 identify_dead_nodes (struct cgraph_node *node)
4842 struct cgraph_node *v;
4843 for (v = node; v; v = ((struct ipa_dfs_info *) v->aux)->next_cycle)
4844 if (v->local.local
4845 && !v->call_for_symbol_thunks_and_aliases
4846 (has_undead_caller_from_outside_scc_p, NULL, true))
4847 IPA_NODE_REF (v)->node_dead = 1;
4849 for (v = node; v; v = ((struct ipa_dfs_info *) v->aux)->next_cycle)
4850 if (!IPA_NODE_REF (v)->node_dead)
4851 spread_undeadness (v);
4853 if (dump_file && (dump_flags & TDF_DETAILS))
4855 for (v = node; v; v = ((struct ipa_dfs_info *) v->aux)->next_cycle)
4856 if (IPA_NODE_REF (v)->node_dead)
4857 fprintf (dump_file, " Marking node as dead: %s.\n", v->dump_name ());
4861 /* The decision stage. Iterate over the topological order of call graph nodes
4862 TOPO and make specialized clones if deemed beneficial. */
4864 static void
4865 ipcp_decision_stage (struct ipa_topo_info *topo)
4867 int i;
4869 if (dump_file)
4870 fprintf (dump_file, "\nIPA decision stage:\n\n");
4872 for (i = topo->nnodes - 1; i >= 0; i--)
4874 struct cgraph_node *node = topo->order[i];
4875 bool change = false, iterate = true;
4877 while (iterate)
4879 struct cgraph_node *v;
4880 iterate = false;
4881 for (v = node; v; v = ((struct ipa_dfs_info *) v->aux)->next_cycle)
4882 if (v->has_gimple_body_p ()
4883 && ipcp_versionable_function_p (v))
4884 iterate |= decide_whether_version_node (v);
4886 change |= iterate;
4888 if (change)
4889 identify_dead_nodes (node);
4893 /* Look up all the bits information that we have discovered and copy it over
4894 to the transformation summary. */
4896 static void
4897 ipcp_store_bits_results (void)
4899 cgraph_node *node;
4901 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
4903 ipa_node_params *info = IPA_NODE_REF (node);
4904 bool dumped_sth = false;
4905 bool found_useful_result = false;
4907 if (!opt_for_fn (node->decl, flag_ipa_bit_cp))
4909 if (dump_file)
4910 fprintf (dump_file, "Not considering %s for ipa bitwise propagation "
4911 "; -fipa-bit-cp: disabled.\n",
4912 node->name ());
4913 continue;
4916 if (info->ipcp_orig_node)
4917 info = IPA_NODE_REF (info->ipcp_orig_node);
4919 unsigned count = ipa_get_param_count (info);
4920 for (unsigned i = 0; i < count; i++)
4922 ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
4923 if (plats->bits_lattice.constant_p ())
4925 found_useful_result = true;
4926 break;
4930 if (!found_useful_result)
4931 continue;
4933 ipcp_transformation_initialize ();
4934 ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
4935 vec_safe_reserve_exact (ts->bits, count);
4937 for (unsigned i = 0; i < count; i++)
4939 ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
4940 ipa_bits *jfbits;
4942 if (plats->bits_lattice.constant_p ())
4943 jfbits
4944 = ipa_get_ipa_bits_for_value (plats->bits_lattice.get_value (),
4945 plats->bits_lattice.get_mask ());
4946 else
4947 jfbits = NULL;
4949 ts->bits->quick_push (jfbits);
4950 if (!dump_file || !jfbits)
4951 continue;
4952 if (!dumped_sth)
4954 fprintf (dump_file, "Propagated bits info for function %s:\n",
4955 node->dump_name ());
4956 dumped_sth = true;
4958 fprintf (dump_file, " param %i: value = ", i);
4959 print_hex (jfbits->value, dump_file);
4960 fprintf (dump_file, ", mask = ");
4961 print_hex (jfbits->mask, dump_file);
4962 fprintf (dump_file, "\n");
4967 /* Look up all VR information that we have discovered and copy it over
4968 to the transformation summary. */
4970 static void
4971 ipcp_store_vr_results (void)
4973 cgraph_node *node;
4975 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
4977 ipa_node_params *info = IPA_NODE_REF (node);
4978 bool found_useful_result = false;
4980 if (!opt_for_fn (node->decl, flag_ipa_vrp))
4982 if (dump_file)
4983 fprintf (dump_file, "Not considering %s for VR discovery "
4984 "and propagate; -fipa-ipa-vrp: disabled.\n",
4985 node->name ());
4986 continue;
4989 if (info->ipcp_orig_node)
4990 info = IPA_NODE_REF (info->ipcp_orig_node);
4992 unsigned count = ipa_get_param_count (info);
4993 for (unsigned i = 0; i < count; i++)
4995 ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
4996 if (!plats->m_value_range.bottom_p ()
4997 && !plats->m_value_range.top_p ())
4999 found_useful_result = true;
5000 break;
5003 if (!found_useful_result)
5004 continue;
5006 ipcp_transformation_initialize ();
5007 ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
5008 vec_safe_reserve_exact (ts->m_vr, count);
5010 for (unsigned i = 0; i < count; i++)
5012 ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
5013 ipa_vr vr;
5015 if (!plats->m_value_range.bottom_p ()
5016 && !plats->m_value_range.top_p ())
5018 vr.known = true;
5019 vr.type = plats->m_value_range.m_vr.kind ();
5020 vr.min = wi::to_wide (plats->m_value_range.m_vr.min ());
5021 vr.max = wi::to_wide (plats->m_value_range.m_vr.max ());
5023 else
5025 vr.known = false;
5026 vr.type = VR_VARYING;
5027 vr.min = vr.max = wi::zero (INT_TYPE_SIZE);
5029 ts->m_vr->quick_push (vr);
5034 /* The IPCP driver. */
5036 static unsigned int
5037 ipcp_driver (void)
5039 struct ipa_topo_info topo;
5041 if (edge_clone_summaries == NULL)
5042 edge_clone_summaries = new edge_clone_summary_t (symtab);
5044 ipa_check_create_node_params ();
5045 ipa_check_create_edge_args ();
5047 if (dump_file)
5049 fprintf (dump_file, "\nIPA structures before propagation:\n");
5050 if (dump_flags & TDF_DETAILS)
5051 ipa_print_all_params (dump_file);
5052 ipa_print_all_jump_functions (dump_file);
5055 /* Topological sort. */
5056 build_toporder_info (&topo);
5057 /* Do the interprocedural propagation. */
5058 ipcp_propagate_stage (&topo);
5059 /* Decide what constant propagation and cloning should be performed. */
5060 ipcp_decision_stage (&topo);
5061 /* Store results of bits propagation. */
5062 ipcp_store_bits_results ();
5063 /* Store results of value range propagation. */
5064 ipcp_store_vr_results ();
5066 /* Free all IPCP structures. */
5067 free_toporder_info (&topo);
5068 delete edge_clone_summaries;
5069 edge_clone_summaries = NULL;
5070 ipa_free_all_structures_after_ipa_cp ();
5071 if (dump_file)
5072 fprintf (dump_file, "\nIPA constant propagation end\n");
5073 return 0;
5076 /* Initialization and computation of IPCP data structures. This is the initial
5077 intraprocedural analysis of functions, which gathers information to be
5078 propagated later on. */
5080 static void
5081 ipcp_generate_summary (void)
5083 struct cgraph_node *node;
5085 if (dump_file)
5086 fprintf (dump_file, "\nIPA constant propagation start:\n");
5087 ipa_register_cgraph_hooks ();
5089 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
5090 ipa_analyze_node (node);
5093 /* Write ipcp summary for nodes in SET. */
5095 static void
5096 ipcp_write_summary (void)
5098 ipa_prop_write_jump_functions ();
5101 /* Read ipcp summary. */
5103 static void
5104 ipcp_read_summary (void)
5106 ipa_prop_read_jump_functions ();
5109 namespace {
5111 const pass_data pass_data_ipa_cp =
5113 IPA_PASS, /* type */
5114 "cp", /* name */
5115 OPTGROUP_NONE, /* optinfo_flags */
5116 TV_IPA_CONSTANT_PROP, /* tv_id */
5117 0, /* properties_required */
5118 0, /* properties_provided */
5119 0, /* properties_destroyed */
5120 0, /* todo_flags_start */
5121 ( TODO_dump_symtab | TODO_remove_functions ), /* todo_flags_finish */
5124 class pass_ipa_cp : public ipa_opt_pass_d
5126 public:
5127 pass_ipa_cp (gcc::context *ctxt)
5128 : ipa_opt_pass_d (pass_data_ipa_cp, ctxt,
5129 ipcp_generate_summary, /* generate_summary */
5130 ipcp_write_summary, /* write_summary */
5131 ipcp_read_summary, /* read_summary */
5132 ipcp_write_transformation_summaries, /*
5133 write_optimization_summary */
5134 ipcp_read_transformation_summaries, /*
5135 read_optimization_summary */
5136 NULL, /* stmt_fixup */
5137 0, /* function_transform_todo_flags_start */
5138 ipcp_transform_function, /* function_transform */
5139 NULL) /* variable_transform */
5142 /* opt_pass methods: */
5143 virtual bool gate (function *)
5145 /* FIXME: We should remove the optimize check after we ensure we never run
5146 IPA passes when not optimizing. */
5147 return (flag_ipa_cp && optimize) || in_lto_p;
5150 virtual unsigned int execute (function *) { return ipcp_driver (); }
5152 }; // class pass_ipa_cp
5154 } // anon namespace
5156 ipa_opt_pass_d *
5157 make_pass_ipa_cp (gcc::context *ctxt)
5159 return new pass_ipa_cp (ctxt);
5162 /* Reset all state within ipa-cp.c so that we can rerun the compiler
5163 within the same process. For use by toplev::finalize. */
5165 void
5166 ipa_cp_c_finalize (void)
5168 max_count = profile_count::uninitialized ();
5169 overall_size = 0;
5170 max_new_size = 0;