* function.c (dump_stack_clash_frame_info): New function.
[official-gcc.git] / gcc / ipa-cp.c
blob6b3d8d7364ced456288f5fee9332875bd7133dd4
1 /* Interprocedural constant propagation
2 Copyright (C) 2005-2017 Free Software Foundation, Inc.
4 Contributed by Razya Ladelsky <RAZYA@il.ibm.com> and Martin Jambor
5 <mjambor@suse.cz>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Interprocedural constant propagation (IPA-CP).
25 The goal of this transformation is to
27 1) discover functions which are always invoked with some arguments with the
28 same known constant values and modify the functions so that the
29 subsequent optimizations can take advantage of the knowledge, and
31 2) partial specialization - create specialized versions of functions
32 transformed in this way if some parameters are known constants only in
33 certain contexts but the estimated tradeoff between speedup and cost size
34 is deemed good.
36 The algorithm also propagates types and attempts to perform type based
37 devirtualization. Types are propagated much like constants.
39 The algorithm basically consists of three stages. In the first, functions
40 are analyzed one at a time and jump functions are constructed for all known
41 call-sites. In the second phase, the pass propagates information from the
42 jump functions across the call to reveal what values are available at what
43 call sites, performs estimations of effects of known values on functions and
44 their callees, and finally decides what specialized extra versions should be
45 created. In the third, the special versions materialize and appropriate
46 calls are redirected.
48 The algorithm used is to a certain extent based on "Interprocedural Constant
49 Propagation", by David Callahan, Keith D Cooper, Ken Kennedy, Linda Torczon,
50 Comp86, pg 152-161 and "A Methodology for Procedure Cloning" by Keith D
51 Cooper, Mary W. Hall, and Ken Kennedy.
54 First stage - intraprocedural analysis
55 =======================================
57 This phase computes jump_function and modification flags.
59 A jump function for a call-site represents the values passed as an actual
60 arguments of a given call-site. In principle, there are three types of
61 values:
63 Pass through - the caller's formal parameter is passed as an actual
64 argument, plus an operation on it can be performed.
65 Constant - a constant is passed as an actual argument.
66 Unknown - neither of the above.
68 All jump function types are described in detail in ipa-prop.h, together with
69 the data structures that represent them and methods of accessing them.
71 ipcp_generate_summary() is the main function of the first stage.
73 Second stage - interprocedural analysis
74 ========================================
76 This stage is itself divided into two phases. In the first, we propagate
77 known values over the call graph, in the second, we make cloning decisions.
78 It uses a different algorithm than the original Callahan's paper.
80 First, we traverse the functions topologically from callers to callees and,
81 for each strongly connected component (SCC), we propagate constants
82 according to previously computed jump functions. We also record what known
83 values depend on other known values and estimate local effects. Finally, we
84 propagate cumulative information about these effects from dependent values
85 to those on which they depend.
87 Second, we again traverse the call graph in the same topological order and
88 make clones for functions which we know are called with the same values in
89 all contexts and decide about extra specialized clones of functions just for
90 some contexts - these decisions are based on both local estimates and
91 cumulative estimates propagated from callees.
93 ipcp_propagate_stage() and ipcp_decision_stage() together constitute the
94 third stage.
96 Third phase - materialization of clones, call statement updates.
97 ============================================
99 This stage is currently performed by call graph code (mainly in cgraphunit.c
100 and tree-inline.c) according to instructions inserted to the call graph by
101 the second stage. */
103 #include "config.h"
104 #include "system.h"
105 #include "coretypes.h"
106 #include "backend.h"
107 #include "tree.h"
108 #include "gimple-expr.h"
109 #include "predict.h"
110 #include "alloc-pool.h"
111 #include "tree-pass.h"
112 #include "cgraph.h"
113 #include "diagnostic.h"
114 #include "fold-const.h"
115 #include "gimple-fold.h"
116 #include "symbol-summary.h"
117 #include "tree-vrp.h"
118 #include "ipa-prop.h"
119 #include "tree-pretty-print.h"
120 #include "tree-inline.h"
121 #include "params.h"
122 #include "ipa-fnsummary.h"
123 #include "ipa-utils.h"
124 #include "tree-ssa-ccp.h"
125 #include "stringpool.h"
126 #include "attribs.h"
128 template <typename valtype> class ipcp_value;
130 /* Describes a particular source for an IPA-CP value. */
132 template <typename valtype>
133 class ipcp_value_source
135 public:
136 /* Aggregate offset of the source, negative if the source is scalar value of
137 the argument itself. */
138 HOST_WIDE_INT offset;
139 /* The incoming edge that brought the value. */
140 cgraph_edge *cs;
141 /* If the jump function that resulted into his value was a pass-through or an
142 ancestor, this is the ipcp_value of the caller from which the described
143 value has been derived. Otherwise it is NULL. */
144 ipcp_value<valtype> *val;
145 /* Next pointer in a linked list of sources of a value. */
146 ipcp_value_source *next;
147 /* If the jump function that resulted into his value was a pass-through or an
148 ancestor, this is the index of the parameter of the caller the jump
149 function references. */
150 int index;
153 /* Common ancestor for all ipcp_value instantiations. */
155 class ipcp_value_base
157 public:
158 /* Time benefit and size cost that specializing the function for this value
159 would bring about in this function alone. */
160 int local_time_benefit, local_size_cost;
161 /* Time benefit and size cost that specializing the function for this value
162 can bring about in it's callees (transitively). */
163 int prop_time_benefit, prop_size_cost;
165 ipcp_value_base ()
166 : local_time_benefit (0), local_size_cost (0),
167 prop_time_benefit (0), prop_size_cost (0) {}
170 /* Describes one particular value stored in struct ipcp_lattice. */
172 template <typename valtype>
173 class ipcp_value : public ipcp_value_base
175 public:
176 /* The actual value for the given parameter. */
177 valtype value;
178 /* The list of sources from which this value originates. */
179 ipcp_value_source <valtype> *sources;
180 /* Next pointers in a linked list of all values in a lattice. */
181 ipcp_value *next;
182 /* Next pointers in a linked list of values in a strongly connected component
183 of values. */
184 ipcp_value *scc_next;
185 /* Next pointers in a linked list of SCCs of values sorted topologically
186 according their sources. */
187 ipcp_value *topo_next;
188 /* A specialized node created for this value, NULL if none has been (so far)
189 created. */
190 cgraph_node *spec_node;
191 /* Depth first search number and low link for topological sorting of
192 values. */
193 int dfs, low_link;
194 /* True if this valye is currently on the topo-sort stack. */
195 bool on_stack;
197 ipcp_value()
198 : sources (0), next (0), scc_next (0), topo_next (0),
199 spec_node (0), dfs (0), low_link (0), on_stack (false) {}
201 void add_source (cgraph_edge *cs, ipcp_value *src_val, int src_idx,
202 HOST_WIDE_INT offset);
205 /* Lattice describing potential values of a formal parameter of a function, or
206 a part of an aggregate. TOP is represented by a lattice with zero values
207 and with contains_variable and bottom flags cleared. BOTTOM is represented
208 by a lattice with the bottom flag set. In that case, values and
209 contains_variable flag should be disregarded. */
211 template <typename valtype>
212 class ipcp_lattice
214 public:
215 /* The list of known values and types in this lattice. Note that values are
216 not deallocated if a lattice is set to bottom because there may be value
217 sources referencing them. */
218 ipcp_value<valtype> *values;
219 /* Number of known values and types in this lattice. */
220 int values_count;
221 /* The lattice contains a variable component (in addition to values). */
222 bool contains_variable;
223 /* The value of the lattice is bottom (i.e. variable and unusable for any
224 propagation). */
225 bool bottom;
227 inline bool is_single_const ();
228 inline bool set_to_bottom ();
229 inline bool set_contains_variable ();
230 bool add_value (valtype newval, cgraph_edge *cs,
231 ipcp_value<valtype> *src_val = NULL,
232 int src_idx = 0, HOST_WIDE_INT offset = -1);
233 void print (FILE * f, bool dump_sources, bool dump_benefits);
236 /* Lattice of tree values with an offset to describe a part of an
237 aggregate. */
239 class ipcp_agg_lattice : public ipcp_lattice<tree>
241 public:
242 /* Offset that is being described by this lattice. */
243 HOST_WIDE_INT offset;
244 /* Size so that we don't have to re-compute it every time we traverse the
245 list. Must correspond to TYPE_SIZE of all lat values. */
246 HOST_WIDE_INT size;
247 /* Next element of the linked list. */
248 struct ipcp_agg_lattice *next;
251 /* Lattice of known bits, only capable of holding one value.
252 Bitwise constant propagation propagates which bits of a
253 value are constant.
254 For eg:
255 int f(int x)
257 return some_op (x);
260 int f1(int y)
262 if (cond)
263 return f (y & 0xff);
264 else
265 return f (y & 0xf);
268 In the above case, the param 'x' will always have all
269 the bits (except the bits in lsb) set to 0.
270 Hence the mask of 'x' would be 0xff. The mask
271 reflects that the bits in lsb are unknown.
272 The actual propagated value is given by m_value & ~m_mask. */
274 class ipcp_bits_lattice
276 public:
277 bool bottom_p () { return m_lattice_val == IPA_BITS_VARYING; }
278 bool top_p () { return m_lattice_val == IPA_BITS_UNDEFINED; }
279 bool constant_p () { return m_lattice_val == IPA_BITS_CONSTANT; }
280 bool set_to_bottom ();
281 bool set_to_constant (widest_int, widest_int);
283 widest_int get_value () { return m_value; }
284 widest_int get_mask () { return m_mask; }
286 bool meet_with (ipcp_bits_lattice& other, unsigned, signop,
287 enum tree_code, tree);
289 bool meet_with (widest_int, widest_int, unsigned);
291 void print (FILE *);
293 private:
294 enum { IPA_BITS_UNDEFINED, IPA_BITS_CONSTANT, IPA_BITS_VARYING } m_lattice_val;
296 /* Similar to ccp_lattice_t, mask represents which bits of value are constant.
297 If a bit in mask is set to 0, then the corresponding bit in
298 value is known to be constant. */
299 widest_int m_value, m_mask;
301 bool meet_with_1 (widest_int, widest_int, unsigned);
302 void get_value_and_mask (tree, widest_int *, widest_int *);
305 /* Lattice of value ranges. */
307 class ipcp_vr_lattice
309 public:
310 value_range m_vr;
312 inline bool bottom_p () const;
313 inline bool top_p () const;
314 inline bool set_to_bottom ();
315 bool meet_with (const value_range *p_vr);
316 bool meet_with (const ipcp_vr_lattice &other);
317 void init () { m_vr.type = VR_UNDEFINED; }
318 void print (FILE * f);
320 private:
321 bool meet_with_1 (const value_range *other_vr);
324 /* Structure containing lattices for a parameter itself and for pieces of
325 aggregates that are passed in the parameter or by a reference in a parameter
326 plus some other useful flags. */
328 class ipcp_param_lattices
330 public:
331 /* Lattice describing the value of the parameter itself. */
332 ipcp_lattice<tree> itself;
333 /* Lattice describing the polymorphic contexts of a parameter. */
334 ipcp_lattice<ipa_polymorphic_call_context> ctxlat;
335 /* Lattices describing aggregate parts. */
336 ipcp_agg_lattice *aggs;
337 /* Lattice describing known bits. */
338 ipcp_bits_lattice bits_lattice;
339 /* Lattice describing value range. */
340 ipcp_vr_lattice m_value_range;
341 /* Number of aggregate lattices */
342 int aggs_count;
343 /* True if aggregate data were passed by reference (as opposed to by
344 value). */
345 bool aggs_by_ref;
346 /* All aggregate lattices contain a variable component (in addition to
347 values). */
348 bool aggs_contain_variable;
349 /* The value of all aggregate lattices is bottom (i.e. variable and unusable
350 for any propagation). */
351 bool aggs_bottom;
353 /* There is a virtual call based on this parameter. */
354 bool virt_call;
357 /* Allocation pools for values and their sources in ipa-cp. */
359 object_allocator<ipcp_value<tree> > ipcp_cst_values_pool
360 ("IPA-CP constant values");
362 object_allocator<ipcp_value<ipa_polymorphic_call_context> >
363 ipcp_poly_ctx_values_pool ("IPA-CP polymorphic contexts");
365 object_allocator<ipcp_value_source<tree> > ipcp_sources_pool
366 ("IPA-CP value sources");
368 object_allocator<ipcp_agg_lattice> ipcp_agg_lattice_pool
369 ("IPA_CP aggregate lattices");
371 /* Maximal count found in program. */
373 static profile_count max_count;
375 /* Original overall size of the program. */
377 static long overall_size, max_new_size;
379 /* Return the param lattices structure corresponding to the Ith formal
380 parameter of the function described by INFO. */
381 static inline struct ipcp_param_lattices *
382 ipa_get_parm_lattices (struct ipa_node_params *info, int i)
384 gcc_assert (i >= 0 && i < ipa_get_param_count (info));
385 gcc_checking_assert (!info->ipcp_orig_node);
386 gcc_checking_assert (info->lattices);
387 return &(info->lattices[i]);
390 /* Return the lattice corresponding to the scalar value of the Ith formal
391 parameter of the function described by INFO. */
392 static inline ipcp_lattice<tree> *
393 ipa_get_scalar_lat (struct ipa_node_params *info, int i)
395 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
396 return &plats->itself;
399 /* Return the lattice corresponding to the scalar value of the Ith formal
400 parameter of the function described by INFO. */
401 static inline ipcp_lattice<ipa_polymorphic_call_context> *
402 ipa_get_poly_ctx_lat (struct ipa_node_params *info, int i)
404 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
405 return &plats->ctxlat;
408 /* Return the lattice corresponding to the value range of the Ith formal
409 parameter of the function described by INFO. */
411 static inline ipcp_vr_lattice *
412 ipa_get_vr_lat (struct ipa_node_params *info, int i)
414 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
415 return &plats->m_value_range;
418 /* Return whether LAT is a lattice with a single constant and without an
419 undefined value. */
421 template <typename valtype>
422 inline bool
423 ipcp_lattice<valtype>::is_single_const ()
425 if (bottom || contains_variable || values_count != 1)
426 return false;
427 else
428 return true;
431 /* Print V which is extracted from a value in a lattice to F. */
433 static void
434 print_ipcp_constant_value (FILE * f, tree v)
436 if (TREE_CODE (v) == ADDR_EXPR
437 && TREE_CODE (TREE_OPERAND (v, 0)) == CONST_DECL)
439 fprintf (f, "& ");
440 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (v, 0)));
442 else
443 print_generic_expr (f, v);
446 /* Print V which is extracted from a value in a lattice to F. */
448 static void
449 print_ipcp_constant_value (FILE * f, ipa_polymorphic_call_context v)
451 v.dump(f, false);
454 /* Print a lattice LAT to F. */
456 template <typename valtype>
457 void
458 ipcp_lattice<valtype>::print (FILE * f, bool dump_sources, bool dump_benefits)
460 ipcp_value<valtype> *val;
461 bool prev = false;
463 if (bottom)
465 fprintf (f, "BOTTOM\n");
466 return;
469 if (!values_count && !contains_variable)
471 fprintf (f, "TOP\n");
472 return;
475 if (contains_variable)
477 fprintf (f, "VARIABLE");
478 prev = true;
479 if (dump_benefits)
480 fprintf (f, "\n");
483 for (val = values; val; val = val->next)
485 if (dump_benefits && prev)
486 fprintf (f, " ");
487 else if (!dump_benefits && prev)
488 fprintf (f, ", ");
489 else
490 prev = true;
492 print_ipcp_constant_value (f, val->value);
494 if (dump_sources)
496 ipcp_value_source<valtype> *s;
498 fprintf (f, " [from:");
499 for (s = val->sources; s; s = s->next)
500 fprintf (f, " %i(%i)", s->cs->caller->order,
501 s->cs->frequency);
502 fprintf (f, "]");
505 if (dump_benefits)
506 fprintf (f, " [loc_time: %i, loc_size: %i, "
507 "prop_time: %i, prop_size: %i]\n",
508 val->local_time_benefit, val->local_size_cost,
509 val->prop_time_benefit, val->prop_size_cost);
511 if (!dump_benefits)
512 fprintf (f, "\n");
515 void
516 ipcp_bits_lattice::print (FILE *f)
518 if (top_p ())
519 fprintf (f, " Bits unknown (TOP)\n");
520 else if (bottom_p ())
521 fprintf (f, " Bits unusable (BOTTOM)\n");
522 else
524 fprintf (f, " Bits: value = "); print_hex (get_value (), f);
525 fprintf (f, ", mask = "); print_hex (get_mask (), f);
526 fprintf (f, "\n");
530 /* Print value range lattice to F. */
532 void
533 ipcp_vr_lattice::print (FILE * f)
535 dump_value_range (f, &m_vr);
538 /* Print all ipcp_lattices of all functions to F. */
540 static void
541 print_all_lattices (FILE * f, bool dump_sources, bool dump_benefits)
543 struct cgraph_node *node;
544 int i, count;
546 fprintf (f, "\nLattices:\n");
547 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
549 struct ipa_node_params *info;
551 info = IPA_NODE_REF (node);
552 fprintf (f, " Node: %s:\n", node->dump_name ());
553 count = ipa_get_param_count (info);
554 for (i = 0; i < count; i++)
556 struct ipcp_agg_lattice *aglat;
557 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
558 fprintf (f, " param [%d]: ", i);
559 plats->itself.print (f, dump_sources, dump_benefits);
560 fprintf (f, " ctxs: ");
561 plats->ctxlat.print (f, dump_sources, dump_benefits);
562 plats->bits_lattice.print (f);
563 fprintf (f, " ");
564 plats->m_value_range.print (f);
565 fprintf (f, "\n");
566 if (plats->virt_call)
567 fprintf (f, " virt_call flag set\n");
569 if (plats->aggs_bottom)
571 fprintf (f, " AGGS BOTTOM\n");
572 continue;
574 if (plats->aggs_contain_variable)
575 fprintf (f, " AGGS VARIABLE\n");
576 for (aglat = plats->aggs; aglat; aglat = aglat->next)
578 fprintf (f, " %soffset " HOST_WIDE_INT_PRINT_DEC ": ",
579 plats->aggs_by_ref ? "ref " : "", aglat->offset);
580 aglat->print (f, dump_sources, dump_benefits);
586 /* Determine whether it is at all technically possible to create clones of NODE
587 and store this information in the ipa_node_params structure associated
588 with NODE. */
590 static void
591 determine_versionability (struct cgraph_node *node,
592 struct ipa_node_params *info)
594 const char *reason = NULL;
596 /* There are a number of generic reasons functions cannot be versioned. We
597 also cannot remove parameters if there are type attributes such as fnspec
598 present. */
599 if (node->alias || node->thunk.thunk_p)
600 reason = "alias or thunk";
601 else if (!node->local.versionable)
602 reason = "not a tree_versionable_function";
603 else if (node->get_availability () <= AVAIL_INTERPOSABLE)
604 reason = "insufficient body availability";
605 else if (!opt_for_fn (node->decl, optimize)
606 || !opt_for_fn (node->decl, flag_ipa_cp))
607 reason = "non-optimized function";
608 else if (lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (node->decl)))
610 /* Ideally we should clone the SIMD clones themselves and create
611 vector copies of them, so IPA-cp and SIMD clones can happily
612 coexist, but that may not be worth the effort. */
613 reason = "function has SIMD clones";
615 else if (lookup_attribute ("target_clones", DECL_ATTRIBUTES (node->decl)))
617 /* Ideally we should clone the target clones themselves and create
618 copies of them, so IPA-cp and target clones can happily
619 coexist, but that may not be worth the effort. */
620 reason = "function target_clones attribute";
622 /* Don't clone decls local to a comdat group; it breaks and for C++
623 decloned constructors, inlining is always better anyway. */
624 else if (node->comdat_local_p ())
625 reason = "comdat-local function";
626 else if (node->calls_comdat_local)
628 /* TODO: call is versionable if we make sure that all
629 callers are inside of a comdat group. */
630 reason = "calls comdat-local function";
633 if (reason && dump_file && !node->alias && !node->thunk.thunk_p)
634 fprintf (dump_file, "Function %s is not versionable, reason: %s.\n",
635 node->dump_name (), reason);
637 info->versionable = (reason == NULL);
640 /* Return true if it is at all technically possible to create clones of a
641 NODE. */
643 static bool
644 ipcp_versionable_function_p (struct cgraph_node *node)
646 return IPA_NODE_REF (node)->versionable;
649 /* Structure holding accumulated information about callers of a node. */
651 struct caller_statistics
653 profile_count count_sum;
654 int n_calls, n_hot_calls, freq_sum;
657 /* Initialize fields of STAT to zeroes. */
659 static inline void
660 init_caller_stats (struct caller_statistics *stats)
662 stats->count_sum = profile_count::zero ();
663 stats->n_calls = 0;
664 stats->n_hot_calls = 0;
665 stats->freq_sum = 0;
668 /* Worker callback of cgraph_for_node_and_aliases accumulating statistics of
669 non-thunk incoming edges to NODE. */
671 static bool
672 gather_caller_stats (struct cgraph_node *node, void *data)
674 struct caller_statistics *stats = (struct caller_statistics *) data;
675 struct cgraph_edge *cs;
677 for (cs = node->callers; cs; cs = cs->next_caller)
678 if (!cs->caller->thunk.thunk_p)
680 if (cs->count.initialized_p ())
681 stats->count_sum += cs->count;
682 stats->freq_sum += cs->frequency;
683 stats->n_calls++;
684 if (cs->maybe_hot_p ())
685 stats->n_hot_calls ++;
687 return false;
691 /* Return true if this NODE is viable candidate for cloning. */
693 static bool
694 ipcp_cloning_candidate_p (struct cgraph_node *node)
696 struct caller_statistics stats;
698 gcc_checking_assert (node->has_gimple_body_p ());
700 if (!opt_for_fn (node->decl, flag_ipa_cp_clone))
702 if (dump_file)
703 fprintf (dump_file, "Not considering %s for cloning; "
704 "-fipa-cp-clone disabled.\n",
705 node->name ());
706 return false;
709 if (node->optimize_for_size_p ())
711 if (dump_file)
712 fprintf (dump_file, "Not considering %s for cloning; "
713 "optimizing it for size.\n",
714 node->name ());
715 return false;
718 init_caller_stats (&stats);
719 node->call_for_symbol_thunks_and_aliases (gather_caller_stats, &stats, false);
721 if (ipa_fn_summaries->get (node)->self_size < stats.n_calls)
723 if (dump_file)
724 fprintf (dump_file, "Considering %s for cloning; code might shrink.\n",
725 node->name ());
726 return true;
729 /* When profile is available and function is hot, propagate into it even if
730 calls seems cold; constant propagation can improve function's speed
731 significantly. */
732 if (max_count > profile_count::zero ())
734 if (stats.count_sum > node->count.apply_scale (90, 100))
736 if (dump_file)
737 fprintf (dump_file, "Considering %s for cloning; "
738 "usually called directly.\n",
739 node->name ());
740 return true;
743 if (!stats.n_hot_calls)
745 if (dump_file)
746 fprintf (dump_file, "Not considering %s for cloning; no hot calls.\n",
747 node->name ());
748 return false;
750 if (dump_file)
751 fprintf (dump_file, "Considering %s for cloning.\n",
752 node->name ());
753 return true;
756 template <typename valtype>
757 class value_topo_info
759 public:
760 /* Head of the linked list of topologically sorted values. */
761 ipcp_value<valtype> *values_topo;
762 /* Stack for creating SCCs, represented by a linked list too. */
763 ipcp_value<valtype> *stack;
764 /* Counter driving the algorithm in add_val_to_toposort. */
765 int dfs_counter;
767 value_topo_info () : values_topo (NULL), stack (NULL), dfs_counter (0)
769 void add_val (ipcp_value<valtype> *cur_val);
770 void propagate_effects ();
773 /* Arrays representing a topological ordering of call graph nodes and a stack
774 of nodes used during constant propagation and also data required to perform
775 topological sort of values and propagation of benefits in the determined
776 order. */
778 class ipa_topo_info
780 public:
781 /* Array with obtained topological order of cgraph nodes. */
782 struct cgraph_node **order;
783 /* Stack of cgraph nodes used during propagation within SCC until all values
784 in the SCC stabilize. */
785 struct cgraph_node **stack;
786 int nnodes, stack_top;
788 value_topo_info<tree> constants;
789 value_topo_info<ipa_polymorphic_call_context> contexts;
791 ipa_topo_info () : order(NULL), stack(NULL), nnodes(0), stack_top(0),
792 constants ()
796 /* Allocate the arrays in TOPO and topologically sort the nodes into order. */
798 static void
799 build_toporder_info (struct ipa_topo_info *topo)
801 topo->order = XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
802 topo->stack = XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
804 gcc_checking_assert (topo->stack_top == 0);
805 topo->nnodes = ipa_reduced_postorder (topo->order, true, true, NULL);
808 /* Free information about strongly connected components and the arrays in
809 TOPO. */
811 static void
812 free_toporder_info (struct ipa_topo_info *topo)
814 ipa_free_postorder_info ();
815 free (topo->order);
816 free (topo->stack);
819 /* Add NODE to the stack in TOPO, unless it is already there. */
821 static inline void
822 push_node_to_stack (struct ipa_topo_info *topo, struct cgraph_node *node)
824 struct ipa_node_params *info = IPA_NODE_REF (node);
825 if (info->node_enqueued)
826 return;
827 info->node_enqueued = 1;
828 topo->stack[topo->stack_top++] = node;
831 /* Pop a node from the stack in TOPO and return it or return NULL if the stack
832 is empty. */
834 static struct cgraph_node *
835 pop_node_from_stack (struct ipa_topo_info *topo)
837 if (topo->stack_top)
839 struct cgraph_node *node;
840 topo->stack_top--;
841 node = topo->stack[topo->stack_top];
842 IPA_NODE_REF (node)->node_enqueued = 0;
843 return node;
845 else
846 return NULL;
849 /* Set lattice LAT to bottom and return true if it previously was not set as
850 such. */
852 template <typename valtype>
853 inline bool
854 ipcp_lattice<valtype>::set_to_bottom ()
856 bool ret = !bottom;
857 bottom = true;
858 return ret;
861 /* Mark lattice as containing an unknown value and return true if it previously
862 was not marked as such. */
864 template <typename valtype>
865 inline bool
866 ipcp_lattice<valtype>::set_contains_variable ()
868 bool ret = !contains_variable;
869 contains_variable = true;
870 return ret;
873 /* Set all aggegate lattices in PLATS to bottom and return true if they were
874 not previously set as such. */
876 static inline bool
877 set_agg_lats_to_bottom (struct ipcp_param_lattices *plats)
879 bool ret = !plats->aggs_bottom;
880 plats->aggs_bottom = true;
881 return ret;
884 /* Mark all aggegate lattices in PLATS as containing an unknown value and
885 return true if they were not previously marked as such. */
887 static inline bool
888 set_agg_lats_contain_variable (struct ipcp_param_lattices *plats)
890 bool ret = !plats->aggs_contain_variable;
891 plats->aggs_contain_variable = true;
892 return ret;
895 bool
896 ipcp_vr_lattice::meet_with (const ipcp_vr_lattice &other)
898 return meet_with_1 (&other.m_vr);
901 /* Meet the current value of the lattice with value ranfge described by VR
902 lattice. */
904 bool
905 ipcp_vr_lattice::meet_with (const value_range *p_vr)
907 return meet_with_1 (p_vr);
910 /* Meet the current value of the lattice with value ranfge described by
911 OTHER_VR lattice. */
913 bool
914 ipcp_vr_lattice::meet_with_1 (const value_range *other_vr)
916 tree min = m_vr.min, max = m_vr.max;
917 value_range_type type = m_vr.type;
919 if (bottom_p ())
920 return false;
922 if (other_vr->type == VR_VARYING)
923 return set_to_bottom ();
925 vrp_meet (&m_vr, other_vr);
926 if (type != m_vr.type
927 || min != m_vr.min
928 || max != m_vr.max)
929 return true;
930 else
931 return false;
934 /* Return true if value range information in the lattice is yet unknown. */
936 bool
937 ipcp_vr_lattice::top_p () const
939 return m_vr.type == VR_UNDEFINED;
942 /* Return true if value range information in the lattice is known to be
943 unusable. */
945 bool
946 ipcp_vr_lattice::bottom_p () const
948 return m_vr.type == VR_VARYING;
951 /* Set value range information in the lattice to bottom. Return true if it
952 previously was in a different state. */
954 bool
955 ipcp_vr_lattice::set_to_bottom ()
957 if (m_vr.type == VR_VARYING)
958 return false;
959 m_vr.type = VR_VARYING;
960 return true;
963 /* Set lattice value to bottom, if it already isn't the case. */
965 bool
966 ipcp_bits_lattice::set_to_bottom ()
968 if (bottom_p ())
969 return false;
970 m_lattice_val = IPA_BITS_VARYING;
971 m_value = 0;
972 m_mask = -1;
973 return true;
976 /* Set to constant if it isn't already. Only meant to be called
977 when switching state from TOP. */
979 bool
980 ipcp_bits_lattice::set_to_constant (widest_int value, widest_int mask)
982 gcc_assert (top_p ());
983 m_lattice_val = IPA_BITS_CONSTANT;
984 m_value = value;
985 m_mask = mask;
986 return true;
989 /* Convert operand to value, mask form. */
991 void
992 ipcp_bits_lattice::get_value_and_mask (tree operand, widest_int *valuep, widest_int *maskp)
994 wide_int get_nonzero_bits (const_tree);
996 if (TREE_CODE (operand) == INTEGER_CST)
998 *valuep = wi::to_widest (operand);
999 *maskp = 0;
1001 else
1003 *valuep = 0;
1004 *maskp = -1;
1008 /* Meet operation, similar to ccp_lattice_meet, we xor values
1009 if this->value, value have different values at same bit positions, we want
1010 to drop that bit to varying. Return true if mask is changed.
1011 This function assumes that the lattice value is in CONSTANT state */
1013 bool
1014 ipcp_bits_lattice::meet_with_1 (widest_int value, widest_int mask,
1015 unsigned precision)
1017 gcc_assert (constant_p ());
1019 widest_int old_mask = m_mask;
1020 m_mask = (m_mask | mask) | (m_value ^ value);
1022 if (wi::sext (m_mask, precision) == -1)
1023 return set_to_bottom ();
1025 return m_mask != old_mask;
1028 /* Meet the bits lattice with operand
1029 described by <value, mask, sgn, precision. */
1031 bool
1032 ipcp_bits_lattice::meet_with (widest_int value, widest_int mask,
1033 unsigned precision)
1035 if (bottom_p ())
1036 return false;
1038 if (top_p ())
1040 if (wi::sext (mask, precision) == -1)
1041 return set_to_bottom ();
1042 return set_to_constant (value, mask);
1045 return meet_with_1 (value, mask, precision);
1048 /* Meet bits lattice with the result of bit_value_binop (other, operand)
1049 if code is binary operation or bit_value_unop (other) if code is unary op.
1050 In the case when code is nop_expr, no adjustment is required. */
1052 bool
1053 ipcp_bits_lattice::meet_with (ipcp_bits_lattice& other, unsigned precision,
1054 signop sgn, enum tree_code code, tree operand)
1056 if (other.bottom_p ())
1057 return set_to_bottom ();
1059 if (bottom_p () || other.top_p ())
1060 return false;
1062 widest_int adjusted_value, adjusted_mask;
1064 if (TREE_CODE_CLASS (code) == tcc_binary)
1066 tree type = TREE_TYPE (operand);
1067 gcc_assert (INTEGRAL_TYPE_P (type));
1068 widest_int o_value, o_mask;
1069 get_value_and_mask (operand, &o_value, &o_mask);
1071 bit_value_binop (code, sgn, precision, &adjusted_value, &adjusted_mask,
1072 sgn, precision, other.get_value (), other.get_mask (),
1073 TYPE_SIGN (type), TYPE_PRECISION (type), o_value, o_mask);
1075 if (wi::sext (adjusted_mask, precision) == -1)
1076 return set_to_bottom ();
1079 else if (TREE_CODE_CLASS (code) == tcc_unary)
1081 bit_value_unop (code, sgn, precision, &adjusted_value,
1082 &adjusted_mask, sgn, precision, other.get_value (),
1083 other.get_mask ());
1085 if (wi::sext (adjusted_mask, precision) == -1)
1086 return set_to_bottom ();
1089 else
1090 return set_to_bottom ();
1092 if (top_p ())
1094 if (wi::sext (adjusted_mask, precision) == -1)
1095 return set_to_bottom ();
1096 return set_to_constant (adjusted_value, adjusted_mask);
1098 else
1099 return meet_with_1 (adjusted_value, adjusted_mask, precision);
1102 /* Mark bot aggregate and scalar lattices as containing an unknown variable,
1103 return true is any of them has not been marked as such so far. */
1105 static inline bool
1106 set_all_contains_variable (struct ipcp_param_lattices *plats)
1108 bool ret;
1109 ret = plats->itself.set_contains_variable ();
1110 ret |= plats->ctxlat.set_contains_variable ();
1111 ret |= set_agg_lats_contain_variable (plats);
1112 ret |= plats->bits_lattice.set_to_bottom ();
1113 ret |= plats->m_value_range.set_to_bottom ();
1114 return ret;
1117 /* Worker of call_for_symbol_thunks_and_aliases, increment the integer DATA
1118 points to by the number of callers to NODE. */
1120 static bool
1121 count_callers (cgraph_node *node, void *data)
1123 int *caller_count = (int *) data;
1125 for (cgraph_edge *cs = node->callers; cs; cs = cs->next_caller)
1126 /* Local thunks can be handled transparently, but if the thunk can not
1127 be optimized out, count it as a real use. */
1128 if (!cs->caller->thunk.thunk_p || !cs->caller->local.local)
1129 ++*caller_count;
1130 return false;
1133 /* Worker of call_for_symbol_thunks_and_aliases, it is supposed to be called on
1134 the one caller of some other node. Set the caller's corresponding flag. */
1136 static bool
1137 set_single_call_flag (cgraph_node *node, void *)
1139 cgraph_edge *cs = node->callers;
1140 /* Local thunks can be handled transparently, skip them. */
1141 while (cs && cs->caller->thunk.thunk_p && cs->caller->local.local)
1142 cs = cs->next_caller;
1143 if (cs)
1145 IPA_NODE_REF (cs->caller)->node_calling_single_call = true;
1146 return true;
1148 return false;
1151 /* Initialize ipcp_lattices. */
1153 static void
1154 initialize_node_lattices (struct cgraph_node *node)
1156 struct ipa_node_params *info = IPA_NODE_REF (node);
1157 struct cgraph_edge *ie;
1158 bool disable = false, variable = false;
1159 int i;
1161 gcc_checking_assert (node->has_gimple_body_p ());
1162 if (cgraph_local_p (node))
1164 int caller_count = 0;
1165 node->call_for_symbol_thunks_and_aliases (count_callers, &caller_count,
1166 true);
1167 gcc_checking_assert (caller_count > 0);
1168 if (caller_count == 1)
1169 node->call_for_symbol_thunks_and_aliases (set_single_call_flag,
1170 NULL, true);
1172 else
1174 /* When cloning is allowed, we can assume that externally visible
1175 functions are not called. We will compensate this by cloning
1176 later. */
1177 if (ipcp_versionable_function_p (node)
1178 && ipcp_cloning_candidate_p (node))
1179 variable = true;
1180 else
1181 disable = true;
1184 for (i = 0; i < ipa_get_param_count (info); i++)
1186 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
1187 plats->m_value_range.init ();
1190 if (disable || variable)
1192 for (i = 0; i < ipa_get_param_count (info); i++)
1194 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
1195 if (disable)
1197 plats->itself.set_to_bottom ();
1198 plats->ctxlat.set_to_bottom ();
1199 set_agg_lats_to_bottom (plats);
1200 plats->bits_lattice.set_to_bottom ();
1201 plats->m_value_range.set_to_bottom ();
1203 else
1204 set_all_contains_variable (plats);
1206 if (dump_file && (dump_flags & TDF_DETAILS)
1207 && !node->alias && !node->thunk.thunk_p)
1208 fprintf (dump_file, "Marking all lattices of %s as %s\n",
1209 node->dump_name (), disable ? "BOTTOM" : "VARIABLE");
1212 for (ie = node->indirect_calls; ie; ie = ie->next_callee)
1213 if (ie->indirect_info->polymorphic
1214 && ie->indirect_info->param_index >= 0)
1216 gcc_checking_assert (ie->indirect_info->param_index >= 0);
1217 ipa_get_parm_lattices (info,
1218 ie->indirect_info->param_index)->virt_call = 1;
1222 /* Return the result of a (possibly arithmetic) pass through jump function
1223 JFUNC on the constant value INPUT. Return NULL_TREE if that cannot be
1224 determined or be considered an interprocedural invariant. */
1226 static tree
1227 ipa_get_jf_pass_through_result (struct ipa_jump_func *jfunc, tree input)
1229 tree restype, res;
1231 if (ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
1232 return input;
1233 if (!is_gimple_ip_invariant (input))
1234 return NULL_TREE;
1236 if (TREE_CODE_CLASS (ipa_get_jf_pass_through_operation (jfunc))
1237 == tcc_unary)
1238 res = fold_unary (ipa_get_jf_pass_through_operation (jfunc),
1239 TREE_TYPE (input), input);
1240 else
1242 if (TREE_CODE_CLASS (ipa_get_jf_pass_through_operation (jfunc))
1243 == tcc_comparison)
1244 restype = boolean_type_node;
1245 else
1246 restype = TREE_TYPE (input);
1247 res = fold_binary (ipa_get_jf_pass_through_operation (jfunc), restype,
1248 input, ipa_get_jf_pass_through_operand (jfunc));
1250 if (res && !is_gimple_ip_invariant (res))
1251 return NULL_TREE;
1253 return res;
1256 /* Return the result of an ancestor jump function JFUNC on the constant value
1257 INPUT. Return NULL_TREE if that cannot be determined. */
1259 static tree
1260 ipa_get_jf_ancestor_result (struct ipa_jump_func *jfunc, tree input)
1262 gcc_checking_assert (TREE_CODE (input) != TREE_BINFO);
1263 if (TREE_CODE (input) == ADDR_EXPR)
1265 tree t = TREE_OPERAND (input, 0);
1266 t = build_ref_for_offset (EXPR_LOCATION (t), t,
1267 ipa_get_jf_ancestor_offset (jfunc), false,
1268 ptr_type_node, NULL, false);
1269 return build_fold_addr_expr (t);
1271 else
1272 return NULL_TREE;
1275 /* Determine whether JFUNC evaluates to a single known constant value and if
1276 so, return it. Otherwise return NULL. INFO describes the caller node or
1277 the one it is inlined to, so that pass-through jump functions can be
1278 evaluated. */
1280 tree
1281 ipa_value_from_jfunc (struct ipa_node_params *info, struct ipa_jump_func *jfunc)
1283 if (jfunc->type == IPA_JF_CONST)
1284 return ipa_get_jf_constant (jfunc);
1285 else if (jfunc->type == IPA_JF_PASS_THROUGH
1286 || jfunc->type == IPA_JF_ANCESTOR)
1288 tree input;
1289 int idx;
1291 if (jfunc->type == IPA_JF_PASS_THROUGH)
1292 idx = ipa_get_jf_pass_through_formal_id (jfunc);
1293 else
1294 idx = ipa_get_jf_ancestor_formal_id (jfunc);
1296 if (info->ipcp_orig_node)
1297 input = info->known_csts[idx];
1298 else
1300 ipcp_lattice<tree> *lat;
1302 if (!info->lattices
1303 || idx >= ipa_get_param_count (info))
1304 return NULL_TREE;
1305 lat = ipa_get_scalar_lat (info, idx);
1306 if (!lat->is_single_const ())
1307 return NULL_TREE;
1308 input = lat->values->value;
1311 if (!input)
1312 return NULL_TREE;
1314 if (jfunc->type == IPA_JF_PASS_THROUGH)
1315 return ipa_get_jf_pass_through_result (jfunc, input);
1316 else
1317 return ipa_get_jf_ancestor_result (jfunc, input);
1319 else
1320 return NULL_TREE;
1323 /* Determie whether JFUNC evaluates to single known polymorphic context, given
1324 that INFO describes the caller node or the one it is inlined to, CS is the
1325 call graph edge corresponding to JFUNC and CSIDX index of the described
1326 parameter. */
1328 ipa_polymorphic_call_context
1329 ipa_context_from_jfunc (ipa_node_params *info, cgraph_edge *cs, int csidx,
1330 ipa_jump_func *jfunc)
1332 ipa_edge_args *args = IPA_EDGE_REF (cs);
1333 ipa_polymorphic_call_context ctx;
1334 ipa_polymorphic_call_context *edge_ctx
1335 = cs ? ipa_get_ith_polymorhic_call_context (args, csidx) : NULL;
1337 if (edge_ctx && !edge_ctx->useless_p ())
1338 ctx = *edge_ctx;
1340 if (jfunc->type == IPA_JF_PASS_THROUGH
1341 || jfunc->type == IPA_JF_ANCESTOR)
1343 ipa_polymorphic_call_context srcctx;
1344 int srcidx;
1345 bool type_preserved = true;
1346 if (jfunc->type == IPA_JF_PASS_THROUGH)
1348 if (ipa_get_jf_pass_through_operation (jfunc) != NOP_EXPR)
1349 return ctx;
1350 type_preserved = ipa_get_jf_pass_through_type_preserved (jfunc);
1351 srcidx = ipa_get_jf_pass_through_formal_id (jfunc);
1353 else
1355 type_preserved = ipa_get_jf_ancestor_type_preserved (jfunc);
1356 srcidx = ipa_get_jf_ancestor_formal_id (jfunc);
1358 if (info->ipcp_orig_node)
1360 if (info->known_contexts.exists ())
1361 srcctx = info->known_contexts[srcidx];
1363 else
1365 if (!info->lattices
1366 || srcidx >= ipa_get_param_count (info))
1367 return ctx;
1368 ipcp_lattice<ipa_polymorphic_call_context> *lat;
1369 lat = ipa_get_poly_ctx_lat (info, srcidx);
1370 if (!lat->is_single_const ())
1371 return ctx;
1372 srcctx = lat->values->value;
1374 if (srcctx.useless_p ())
1375 return ctx;
1376 if (jfunc->type == IPA_JF_ANCESTOR)
1377 srcctx.offset_by (ipa_get_jf_ancestor_offset (jfunc));
1378 if (!type_preserved)
1379 srcctx.possible_dynamic_type_change (cs->in_polymorphic_cdtor);
1380 srcctx.combine_with (ctx);
1381 return srcctx;
1384 return ctx;
1387 /* If checking is enabled, verify that no lattice is in the TOP state, i.e. not
1388 bottom, not containing a variable component and without any known value at
1389 the same time. */
1391 DEBUG_FUNCTION void
1392 ipcp_verify_propagated_values (void)
1394 struct cgraph_node *node;
1396 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
1398 struct ipa_node_params *info = IPA_NODE_REF (node);
1399 int i, count = ipa_get_param_count (info);
1401 for (i = 0; i < count; i++)
1403 ipcp_lattice<tree> *lat = ipa_get_scalar_lat (info, i);
1405 if (!lat->bottom
1406 && !lat->contains_variable
1407 && lat->values_count == 0)
1409 if (dump_file)
1411 symtab->dump (dump_file);
1412 fprintf (dump_file, "\nIPA lattices after constant "
1413 "propagation, before gcc_unreachable:\n");
1414 print_all_lattices (dump_file, true, false);
1417 gcc_unreachable ();
1423 /* Return true iff X and Y should be considered equal values by IPA-CP. */
1425 static bool
1426 values_equal_for_ipcp_p (tree x, tree y)
1428 gcc_checking_assert (x != NULL_TREE && y != NULL_TREE);
1430 if (x == y)
1431 return true;
1433 if (TREE_CODE (x) == ADDR_EXPR
1434 && TREE_CODE (y) == ADDR_EXPR
1435 && TREE_CODE (TREE_OPERAND (x, 0)) == CONST_DECL
1436 && TREE_CODE (TREE_OPERAND (y, 0)) == CONST_DECL)
1437 return operand_equal_p (DECL_INITIAL (TREE_OPERAND (x, 0)),
1438 DECL_INITIAL (TREE_OPERAND (y, 0)), 0);
1439 else
1440 return operand_equal_p (x, y, 0);
1443 /* Return true iff X and Y should be considered equal contexts by IPA-CP. */
1445 static bool
1446 values_equal_for_ipcp_p (ipa_polymorphic_call_context x,
1447 ipa_polymorphic_call_context y)
1449 return x.equal_to (y);
1453 /* Add a new value source to the value represented by THIS, marking that a
1454 value comes from edge CS and (if the underlying jump function is a
1455 pass-through or an ancestor one) from a caller value SRC_VAL of a caller
1456 parameter described by SRC_INDEX. OFFSET is negative if the source was the
1457 scalar value of the parameter itself or the offset within an aggregate. */
1459 template <typename valtype>
1460 void
1461 ipcp_value<valtype>::add_source (cgraph_edge *cs, ipcp_value *src_val,
1462 int src_idx, HOST_WIDE_INT offset)
1464 ipcp_value_source<valtype> *src;
1466 src = new (ipcp_sources_pool.allocate ()) ipcp_value_source<valtype>;
1467 src->offset = offset;
1468 src->cs = cs;
1469 src->val = src_val;
1470 src->index = src_idx;
1472 src->next = sources;
1473 sources = src;
1476 /* Allocate a new ipcp_value holding a tree constant, initialize its value to
1477 SOURCE and clear all other fields. */
1479 static ipcp_value<tree> *
1480 allocate_and_init_ipcp_value (tree source)
1482 ipcp_value<tree> *val;
1484 val = new (ipcp_cst_values_pool.allocate ()) ipcp_value<tree>();
1485 val->value = source;
1486 return val;
1489 /* Allocate a new ipcp_value holding a polymorphic context, initialize its
1490 value to SOURCE and clear all other fields. */
1492 static ipcp_value<ipa_polymorphic_call_context> *
1493 allocate_and_init_ipcp_value (ipa_polymorphic_call_context source)
1495 ipcp_value<ipa_polymorphic_call_context> *val;
1497 // TODO
1498 val = new (ipcp_poly_ctx_values_pool.allocate ())
1499 ipcp_value<ipa_polymorphic_call_context>();
1500 val->value = source;
1501 return val;
1504 /* Try to add NEWVAL to LAT, potentially creating a new ipcp_value for it. CS,
1505 SRC_VAL SRC_INDEX and OFFSET are meant for add_source and have the same
1506 meaning. OFFSET -1 means the source is scalar and not a part of an
1507 aggregate. */
1509 template <typename valtype>
1510 bool
1511 ipcp_lattice<valtype>::add_value (valtype newval, cgraph_edge *cs,
1512 ipcp_value<valtype> *src_val,
1513 int src_idx, HOST_WIDE_INT offset)
1515 ipcp_value<valtype> *val;
1517 if (bottom)
1518 return false;
1520 for (val = values; val; val = val->next)
1521 if (values_equal_for_ipcp_p (val->value, newval))
1523 if (ipa_edge_within_scc (cs))
1525 ipcp_value_source<valtype> *s;
1526 for (s = val->sources; s; s = s->next)
1527 if (s->cs == cs)
1528 break;
1529 if (s)
1530 return false;
1533 val->add_source (cs, src_val, src_idx, offset);
1534 return false;
1537 if (values_count == PARAM_VALUE (PARAM_IPA_CP_VALUE_LIST_SIZE))
1539 /* We can only free sources, not the values themselves, because sources
1540 of other values in this SCC might point to them. */
1541 for (val = values; val; val = val->next)
1543 while (val->sources)
1545 ipcp_value_source<valtype> *src = val->sources;
1546 val->sources = src->next;
1547 ipcp_sources_pool.remove ((ipcp_value_source<tree>*)src);
1551 values = NULL;
1552 return set_to_bottom ();
1555 values_count++;
1556 val = allocate_and_init_ipcp_value (newval);
1557 val->add_source (cs, src_val, src_idx, offset);
1558 val->next = values;
1559 values = val;
1560 return true;
1563 /* Propagate values through a pass-through jump function JFUNC associated with
1564 edge CS, taking values from SRC_LAT and putting them into DEST_LAT. SRC_IDX
1565 is the index of the source parameter. */
1567 static bool
1568 propagate_vals_across_pass_through (cgraph_edge *cs, ipa_jump_func *jfunc,
1569 ipcp_lattice<tree> *src_lat,
1570 ipcp_lattice<tree> *dest_lat, int src_idx)
1572 ipcp_value<tree> *src_val;
1573 bool ret = false;
1575 /* Do not create new values when propagating within an SCC because if there
1576 are arithmetic functions with circular dependencies, there is infinite
1577 number of them and we would just make lattices bottom. */
1578 if ((ipa_get_jf_pass_through_operation (jfunc) != NOP_EXPR)
1579 && ipa_edge_within_scc (cs))
1580 ret = dest_lat->set_contains_variable ();
1581 else
1582 for (src_val = src_lat->values; src_val; src_val = src_val->next)
1584 tree cstval = ipa_get_jf_pass_through_result (jfunc, src_val->value);
1586 if (cstval)
1587 ret |= dest_lat->add_value (cstval, cs, src_val, src_idx);
1588 else
1589 ret |= dest_lat->set_contains_variable ();
1592 return ret;
1595 /* Propagate values through an ancestor jump function JFUNC associated with
1596 edge CS, taking values from SRC_LAT and putting them into DEST_LAT. SRC_IDX
1597 is the index of the source parameter. */
1599 static bool
1600 propagate_vals_across_ancestor (struct cgraph_edge *cs,
1601 struct ipa_jump_func *jfunc,
1602 ipcp_lattice<tree> *src_lat,
1603 ipcp_lattice<tree> *dest_lat, int src_idx)
1605 ipcp_value<tree> *src_val;
1606 bool ret = false;
1608 if (ipa_edge_within_scc (cs))
1609 return dest_lat->set_contains_variable ();
1611 for (src_val = src_lat->values; src_val; src_val = src_val->next)
1613 tree t = ipa_get_jf_ancestor_result (jfunc, src_val->value);
1615 if (t)
1616 ret |= dest_lat->add_value (t, cs, src_val, src_idx);
1617 else
1618 ret |= dest_lat->set_contains_variable ();
1621 return ret;
1624 /* Propagate scalar values across jump function JFUNC that is associated with
1625 edge CS and put the values into DEST_LAT. */
1627 static bool
1628 propagate_scalar_across_jump_function (struct cgraph_edge *cs,
1629 struct ipa_jump_func *jfunc,
1630 ipcp_lattice<tree> *dest_lat)
1632 if (dest_lat->bottom)
1633 return false;
1635 if (jfunc->type == IPA_JF_CONST)
1637 tree val = ipa_get_jf_constant (jfunc);
1638 return dest_lat->add_value (val, cs, NULL, 0);
1640 else if (jfunc->type == IPA_JF_PASS_THROUGH
1641 || jfunc->type == IPA_JF_ANCESTOR)
1643 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
1644 ipcp_lattice<tree> *src_lat;
1645 int src_idx;
1646 bool ret;
1648 if (jfunc->type == IPA_JF_PASS_THROUGH)
1649 src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
1650 else
1651 src_idx = ipa_get_jf_ancestor_formal_id (jfunc);
1653 src_lat = ipa_get_scalar_lat (caller_info, src_idx);
1654 if (src_lat->bottom)
1655 return dest_lat->set_contains_variable ();
1657 /* If we would need to clone the caller and cannot, do not propagate. */
1658 if (!ipcp_versionable_function_p (cs->caller)
1659 && (src_lat->contains_variable
1660 || (src_lat->values_count > 1)))
1661 return dest_lat->set_contains_variable ();
1663 if (jfunc->type == IPA_JF_PASS_THROUGH)
1664 ret = propagate_vals_across_pass_through (cs, jfunc, src_lat,
1665 dest_lat, src_idx);
1666 else
1667 ret = propagate_vals_across_ancestor (cs, jfunc, src_lat, dest_lat,
1668 src_idx);
1670 if (src_lat->contains_variable)
1671 ret |= dest_lat->set_contains_variable ();
1673 return ret;
1676 /* TODO: We currently do not handle member method pointers in IPA-CP (we only
1677 use it for indirect inlining), we should propagate them too. */
1678 return dest_lat->set_contains_variable ();
1681 /* Propagate scalar values across jump function JFUNC that is associated with
1682 edge CS and describes argument IDX and put the values into DEST_LAT. */
1684 static bool
1685 propagate_context_across_jump_function (cgraph_edge *cs,
1686 ipa_jump_func *jfunc, int idx,
1687 ipcp_lattice<ipa_polymorphic_call_context> *dest_lat)
1689 ipa_edge_args *args = IPA_EDGE_REF (cs);
1690 if (dest_lat->bottom)
1691 return false;
1692 bool ret = false;
1693 bool added_sth = false;
1694 bool type_preserved = true;
1696 ipa_polymorphic_call_context edge_ctx, *edge_ctx_ptr
1697 = ipa_get_ith_polymorhic_call_context (args, idx);
1699 if (edge_ctx_ptr)
1700 edge_ctx = *edge_ctx_ptr;
1702 if (jfunc->type == IPA_JF_PASS_THROUGH
1703 || jfunc->type == IPA_JF_ANCESTOR)
1705 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
1706 int src_idx;
1707 ipcp_lattice<ipa_polymorphic_call_context> *src_lat;
1709 /* TODO: Once we figure out how to propagate speculations, it will
1710 probably be a good idea to switch to speculation if type_preserved is
1711 not set instead of punting. */
1712 if (jfunc->type == IPA_JF_PASS_THROUGH)
1714 if (ipa_get_jf_pass_through_operation (jfunc) != NOP_EXPR)
1715 goto prop_fail;
1716 type_preserved = ipa_get_jf_pass_through_type_preserved (jfunc);
1717 src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
1719 else
1721 type_preserved = ipa_get_jf_ancestor_type_preserved (jfunc);
1722 src_idx = ipa_get_jf_ancestor_formal_id (jfunc);
1725 src_lat = ipa_get_poly_ctx_lat (caller_info, src_idx);
1726 /* If we would need to clone the caller and cannot, do not propagate. */
1727 if (!ipcp_versionable_function_p (cs->caller)
1728 && (src_lat->contains_variable
1729 || (src_lat->values_count > 1)))
1730 goto prop_fail;
1732 ipcp_value<ipa_polymorphic_call_context> *src_val;
1733 for (src_val = src_lat->values; src_val; src_val = src_val->next)
1735 ipa_polymorphic_call_context cur = src_val->value;
1737 if (!type_preserved)
1738 cur.possible_dynamic_type_change (cs->in_polymorphic_cdtor);
1739 if (jfunc->type == IPA_JF_ANCESTOR)
1740 cur.offset_by (ipa_get_jf_ancestor_offset (jfunc));
1741 /* TODO: In cases we know how the context is going to be used,
1742 we can improve the result by passing proper OTR_TYPE. */
1743 cur.combine_with (edge_ctx);
1744 if (!cur.useless_p ())
1746 if (src_lat->contains_variable
1747 && !edge_ctx.equal_to (cur))
1748 ret |= dest_lat->set_contains_variable ();
1749 ret |= dest_lat->add_value (cur, cs, src_val, src_idx);
1750 added_sth = true;
1756 prop_fail:
1757 if (!added_sth)
1759 if (!edge_ctx.useless_p ())
1760 ret |= dest_lat->add_value (edge_ctx, cs);
1761 else
1762 ret |= dest_lat->set_contains_variable ();
1765 return ret;
1768 /* Propagate bits across jfunc that is associated with
1769 edge cs and update dest_lattice accordingly. */
1771 bool
1772 propagate_bits_across_jump_function (cgraph_edge *cs, int idx,
1773 ipa_jump_func *jfunc,
1774 ipcp_bits_lattice *dest_lattice)
1776 if (dest_lattice->bottom_p ())
1777 return false;
1779 enum availability availability;
1780 cgraph_node *callee = cs->callee->function_symbol (&availability);
1781 struct ipa_node_params *callee_info = IPA_NODE_REF (callee);
1782 tree parm_type = ipa_get_type (callee_info, idx);
1784 /* For K&R C programs, ipa_get_type() could return NULL_TREE.
1785 Avoid the transform for these cases. */
1786 if (!parm_type)
1788 if (dump_file && (dump_flags & TDF_DETAILS))
1789 fprintf (dump_file, "Setting dest_lattice to bottom, because"
1790 " param %i type is NULL for %s\n", idx,
1791 cs->callee->name ());
1793 return dest_lattice->set_to_bottom ();
1796 unsigned precision = TYPE_PRECISION (parm_type);
1797 signop sgn = TYPE_SIGN (parm_type);
1799 if (jfunc->type == IPA_JF_PASS_THROUGH
1800 || jfunc->type == IPA_JF_ANCESTOR)
1802 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
1803 tree operand = NULL_TREE;
1804 enum tree_code code;
1805 unsigned src_idx;
1807 if (jfunc->type == IPA_JF_PASS_THROUGH)
1809 code = ipa_get_jf_pass_through_operation (jfunc);
1810 src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
1811 if (code != NOP_EXPR)
1812 operand = ipa_get_jf_pass_through_operand (jfunc);
1814 else
1816 code = POINTER_PLUS_EXPR;
1817 src_idx = ipa_get_jf_ancestor_formal_id (jfunc);
1818 unsigned HOST_WIDE_INT offset = ipa_get_jf_ancestor_offset (jfunc) / BITS_PER_UNIT;
1819 operand = build_int_cstu (size_type_node, offset);
1822 struct ipcp_param_lattices *src_lats
1823 = ipa_get_parm_lattices (caller_info, src_idx);
1825 /* Try to propagate bits if src_lattice is bottom, but jfunc is known.
1826 for eg consider:
1827 int f(int x)
1829 g (x & 0xff);
1831 Assume lattice for x is bottom, however we can still propagate
1832 result of x & 0xff == 0xff, which gets computed during ccp1 pass
1833 and we store it in jump function during analysis stage. */
1835 if (src_lats->bits_lattice.bottom_p ()
1836 && jfunc->bits)
1837 return dest_lattice->meet_with (jfunc->bits->value, jfunc->bits->mask,
1838 precision);
1839 else
1840 return dest_lattice->meet_with (src_lats->bits_lattice, precision, sgn,
1841 code, operand);
1844 else if (jfunc->type == IPA_JF_ANCESTOR)
1845 return dest_lattice->set_to_bottom ();
1846 else if (jfunc->bits)
1847 return dest_lattice->meet_with (jfunc->bits->value, jfunc->bits->mask,
1848 precision);
1849 else
1850 return dest_lattice->set_to_bottom ();
1853 /* Emulate effects of unary OPERATION and/or conversion from SRC_TYPE to
1854 DST_TYPE on value range in SRC_VR and store it to DST_VR. Return true if
1855 the result is a range or an anti-range. */
1857 static bool
1858 ipa_vr_operation_and_type_effects (value_range *dst_vr, value_range *src_vr,
1859 enum tree_code operation,
1860 tree dst_type, tree src_type)
1862 memset (dst_vr, 0, sizeof (*dst_vr));
1863 extract_range_from_unary_expr (dst_vr, operation, dst_type, src_vr, src_type);
1864 if (dst_vr->type == VR_RANGE || dst_vr->type == VR_ANTI_RANGE)
1865 return true;
1866 else
1867 return false;
1870 /* Propagate value range across jump function JFUNC that is associated with
1871 edge CS with param of callee of PARAM_TYPE and update DEST_PLATS
1872 accordingly. */
1874 static bool
1875 propagate_vr_across_jump_function (cgraph_edge *cs, ipa_jump_func *jfunc,
1876 struct ipcp_param_lattices *dest_plats,
1877 tree param_type)
1879 ipcp_vr_lattice *dest_lat = &dest_plats->m_value_range;
1881 if (dest_lat->bottom_p ())
1882 return false;
1884 if (!param_type
1885 || (!INTEGRAL_TYPE_P (param_type)
1886 && !POINTER_TYPE_P (param_type)))
1887 return dest_lat->set_to_bottom ();
1889 if (jfunc->type == IPA_JF_PASS_THROUGH)
1891 enum tree_code operation = ipa_get_jf_pass_through_operation (jfunc);
1893 if (TREE_CODE_CLASS (operation) == tcc_unary)
1895 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
1896 int src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
1897 tree operand_type = ipa_get_type (caller_info, src_idx);
1898 struct ipcp_param_lattices *src_lats
1899 = ipa_get_parm_lattices (caller_info, src_idx);
1901 if (src_lats->m_value_range.bottom_p ())
1902 return dest_lat->set_to_bottom ();
1903 value_range vr;
1904 if (ipa_vr_operation_and_type_effects (&vr,
1905 &src_lats->m_value_range.m_vr,
1906 operation, param_type,
1907 operand_type))
1908 return dest_lat->meet_with (&vr);
1911 else if (jfunc->type == IPA_JF_CONST)
1913 tree val = ipa_get_jf_constant (jfunc);
1914 if (TREE_CODE (val) == INTEGER_CST)
1916 val = fold_convert (param_type, val);
1917 if (TREE_OVERFLOW_P (val))
1918 val = drop_tree_overflow (val);
1920 value_range tmpvr;
1921 memset (&tmpvr, 0, sizeof (tmpvr));
1922 tmpvr.type = VR_RANGE;
1923 tmpvr.min = val;
1924 tmpvr.max = val;
1925 return dest_lat->meet_with (&tmpvr);
1929 value_range vr;
1930 if (jfunc->m_vr
1931 && ipa_vr_operation_and_type_effects (&vr, jfunc->m_vr, NOP_EXPR,
1932 param_type,
1933 TREE_TYPE (jfunc->m_vr->min)))
1934 return dest_lat->meet_with (&vr);
1935 else
1936 return dest_lat->set_to_bottom ();
1939 /* If DEST_PLATS already has aggregate items, check that aggs_by_ref matches
1940 NEW_AGGS_BY_REF and if not, mark all aggs as bottoms and return true (in all
1941 other cases, return false). If there are no aggregate items, set
1942 aggs_by_ref to NEW_AGGS_BY_REF. */
1944 static bool
1945 set_check_aggs_by_ref (struct ipcp_param_lattices *dest_plats,
1946 bool new_aggs_by_ref)
1948 if (dest_plats->aggs)
1950 if (dest_plats->aggs_by_ref != new_aggs_by_ref)
1952 set_agg_lats_to_bottom (dest_plats);
1953 return true;
1956 else
1957 dest_plats->aggs_by_ref = new_aggs_by_ref;
1958 return false;
1961 /* Walk aggregate lattices in DEST_PLATS from ***AGLAT on, until ***aglat is an
1962 already existing lattice for the given OFFSET and SIZE, marking all skipped
1963 lattices as containing variable and checking for overlaps. If there is no
1964 already existing lattice for the OFFSET and VAL_SIZE, create one, initialize
1965 it with offset, size and contains_variable to PRE_EXISTING, and return true,
1966 unless there are too many already. If there are two many, return false. If
1967 there are overlaps turn whole DEST_PLATS to bottom and return false. If any
1968 skipped lattices were newly marked as containing variable, set *CHANGE to
1969 true. */
1971 static bool
1972 merge_agg_lats_step (struct ipcp_param_lattices *dest_plats,
1973 HOST_WIDE_INT offset, HOST_WIDE_INT val_size,
1974 struct ipcp_agg_lattice ***aglat,
1975 bool pre_existing, bool *change)
1977 gcc_checking_assert (offset >= 0);
1979 while (**aglat && (**aglat)->offset < offset)
1981 if ((**aglat)->offset + (**aglat)->size > offset)
1983 set_agg_lats_to_bottom (dest_plats);
1984 return false;
1986 *change |= (**aglat)->set_contains_variable ();
1987 *aglat = &(**aglat)->next;
1990 if (**aglat && (**aglat)->offset == offset)
1992 if ((**aglat)->size != val_size
1993 || ((**aglat)->next
1994 && (**aglat)->next->offset < offset + val_size))
1996 set_agg_lats_to_bottom (dest_plats);
1997 return false;
1999 gcc_checking_assert (!(**aglat)->next
2000 || (**aglat)->next->offset >= offset + val_size);
2001 return true;
2003 else
2005 struct ipcp_agg_lattice *new_al;
2007 if (**aglat && (**aglat)->offset < offset + val_size)
2009 set_agg_lats_to_bottom (dest_plats);
2010 return false;
2012 if (dest_plats->aggs_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
2013 return false;
2014 dest_plats->aggs_count++;
2015 new_al = ipcp_agg_lattice_pool.allocate ();
2016 memset (new_al, 0, sizeof (*new_al));
2018 new_al->offset = offset;
2019 new_al->size = val_size;
2020 new_al->contains_variable = pre_existing;
2022 new_al->next = **aglat;
2023 **aglat = new_al;
2024 return true;
2028 /* Set all AGLAT and all other aggregate lattices reachable by next pointers as
2029 containing an unknown value. */
2031 static bool
2032 set_chain_of_aglats_contains_variable (struct ipcp_agg_lattice *aglat)
2034 bool ret = false;
2035 while (aglat)
2037 ret |= aglat->set_contains_variable ();
2038 aglat = aglat->next;
2040 return ret;
2043 /* Merge existing aggregate lattices in SRC_PLATS to DEST_PLATS, subtracting
2044 DELTA_OFFSET. CS is the call graph edge and SRC_IDX the index of the source
2045 parameter used for lattice value sources. Return true if DEST_PLATS changed
2046 in any way. */
2048 static bool
2049 merge_aggregate_lattices (struct cgraph_edge *cs,
2050 struct ipcp_param_lattices *dest_plats,
2051 struct ipcp_param_lattices *src_plats,
2052 int src_idx, HOST_WIDE_INT offset_delta)
2054 bool pre_existing = dest_plats->aggs != NULL;
2055 struct ipcp_agg_lattice **dst_aglat;
2056 bool ret = false;
2058 if (set_check_aggs_by_ref (dest_plats, src_plats->aggs_by_ref))
2059 return true;
2060 if (src_plats->aggs_bottom)
2061 return set_agg_lats_contain_variable (dest_plats);
2062 if (src_plats->aggs_contain_variable)
2063 ret |= set_agg_lats_contain_variable (dest_plats);
2064 dst_aglat = &dest_plats->aggs;
2066 for (struct ipcp_agg_lattice *src_aglat = src_plats->aggs;
2067 src_aglat;
2068 src_aglat = src_aglat->next)
2070 HOST_WIDE_INT new_offset = src_aglat->offset - offset_delta;
2072 if (new_offset < 0)
2073 continue;
2074 if (merge_agg_lats_step (dest_plats, new_offset, src_aglat->size,
2075 &dst_aglat, pre_existing, &ret))
2077 struct ipcp_agg_lattice *new_al = *dst_aglat;
2079 dst_aglat = &(*dst_aglat)->next;
2080 if (src_aglat->bottom)
2082 ret |= new_al->set_contains_variable ();
2083 continue;
2085 if (src_aglat->contains_variable)
2086 ret |= new_al->set_contains_variable ();
2087 for (ipcp_value<tree> *val = src_aglat->values;
2088 val;
2089 val = val->next)
2090 ret |= new_al->add_value (val->value, cs, val, src_idx,
2091 src_aglat->offset);
2093 else if (dest_plats->aggs_bottom)
2094 return true;
2096 ret |= set_chain_of_aglats_contains_variable (*dst_aglat);
2097 return ret;
2100 /* Determine whether there is anything to propagate FROM SRC_PLATS through a
2101 pass-through JFUNC and if so, whether it has conform and conforms to the
2102 rules about propagating values passed by reference. */
2104 static bool
2105 agg_pass_through_permissible_p (struct ipcp_param_lattices *src_plats,
2106 struct ipa_jump_func *jfunc)
2108 return src_plats->aggs
2109 && (!src_plats->aggs_by_ref
2110 || ipa_get_jf_pass_through_agg_preserved (jfunc));
2113 /* Propagate scalar values across jump function JFUNC that is associated with
2114 edge CS and put the values into DEST_LAT. */
2116 static bool
2117 propagate_aggs_across_jump_function (struct cgraph_edge *cs,
2118 struct ipa_jump_func *jfunc,
2119 struct ipcp_param_lattices *dest_plats)
2121 bool ret = false;
2123 if (dest_plats->aggs_bottom)
2124 return false;
2126 if (jfunc->type == IPA_JF_PASS_THROUGH
2127 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
2129 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
2130 int src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
2131 struct ipcp_param_lattices *src_plats;
2133 src_plats = ipa_get_parm_lattices (caller_info, src_idx);
2134 if (agg_pass_through_permissible_p (src_plats, jfunc))
2136 /* Currently we do not produce clobber aggregate jump
2137 functions, replace with merging when we do. */
2138 gcc_assert (!jfunc->agg.items);
2139 ret |= merge_aggregate_lattices (cs, dest_plats, src_plats,
2140 src_idx, 0);
2142 else
2143 ret |= set_agg_lats_contain_variable (dest_plats);
2145 else if (jfunc->type == IPA_JF_ANCESTOR
2146 && ipa_get_jf_ancestor_agg_preserved (jfunc))
2148 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
2149 int src_idx = ipa_get_jf_ancestor_formal_id (jfunc);
2150 struct ipcp_param_lattices *src_plats;
2152 src_plats = ipa_get_parm_lattices (caller_info, src_idx);
2153 if (src_plats->aggs && src_plats->aggs_by_ref)
2155 /* Currently we do not produce clobber aggregate jump
2156 functions, replace with merging when we do. */
2157 gcc_assert (!jfunc->agg.items);
2158 ret |= merge_aggregate_lattices (cs, dest_plats, src_plats, src_idx,
2159 ipa_get_jf_ancestor_offset (jfunc));
2161 else if (!src_plats->aggs_by_ref)
2162 ret |= set_agg_lats_to_bottom (dest_plats);
2163 else
2164 ret |= set_agg_lats_contain_variable (dest_plats);
2166 else if (jfunc->agg.items)
2168 bool pre_existing = dest_plats->aggs != NULL;
2169 struct ipcp_agg_lattice **aglat = &dest_plats->aggs;
2170 struct ipa_agg_jf_item *item;
2171 int i;
2173 if (set_check_aggs_by_ref (dest_plats, jfunc->agg.by_ref))
2174 return true;
2176 FOR_EACH_VEC_ELT (*jfunc->agg.items, i, item)
2178 HOST_WIDE_INT val_size;
2180 if (item->offset < 0)
2181 continue;
2182 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2183 val_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (item->value)));
2185 if (merge_agg_lats_step (dest_plats, item->offset, val_size,
2186 &aglat, pre_existing, &ret))
2188 ret |= (*aglat)->add_value (item->value, cs, NULL, 0, 0);
2189 aglat = &(*aglat)->next;
2191 else if (dest_plats->aggs_bottom)
2192 return true;
2195 ret |= set_chain_of_aglats_contains_variable (*aglat);
2197 else
2198 ret |= set_agg_lats_contain_variable (dest_plats);
2200 return ret;
2203 /* Return true if on the way cfrom CS->caller to the final (non-alias and
2204 non-thunk) destination, the call passes through a thunk. */
2206 static bool
2207 call_passes_through_thunk_p (cgraph_edge *cs)
2209 cgraph_node *alias_or_thunk = cs->callee;
2210 while (alias_or_thunk->alias)
2211 alias_or_thunk = alias_or_thunk->get_alias_target ();
2212 return alias_or_thunk->thunk.thunk_p;
2215 /* Propagate constants from the caller to the callee of CS. INFO describes the
2216 caller. */
2218 static bool
2219 propagate_constants_across_call (struct cgraph_edge *cs)
2221 struct ipa_node_params *callee_info;
2222 enum availability availability;
2223 cgraph_node *callee;
2224 struct ipa_edge_args *args;
2225 bool ret = false;
2226 int i, args_count, parms_count;
2228 callee = cs->callee->function_symbol (&availability);
2229 if (!callee->definition)
2230 return false;
2231 gcc_checking_assert (callee->has_gimple_body_p ());
2232 callee_info = IPA_NODE_REF (callee);
2234 args = IPA_EDGE_REF (cs);
2235 args_count = ipa_get_cs_argument_count (args);
2236 parms_count = ipa_get_param_count (callee_info);
2237 if (parms_count == 0)
2238 return false;
2240 /* No propagation through instrumentation thunks is available yet.
2241 It should be possible with proper mapping of call args and
2242 instrumented callee params in the propagation loop below. But
2243 this case mostly occurs when legacy code calls instrumented code
2244 and it is not a primary target for optimizations.
2245 We detect instrumentation thunks in aliases and thunks chain by
2246 checking instrumentation_clone flag for chain source and target.
2247 Going through instrumentation thunks we always have it changed
2248 from 0 to 1 and all other nodes do not change it. */
2249 if (!cs->callee->instrumentation_clone
2250 && callee->instrumentation_clone)
2252 for (i = 0; i < parms_count; i++)
2253 ret |= set_all_contains_variable (ipa_get_parm_lattices (callee_info,
2254 i));
2255 return ret;
2258 /* If this call goes through a thunk we must not propagate to the first (0th)
2259 parameter. However, we might need to uncover a thunk from below a series
2260 of aliases first. */
2261 if (call_passes_through_thunk_p (cs))
2263 ret |= set_all_contains_variable (ipa_get_parm_lattices (callee_info,
2264 0));
2265 i = 1;
2267 else
2268 i = 0;
2270 for (; (i < args_count) && (i < parms_count); i++)
2272 struct ipa_jump_func *jump_func = ipa_get_ith_jump_func (args, i);
2273 struct ipcp_param_lattices *dest_plats;
2274 tree param_type = ipa_get_type (callee_info, i);
2276 dest_plats = ipa_get_parm_lattices (callee_info, i);
2277 if (availability == AVAIL_INTERPOSABLE)
2278 ret |= set_all_contains_variable (dest_plats);
2279 else
2281 ret |= propagate_scalar_across_jump_function (cs, jump_func,
2282 &dest_plats->itself);
2283 ret |= propagate_context_across_jump_function (cs, jump_func, i,
2284 &dest_plats->ctxlat);
2286 |= propagate_bits_across_jump_function (cs, i, jump_func,
2287 &dest_plats->bits_lattice);
2288 ret |= propagate_aggs_across_jump_function (cs, jump_func,
2289 dest_plats);
2290 if (opt_for_fn (callee->decl, flag_ipa_vrp))
2291 ret |= propagate_vr_across_jump_function (cs, jump_func,
2292 dest_plats, param_type);
2293 else
2294 ret |= dest_plats->m_value_range.set_to_bottom ();
2297 for (; i < parms_count; i++)
2298 ret |= set_all_contains_variable (ipa_get_parm_lattices (callee_info, i));
2300 return ret;
2303 /* If an indirect edge IE can be turned into a direct one based on KNOWN_VALS
2304 KNOWN_CONTEXTS, KNOWN_AGGS or AGG_REPS return the destination. The latter
2305 three can be NULL. If AGG_REPS is not NULL, KNOWN_AGGS is ignored. */
2307 static tree
2308 ipa_get_indirect_edge_target_1 (struct cgraph_edge *ie,
2309 vec<tree> known_csts,
2310 vec<ipa_polymorphic_call_context> known_contexts,
2311 vec<ipa_agg_jump_function_p> known_aggs,
2312 struct ipa_agg_replacement_value *agg_reps,
2313 bool *speculative)
2315 int param_index = ie->indirect_info->param_index;
2316 HOST_WIDE_INT anc_offset;
2317 tree t;
2318 tree target = NULL;
2320 *speculative = false;
2322 if (param_index == -1
2323 || known_csts.length () <= (unsigned int) param_index)
2324 return NULL_TREE;
2326 if (!ie->indirect_info->polymorphic)
2328 tree t;
2330 if (ie->indirect_info->agg_contents)
2332 t = NULL;
2333 if (agg_reps && ie->indirect_info->guaranteed_unmodified)
2335 while (agg_reps)
2337 if (agg_reps->index == param_index
2338 && agg_reps->offset == ie->indirect_info->offset
2339 && agg_reps->by_ref == ie->indirect_info->by_ref)
2341 t = agg_reps->value;
2342 break;
2344 agg_reps = agg_reps->next;
2347 if (!t)
2349 struct ipa_agg_jump_function *agg;
2350 if (known_aggs.length () > (unsigned int) param_index)
2351 agg = known_aggs[param_index];
2352 else
2353 agg = NULL;
2354 bool from_global_constant;
2355 t = ipa_find_agg_cst_for_param (agg, known_csts[param_index],
2356 ie->indirect_info->offset,
2357 ie->indirect_info->by_ref,
2358 &from_global_constant);
2359 if (t
2360 && !from_global_constant
2361 && !ie->indirect_info->guaranteed_unmodified)
2362 t = NULL_TREE;
2365 else
2366 t = known_csts[param_index];
2368 if (t
2369 && TREE_CODE (t) == ADDR_EXPR
2370 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL)
2371 return TREE_OPERAND (t, 0);
2372 else
2373 return NULL_TREE;
2376 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
2377 return NULL_TREE;
2379 gcc_assert (!ie->indirect_info->agg_contents);
2380 anc_offset = ie->indirect_info->offset;
2382 t = NULL;
2384 /* Try to work out value of virtual table pointer value in replacemnets. */
2385 if (!t && agg_reps && !ie->indirect_info->by_ref)
2387 while (agg_reps)
2389 if (agg_reps->index == param_index
2390 && agg_reps->offset == ie->indirect_info->offset
2391 && agg_reps->by_ref)
2393 t = agg_reps->value;
2394 break;
2396 agg_reps = agg_reps->next;
2400 /* Try to work out value of virtual table pointer value in known
2401 aggregate values. */
2402 if (!t && known_aggs.length () > (unsigned int) param_index
2403 && !ie->indirect_info->by_ref)
2405 struct ipa_agg_jump_function *agg;
2406 agg = known_aggs[param_index];
2407 t = ipa_find_agg_cst_for_param (agg, known_csts[param_index],
2408 ie->indirect_info->offset, true);
2411 /* If we found the virtual table pointer, lookup the target. */
2412 if (t)
2414 tree vtable;
2415 unsigned HOST_WIDE_INT offset;
2416 if (vtable_pointer_value_to_vtable (t, &vtable, &offset))
2418 bool can_refer;
2419 target = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
2420 vtable, offset, &can_refer);
2421 if (can_refer)
2423 if (!target
2424 || (TREE_CODE (TREE_TYPE (target)) == FUNCTION_TYPE
2425 && DECL_FUNCTION_CODE (target) == BUILT_IN_UNREACHABLE)
2426 || !possible_polymorphic_call_target_p
2427 (ie, cgraph_node::get (target)))
2429 /* Do not speculate builtin_unreachable, it is stupid! */
2430 if (ie->indirect_info->vptr_changed)
2431 return NULL;
2432 target = ipa_impossible_devirt_target (ie, target);
2434 *speculative = ie->indirect_info->vptr_changed;
2435 if (!*speculative)
2436 return target;
2441 /* Do we know the constant value of pointer? */
2442 if (!t)
2443 t = known_csts[param_index];
2445 gcc_checking_assert (!t || TREE_CODE (t) != TREE_BINFO);
2447 ipa_polymorphic_call_context context;
2448 if (known_contexts.length () > (unsigned int) param_index)
2450 context = known_contexts[param_index];
2451 context.offset_by (anc_offset);
2452 if (ie->indirect_info->vptr_changed)
2453 context.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
2454 ie->indirect_info->otr_type);
2455 if (t)
2457 ipa_polymorphic_call_context ctx2 = ipa_polymorphic_call_context
2458 (t, ie->indirect_info->otr_type, anc_offset);
2459 if (!ctx2.useless_p ())
2460 context.combine_with (ctx2, ie->indirect_info->otr_type);
2463 else if (t)
2465 context = ipa_polymorphic_call_context (t, ie->indirect_info->otr_type,
2466 anc_offset);
2467 if (ie->indirect_info->vptr_changed)
2468 context.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
2469 ie->indirect_info->otr_type);
2471 else
2472 return NULL_TREE;
2474 vec <cgraph_node *>targets;
2475 bool final;
2477 targets = possible_polymorphic_call_targets
2478 (ie->indirect_info->otr_type,
2479 ie->indirect_info->otr_token,
2480 context, &final);
2481 if (!final || targets.length () > 1)
2483 struct cgraph_node *node;
2484 if (*speculative)
2485 return target;
2486 if (!opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
2487 || ie->speculative || !ie->maybe_hot_p ())
2488 return NULL;
2489 node = try_speculative_devirtualization (ie->indirect_info->otr_type,
2490 ie->indirect_info->otr_token,
2491 context);
2492 if (node)
2494 *speculative = true;
2495 target = node->decl;
2497 else
2498 return NULL;
2500 else
2502 *speculative = false;
2503 if (targets.length () == 1)
2504 target = targets[0]->decl;
2505 else
2506 target = ipa_impossible_devirt_target (ie, NULL_TREE);
2509 if (target && !possible_polymorphic_call_target_p (ie,
2510 cgraph_node::get (target)))
2512 if (*speculative)
2513 return NULL;
2514 target = ipa_impossible_devirt_target (ie, target);
2517 return target;
2521 /* If an indirect edge IE can be turned into a direct one based on KNOWN_CSTS,
2522 KNOWN_CONTEXTS (which can be vNULL) or KNOWN_AGGS (which also can be vNULL)
2523 return the destination. */
2525 tree
2526 ipa_get_indirect_edge_target (struct cgraph_edge *ie,
2527 vec<tree> known_csts,
2528 vec<ipa_polymorphic_call_context> known_contexts,
2529 vec<ipa_agg_jump_function_p> known_aggs,
2530 bool *speculative)
2532 return ipa_get_indirect_edge_target_1 (ie, known_csts, known_contexts,
2533 known_aggs, NULL, speculative);
2536 /* Calculate devirtualization time bonus for NODE, assuming we know KNOWN_CSTS
2537 and KNOWN_CONTEXTS. */
2539 static int
2540 devirtualization_time_bonus (struct cgraph_node *node,
2541 vec<tree> known_csts,
2542 vec<ipa_polymorphic_call_context> known_contexts,
2543 vec<ipa_agg_jump_function_p> known_aggs)
2545 struct cgraph_edge *ie;
2546 int res = 0;
2548 for (ie = node->indirect_calls; ie; ie = ie->next_callee)
2550 struct cgraph_node *callee;
2551 struct ipa_fn_summary *isummary;
2552 enum availability avail;
2553 tree target;
2554 bool speculative;
2556 target = ipa_get_indirect_edge_target (ie, known_csts, known_contexts,
2557 known_aggs, &speculative);
2558 if (!target)
2559 continue;
2561 /* Only bare minimum benefit for clearly un-inlineable targets. */
2562 res += 1;
2563 callee = cgraph_node::get (target);
2564 if (!callee || !callee->definition)
2565 continue;
2566 callee = callee->function_symbol (&avail);
2567 if (avail < AVAIL_AVAILABLE)
2568 continue;
2569 isummary = ipa_fn_summaries->get (callee);
2570 if (!isummary->inlinable)
2571 continue;
2573 /* FIXME: The values below need re-considering and perhaps also
2574 integrating into the cost metrics, at lest in some very basic way. */
2575 if (isummary->size <= MAX_INLINE_INSNS_AUTO / 4)
2576 res += 31 / ((int)speculative + 1);
2577 else if (isummary->size <= MAX_INLINE_INSNS_AUTO / 2)
2578 res += 15 / ((int)speculative + 1);
2579 else if (isummary->size <= MAX_INLINE_INSNS_AUTO
2580 || DECL_DECLARED_INLINE_P (callee->decl))
2581 res += 7 / ((int)speculative + 1);
2584 return res;
2587 /* Return time bonus incurred because of HINTS. */
2589 static int
2590 hint_time_bonus (ipa_hints hints)
2592 int result = 0;
2593 if (hints & (INLINE_HINT_loop_iterations | INLINE_HINT_loop_stride))
2594 result += PARAM_VALUE (PARAM_IPA_CP_LOOP_HINT_BONUS);
2595 if (hints & INLINE_HINT_array_index)
2596 result += PARAM_VALUE (PARAM_IPA_CP_ARRAY_INDEX_HINT_BONUS);
2597 return result;
2600 /* If there is a reason to penalize the function described by INFO in the
2601 cloning goodness evaluation, do so. */
2603 static inline int64_t
2604 incorporate_penalties (ipa_node_params *info, int64_t evaluation)
2606 if (info->node_within_scc)
2607 evaluation = (evaluation
2608 * (100 - PARAM_VALUE (PARAM_IPA_CP_RECURSION_PENALTY))) / 100;
2610 if (info->node_calling_single_call)
2611 evaluation = (evaluation
2612 * (100 - PARAM_VALUE (PARAM_IPA_CP_SINGLE_CALL_PENALTY)))
2613 / 100;
2615 return evaluation;
2618 /* Return true if cloning NODE is a good idea, given the estimated TIME_BENEFIT
2619 and SIZE_COST and with the sum of frequencies of incoming edges to the
2620 potential new clone in FREQUENCIES. */
2622 static bool
2623 good_cloning_opportunity_p (struct cgraph_node *node, int time_benefit,
2624 int freq_sum, profile_count count_sum, int size_cost)
2626 if (time_benefit == 0
2627 || !opt_for_fn (node->decl, flag_ipa_cp_clone)
2628 || node->optimize_for_size_p ())
2629 return false;
2631 gcc_assert (size_cost > 0);
2633 struct ipa_node_params *info = IPA_NODE_REF (node);
2634 if (max_count > profile_count::zero ())
2636 int factor = RDIV (count_sum.probability_in
2637 (max_count).to_reg_br_prob_base ()
2638 * 1000, REG_BR_PROB_BASE);
2639 int64_t evaluation = (((int64_t) time_benefit * factor)
2640 / size_cost);
2641 evaluation = incorporate_penalties (info, evaluation);
2643 if (dump_file && (dump_flags & TDF_DETAILS))
2645 fprintf (dump_file, " good_cloning_opportunity_p (time: %i, "
2646 "size: %i, count_sum: ", time_benefit, size_cost);
2647 count_sum.dump (dump_file);
2648 fprintf (dump_file, "%s%s) -> evaluation: " "%" PRId64
2649 ", threshold: %i\n",
2650 info->node_within_scc ? ", scc" : "",
2651 info->node_calling_single_call ? ", single_call" : "",
2652 evaluation, PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD));
2655 return evaluation >= PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD);
2657 else
2659 int64_t evaluation = (((int64_t) time_benefit * freq_sum)
2660 / size_cost);
2661 evaluation = incorporate_penalties (info, evaluation);
2663 if (dump_file && (dump_flags & TDF_DETAILS))
2664 fprintf (dump_file, " good_cloning_opportunity_p (time: %i, "
2665 "size: %i, freq_sum: %i%s%s) -> evaluation: "
2666 "%" PRId64 ", threshold: %i\n",
2667 time_benefit, size_cost, freq_sum,
2668 info->node_within_scc ? ", scc" : "",
2669 info->node_calling_single_call ? ", single_call" : "",
2670 evaluation, PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD));
2672 return evaluation >= PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD);
2676 /* Return all context independent values from aggregate lattices in PLATS in a
2677 vector. Return NULL if there are none. */
2679 static vec<ipa_agg_jf_item, va_gc> *
2680 context_independent_aggregate_values (struct ipcp_param_lattices *plats)
2682 vec<ipa_agg_jf_item, va_gc> *res = NULL;
2684 if (plats->aggs_bottom
2685 || plats->aggs_contain_variable
2686 || plats->aggs_count == 0)
2687 return NULL;
2689 for (struct ipcp_agg_lattice *aglat = plats->aggs;
2690 aglat;
2691 aglat = aglat->next)
2692 if (aglat->is_single_const ())
2694 struct ipa_agg_jf_item item;
2695 item.offset = aglat->offset;
2696 item.value = aglat->values->value;
2697 vec_safe_push (res, item);
2699 return res;
2702 /* Allocate KNOWN_CSTS, KNOWN_CONTEXTS and, if non-NULL, KNOWN_AGGS and
2703 populate them with values of parameters that are known independent of the
2704 context. INFO describes the function. If REMOVABLE_PARAMS_COST is
2705 non-NULL, the movement cost of all removable parameters will be stored in
2706 it. */
2708 static bool
2709 gather_context_independent_values (struct ipa_node_params *info,
2710 vec<tree> *known_csts,
2711 vec<ipa_polymorphic_call_context>
2712 *known_contexts,
2713 vec<ipa_agg_jump_function> *known_aggs,
2714 int *removable_params_cost)
2716 int i, count = ipa_get_param_count (info);
2717 bool ret = false;
2719 known_csts->create (0);
2720 known_contexts->create (0);
2721 known_csts->safe_grow_cleared (count);
2722 known_contexts->safe_grow_cleared (count);
2723 if (known_aggs)
2725 known_aggs->create (0);
2726 known_aggs->safe_grow_cleared (count);
2729 if (removable_params_cost)
2730 *removable_params_cost = 0;
2732 for (i = 0; i < count; i++)
2734 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
2735 ipcp_lattice<tree> *lat = &plats->itself;
2737 if (lat->is_single_const ())
2739 ipcp_value<tree> *val = lat->values;
2740 gcc_checking_assert (TREE_CODE (val->value) != TREE_BINFO);
2741 (*known_csts)[i] = val->value;
2742 if (removable_params_cost)
2743 *removable_params_cost
2744 += estimate_move_cost (TREE_TYPE (val->value), false);
2745 ret = true;
2747 else if (removable_params_cost
2748 && !ipa_is_param_used (info, i))
2749 *removable_params_cost
2750 += ipa_get_param_move_cost (info, i);
2752 if (!ipa_is_param_used (info, i))
2753 continue;
2755 ipcp_lattice<ipa_polymorphic_call_context> *ctxlat = &plats->ctxlat;
2756 /* Do not account known context as reason for cloning. We can see
2757 if it permits devirtualization. */
2758 if (ctxlat->is_single_const ())
2759 (*known_contexts)[i] = ctxlat->values->value;
2761 if (known_aggs)
2763 vec<ipa_agg_jf_item, va_gc> *agg_items;
2764 struct ipa_agg_jump_function *ajf;
2766 agg_items = context_independent_aggregate_values (plats);
2767 ajf = &(*known_aggs)[i];
2768 ajf->items = agg_items;
2769 ajf->by_ref = plats->aggs_by_ref;
2770 ret |= agg_items != NULL;
2774 return ret;
2777 /* The current interface in ipa-inline-analysis requires a pointer vector.
2778 Create it.
2780 FIXME: That interface should be re-worked, this is slightly silly. Still,
2781 I'd like to discuss how to change it first and this demonstrates the
2782 issue. */
2784 static vec<ipa_agg_jump_function_p>
2785 agg_jmp_p_vec_for_t_vec (vec<ipa_agg_jump_function> known_aggs)
2787 vec<ipa_agg_jump_function_p> ret;
2788 struct ipa_agg_jump_function *ajf;
2789 int i;
2791 ret.create (known_aggs.length ());
2792 FOR_EACH_VEC_ELT (known_aggs, i, ajf)
2793 ret.quick_push (ajf);
2794 return ret;
2797 /* Perform time and size measurement of NODE with the context given in
2798 KNOWN_CSTS, KNOWN_CONTEXTS and KNOWN_AGGS, calculate the benefit and cost
2799 given BASE_TIME of the node without specialization, REMOVABLE_PARAMS_COST of
2800 all context-independent removable parameters and EST_MOVE_COST of estimated
2801 movement of the considered parameter and store it into VAL. */
2803 static void
2804 perform_estimation_of_a_value (cgraph_node *node, vec<tree> known_csts,
2805 vec<ipa_polymorphic_call_context> known_contexts,
2806 vec<ipa_agg_jump_function_p> known_aggs_ptrs,
2807 int removable_params_cost,
2808 int est_move_cost, ipcp_value_base *val)
2810 int size, time_benefit;
2811 sreal time, base_time;
2812 ipa_hints hints;
2814 estimate_ipcp_clone_size_and_time (node, known_csts, known_contexts,
2815 known_aggs_ptrs, &size, &time,
2816 &base_time, &hints);
2817 base_time -= time;
2818 if (base_time > 65535)
2819 base_time = 65535;
2820 time_benefit = base_time.to_int ()
2821 + devirtualization_time_bonus (node, known_csts, known_contexts,
2822 known_aggs_ptrs)
2823 + hint_time_bonus (hints)
2824 + removable_params_cost + est_move_cost;
2826 gcc_checking_assert (size >=0);
2827 /* The inliner-heuristics based estimates may think that in certain
2828 contexts some functions do not have any size at all but we want
2829 all specializations to have at least a tiny cost, not least not to
2830 divide by zero. */
2831 if (size == 0)
2832 size = 1;
2834 val->local_time_benefit = time_benefit;
2835 val->local_size_cost = size;
2838 /* Iterate over known values of parameters of NODE and estimate the local
2839 effects in terms of time and size they have. */
2841 static void
2842 estimate_local_effects (struct cgraph_node *node)
2844 struct ipa_node_params *info = IPA_NODE_REF (node);
2845 int i, count = ipa_get_param_count (info);
2846 vec<tree> known_csts;
2847 vec<ipa_polymorphic_call_context> known_contexts;
2848 vec<ipa_agg_jump_function> known_aggs;
2849 vec<ipa_agg_jump_function_p> known_aggs_ptrs;
2850 bool always_const;
2851 int removable_params_cost;
2853 if (!count || !ipcp_versionable_function_p (node))
2854 return;
2856 if (dump_file && (dump_flags & TDF_DETAILS))
2857 fprintf (dump_file, "\nEstimating effects for %s.\n", node->dump_name ());
2859 always_const = gather_context_independent_values (info, &known_csts,
2860 &known_contexts, &known_aggs,
2861 &removable_params_cost);
2862 known_aggs_ptrs = agg_jmp_p_vec_for_t_vec (known_aggs);
2863 int devirt_bonus = devirtualization_time_bonus (node, known_csts,
2864 known_contexts, known_aggs_ptrs);
2865 if (always_const || devirt_bonus
2866 || (removable_params_cost && node->local.can_change_signature))
2868 struct caller_statistics stats;
2869 ipa_hints hints;
2870 sreal time, base_time;
2871 int size;
2873 init_caller_stats (&stats);
2874 node->call_for_symbol_thunks_and_aliases (gather_caller_stats, &stats,
2875 false);
2876 estimate_ipcp_clone_size_and_time (node, known_csts, known_contexts,
2877 known_aggs_ptrs, &size, &time,
2878 &base_time, &hints);
2879 time -= devirt_bonus;
2880 time -= hint_time_bonus (hints);
2881 time -= removable_params_cost;
2882 size -= stats.n_calls * removable_params_cost;
2884 if (dump_file)
2885 fprintf (dump_file, " - context independent values, size: %i, "
2886 "time_benefit: %f\n", size, (base_time - time).to_double ());
2888 if (size <= 0 || node->local.local)
2890 info->do_clone_for_all_contexts = true;
2892 if (dump_file)
2893 fprintf (dump_file, " Decided to specialize for all "
2894 "known contexts, code not going to grow.\n");
2896 else if (good_cloning_opportunity_p (node,
2897 MAX ((base_time - time).to_int (),
2898 65536),
2899 stats.freq_sum, stats.count_sum,
2900 size))
2902 if (size + overall_size <= max_new_size)
2904 info->do_clone_for_all_contexts = true;
2905 overall_size += size;
2907 if (dump_file)
2908 fprintf (dump_file, " Decided to specialize for all "
2909 "known contexts, growth deemed beneficial.\n");
2911 else if (dump_file && (dump_flags & TDF_DETAILS))
2912 fprintf (dump_file, " Not cloning for all contexts because "
2913 "max_new_size would be reached with %li.\n",
2914 size + overall_size);
2916 else if (dump_file && (dump_flags & TDF_DETAILS))
2917 fprintf (dump_file, " Not cloning for all contexts because "
2918 "!good_cloning_opportunity_p.\n");
2922 for (i = 0; i < count; i++)
2924 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
2925 ipcp_lattice<tree> *lat = &plats->itself;
2926 ipcp_value<tree> *val;
2928 if (lat->bottom
2929 || !lat->values
2930 || known_csts[i])
2931 continue;
2933 for (val = lat->values; val; val = val->next)
2935 gcc_checking_assert (TREE_CODE (val->value) != TREE_BINFO);
2936 known_csts[i] = val->value;
2938 int emc = estimate_move_cost (TREE_TYPE (val->value), true);
2939 perform_estimation_of_a_value (node, known_csts, known_contexts,
2940 known_aggs_ptrs,
2941 removable_params_cost, emc, val);
2943 if (dump_file && (dump_flags & TDF_DETAILS))
2945 fprintf (dump_file, " - estimates for value ");
2946 print_ipcp_constant_value (dump_file, val->value);
2947 fprintf (dump_file, " for ");
2948 ipa_dump_param (dump_file, info, i);
2949 fprintf (dump_file, ": time_benefit: %i, size: %i\n",
2950 val->local_time_benefit, val->local_size_cost);
2953 known_csts[i] = NULL_TREE;
2956 for (i = 0; i < count; i++)
2958 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
2960 if (!plats->virt_call)
2961 continue;
2963 ipcp_lattice<ipa_polymorphic_call_context> *ctxlat = &plats->ctxlat;
2964 ipcp_value<ipa_polymorphic_call_context> *val;
2966 if (ctxlat->bottom
2967 || !ctxlat->values
2968 || !known_contexts[i].useless_p ())
2969 continue;
2971 for (val = ctxlat->values; val; val = val->next)
2973 known_contexts[i] = val->value;
2974 perform_estimation_of_a_value (node, known_csts, known_contexts,
2975 known_aggs_ptrs,
2976 removable_params_cost, 0, val);
2978 if (dump_file && (dump_flags & TDF_DETAILS))
2980 fprintf (dump_file, " - estimates for polymorphic context ");
2981 print_ipcp_constant_value (dump_file, val->value);
2982 fprintf (dump_file, " for ");
2983 ipa_dump_param (dump_file, info, i);
2984 fprintf (dump_file, ": time_benefit: %i, size: %i\n",
2985 val->local_time_benefit, val->local_size_cost);
2988 known_contexts[i] = ipa_polymorphic_call_context ();
2991 for (i = 0; i < count; i++)
2993 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
2994 struct ipa_agg_jump_function *ajf;
2995 struct ipcp_agg_lattice *aglat;
2997 if (plats->aggs_bottom || !plats->aggs)
2998 continue;
3000 ajf = &known_aggs[i];
3001 for (aglat = plats->aggs; aglat; aglat = aglat->next)
3003 ipcp_value<tree> *val;
3004 if (aglat->bottom || !aglat->values
3005 /* If the following is true, the one value is in known_aggs. */
3006 || (!plats->aggs_contain_variable
3007 && aglat->is_single_const ()))
3008 continue;
3010 for (val = aglat->values; val; val = val->next)
3012 struct ipa_agg_jf_item item;
3014 item.offset = aglat->offset;
3015 item.value = val->value;
3016 vec_safe_push (ajf->items, item);
3018 perform_estimation_of_a_value (node, known_csts, known_contexts,
3019 known_aggs_ptrs,
3020 removable_params_cost, 0, val);
3022 if (dump_file && (dump_flags & TDF_DETAILS))
3024 fprintf (dump_file, " - estimates for value ");
3025 print_ipcp_constant_value (dump_file, val->value);
3026 fprintf (dump_file, " for ");
3027 ipa_dump_param (dump_file, info, i);
3028 fprintf (dump_file, "[%soffset: " HOST_WIDE_INT_PRINT_DEC
3029 "]: time_benefit: %i, size: %i\n",
3030 plats->aggs_by_ref ? "ref " : "",
3031 aglat->offset,
3032 val->local_time_benefit, val->local_size_cost);
3035 ajf->items->pop ();
3040 for (i = 0; i < count; i++)
3041 vec_free (known_aggs[i].items);
3043 known_csts.release ();
3044 known_contexts.release ();
3045 known_aggs.release ();
3046 known_aggs_ptrs.release ();
3050 /* Add value CUR_VAL and all yet-unsorted values it is dependent on to the
3051 topological sort of values. */
3053 template <typename valtype>
3054 void
3055 value_topo_info<valtype>::add_val (ipcp_value<valtype> *cur_val)
3057 ipcp_value_source<valtype> *src;
3059 if (cur_val->dfs)
3060 return;
3062 dfs_counter++;
3063 cur_val->dfs = dfs_counter;
3064 cur_val->low_link = dfs_counter;
3066 cur_val->topo_next = stack;
3067 stack = cur_val;
3068 cur_val->on_stack = true;
3070 for (src = cur_val->sources; src; src = src->next)
3071 if (src->val)
3073 if (src->val->dfs == 0)
3075 add_val (src->val);
3076 if (src->val->low_link < cur_val->low_link)
3077 cur_val->low_link = src->val->low_link;
3079 else if (src->val->on_stack
3080 && src->val->dfs < cur_val->low_link)
3081 cur_val->low_link = src->val->dfs;
3084 if (cur_val->dfs == cur_val->low_link)
3086 ipcp_value<valtype> *v, *scc_list = NULL;
3090 v = stack;
3091 stack = v->topo_next;
3092 v->on_stack = false;
3094 v->scc_next = scc_list;
3095 scc_list = v;
3097 while (v != cur_val);
3099 cur_val->topo_next = values_topo;
3100 values_topo = cur_val;
3104 /* Add all values in lattices associated with NODE to the topological sort if
3105 they are not there yet. */
3107 static void
3108 add_all_node_vals_to_toposort (cgraph_node *node, ipa_topo_info *topo)
3110 struct ipa_node_params *info = IPA_NODE_REF (node);
3111 int i, count = ipa_get_param_count (info);
3113 for (i = 0; i < count; i++)
3115 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
3116 ipcp_lattice<tree> *lat = &plats->itself;
3117 struct ipcp_agg_lattice *aglat;
3119 if (!lat->bottom)
3121 ipcp_value<tree> *val;
3122 for (val = lat->values; val; val = val->next)
3123 topo->constants.add_val (val);
3126 if (!plats->aggs_bottom)
3127 for (aglat = plats->aggs; aglat; aglat = aglat->next)
3128 if (!aglat->bottom)
3130 ipcp_value<tree> *val;
3131 for (val = aglat->values; val; val = val->next)
3132 topo->constants.add_val (val);
3135 ipcp_lattice<ipa_polymorphic_call_context> *ctxlat = &plats->ctxlat;
3136 if (!ctxlat->bottom)
3138 ipcp_value<ipa_polymorphic_call_context> *ctxval;
3139 for (ctxval = ctxlat->values; ctxval; ctxval = ctxval->next)
3140 topo->contexts.add_val (ctxval);
3145 /* One pass of constants propagation along the call graph edges, from callers
3146 to callees (requires topological ordering in TOPO), iterate over strongly
3147 connected components. */
3149 static void
3150 propagate_constants_topo (struct ipa_topo_info *topo)
3152 int i;
3154 for (i = topo->nnodes - 1; i >= 0; i--)
3156 unsigned j;
3157 struct cgraph_node *v, *node = topo->order[i];
3158 vec<cgraph_node *> cycle_nodes = ipa_get_nodes_in_cycle (node);
3160 /* First, iteratively propagate within the strongly connected component
3161 until all lattices stabilize. */
3162 FOR_EACH_VEC_ELT (cycle_nodes, j, v)
3163 if (v->has_gimple_body_p ())
3164 push_node_to_stack (topo, v);
3166 v = pop_node_from_stack (topo);
3167 while (v)
3169 struct cgraph_edge *cs;
3171 for (cs = v->callees; cs; cs = cs->next_callee)
3172 if (ipa_edge_within_scc (cs))
3174 IPA_NODE_REF (v)->node_within_scc = true;
3175 if (propagate_constants_across_call (cs))
3176 push_node_to_stack (topo, cs->callee->function_symbol ());
3178 v = pop_node_from_stack (topo);
3181 /* Afterwards, propagate along edges leading out of the SCC, calculates
3182 the local effects of the discovered constants and all valid values to
3183 their topological sort. */
3184 FOR_EACH_VEC_ELT (cycle_nodes, j, v)
3185 if (v->has_gimple_body_p ())
3187 struct cgraph_edge *cs;
3189 estimate_local_effects (v);
3190 add_all_node_vals_to_toposort (v, topo);
3191 for (cs = v->callees; cs; cs = cs->next_callee)
3192 if (!ipa_edge_within_scc (cs))
3193 propagate_constants_across_call (cs);
3195 cycle_nodes.release ();
3200 /* Return the sum of A and B if none of them is bigger than INT_MAX/2, return
3201 the bigger one if otherwise. */
3203 static int
3204 safe_add (int a, int b)
3206 if (a > INT_MAX/2 || b > INT_MAX/2)
3207 return a > b ? a : b;
3208 else
3209 return a + b;
3213 /* Propagate the estimated effects of individual values along the topological
3214 from the dependent values to those they depend on. */
3216 template <typename valtype>
3217 void
3218 value_topo_info<valtype>::propagate_effects ()
3220 ipcp_value<valtype> *base;
3222 for (base = values_topo; base; base = base->topo_next)
3224 ipcp_value_source<valtype> *src;
3225 ipcp_value<valtype> *val;
3226 int time = 0, size = 0;
3228 for (val = base; val; val = val->scc_next)
3230 time = safe_add (time,
3231 val->local_time_benefit + val->prop_time_benefit);
3232 size = safe_add (size, val->local_size_cost + val->prop_size_cost);
3235 for (val = base; val; val = val->scc_next)
3236 for (src = val->sources; src; src = src->next)
3237 if (src->val
3238 && src->cs->maybe_hot_p ())
3240 src->val->prop_time_benefit = safe_add (time,
3241 src->val->prop_time_benefit);
3242 src->val->prop_size_cost = safe_add (size,
3243 src->val->prop_size_cost);
3249 /* Propagate constants, polymorphic contexts and their effects from the
3250 summaries interprocedurally. */
3252 static void
3253 ipcp_propagate_stage (struct ipa_topo_info *topo)
3255 struct cgraph_node *node;
3257 if (dump_file)
3258 fprintf (dump_file, "\n Propagating constants:\n\n");
3260 FOR_EACH_DEFINED_FUNCTION (node)
3262 struct ipa_node_params *info = IPA_NODE_REF (node);
3264 determine_versionability (node, info);
3265 if (node->has_gimple_body_p ())
3267 info->lattices = XCNEWVEC (struct ipcp_param_lattices,
3268 ipa_get_param_count (info));
3269 initialize_node_lattices (node);
3271 if (node->definition && !node->alias)
3272 overall_size += ipa_fn_summaries->get (node)->self_size;
3273 if (node->count > max_count)
3274 max_count = node->count;
3277 max_new_size = overall_size;
3278 if (max_new_size < PARAM_VALUE (PARAM_LARGE_UNIT_INSNS))
3279 max_new_size = PARAM_VALUE (PARAM_LARGE_UNIT_INSNS);
3280 max_new_size += max_new_size * PARAM_VALUE (PARAM_IPCP_UNIT_GROWTH) / 100 + 1;
3282 if (dump_file)
3283 fprintf (dump_file, "\noverall_size: %li, max_new_size: %li\n",
3284 overall_size, max_new_size);
3286 propagate_constants_topo (topo);
3287 if (flag_checking)
3288 ipcp_verify_propagated_values ();
3289 topo->constants.propagate_effects ();
3290 topo->contexts.propagate_effects ();
3292 if (dump_file)
3294 fprintf (dump_file, "\nIPA lattices after all propagation:\n");
3295 print_all_lattices (dump_file, (dump_flags & TDF_DETAILS), true);
3299 /* Discover newly direct outgoing edges from NODE which is a new clone with
3300 known KNOWN_CSTS and make them direct. */
3302 static void
3303 ipcp_discover_new_direct_edges (struct cgraph_node *node,
3304 vec<tree> known_csts,
3305 vec<ipa_polymorphic_call_context>
3306 known_contexts,
3307 struct ipa_agg_replacement_value *aggvals)
3309 struct cgraph_edge *ie, *next_ie;
3310 bool found = false;
3312 for (ie = node->indirect_calls; ie; ie = next_ie)
3314 tree target;
3315 bool speculative;
3317 next_ie = ie->next_callee;
3318 target = ipa_get_indirect_edge_target_1 (ie, known_csts, known_contexts,
3319 vNULL, aggvals, &speculative);
3320 if (target)
3322 bool agg_contents = ie->indirect_info->agg_contents;
3323 bool polymorphic = ie->indirect_info->polymorphic;
3324 int param_index = ie->indirect_info->param_index;
3325 struct cgraph_edge *cs = ipa_make_edge_direct_to_target (ie, target,
3326 speculative);
3327 found = true;
3329 if (cs && !agg_contents && !polymorphic)
3331 struct ipa_node_params *info = IPA_NODE_REF (node);
3332 int c = ipa_get_controlled_uses (info, param_index);
3333 if (c != IPA_UNDESCRIBED_USE)
3335 struct ipa_ref *to_del;
3337 c--;
3338 ipa_set_controlled_uses (info, param_index, c);
3339 if (dump_file && (dump_flags & TDF_DETAILS))
3340 fprintf (dump_file, " controlled uses count of param "
3341 "%i bumped down to %i\n", param_index, c);
3342 if (c == 0
3343 && (to_del = node->find_reference (cs->callee, NULL, 0)))
3345 if (dump_file && (dump_flags & TDF_DETAILS))
3346 fprintf (dump_file, " and even removing its "
3347 "cloning-created reference\n");
3348 to_del->remove_reference ();
3354 /* Turning calls to direct calls will improve overall summary. */
3355 if (found)
3356 ipa_update_overall_fn_summary (node);
3359 /* Vector of pointers which for linked lists of clones of an original crgaph
3360 edge. */
3362 static vec<cgraph_edge *> next_edge_clone;
3363 static vec<cgraph_edge *> prev_edge_clone;
3365 static inline void
3366 grow_edge_clone_vectors (void)
3368 if (next_edge_clone.length ()
3369 <= (unsigned) symtab->edges_max_uid)
3370 next_edge_clone.safe_grow_cleared (symtab->edges_max_uid + 1);
3371 if (prev_edge_clone.length ()
3372 <= (unsigned) symtab->edges_max_uid)
3373 prev_edge_clone.safe_grow_cleared (symtab->edges_max_uid + 1);
3376 /* Edge duplication hook to grow the appropriate linked list in
3377 next_edge_clone. */
3379 static void
3380 ipcp_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3381 void *)
3383 grow_edge_clone_vectors ();
3385 struct cgraph_edge *old_next = next_edge_clone[src->uid];
3386 if (old_next)
3387 prev_edge_clone[old_next->uid] = dst;
3388 prev_edge_clone[dst->uid] = src;
3390 next_edge_clone[dst->uid] = old_next;
3391 next_edge_clone[src->uid] = dst;
3394 /* Hook that is called by cgraph.c when an edge is removed. */
3396 static void
3397 ipcp_edge_removal_hook (struct cgraph_edge *cs, void *)
3399 grow_edge_clone_vectors ();
3401 struct cgraph_edge *prev = prev_edge_clone[cs->uid];
3402 struct cgraph_edge *next = next_edge_clone[cs->uid];
3403 if (prev)
3404 next_edge_clone[prev->uid] = next;
3405 if (next)
3406 prev_edge_clone[next->uid] = prev;
3409 /* See if NODE is a clone with a known aggregate value at a given OFFSET of a
3410 parameter with the given INDEX. */
3412 static tree
3413 get_clone_agg_value (struct cgraph_node *node, HOST_WIDE_INT offset,
3414 int index)
3416 struct ipa_agg_replacement_value *aggval;
3418 aggval = ipa_get_agg_replacements_for_node (node);
3419 while (aggval)
3421 if (aggval->offset == offset
3422 && aggval->index == index)
3423 return aggval->value;
3424 aggval = aggval->next;
3426 return NULL_TREE;
3429 /* Return true is NODE is DEST or its clone for all contexts. */
3431 static bool
3432 same_node_or_its_all_contexts_clone_p (cgraph_node *node, cgraph_node *dest)
3434 if (node == dest)
3435 return true;
3437 struct ipa_node_params *info = IPA_NODE_REF (node);
3438 return info->is_all_contexts_clone && info->ipcp_orig_node == dest;
3441 /* Return true if edge CS does bring about the value described by SRC to node
3442 DEST or its clone for all contexts. */
3444 static bool
3445 cgraph_edge_brings_value_p (cgraph_edge *cs, ipcp_value_source<tree> *src,
3446 cgraph_node *dest)
3448 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
3449 enum availability availability;
3450 cgraph_node *real_dest = cs->callee->function_symbol (&availability);
3452 if (!same_node_or_its_all_contexts_clone_p (real_dest, dest)
3453 || availability <= AVAIL_INTERPOSABLE
3454 || caller_info->node_dead)
3455 return false;
3456 if (!src->val)
3457 return true;
3459 if (caller_info->ipcp_orig_node)
3461 tree t;
3462 if (src->offset == -1)
3463 t = caller_info->known_csts[src->index];
3464 else
3465 t = get_clone_agg_value (cs->caller, src->offset, src->index);
3466 return (t != NULL_TREE
3467 && values_equal_for_ipcp_p (src->val->value, t));
3469 else
3471 struct ipcp_agg_lattice *aglat;
3472 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (caller_info,
3473 src->index);
3474 if (src->offset == -1)
3475 return (plats->itself.is_single_const ()
3476 && values_equal_for_ipcp_p (src->val->value,
3477 plats->itself.values->value));
3478 else
3480 if (plats->aggs_bottom || plats->aggs_contain_variable)
3481 return false;
3482 for (aglat = plats->aggs; aglat; aglat = aglat->next)
3483 if (aglat->offset == src->offset)
3484 return (aglat->is_single_const ()
3485 && values_equal_for_ipcp_p (src->val->value,
3486 aglat->values->value));
3488 return false;
3492 /* Return true if edge CS does bring about the value described by SRC to node
3493 DEST or its clone for all contexts. */
3495 static bool
3496 cgraph_edge_brings_value_p (cgraph_edge *cs,
3497 ipcp_value_source<ipa_polymorphic_call_context> *src,
3498 cgraph_node *dest)
3500 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
3501 cgraph_node *real_dest = cs->callee->function_symbol ();
3503 if (!same_node_or_its_all_contexts_clone_p (real_dest, dest)
3504 || caller_info->node_dead)
3505 return false;
3506 if (!src->val)
3507 return true;
3509 if (caller_info->ipcp_orig_node)
3510 return (caller_info->known_contexts.length () > (unsigned) src->index)
3511 && values_equal_for_ipcp_p (src->val->value,
3512 caller_info->known_contexts[src->index]);
3514 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (caller_info,
3515 src->index);
3516 return plats->ctxlat.is_single_const ()
3517 && values_equal_for_ipcp_p (src->val->value,
3518 plats->ctxlat.values->value);
3521 /* Get the next clone in the linked list of clones of an edge. */
3523 static inline struct cgraph_edge *
3524 get_next_cgraph_edge_clone (struct cgraph_edge *cs)
3526 return next_edge_clone[cs->uid];
3529 /* Given VAL that is intended for DEST, iterate over all its sources and if
3530 they still hold, add their edge frequency and their number into *FREQUENCY
3531 and *CALLER_COUNT respectively. */
3533 template <typename valtype>
3534 static bool
3535 get_info_about_necessary_edges (ipcp_value<valtype> *val, cgraph_node *dest,
3536 int *freq_sum,
3537 profile_count *count_sum, int *caller_count)
3539 ipcp_value_source<valtype> *src;
3540 int freq = 0, count = 0;
3541 profile_count cnt = profile_count::zero ();
3542 bool hot = false;
3544 for (src = val->sources; src; src = src->next)
3546 struct cgraph_edge *cs = src->cs;
3547 while (cs)
3549 if (cgraph_edge_brings_value_p (cs, src, dest))
3551 count++;
3552 freq += cs->frequency;
3553 if (cs->count.initialized_p ())
3554 cnt += cs->count;
3555 hot |= cs->maybe_hot_p ();
3557 cs = get_next_cgraph_edge_clone (cs);
3561 *freq_sum = freq;
3562 *count_sum = cnt;
3563 *caller_count = count;
3564 return hot;
3567 /* Return a vector of incoming edges that do bring value VAL to node DEST. It
3568 is assumed their number is known and equal to CALLER_COUNT. */
3570 template <typename valtype>
3571 static vec<cgraph_edge *>
3572 gather_edges_for_value (ipcp_value<valtype> *val, cgraph_node *dest,
3573 int caller_count)
3575 ipcp_value_source<valtype> *src;
3576 vec<cgraph_edge *> ret;
3578 ret.create (caller_count);
3579 for (src = val->sources; src; src = src->next)
3581 struct cgraph_edge *cs = src->cs;
3582 while (cs)
3584 if (cgraph_edge_brings_value_p (cs, src, dest))
3585 ret.quick_push (cs);
3586 cs = get_next_cgraph_edge_clone (cs);
3590 return ret;
3593 /* Construct a replacement map for a know VALUE for a formal parameter PARAM.
3594 Return it or NULL if for some reason it cannot be created. */
3596 static struct ipa_replace_map *
3597 get_replacement_map (struct ipa_node_params *info, tree value, int parm_num)
3599 struct ipa_replace_map *replace_map;
3602 replace_map = ggc_alloc<ipa_replace_map> ();
3603 if (dump_file)
3605 fprintf (dump_file, " replacing ");
3606 ipa_dump_param (dump_file, info, parm_num);
3608 fprintf (dump_file, " with const ");
3609 print_generic_expr (dump_file, value);
3610 fprintf (dump_file, "\n");
3612 replace_map->old_tree = NULL;
3613 replace_map->parm_num = parm_num;
3614 replace_map->new_tree = value;
3615 replace_map->replace_p = true;
3616 replace_map->ref_p = false;
3618 return replace_map;
3621 /* Dump new profiling counts */
3623 static void
3624 dump_profile_updates (struct cgraph_node *orig_node,
3625 struct cgraph_node *new_node)
3627 struct cgraph_edge *cs;
3629 fprintf (dump_file, " setting count of the specialized node to ");
3630 new_node->count.dump (dump_file);
3631 fprintf (dump_file, "\n");
3632 for (cs = new_node->callees; cs; cs = cs->next_callee)
3634 fprintf (dump_file, " edge to %s has count ",
3635 cs->callee->name ());
3636 cs->count.dump (dump_file);
3637 fprintf (dump_file, "\n");
3640 fprintf (dump_file, " setting count of the original node to ");
3641 orig_node->count.dump (dump_file);
3642 fprintf (dump_file, "\n");
3643 for (cs = orig_node->callees; cs; cs = cs->next_callee)
3645 fprintf (dump_file, " edge to %s is left with ",
3646 cs->callee->name ());
3647 cs->count.dump (dump_file);
3648 fprintf (dump_file, "\n");
3652 /* After a specialized NEW_NODE version of ORIG_NODE has been created, update
3653 their profile information to reflect this. */
3655 static void
3656 update_profiling_info (struct cgraph_node *orig_node,
3657 struct cgraph_node *new_node)
3659 struct cgraph_edge *cs;
3660 struct caller_statistics stats;
3661 profile_count new_sum, orig_sum;
3662 profile_count remainder, orig_node_count = orig_node->count;
3664 if (!(orig_node_count > profile_count::zero ()))
3665 return;
3667 init_caller_stats (&stats);
3668 orig_node->call_for_symbol_thunks_and_aliases (gather_caller_stats, &stats,
3669 false);
3670 orig_sum = stats.count_sum;
3671 init_caller_stats (&stats);
3672 new_node->call_for_symbol_thunks_and_aliases (gather_caller_stats, &stats,
3673 false);
3674 new_sum = stats.count_sum;
3676 if (orig_node_count < orig_sum + new_sum)
3678 if (dump_file)
3680 fprintf (dump_file, " Problem: node %s has too low count ",
3681 orig_node->dump_name ());
3682 orig_node_count.dump (dump_file);
3683 fprintf (dump_file, "while the sum of incoming count is ");
3684 (orig_sum + new_sum).dump (dump_file);
3685 fprintf (dump_file, "\n");
3688 orig_node_count = (orig_sum + new_sum).apply_scale (12, 10);
3689 if (dump_file)
3691 fprintf (dump_file, " proceeding by pretending it was ");
3692 orig_node_count.dump (dump_file);
3693 fprintf (dump_file, "\n");
3697 new_node->count = new_sum;
3698 remainder = orig_node_count - new_sum;
3699 orig_node->count = remainder;
3701 for (cs = new_node->callees; cs; cs = cs->next_callee)
3702 /* FIXME: why we care about non-zero frequency here? */
3703 if (cs->frequency)
3704 cs->count = cs->count.apply_scale (new_sum, orig_node_count);
3705 else
3706 cs->count = profile_count::zero ();
3708 for (cs = orig_node->callees; cs; cs = cs->next_callee)
3709 cs->count = cs->count.apply_scale (remainder, orig_node_count);
3711 if (dump_file)
3712 dump_profile_updates (orig_node, new_node);
3715 /* Update the respective profile of specialized NEW_NODE and the original
3716 ORIG_NODE after additional edges with cumulative count sum REDIRECTED_SUM
3717 have been redirected to the specialized version. */
3719 static void
3720 update_specialized_profile (struct cgraph_node *new_node,
3721 struct cgraph_node *orig_node,
3722 profile_count redirected_sum)
3724 struct cgraph_edge *cs;
3725 profile_count new_node_count, orig_node_count = orig_node->count;
3727 if (dump_file)
3729 fprintf (dump_file, " the sum of counts of redirected edges is ");
3730 redirected_sum.dump (dump_file);
3731 fprintf (dump_file, "\n");
3733 if (!(orig_node_count > profile_count::zero ()))
3734 return;
3736 gcc_assert (orig_node_count >= redirected_sum);
3738 new_node_count = new_node->count;
3739 new_node->count += redirected_sum;
3740 orig_node->count -= redirected_sum;
3742 for (cs = new_node->callees; cs; cs = cs->next_callee)
3743 if (cs->frequency)
3744 cs->count += cs->count.apply_scale (redirected_sum, new_node_count);
3745 else
3746 cs->count = profile_count::zero ();
3748 for (cs = orig_node->callees; cs; cs = cs->next_callee)
3750 profile_count dec = cs->count.apply_scale (redirected_sum,
3751 orig_node_count);
3752 cs->count -= dec;
3755 if (dump_file)
3756 dump_profile_updates (orig_node, new_node);
3759 /* Create a specialized version of NODE with known constants in KNOWN_CSTS,
3760 known contexts in KNOWN_CONTEXTS and known aggregate values in AGGVALS and
3761 redirect all edges in CALLERS to it. */
3763 static struct cgraph_node *
3764 create_specialized_node (struct cgraph_node *node,
3765 vec<tree> known_csts,
3766 vec<ipa_polymorphic_call_context> known_contexts,
3767 struct ipa_agg_replacement_value *aggvals,
3768 vec<cgraph_edge *> callers)
3770 struct ipa_node_params *new_info, *info = IPA_NODE_REF (node);
3771 vec<ipa_replace_map *, va_gc> *replace_trees = NULL;
3772 struct ipa_agg_replacement_value *av;
3773 struct cgraph_node *new_node;
3774 int i, count = ipa_get_param_count (info);
3775 bitmap args_to_skip;
3777 gcc_assert (!info->ipcp_orig_node);
3779 if (node->local.can_change_signature)
3781 args_to_skip = BITMAP_GGC_ALLOC ();
3782 for (i = 0; i < count; i++)
3784 tree t = known_csts[i];
3786 if (t || !ipa_is_param_used (info, i))
3787 bitmap_set_bit (args_to_skip, i);
3790 else
3792 args_to_skip = NULL;
3793 if (dump_file && (dump_flags & TDF_DETAILS))
3794 fprintf (dump_file, " cannot change function signature\n");
3797 for (i = 0; i < count; i++)
3799 tree t = known_csts[i];
3800 if (t)
3802 struct ipa_replace_map *replace_map;
3804 gcc_checking_assert (TREE_CODE (t) != TREE_BINFO);
3805 replace_map = get_replacement_map (info, t, i);
3806 if (replace_map)
3807 vec_safe_push (replace_trees, replace_map);
3811 new_node = node->create_virtual_clone (callers, replace_trees,
3812 args_to_skip, "constprop");
3813 ipa_set_node_agg_value_chain (new_node, aggvals);
3814 for (av = aggvals; av; av = av->next)
3815 new_node->maybe_create_reference (av->value, NULL);
3817 if (dump_file && (dump_flags & TDF_DETAILS))
3819 fprintf (dump_file, " the new node is %s.\n", new_node->dump_name ());
3820 if (known_contexts.exists ())
3822 for (i = 0; i < count; i++)
3823 if (!known_contexts[i].useless_p ())
3825 fprintf (dump_file, " known ctx %i is ", i);
3826 known_contexts[i].dump (dump_file);
3829 if (aggvals)
3830 ipa_dump_agg_replacement_values (dump_file, aggvals);
3832 ipa_check_create_node_params ();
3833 update_profiling_info (node, new_node);
3834 new_info = IPA_NODE_REF (new_node);
3835 new_info->ipcp_orig_node = node;
3836 new_info->known_csts = known_csts;
3837 new_info->known_contexts = known_contexts;
3839 ipcp_discover_new_direct_edges (new_node, known_csts, known_contexts, aggvals);
3841 callers.release ();
3842 return new_node;
3845 /* Given a NODE, and a subset of its CALLERS, try to populate blanks slots in
3846 KNOWN_CSTS with constants that are also known for all of the CALLERS. */
3848 static void
3849 find_more_scalar_values_for_callers_subset (struct cgraph_node *node,
3850 vec<tree> known_csts,
3851 vec<cgraph_edge *> callers)
3853 struct ipa_node_params *info = IPA_NODE_REF (node);
3854 int i, count = ipa_get_param_count (info);
3856 for (i = 0; i < count; i++)
3858 struct cgraph_edge *cs;
3859 tree newval = NULL_TREE;
3860 int j;
3861 bool first = true;
3863 if (ipa_get_scalar_lat (info, i)->bottom || known_csts[i])
3864 continue;
3866 FOR_EACH_VEC_ELT (callers, j, cs)
3868 struct ipa_jump_func *jump_func;
3869 tree t;
3871 if (i >= ipa_get_cs_argument_count (IPA_EDGE_REF (cs))
3872 || (i == 0
3873 && call_passes_through_thunk_p (cs))
3874 || (!cs->callee->instrumentation_clone
3875 && cs->callee->function_symbol ()->instrumentation_clone))
3877 newval = NULL_TREE;
3878 break;
3880 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
3881 t = ipa_value_from_jfunc (IPA_NODE_REF (cs->caller), jump_func);
3882 if (!t
3883 || (newval
3884 && !values_equal_for_ipcp_p (t, newval))
3885 || (!first && !newval))
3887 newval = NULL_TREE;
3888 break;
3890 else
3891 newval = t;
3892 first = false;
3895 if (newval)
3897 if (dump_file && (dump_flags & TDF_DETAILS))
3899 fprintf (dump_file, " adding an extra known scalar value ");
3900 print_ipcp_constant_value (dump_file, newval);
3901 fprintf (dump_file, " for ");
3902 ipa_dump_param (dump_file, info, i);
3903 fprintf (dump_file, "\n");
3906 known_csts[i] = newval;
3911 /* Given a NODE and a subset of its CALLERS, try to populate plank slots in
3912 KNOWN_CONTEXTS with polymorphic contexts that are also known for all of the
3913 CALLERS. */
3915 static void
3916 find_more_contexts_for_caller_subset (cgraph_node *node,
3917 vec<ipa_polymorphic_call_context>
3918 *known_contexts,
3919 vec<cgraph_edge *> callers)
3921 ipa_node_params *info = IPA_NODE_REF (node);
3922 int i, count = ipa_get_param_count (info);
3924 for (i = 0; i < count; i++)
3926 cgraph_edge *cs;
3928 if (ipa_get_poly_ctx_lat (info, i)->bottom
3929 || (known_contexts->exists ()
3930 && !(*known_contexts)[i].useless_p ()))
3931 continue;
3933 ipa_polymorphic_call_context newval;
3934 bool first = true;
3935 int j;
3937 FOR_EACH_VEC_ELT (callers, j, cs)
3939 if (i >= ipa_get_cs_argument_count (IPA_EDGE_REF (cs)))
3940 return;
3941 ipa_jump_func *jfunc = ipa_get_ith_jump_func (IPA_EDGE_REF (cs),
3943 ipa_polymorphic_call_context ctx;
3944 ctx = ipa_context_from_jfunc (IPA_NODE_REF (cs->caller), cs, i,
3945 jfunc);
3946 if (first)
3948 newval = ctx;
3949 first = false;
3951 else
3952 newval.meet_with (ctx);
3953 if (newval.useless_p ())
3954 break;
3957 if (!newval.useless_p ())
3959 if (dump_file && (dump_flags & TDF_DETAILS))
3961 fprintf (dump_file, " adding an extra known polymorphic "
3962 "context ");
3963 print_ipcp_constant_value (dump_file, newval);
3964 fprintf (dump_file, " for ");
3965 ipa_dump_param (dump_file, info, i);
3966 fprintf (dump_file, "\n");
3969 if (!known_contexts->exists ())
3970 known_contexts->safe_grow_cleared (ipa_get_param_count (info));
3971 (*known_contexts)[i] = newval;
3977 /* Go through PLATS and create a vector of values consisting of values and
3978 offsets (minus OFFSET) of lattices that contain only a single value. */
3980 static vec<ipa_agg_jf_item>
3981 copy_plats_to_inter (struct ipcp_param_lattices *plats, HOST_WIDE_INT offset)
3983 vec<ipa_agg_jf_item> res = vNULL;
3985 if (!plats->aggs || plats->aggs_contain_variable || plats->aggs_bottom)
3986 return vNULL;
3988 for (struct ipcp_agg_lattice *aglat = plats->aggs; aglat; aglat = aglat->next)
3989 if (aglat->is_single_const ())
3991 struct ipa_agg_jf_item ti;
3992 ti.offset = aglat->offset - offset;
3993 ti.value = aglat->values->value;
3994 res.safe_push (ti);
3996 return res;
3999 /* Intersect all values in INTER with single value lattices in PLATS (while
4000 subtracting OFFSET). */
4002 static void
4003 intersect_with_plats (struct ipcp_param_lattices *plats,
4004 vec<ipa_agg_jf_item> *inter,
4005 HOST_WIDE_INT offset)
4007 struct ipcp_agg_lattice *aglat;
4008 struct ipa_agg_jf_item *item;
4009 int k;
4011 if (!plats->aggs || plats->aggs_contain_variable || plats->aggs_bottom)
4013 inter->release ();
4014 return;
4017 aglat = plats->aggs;
4018 FOR_EACH_VEC_ELT (*inter, k, item)
4020 bool found = false;
4021 if (!item->value)
4022 continue;
4023 while (aglat)
4025 if (aglat->offset - offset > item->offset)
4026 break;
4027 if (aglat->offset - offset == item->offset)
4029 gcc_checking_assert (item->value);
4030 if (values_equal_for_ipcp_p (item->value, aglat->values->value))
4031 found = true;
4032 break;
4034 aglat = aglat->next;
4036 if (!found)
4037 item->value = NULL_TREE;
4041 /* Copy aggregate replacement values of NODE (which is an IPA-CP clone) to the
4042 vector result while subtracting OFFSET from the individual value offsets. */
4044 static vec<ipa_agg_jf_item>
4045 agg_replacements_to_vector (struct cgraph_node *node, int index,
4046 HOST_WIDE_INT offset)
4048 struct ipa_agg_replacement_value *av;
4049 vec<ipa_agg_jf_item> res = vNULL;
4051 for (av = ipa_get_agg_replacements_for_node (node); av; av = av->next)
4052 if (av->index == index
4053 && (av->offset - offset) >= 0)
4055 struct ipa_agg_jf_item item;
4056 gcc_checking_assert (av->value);
4057 item.offset = av->offset - offset;
4058 item.value = av->value;
4059 res.safe_push (item);
4062 return res;
4065 /* Intersect all values in INTER with those that we have already scheduled to
4066 be replaced in parameter number INDEX of NODE, which is an IPA-CP clone
4067 (while subtracting OFFSET). */
4069 static void
4070 intersect_with_agg_replacements (struct cgraph_node *node, int index,
4071 vec<ipa_agg_jf_item> *inter,
4072 HOST_WIDE_INT offset)
4074 struct ipa_agg_replacement_value *srcvals;
4075 struct ipa_agg_jf_item *item;
4076 int i;
4078 srcvals = ipa_get_agg_replacements_for_node (node);
4079 if (!srcvals)
4081 inter->release ();
4082 return;
4085 FOR_EACH_VEC_ELT (*inter, i, item)
4087 struct ipa_agg_replacement_value *av;
4088 bool found = false;
4089 if (!item->value)
4090 continue;
4091 for (av = srcvals; av; av = av->next)
4093 gcc_checking_assert (av->value);
4094 if (av->index == index
4095 && av->offset - offset == item->offset)
4097 if (values_equal_for_ipcp_p (item->value, av->value))
4098 found = true;
4099 break;
4102 if (!found)
4103 item->value = NULL_TREE;
4107 /* Intersect values in INTER with aggregate values that come along edge CS to
4108 parameter number INDEX and return it. If INTER does not actually exist yet,
4109 copy all incoming values to it. If we determine we ended up with no values
4110 whatsoever, return a released vector. */
4112 static vec<ipa_agg_jf_item>
4113 intersect_aggregates_with_edge (struct cgraph_edge *cs, int index,
4114 vec<ipa_agg_jf_item> inter)
4116 struct ipa_jump_func *jfunc;
4117 jfunc = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), index);
4118 if (jfunc->type == IPA_JF_PASS_THROUGH
4119 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
4121 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
4122 int src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
4124 if (caller_info->ipcp_orig_node)
4126 struct cgraph_node *orig_node = caller_info->ipcp_orig_node;
4127 struct ipcp_param_lattices *orig_plats;
4128 orig_plats = ipa_get_parm_lattices (IPA_NODE_REF (orig_node),
4129 src_idx);
4130 if (agg_pass_through_permissible_p (orig_plats, jfunc))
4132 if (!inter.exists ())
4133 inter = agg_replacements_to_vector (cs->caller, src_idx, 0);
4134 else
4135 intersect_with_agg_replacements (cs->caller, src_idx,
4136 &inter, 0);
4138 else
4140 inter.release ();
4141 return vNULL;
4144 else
4146 struct ipcp_param_lattices *src_plats;
4147 src_plats = ipa_get_parm_lattices (caller_info, src_idx);
4148 if (agg_pass_through_permissible_p (src_plats, jfunc))
4150 /* Currently we do not produce clobber aggregate jump
4151 functions, adjust when we do. */
4152 gcc_checking_assert (!jfunc->agg.items);
4153 if (!inter.exists ())
4154 inter = copy_plats_to_inter (src_plats, 0);
4155 else
4156 intersect_with_plats (src_plats, &inter, 0);
4158 else
4160 inter.release ();
4161 return vNULL;
4165 else if (jfunc->type == IPA_JF_ANCESTOR
4166 && ipa_get_jf_ancestor_agg_preserved (jfunc))
4168 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
4169 int src_idx = ipa_get_jf_ancestor_formal_id (jfunc);
4170 struct ipcp_param_lattices *src_plats;
4171 HOST_WIDE_INT delta = ipa_get_jf_ancestor_offset (jfunc);
4173 if (caller_info->ipcp_orig_node)
4175 if (!inter.exists ())
4176 inter = agg_replacements_to_vector (cs->caller, src_idx, delta);
4177 else
4178 intersect_with_agg_replacements (cs->caller, src_idx, &inter,
4179 delta);
4181 else
4183 src_plats = ipa_get_parm_lattices (caller_info, src_idx);;
4184 /* Currently we do not produce clobber aggregate jump
4185 functions, adjust when we do. */
4186 gcc_checking_assert (!src_plats->aggs || !jfunc->agg.items);
4187 if (!inter.exists ())
4188 inter = copy_plats_to_inter (src_plats, delta);
4189 else
4190 intersect_with_plats (src_plats, &inter, delta);
4193 else if (jfunc->agg.items)
4195 struct ipa_agg_jf_item *item;
4196 int k;
4198 if (!inter.exists ())
4199 for (unsigned i = 0; i < jfunc->agg.items->length (); i++)
4200 inter.safe_push ((*jfunc->agg.items)[i]);
4201 else
4202 FOR_EACH_VEC_ELT (inter, k, item)
4204 int l = 0;
4205 bool found = false;;
4207 if (!item->value)
4208 continue;
4210 while ((unsigned) l < jfunc->agg.items->length ())
4212 struct ipa_agg_jf_item *ti;
4213 ti = &(*jfunc->agg.items)[l];
4214 if (ti->offset > item->offset)
4215 break;
4216 if (ti->offset == item->offset)
4218 gcc_checking_assert (ti->value);
4219 if (values_equal_for_ipcp_p (item->value,
4220 ti->value))
4221 found = true;
4222 break;
4224 l++;
4226 if (!found)
4227 item->value = NULL;
4230 else
4232 inter.release ();
4233 return vec<ipa_agg_jf_item>();
4235 return inter;
4238 /* Look at edges in CALLERS and collect all known aggregate values that arrive
4239 from all of them. */
4241 static struct ipa_agg_replacement_value *
4242 find_aggregate_values_for_callers_subset (struct cgraph_node *node,
4243 vec<cgraph_edge *> callers)
4245 struct ipa_node_params *dest_info = IPA_NODE_REF (node);
4246 struct ipa_agg_replacement_value *res;
4247 struct ipa_agg_replacement_value **tail = &res;
4248 struct cgraph_edge *cs;
4249 int i, j, count = ipa_get_param_count (dest_info);
4251 FOR_EACH_VEC_ELT (callers, j, cs)
4253 int c = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
4254 if (c < count)
4255 count = c;
4258 for (i = 0; i < count; i++)
4260 struct cgraph_edge *cs;
4261 vec<ipa_agg_jf_item> inter = vNULL;
4262 struct ipa_agg_jf_item *item;
4263 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (dest_info, i);
4264 int j;
4266 /* Among other things, the following check should deal with all by_ref
4267 mismatches. */
4268 if (plats->aggs_bottom)
4269 continue;
4271 FOR_EACH_VEC_ELT (callers, j, cs)
4273 inter = intersect_aggregates_with_edge (cs, i, inter);
4275 if (!inter.exists ())
4276 goto next_param;
4279 FOR_EACH_VEC_ELT (inter, j, item)
4281 struct ipa_agg_replacement_value *v;
4283 if (!item->value)
4284 continue;
4286 v = ggc_alloc<ipa_agg_replacement_value> ();
4287 v->index = i;
4288 v->offset = item->offset;
4289 v->value = item->value;
4290 v->by_ref = plats->aggs_by_ref;
4291 *tail = v;
4292 tail = &v->next;
4295 next_param:
4296 if (inter.exists ())
4297 inter.release ();
4299 *tail = NULL;
4300 return res;
4303 /* Turn KNOWN_AGGS into a list of aggregate replacement values. */
4305 static struct ipa_agg_replacement_value *
4306 known_aggs_to_agg_replacement_list (vec<ipa_agg_jump_function> known_aggs)
4308 struct ipa_agg_replacement_value *res;
4309 struct ipa_agg_replacement_value **tail = &res;
4310 struct ipa_agg_jump_function *aggjf;
4311 struct ipa_agg_jf_item *item;
4312 int i, j;
4314 FOR_EACH_VEC_ELT (known_aggs, i, aggjf)
4315 FOR_EACH_VEC_SAFE_ELT (aggjf->items, j, item)
4317 struct ipa_agg_replacement_value *v;
4318 v = ggc_alloc<ipa_agg_replacement_value> ();
4319 v->index = i;
4320 v->offset = item->offset;
4321 v->value = item->value;
4322 v->by_ref = aggjf->by_ref;
4323 *tail = v;
4324 tail = &v->next;
4326 *tail = NULL;
4327 return res;
4330 /* Determine whether CS also brings all scalar values that the NODE is
4331 specialized for. */
4333 static bool
4334 cgraph_edge_brings_all_scalars_for_node (struct cgraph_edge *cs,
4335 struct cgraph_node *node)
4337 struct ipa_node_params *dest_info = IPA_NODE_REF (node);
4338 int count = ipa_get_param_count (dest_info);
4339 struct ipa_node_params *caller_info;
4340 struct ipa_edge_args *args;
4341 int i;
4343 caller_info = IPA_NODE_REF (cs->caller);
4344 args = IPA_EDGE_REF (cs);
4345 for (i = 0; i < count; i++)
4347 struct ipa_jump_func *jump_func;
4348 tree val, t;
4350 val = dest_info->known_csts[i];
4351 if (!val)
4352 continue;
4354 if (i >= ipa_get_cs_argument_count (args))
4355 return false;
4356 jump_func = ipa_get_ith_jump_func (args, i);
4357 t = ipa_value_from_jfunc (caller_info, jump_func);
4358 if (!t || !values_equal_for_ipcp_p (val, t))
4359 return false;
4361 return true;
4364 /* Determine whether CS also brings all aggregate values that NODE is
4365 specialized for. */
4366 static bool
4367 cgraph_edge_brings_all_agg_vals_for_node (struct cgraph_edge *cs,
4368 struct cgraph_node *node)
4370 struct ipa_node_params *orig_caller_info = IPA_NODE_REF (cs->caller);
4371 struct ipa_node_params *orig_node_info;
4372 struct ipa_agg_replacement_value *aggval;
4373 int i, ec, count;
4375 aggval = ipa_get_agg_replacements_for_node (node);
4376 if (!aggval)
4377 return true;
4379 count = ipa_get_param_count (IPA_NODE_REF (node));
4380 ec = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
4381 if (ec < count)
4382 for (struct ipa_agg_replacement_value *av = aggval; av; av = av->next)
4383 if (aggval->index >= ec)
4384 return false;
4386 orig_node_info = IPA_NODE_REF (IPA_NODE_REF (node)->ipcp_orig_node);
4387 if (orig_caller_info->ipcp_orig_node)
4388 orig_caller_info = IPA_NODE_REF (orig_caller_info->ipcp_orig_node);
4390 for (i = 0; i < count; i++)
4392 static vec<ipa_agg_jf_item> values = vec<ipa_agg_jf_item>();
4393 struct ipcp_param_lattices *plats;
4394 bool interesting = false;
4395 for (struct ipa_agg_replacement_value *av = aggval; av; av = av->next)
4396 if (aggval->index == i)
4398 interesting = true;
4399 break;
4401 if (!interesting)
4402 continue;
4404 plats = ipa_get_parm_lattices (orig_node_info, aggval->index);
4405 if (plats->aggs_bottom)
4406 return false;
4408 values = intersect_aggregates_with_edge (cs, i, values);
4409 if (!values.exists ())
4410 return false;
4412 for (struct ipa_agg_replacement_value *av = aggval; av; av = av->next)
4413 if (aggval->index == i)
4415 struct ipa_agg_jf_item *item;
4416 int j;
4417 bool found = false;
4418 FOR_EACH_VEC_ELT (values, j, item)
4419 if (item->value
4420 && item->offset == av->offset
4421 && values_equal_for_ipcp_p (item->value, av->value))
4423 found = true;
4424 break;
4426 if (!found)
4428 values.release ();
4429 return false;
4433 return true;
4436 /* Given an original NODE and a VAL for which we have already created a
4437 specialized clone, look whether there are incoming edges that still lead
4438 into the old node but now also bring the requested value and also conform to
4439 all other criteria such that they can be redirected the special node.
4440 This function can therefore redirect the final edge in a SCC. */
4442 template <typename valtype>
4443 static void
4444 perhaps_add_new_callers (cgraph_node *node, ipcp_value<valtype> *val)
4446 ipcp_value_source<valtype> *src;
4447 profile_count redirected_sum = profile_count::zero ();
4449 for (src = val->sources; src; src = src->next)
4451 struct cgraph_edge *cs = src->cs;
4452 while (cs)
4454 if (cgraph_edge_brings_value_p (cs, src, node)
4455 && cgraph_edge_brings_all_scalars_for_node (cs, val->spec_node)
4456 && cgraph_edge_brings_all_agg_vals_for_node (cs, val->spec_node))
4458 if (dump_file)
4459 fprintf (dump_file, " - adding an extra caller %s of %s\n",
4460 cs->caller->dump_name (),
4461 val->spec_node->dump_name ());
4463 cs->redirect_callee_duplicating_thunks (val->spec_node);
4464 val->spec_node->expand_all_artificial_thunks ();
4465 if (cs->count.initialized_p ())
4466 redirected_sum = redirected_sum + cs->count;
4468 cs = get_next_cgraph_edge_clone (cs);
4472 if (redirected_sum > profile_count::zero ())
4473 update_specialized_profile (val->spec_node, node, redirected_sum);
4476 /* Return true if KNOWN_CONTEXTS contain at least one useful context. */
4478 static bool
4479 known_contexts_useful_p (vec<ipa_polymorphic_call_context> known_contexts)
4481 ipa_polymorphic_call_context *ctx;
4482 int i;
4484 FOR_EACH_VEC_ELT (known_contexts, i, ctx)
4485 if (!ctx->useless_p ())
4486 return true;
4487 return false;
4490 /* Return a copy of KNOWN_CSTS if it is not empty, otherwise return vNULL. */
4492 static vec<ipa_polymorphic_call_context>
4493 copy_useful_known_contexts (vec<ipa_polymorphic_call_context> known_contexts)
4495 if (known_contexts_useful_p (known_contexts))
4496 return known_contexts.copy ();
4497 else
4498 return vNULL;
4501 /* Copy KNOWN_CSTS and modify the copy according to VAL and INDEX. If
4502 non-empty, replace KNOWN_CONTEXTS with its copy too. */
4504 static void
4505 modify_known_vectors_with_val (vec<tree> *known_csts,
4506 vec<ipa_polymorphic_call_context> *known_contexts,
4507 ipcp_value<tree> *val,
4508 int index)
4510 *known_csts = known_csts->copy ();
4511 *known_contexts = copy_useful_known_contexts (*known_contexts);
4512 (*known_csts)[index] = val->value;
4515 /* Replace KNOWN_CSTS with its copy. Also copy KNOWN_CONTEXTS and modify the
4516 copy according to VAL and INDEX. */
4518 static void
4519 modify_known_vectors_with_val (vec<tree> *known_csts,
4520 vec<ipa_polymorphic_call_context> *known_contexts,
4521 ipcp_value<ipa_polymorphic_call_context> *val,
4522 int index)
4524 *known_csts = known_csts->copy ();
4525 *known_contexts = known_contexts->copy ();
4526 (*known_contexts)[index] = val->value;
4529 /* Return true if OFFSET indicates this was not an aggregate value or there is
4530 a replacement equivalent to VALUE, INDEX and OFFSET among those in the
4531 AGGVALS list. */
4533 DEBUG_FUNCTION bool
4534 ipcp_val_agg_replacement_ok_p (ipa_agg_replacement_value *aggvals,
4535 int index, HOST_WIDE_INT offset, tree value)
4537 if (offset == -1)
4538 return true;
4540 while (aggvals)
4542 if (aggvals->index == index
4543 && aggvals->offset == offset
4544 && values_equal_for_ipcp_p (aggvals->value, value))
4545 return true;
4546 aggvals = aggvals->next;
4548 return false;
4551 /* Return true if offset is minus one because source of a polymorphic contect
4552 cannot be an aggregate value. */
4554 DEBUG_FUNCTION bool
4555 ipcp_val_agg_replacement_ok_p (ipa_agg_replacement_value *,
4556 int , HOST_WIDE_INT offset,
4557 ipa_polymorphic_call_context)
4559 return offset == -1;
4562 /* Decide wheter to create a special version of NODE for value VAL of parameter
4563 at the given INDEX. If OFFSET is -1, the value is for the parameter itself,
4564 otherwise it is stored at the given OFFSET of the parameter. KNOWN_CSTS,
4565 KNOWN_CONTEXTS and KNOWN_AGGS describe the other already known values. */
4567 template <typename valtype>
4568 static bool
4569 decide_about_value (struct cgraph_node *node, int index, HOST_WIDE_INT offset,
4570 ipcp_value<valtype> *val, vec<tree> known_csts,
4571 vec<ipa_polymorphic_call_context> known_contexts)
4573 struct ipa_agg_replacement_value *aggvals;
4574 int freq_sum, caller_count;
4575 profile_count count_sum;
4576 vec<cgraph_edge *> callers;
4578 if (val->spec_node)
4580 perhaps_add_new_callers (node, val);
4581 return false;
4583 else if (val->local_size_cost + overall_size > max_new_size)
4585 if (dump_file && (dump_flags & TDF_DETAILS))
4586 fprintf (dump_file, " Ignoring candidate value because "
4587 "max_new_size would be reached with %li.\n",
4588 val->local_size_cost + overall_size);
4589 return false;
4591 else if (!get_info_about_necessary_edges (val, node, &freq_sum, &count_sum,
4592 &caller_count))
4593 return false;
4595 if (dump_file && (dump_flags & TDF_DETAILS))
4597 fprintf (dump_file, " - considering value ");
4598 print_ipcp_constant_value (dump_file, val->value);
4599 fprintf (dump_file, " for ");
4600 ipa_dump_param (dump_file, IPA_NODE_REF (node), index);
4601 if (offset != -1)
4602 fprintf (dump_file, ", offset: " HOST_WIDE_INT_PRINT_DEC, offset);
4603 fprintf (dump_file, " (caller_count: %i)\n", caller_count);
4606 if (!good_cloning_opportunity_p (node, val->local_time_benefit,
4607 freq_sum, count_sum,
4608 val->local_size_cost)
4609 && !good_cloning_opportunity_p (node,
4610 val->local_time_benefit
4611 + val->prop_time_benefit,
4612 freq_sum, count_sum,
4613 val->local_size_cost
4614 + val->prop_size_cost))
4615 return false;
4617 if (dump_file)
4618 fprintf (dump_file, " Creating a specialized node of %s.\n",
4619 node->dump_name ());
4621 callers = gather_edges_for_value (val, node, caller_count);
4622 if (offset == -1)
4623 modify_known_vectors_with_val (&known_csts, &known_contexts, val, index);
4624 else
4626 known_csts = known_csts.copy ();
4627 known_contexts = copy_useful_known_contexts (known_contexts);
4629 find_more_scalar_values_for_callers_subset (node, known_csts, callers);
4630 find_more_contexts_for_caller_subset (node, &known_contexts, callers);
4631 aggvals = find_aggregate_values_for_callers_subset (node, callers);
4632 gcc_checking_assert (ipcp_val_agg_replacement_ok_p (aggvals, index,
4633 offset, val->value));
4634 val->spec_node = create_specialized_node (node, known_csts, known_contexts,
4635 aggvals, callers);
4636 overall_size += val->local_size_cost;
4638 /* TODO: If for some lattice there is only one other known value
4639 left, make a special node for it too. */
4641 return true;
4644 /* Decide whether and what specialized clones of NODE should be created. */
4646 static bool
4647 decide_whether_version_node (struct cgraph_node *node)
4649 struct ipa_node_params *info = IPA_NODE_REF (node);
4650 int i, count = ipa_get_param_count (info);
4651 vec<tree> known_csts;
4652 vec<ipa_polymorphic_call_context> known_contexts;
4653 vec<ipa_agg_jump_function> known_aggs = vNULL;
4654 bool ret = false;
4656 if (count == 0)
4657 return false;
4659 if (dump_file && (dump_flags & TDF_DETAILS))
4660 fprintf (dump_file, "\nEvaluating opportunities for %s.\n",
4661 node->dump_name ());
4663 gather_context_independent_values (info, &known_csts, &known_contexts,
4664 info->do_clone_for_all_contexts ? &known_aggs
4665 : NULL, NULL);
4667 for (i = 0; i < count;i++)
4669 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
4670 ipcp_lattice<tree> *lat = &plats->itself;
4671 ipcp_lattice<ipa_polymorphic_call_context> *ctxlat = &plats->ctxlat;
4673 if (!lat->bottom
4674 && !known_csts[i])
4676 ipcp_value<tree> *val;
4677 for (val = lat->values; val; val = val->next)
4678 ret |= decide_about_value (node, i, -1, val, known_csts,
4679 known_contexts);
4682 if (!plats->aggs_bottom)
4684 struct ipcp_agg_lattice *aglat;
4685 ipcp_value<tree> *val;
4686 for (aglat = plats->aggs; aglat; aglat = aglat->next)
4687 if (!aglat->bottom && aglat->values
4688 /* If the following is false, the one value is in
4689 known_aggs. */
4690 && (plats->aggs_contain_variable
4691 || !aglat->is_single_const ()))
4692 for (val = aglat->values; val; val = val->next)
4693 ret |= decide_about_value (node, i, aglat->offset, val,
4694 known_csts, known_contexts);
4697 if (!ctxlat->bottom
4698 && known_contexts[i].useless_p ())
4700 ipcp_value<ipa_polymorphic_call_context> *val;
4701 for (val = ctxlat->values; val; val = val->next)
4702 ret |= decide_about_value (node, i, -1, val, known_csts,
4703 known_contexts);
4706 info = IPA_NODE_REF (node);
4709 if (info->do_clone_for_all_contexts)
4711 struct cgraph_node *clone;
4712 vec<cgraph_edge *> callers;
4714 if (dump_file)
4715 fprintf (dump_file, " - Creating a specialized node of %s "
4716 "for all known contexts.\n", node->dump_name ());
4718 callers = node->collect_callers ();
4720 if (!known_contexts_useful_p (known_contexts))
4722 known_contexts.release ();
4723 known_contexts = vNULL;
4725 clone = create_specialized_node (node, known_csts, known_contexts,
4726 known_aggs_to_agg_replacement_list (known_aggs),
4727 callers);
4728 info = IPA_NODE_REF (node);
4729 info->do_clone_for_all_contexts = false;
4730 IPA_NODE_REF (clone)->is_all_contexts_clone = true;
4731 for (i = 0; i < count; i++)
4732 vec_free (known_aggs[i].items);
4733 known_aggs.release ();
4734 ret = true;
4736 else
4738 known_csts.release ();
4739 known_contexts.release ();
4742 return ret;
4745 /* Transitively mark all callees of NODE within the same SCC as not dead. */
4747 static void
4748 spread_undeadness (struct cgraph_node *node)
4750 struct cgraph_edge *cs;
4752 for (cs = node->callees; cs; cs = cs->next_callee)
4753 if (ipa_edge_within_scc (cs))
4755 struct cgraph_node *callee;
4756 struct ipa_node_params *info;
4758 callee = cs->callee->function_symbol (NULL);
4759 info = IPA_NODE_REF (callee);
4761 if (info->node_dead)
4763 info->node_dead = 0;
4764 spread_undeadness (callee);
4769 /* Return true if NODE has a caller from outside of its SCC that is not
4770 dead. Worker callback for cgraph_for_node_and_aliases. */
4772 static bool
4773 has_undead_caller_from_outside_scc_p (struct cgraph_node *node,
4774 void *data ATTRIBUTE_UNUSED)
4776 struct cgraph_edge *cs;
4778 for (cs = node->callers; cs; cs = cs->next_caller)
4779 if (cs->caller->thunk.thunk_p
4780 && cs->caller->call_for_symbol_thunks_and_aliases
4781 (has_undead_caller_from_outside_scc_p, NULL, true))
4782 return true;
4783 else if (!ipa_edge_within_scc (cs)
4784 && !IPA_NODE_REF (cs->caller)->node_dead)
4785 return true;
4786 return false;
4790 /* Identify nodes within the same SCC as NODE which are no longer needed
4791 because of new clones and will be removed as unreachable. */
4793 static void
4794 identify_dead_nodes (struct cgraph_node *node)
4796 struct cgraph_node *v;
4797 for (v = node; v; v = ((struct ipa_dfs_info *) v->aux)->next_cycle)
4798 if (v->local.local
4799 && !v->call_for_symbol_thunks_and_aliases
4800 (has_undead_caller_from_outside_scc_p, NULL, true))
4801 IPA_NODE_REF (v)->node_dead = 1;
4803 for (v = node; v; v = ((struct ipa_dfs_info *) v->aux)->next_cycle)
4804 if (!IPA_NODE_REF (v)->node_dead)
4805 spread_undeadness (v);
4807 if (dump_file && (dump_flags & TDF_DETAILS))
4809 for (v = node; v; v = ((struct ipa_dfs_info *) v->aux)->next_cycle)
4810 if (IPA_NODE_REF (v)->node_dead)
4811 fprintf (dump_file, " Marking node as dead: %s.\n", v->dump_name ());
4815 /* The decision stage. Iterate over the topological order of call graph nodes
4816 TOPO and make specialized clones if deemed beneficial. */
4818 static void
4819 ipcp_decision_stage (struct ipa_topo_info *topo)
4821 int i;
4823 if (dump_file)
4824 fprintf (dump_file, "\nIPA decision stage:\n\n");
4826 for (i = topo->nnodes - 1; i >= 0; i--)
4828 struct cgraph_node *node = topo->order[i];
4829 bool change = false, iterate = true;
4831 while (iterate)
4833 struct cgraph_node *v;
4834 iterate = false;
4835 for (v = node; v; v = ((struct ipa_dfs_info *) v->aux)->next_cycle)
4836 if (v->has_gimple_body_p ()
4837 && ipcp_versionable_function_p (v))
4838 iterate |= decide_whether_version_node (v);
4840 change |= iterate;
4842 if (change)
4843 identify_dead_nodes (node);
4847 /* Look up all the bits information that we have discovered and copy it over
4848 to the transformation summary. */
4850 static void
4851 ipcp_store_bits_results (void)
4853 cgraph_node *node;
4855 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
4857 ipa_node_params *info = IPA_NODE_REF (node);
4858 bool dumped_sth = false;
4859 bool found_useful_result = false;
4861 if (!opt_for_fn (node->decl, flag_ipa_bit_cp))
4863 if (dump_file)
4864 fprintf (dump_file, "Not considering %s for ipa bitwise propagation "
4865 "; -fipa-bit-cp: disabled.\n",
4866 node->name ());
4867 continue;
4870 if (info->ipcp_orig_node)
4871 info = IPA_NODE_REF (info->ipcp_orig_node);
4873 unsigned count = ipa_get_param_count (info);
4874 for (unsigned i = 0; i < count; i++)
4876 ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
4877 if (plats->bits_lattice.constant_p ())
4879 found_useful_result = true;
4880 break;
4884 if (!found_useful_result)
4885 continue;
4887 ipcp_grow_transformations_if_necessary ();
4888 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
4889 vec_safe_reserve_exact (ts->bits, count);
4891 for (unsigned i = 0; i < count; i++)
4893 ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
4894 ipa_bits *jfbits;
4896 if (plats->bits_lattice.constant_p ())
4897 jfbits
4898 = ipa_get_ipa_bits_for_value (plats->bits_lattice.get_value (),
4899 plats->bits_lattice.get_mask ());
4900 else
4901 jfbits = NULL;
4903 ts->bits->quick_push (jfbits);
4904 if (!dump_file || !jfbits)
4905 continue;
4906 if (!dumped_sth)
4908 fprintf (dump_file, "Propagated bits info for function %s:\n",
4909 node->dump_name ());
4910 dumped_sth = true;
4912 fprintf (dump_file, " param %i: value = ", i);
4913 print_hex (jfbits->value, dump_file);
4914 fprintf (dump_file, ", mask = ");
4915 print_hex (jfbits->mask, dump_file);
4916 fprintf (dump_file, "\n");
4921 /* Look up all VR information that we have discovered and copy it over
4922 to the transformation summary. */
4924 static void
4925 ipcp_store_vr_results (void)
4927 cgraph_node *node;
4929 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
4931 ipa_node_params *info = IPA_NODE_REF (node);
4932 bool found_useful_result = false;
4934 if (!opt_for_fn (node->decl, flag_ipa_vrp))
4936 if (dump_file)
4937 fprintf (dump_file, "Not considering %s for VR discovery "
4938 "and propagate; -fipa-ipa-vrp: disabled.\n",
4939 node->name ());
4940 continue;
4943 if (info->ipcp_orig_node)
4944 info = IPA_NODE_REF (info->ipcp_orig_node);
4946 unsigned count = ipa_get_param_count (info);
4947 for (unsigned i = 0; i < count; i++)
4949 ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
4950 if (!plats->m_value_range.bottom_p ()
4951 && !plats->m_value_range.top_p ())
4953 found_useful_result = true;
4954 break;
4957 if (!found_useful_result)
4958 continue;
4960 ipcp_grow_transformations_if_necessary ();
4961 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
4962 vec_safe_reserve_exact (ts->m_vr, count);
4964 for (unsigned i = 0; i < count; i++)
4966 ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
4967 ipa_vr vr;
4969 if (!plats->m_value_range.bottom_p ()
4970 && !plats->m_value_range.top_p ())
4972 vr.known = true;
4973 vr.type = plats->m_value_range.m_vr.type;
4974 vr.min = plats->m_value_range.m_vr.min;
4975 vr.max = plats->m_value_range.m_vr.max;
4977 else
4979 vr.known = false;
4980 vr.type = VR_VARYING;
4981 vr.min = vr.max = wi::zero (INT_TYPE_SIZE);
4983 ts->m_vr->quick_push (vr);
4988 /* The IPCP driver. */
4990 static unsigned int
4991 ipcp_driver (void)
4993 struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
4994 struct cgraph_edge_hook_list *edge_removal_hook_holder;
4995 struct ipa_topo_info topo;
4997 ipa_check_create_node_params ();
4998 ipa_check_create_edge_args ();
4999 grow_edge_clone_vectors ();
5000 edge_duplication_hook_holder
5001 = symtab->add_edge_duplication_hook (&ipcp_edge_duplication_hook, NULL);
5002 edge_removal_hook_holder
5003 = symtab->add_edge_removal_hook (&ipcp_edge_removal_hook, NULL);
5005 if (dump_file)
5007 fprintf (dump_file, "\nIPA structures before propagation:\n");
5008 if (dump_flags & TDF_DETAILS)
5009 ipa_print_all_params (dump_file);
5010 ipa_print_all_jump_functions (dump_file);
5013 /* Topological sort. */
5014 build_toporder_info (&topo);
5015 /* Do the interprocedural propagation. */
5016 ipcp_propagate_stage (&topo);
5017 /* Decide what constant propagation and cloning should be performed. */
5018 ipcp_decision_stage (&topo);
5019 /* Store results of bits propagation. */
5020 ipcp_store_bits_results ();
5021 /* Store results of value range propagation. */
5022 ipcp_store_vr_results ();
5024 /* Free all IPCP structures. */
5025 free_toporder_info (&topo);
5026 next_edge_clone.release ();
5027 prev_edge_clone.release ();
5028 symtab->remove_edge_removal_hook (edge_removal_hook_holder);
5029 symtab->remove_edge_duplication_hook (edge_duplication_hook_holder);
5030 ipa_free_all_structures_after_ipa_cp ();
5031 if (dump_file)
5032 fprintf (dump_file, "\nIPA constant propagation end\n");
5033 return 0;
5036 /* Initialization and computation of IPCP data structures. This is the initial
5037 intraprocedural analysis of functions, which gathers information to be
5038 propagated later on. */
5040 static void
5041 ipcp_generate_summary (void)
5043 struct cgraph_node *node;
5045 if (dump_file)
5046 fprintf (dump_file, "\nIPA constant propagation start:\n");
5047 ipa_register_cgraph_hooks ();
5049 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
5050 ipa_analyze_node (node);
5053 /* Write ipcp summary for nodes in SET. */
5055 static void
5056 ipcp_write_summary (void)
5058 ipa_prop_write_jump_functions ();
5061 /* Read ipcp summary. */
5063 static void
5064 ipcp_read_summary (void)
5066 ipa_prop_read_jump_functions ();
5069 namespace {
5071 const pass_data pass_data_ipa_cp =
5073 IPA_PASS, /* type */
5074 "cp", /* name */
5075 OPTGROUP_NONE, /* optinfo_flags */
5076 TV_IPA_CONSTANT_PROP, /* tv_id */
5077 0, /* properties_required */
5078 0, /* properties_provided */
5079 0, /* properties_destroyed */
5080 0, /* todo_flags_start */
5081 ( TODO_dump_symtab | TODO_remove_functions ), /* todo_flags_finish */
5084 class pass_ipa_cp : public ipa_opt_pass_d
5086 public:
5087 pass_ipa_cp (gcc::context *ctxt)
5088 : ipa_opt_pass_d (pass_data_ipa_cp, ctxt,
5089 ipcp_generate_summary, /* generate_summary */
5090 ipcp_write_summary, /* write_summary */
5091 ipcp_read_summary, /* read_summary */
5092 ipcp_write_transformation_summaries, /*
5093 write_optimization_summary */
5094 ipcp_read_transformation_summaries, /*
5095 read_optimization_summary */
5096 NULL, /* stmt_fixup */
5097 0, /* function_transform_todo_flags_start */
5098 ipcp_transform_function, /* function_transform */
5099 NULL) /* variable_transform */
5102 /* opt_pass methods: */
5103 virtual bool gate (function *)
5105 /* FIXME: We should remove the optimize check after we ensure we never run
5106 IPA passes when not optimizing. */
5107 return (flag_ipa_cp && optimize) || in_lto_p;
5110 virtual unsigned int execute (function *) { return ipcp_driver (); }
5112 }; // class pass_ipa_cp
5114 } // anon namespace
5116 ipa_opt_pass_d *
5117 make_pass_ipa_cp (gcc::context *ctxt)
5119 return new pass_ipa_cp (ctxt);
5122 /* Reset all state within ipa-cp.c so that we can rerun the compiler
5123 within the same process. For use by toplev::finalize. */
5125 void
5126 ipa_cp_c_finalize (void)
5128 max_count = profile_count::zero ();
5129 overall_size = 0;
5130 max_new_size = 0;