ipa-cp.c (propagate_bits_accross_jump_function): Introduce space between callee name...
[official-gcc.git] / gcc / ipa-cp.c
blob7380b2a34054225da1b43374bab96a9221366ea3
1 /* Interprocedural constant propagation
2 Copyright (C) 2005-2016 Free Software Foundation, Inc.
4 Contributed by Razya Ladelsky <RAZYA@il.ibm.com> and Martin Jambor
5 <mjambor@suse.cz>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Interprocedural constant propagation (IPA-CP).
25 The goal of this transformation is to
27 1) discover functions which are always invoked with some arguments with the
28 same known constant values and modify the functions so that the
29 subsequent optimizations can take advantage of the knowledge, and
31 2) partial specialization - create specialized versions of functions
32 transformed in this way if some parameters are known constants only in
33 certain contexts but the estimated tradeoff between speedup and cost size
34 is deemed good.
36 The algorithm also propagates types and attempts to perform type based
37 devirtualization. Types are propagated much like constants.
39 The algorithm basically consists of three stages. In the first, functions
40 are analyzed one at a time and jump functions are constructed for all known
41 call-sites. In the second phase, the pass propagates information from the
42 jump functions across the call to reveal what values are available at what
43 call sites, performs estimations of effects of known values on functions and
44 their callees, and finally decides what specialized extra versions should be
45 created. In the third, the special versions materialize and appropriate
46 calls are redirected.
48 The algorithm used is to a certain extent based on "Interprocedural Constant
49 Propagation", by David Callahan, Keith D Cooper, Ken Kennedy, Linda Torczon,
50 Comp86, pg 152-161 and "A Methodology for Procedure Cloning" by Keith D
51 Cooper, Mary W. Hall, and Ken Kennedy.
54 First stage - intraprocedural analysis
55 =======================================
57 This phase computes jump_function and modification flags.
59 A jump function for a call-site represents the values passed as an actual
60 arguments of a given call-site. In principle, there are three types of
61 values:
63 Pass through - the caller's formal parameter is passed as an actual
64 argument, plus an operation on it can be performed.
65 Constant - a constant is passed as an actual argument.
66 Unknown - neither of the above.
68 All jump function types are described in detail in ipa-prop.h, together with
69 the data structures that represent them and methods of accessing them.
71 ipcp_generate_summary() is the main function of the first stage.
73 Second stage - interprocedural analysis
74 ========================================
76 This stage is itself divided into two phases. In the first, we propagate
77 known values over the call graph, in the second, we make cloning decisions.
78 It uses a different algorithm than the original Callahan's paper.
80 First, we traverse the functions topologically from callers to callees and,
81 for each strongly connected component (SCC), we propagate constants
82 according to previously computed jump functions. We also record what known
83 values depend on other known values and estimate local effects. Finally, we
84 propagate cumulative information about these effects from dependent values
85 to those on which they depend.
87 Second, we again traverse the call graph in the same topological order and
88 make clones for functions which we know are called with the same values in
89 all contexts and decide about extra specialized clones of functions just for
90 some contexts - these decisions are based on both local estimates and
91 cumulative estimates propagated from callees.
93 ipcp_propagate_stage() and ipcp_decision_stage() together constitute the
94 third stage.
96 Third phase - materialization of clones, call statement updates.
97 ============================================
99 This stage is currently performed by call graph code (mainly in cgraphunit.c
100 and tree-inline.c) according to instructions inserted to the call graph by
101 the second stage. */
103 #include "config.h"
104 #include "system.h"
105 #include "coretypes.h"
106 #include "backend.h"
107 #include "tree.h"
108 #include "gimple-expr.h"
109 #include "predict.h"
110 #include "alloc-pool.h"
111 #include "tree-pass.h"
112 #include "cgraph.h"
113 #include "diagnostic.h"
114 #include "fold-const.h"
115 #include "gimple-fold.h"
116 #include "symbol-summary.h"
117 #include "tree-vrp.h"
118 #include "ipa-prop.h"
119 #include "tree-pretty-print.h"
120 #include "tree-inline.h"
121 #include "params.h"
122 #include "ipa-inline.h"
123 #include "ipa-utils.h"
124 #include "tree-ssa-ccp.h"
126 template <typename valtype> class ipcp_value;
128 /* Describes a particular source for an IPA-CP value. */
130 template <typename valtype>
131 class ipcp_value_source
133 public:
134 /* Aggregate offset of the source, negative if the source is scalar value of
135 the argument itself. */
136 HOST_WIDE_INT offset;
137 /* The incoming edge that brought the value. */
138 cgraph_edge *cs;
139 /* If the jump function that resulted into his value was a pass-through or an
140 ancestor, this is the ipcp_value of the caller from which the described
141 value has been derived. Otherwise it is NULL. */
142 ipcp_value<valtype> *val;
143 /* Next pointer in a linked list of sources of a value. */
144 ipcp_value_source *next;
145 /* If the jump function that resulted into his value was a pass-through or an
146 ancestor, this is the index of the parameter of the caller the jump
147 function references. */
148 int index;
151 /* Common ancestor for all ipcp_value instantiations. */
153 class ipcp_value_base
155 public:
156 /* Time benefit and size cost that specializing the function for this value
157 would bring about in this function alone. */
158 int local_time_benefit, local_size_cost;
159 /* Time benefit and size cost that specializing the function for this value
160 can bring about in it's callees (transitively). */
161 int prop_time_benefit, prop_size_cost;
164 /* Describes one particular value stored in struct ipcp_lattice. */
166 template <typename valtype>
167 class ipcp_value : public ipcp_value_base
169 public:
170 /* The actual value for the given parameter. */
171 valtype value;
172 /* The list of sources from which this value originates. */
173 ipcp_value_source <valtype> *sources;
174 /* Next pointers in a linked list of all values in a lattice. */
175 ipcp_value *next;
176 /* Next pointers in a linked list of values in a strongly connected component
177 of values. */
178 ipcp_value *scc_next;
179 /* Next pointers in a linked list of SCCs of values sorted topologically
180 according their sources. */
181 ipcp_value *topo_next;
182 /* A specialized node created for this value, NULL if none has been (so far)
183 created. */
184 cgraph_node *spec_node;
185 /* Depth first search number and low link for topological sorting of
186 values. */
187 int dfs, low_link;
188 /* True if this valye is currently on the topo-sort stack. */
189 bool on_stack;
191 void add_source (cgraph_edge *cs, ipcp_value *src_val, int src_idx,
192 HOST_WIDE_INT offset);
195 /* Lattice describing potential values of a formal parameter of a function, or
196 a part of an aggreagate. TOP is represented by a lattice with zero values
197 and with contains_variable and bottom flags cleared. BOTTOM is represented
198 by a lattice with the bottom flag set. In that case, values and
199 contains_variable flag should be disregarded. */
201 template <typename valtype>
202 class ipcp_lattice
204 public:
205 /* The list of known values and types in this lattice. Note that values are
206 not deallocated if a lattice is set to bottom because there may be value
207 sources referencing them. */
208 ipcp_value<valtype> *values;
209 /* Number of known values and types in this lattice. */
210 int values_count;
211 /* The lattice contains a variable component (in addition to values). */
212 bool contains_variable;
213 /* The value of the lattice is bottom (i.e. variable and unusable for any
214 propagation). */
215 bool bottom;
217 inline bool is_single_const ();
218 inline bool set_to_bottom ();
219 inline bool set_contains_variable ();
220 bool add_value (valtype newval, cgraph_edge *cs,
221 ipcp_value<valtype> *src_val = NULL,
222 int src_idx = 0, HOST_WIDE_INT offset = -1);
223 void print (FILE * f, bool dump_sources, bool dump_benefits);
226 /* Lattice of tree values with an offset to describe a part of an
227 aggregate. */
229 class ipcp_agg_lattice : public ipcp_lattice<tree>
231 public:
232 /* Offset that is being described by this lattice. */
233 HOST_WIDE_INT offset;
234 /* Size so that we don't have to re-compute it every time we traverse the
235 list. Must correspond to TYPE_SIZE of all lat values. */
236 HOST_WIDE_INT size;
237 /* Next element of the linked list. */
238 struct ipcp_agg_lattice *next;
241 /* Lattice of pointer alignment. Unlike the previous types of lattices, this
242 one is only capable of holding one value. */
244 class ipcp_alignment_lattice
246 public:
247 /* If bottom and top are both false, these two fields hold values as given by
248 ptr_info_def and get_pointer_alignment_1. */
249 unsigned align;
250 unsigned misalign;
252 inline bool bottom_p () const;
253 inline bool top_p () const;
254 inline bool set_to_bottom ();
255 bool meet_with (unsigned new_align, unsigned new_misalign);
256 bool meet_with (const ipcp_alignment_lattice &other, HOST_WIDE_INT offset);
257 void print (FILE * f);
258 private:
259 /* If set, this lattice is bottom and all other fields should be
260 disregarded. */
261 bool bottom;
262 /* If bottom and not_top are false, the lattice is TOP. If not_top is true,
263 the known alignment is stored in the fields align and misalign. The field
264 is negated so that memset to zero initializes the lattice to TOP
265 state. */
266 bool not_top;
268 bool meet_with_1 (unsigned new_align, unsigned new_misalign);
271 /* Lattice of known bits, only capable of holding one value.
272 Bitwise constant propagation propagates which bits of a
273 value are constant.
274 For eg:
275 int f(int x)
277 return some_op (x);
280 int f1(int y)
282 if (cond)
283 return f (y & 0xff);
284 else
285 return f (y & 0xf);
288 In the above case, the param 'x' will always have all
289 the bits (except the bits in lsb) set to 0.
290 Hence the mask of 'x' would be 0xff. The mask
291 reflects that the bits in lsb are unknown.
292 The actual propagated value is given by m_value & ~m_mask. */
294 class ipcp_bits_lattice
296 public:
297 bool bottom_p () { return m_lattice_val == IPA_BITS_VARYING; }
298 bool top_p () { return m_lattice_val == IPA_BITS_UNDEFINED; }
299 bool constant_p () { return m_lattice_val == IPA_BITS_CONSTANT; }
300 bool set_to_bottom ();
301 bool set_to_constant (widest_int, widest_int);
303 widest_int get_value () { return m_value; }
304 widest_int get_mask () { return m_mask; }
306 bool meet_with (ipcp_bits_lattice& other, unsigned, signop,
307 enum tree_code, tree);
309 bool meet_with (widest_int, widest_int, unsigned);
311 void print (FILE *);
313 private:
314 enum { IPA_BITS_UNDEFINED, IPA_BITS_CONSTANT, IPA_BITS_VARYING } m_lattice_val;
316 /* Similar to ccp_lattice_t, mask represents which bits of value are constant.
317 If a bit in mask is set to 0, then the corresponding bit in
318 value is known to be constant. */
319 widest_int m_value, m_mask;
321 bool meet_with_1 (widest_int, widest_int, unsigned);
322 void get_value_and_mask (tree, widest_int *, widest_int *);
325 /* Lattice of value ranges. */
327 class ipcp_vr_lattice
329 public:
330 value_range m_vr;
332 inline bool bottom_p () const;
333 inline bool top_p () const;
334 inline bool set_to_bottom ();
335 bool meet_with (const value_range *p_vr);
336 bool meet_with (const ipcp_vr_lattice &other);
337 void init () { m_vr.type = VR_UNDEFINED; }
338 void print (FILE * f);
340 private:
341 bool meet_with_1 (const value_range *other_vr);
344 /* Structure containing lattices for a parameter itself and for pieces of
345 aggregates that are passed in the parameter or by a reference in a parameter
346 plus some other useful flags. */
348 class ipcp_param_lattices
350 public:
351 /* Lattice describing the value of the parameter itself. */
352 ipcp_lattice<tree> itself;
353 /* Lattice describing the polymorphic contexts of a parameter. */
354 ipcp_lattice<ipa_polymorphic_call_context> ctxlat;
355 /* Lattices describing aggregate parts. */
356 ipcp_agg_lattice *aggs;
357 /* Lattice describing known alignment. */
358 ipcp_alignment_lattice alignment;
359 /* Lattice describing known bits. */
360 ipcp_bits_lattice bits_lattice;
361 /* Lattice describing value range. */
362 ipcp_vr_lattice m_value_range;
363 /* Number of aggregate lattices */
364 int aggs_count;
365 /* True if aggregate data were passed by reference (as opposed to by
366 value). */
367 bool aggs_by_ref;
368 /* All aggregate lattices contain a variable component (in addition to
369 values). */
370 bool aggs_contain_variable;
371 /* The value of all aggregate lattices is bottom (i.e. variable and unusable
372 for any propagation). */
373 bool aggs_bottom;
375 /* There is a virtual call based on this parameter. */
376 bool virt_call;
379 /* Allocation pools for values and their sources in ipa-cp. */
381 object_allocator<ipcp_value<tree> > ipcp_cst_values_pool
382 ("IPA-CP constant values");
384 object_allocator<ipcp_value<ipa_polymorphic_call_context> >
385 ipcp_poly_ctx_values_pool ("IPA-CP polymorphic contexts");
387 object_allocator<ipcp_value_source<tree> > ipcp_sources_pool
388 ("IPA-CP value sources");
390 object_allocator<ipcp_agg_lattice> ipcp_agg_lattice_pool
391 ("IPA_CP aggregate lattices");
393 /* Maximal count found in program. */
395 static gcov_type max_count;
397 /* Original overall size of the program. */
399 static long overall_size, max_new_size;
401 /* Return the param lattices structure corresponding to the Ith formal
402 parameter of the function described by INFO. */
403 static inline struct ipcp_param_lattices *
404 ipa_get_parm_lattices (struct ipa_node_params *info, int i)
406 gcc_assert (i >= 0 && i < ipa_get_param_count (info));
407 gcc_checking_assert (!info->ipcp_orig_node);
408 gcc_checking_assert (info->lattices);
409 return &(info->lattices[i]);
412 /* Return the lattice corresponding to the scalar value of the Ith formal
413 parameter of the function described by INFO. */
414 static inline ipcp_lattice<tree> *
415 ipa_get_scalar_lat (struct ipa_node_params *info, int i)
417 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
418 return &plats->itself;
421 /* Return the lattice corresponding to the scalar value of the Ith formal
422 parameter of the function described by INFO. */
423 static inline ipcp_lattice<ipa_polymorphic_call_context> *
424 ipa_get_poly_ctx_lat (struct ipa_node_params *info, int i)
426 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
427 return &plats->ctxlat;
430 /* Return the lattice corresponding to the value range of the Ith formal
431 parameter of the function described by INFO. */
433 static inline ipcp_vr_lattice *
434 ipa_get_vr_lat (struct ipa_node_params *info, int i)
436 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
437 return &plats->m_value_range;
440 /* Return whether LAT is a lattice with a single constant and without an
441 undefined value. */
443 template <typename valtype>
444 inline bool
445 ipcp_lattice<valtype>::is_single_const ()
447 if (bottom || contains_variable || values_count != 1)
448 return false;
449 else
450 return true;
453 /* Print V which is extracted from a value in a lattice to F. */
455 static void
456 print_ipcp_constant_value (FILE * f, tree v)
458 if (TREE_CODE (v) == ADDR_EXPR
459 && TREE_CODE (TREE_OPERAND (v, 0)) == CONST_DECL)
461 fprintf (f, "& ");
462 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (v, 0)), 0);
464 else
465 print_generic_expr (f, v, 0);
468 /* Print V which is extracted from a value in a lattice to F. */
470 static void
471 print_ipcp_constant_value (FILE * f, ipa_polymorphic_call_context v)
473 v.dump(f, false);
476 /* Print a lattice LAT to F. */
478 template <typename valtype>
479 void
480 ipcp_lattice<valtype>::print (FILE * f, bool dump_sources, bool dump_benefits)
482 ipcp_value<valtype> *val;
483 bool prev = false;
485 if (bottom)
487 fprintf (f, "BOTTOM\n");
488 return;
491 if (!values_count && !contains_variable)
493 fprintf (f, "TOP\n");
494 return;
497 if (contains_variable)
499 fprintf (f, "VARIABLE");
500 prev = true;
501 if (dump_benefits)
502 fprintf (f, "\n");
505 for (val = values; val; val = val->next)
507 if (dump_benefits && prev)
508 fprintf (f, " ");
509 else if (!dump_benefits && prev)
510 fprintf (f, ", ");
511 else
512 prev = true;
514 print_ipcp_constant_value (f, val->value);
516 if (dump_sources)
518 ipcp_value_source<valtype> *s;
520 fprintf (f, " [from:");
521 for (s = val->sources; s; s = s->next)
522 fprintf (f, " %i(%i)", s->cs->caller->order,
523 s->cs->frequency);
524 fprintf (f, "]");
527 if (dump_benefits)
528 fprintf (f, " [loc_time: %i, loc_size: %i, "
529 "prop_time: %i, prop_size: %i]\n",
530 val->local_time_benefit, val->local_size_cost,
531 val->prop_time_benefit, val->prop_size_cost);
533 if (!dump_benefits)
534 fprintf (f, "\n");
537 /* Print alignment lattice to F. */
539 void
540 ipcp_alignment_lattice::print (FILE * f)
542 if (top_p ())
543 fprintf (f, " Alignment unknown (TOP)\n");
544 else if (bottom_p ())
545 fprintf (f, " Alignment unusable (BOTTOM)\n");
546 else
547 fprintf (f, " Alignment %u, misalignment %u\n", align, misalign);
550 void
551 ipcp_bits_lattice::print (FILE *f)
553 if (top_p ())
554 fprintf (f, " Bits unknown (TOP)\n");
555 else if (bottom_p ())
556 fprintf (f, " Bits unusable (BOTTOM)\n");
557 else
559 fprintf (f, " Bits: value = "); print_hex (get_value (), f);
560 fprintf (f, ", mask = "); print_hex (get_mask (), f);
561 fprintf (f, "\n");
565 /* Print value range lattice to F. */
567 void
568 ipcp_vr_lattice::print (FILE * f)
570 dump_value_range (f, &m_vr);
573 /* Print all ipcp_lattices of all functions to F. */
575 static void
576 print_all_lattices (FILE * f, bool dump_sources, bool dump_benefits)
578 struct cgraph_node *node;
579 int i, count;
581 fprintf (f, "\nLattices:\n");
582 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
584 struct ipa_node_params *info;
586 info = IPA_NODE_REF (node);
587 fprintf (f, " Node: %s/%i:\n", node->name (),
588 node->order);
589 count = ipa_get_param_count (info);
590 for (i = 0; i < count; i++)
592 struct ipcp_agg_lattice *aglat;
593 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
594 fprintf (f, " param [%d]: ", i);
595 plats->itself.print (f, dump_sources, dump_benefits);
596 fprintf (f, " ctxs: ");
597 plats->ctxlat.print (f, dump_sources, dump_benefits);
598 plats->alignment.print (f);
599 plats->bits_lattice.print (f);
600 fprintf (f, " ");
601 plats->m_value_range.print (f);
602 fprintf (f, "\n");
603 if (plats->virt_call)
604 fprintf (f, " virt_call flag set\n");
606 if (plats->aggs_bottom)
608 fprintf (f, " AGGS BOTTOM\n");
609 continue;
611 if (plats->aggs_contain_variable)
612 fprintf (f, " AGGS VARIABLE\n");
613 for (aglat = plats->aggs; aglat; aglat = aglat->next)
615 fprintf (f, " %soffset " HOST_WIDE_INT_PRINT_DEC ": ",
616 plats->aggs_by_ref ? "ref " : "", aglat->offset);
617 aglat->print (f, dump_sources, dump_benefits);
623 /* Determine whether it is at all technically possible to create clones of NODE
624 and store this information in the ipa_node_params structure associated
625 with NODE. */
627 static void
628 determine_versionability (struct cgraph_node *node,
629 struct ipa_node_params *info)
631 const char *reason = NULL;
633 /* There are a number of generic reasons functions cannot be versioned. We
634 also cannot remove parameters if there are type attributes such as fnspec
635 present. */
636 if (node->alias || node->thunk.thunk_p)
637 reason = "alias or thunk";
638 else if (!node->local.versionable)
639 reason = "not a tree_versionable_function";
640 else if (node->get_availability () <= AVAIL_INTERPOSABLE)
641 reason = "insufficient body availability";
642 else if (!opt_for_fn (node->decl, optimize)
643 || !opt_for_fn (node->decl, flag_ipa_cp))
644 reason = "non-optimized function";
645 else if (lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (node->decl)))
647 /* Ideally we should clone the SIMD clones themselves and create
648 vector copies of them, so IPA-cp and SIMD clones can happily
649 coexist, but that may not be worth the effort. */
650 reason = "function has SIMD clones";
652 else if (lookup_attribute ("target_clones", DECL_ATTRIBUTES (node->decl)))
654 /* Ideally we should clone the target clones themselves and create
655 copies of them, so IPA-cp and target clones can happily
656 coexist, but that may not be worth the effort. */
657 reason = "function target_clones attribute";
659 /* Don't clone decls local to a comdat group; it breaks and for C++
660 decloned constructors, inlining is always better anyway. */
661 else if (node->comdat_local_p ())
662 reason = "comdat-local function";
664 if (reason && dump_file && !node->alias && !node->thunk.thunk_p)
665 fprintf (dump_file, "Function %s/%i is not versionable, reason: %s.\n",
666 node->name (), node->order, reason);
668 info->versionable = (reason == NULL);
671 /* Return true if it is at all technically possible to create clones of a
672 NODE. */
674 static bool
675 ipcp_versionable_function_p (struct cgraph_node *node)
677 return IPA_NODE_REF (node)->versionable;
680 /* Structure holding accumulated information about callers of a node. */
682 struct caller_statistics
684 gcov_type count_sum;
685 int n_calls, n_hot_calls, freq_sum;
688 /* Initialize fields of STAT to zeroes. */
690 static inline void
691 init_caller_stats (struct caller_statistics *stats)
693 stats->count_sum = 0;
694 stats->n_calls = 0;
695 stats->n_hot_calls = 0;
696 stats->freq_sum = 0;
699 /* Worker callback of cgraph_for_node_and_aliases accumulating statistics of
700 non-thunk incoming edges to NODE. */
702 static bool
703 gather_caller_stats (struct cgraph_node *node, void *data)
705 struct caller_statistics *stats = (struct caller_statistics *) data;
706 struct cgraph_edge *cs;
708 for (cs = node->callers; cs; cs = cs->next_caller)
709 if (!cs->caller->thunk.thunk_p)
711 stats->count_sum += cs->count;
712 stats->freq_sum += cs->frequency;
713 stats->n_calls++;
714 if (cs->maybe_hot_p ())
715 stats->n_hot_calls ++;
717 return false;
721 /* Return true if this NODE is viable candidate for cloning. */
723 static bool
724 ipcp_cloning_candidate_p (struct cgraph_node *node)
726 struct caller_statistics stats;
728 gcc_checking_assert (node->has_gimple_body_p ());
730 if (!opt_for_fn (node->decl, flag_ipa_cp_clone))
732 if (dump_file)
733 fprintf (dump_file, "Not considering %s for cloning; "
734 "-fipa-cp-clone disabled.\n",
735 node->name ());
736 return false;
739 if (node->optimize_for_size_p ())
741 if (dump_file)
742 fprintf (dump_file, "Not considering %s for cloning; "
743 "optimizing it for size.\n",
744 node->name ());
745 return false;
748 init_caller_stats (&stats);
749 node->call_for_symbol_thunks_and_aliases (gather_caller_stats, &stats, false);
751 if (inline_summaries->get (node)->self_size < stats.n_calls)
753 if (dump_file)
754 fprintf (dump_file, "Considering %s for cloning; code might shrink.\n",
755 node->name ());
756 return true;
759 /* When profile is available and function is hot, propagate into it even if
760 calls seems cold; constant propagation can improve function's speed
761 significantly. */
762 if (max_count)
764 if (stats.count_sum > node->count * 90 / 100)
766 if (dump_file)
767 fprintf (dump_file, "Considering %s for cloning; "
768 "usually called directly.\n",
769 node->name ());
770 return true;
773 if (!stats.n_hot_calls)
775 if (dump_file)
776 fprintf (dump_file, "Not considering %s for cloning; no hot calls.\n",
777 node->name ());
778 return false;
780 if (dump_file)
781 fprintf (dump_file, "Considering %s for cloning.\n",
782 node->name ());
783 return true;
786 template <typename valtype>
787 class value_topo_info
789 public:
790 /* Head of the linked list of topologically sorted values. */
791 ipcp_value<valtype> *values_topo;
792 /* Stack for creating SCCs, represented by a linked list too. */
793 ipcp_value<valtype> *stack;
794 /* Counter driving the algorithm in add_val_to_toposort. */
795 int dfs_counter;
797 value_topo_info () : values_topo (NULL), stack (NULL), dfs_counter (0)
799 void add_val (ipcp_value<valtype> *cur_val);
800 void propagate_effects ();
803 /* Arrays representing a topological ordering of call graph nodes and a stack
804 of nodes used during constant propagation and also data required to perform
805 topological sort of values and propagation of benefits in the determined
806 order. */
808 class ipa_topo_info
810 public:
811 /* Array with obtained topological order of cgraph nodes. */
812 struct cgraph_node **order;
813 /* Stack of cgraph nodes used during propagation within SCC until all values
814 in the SCC stabilize. */
815 struct cgraph_node **stack;
816 int nnodes, stack_top;
818 value_topo_info<tree> constants;
819 value_topo_info<ipa_polymorphic_call_context> contexts;
821 ipa_topo_info () : order(NULL), stack(NULL), nnodes(0), stack_top(0),
822 constants ()
826 /* Allocate the arrays in TOPO and topologically sort the nodes into order. */
828 static void
829 build_toporder_info (struct ipa_topo_info *topo)
831 topo->order = XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
832 topo->stack = XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
834 gcc_checking_assert (topo->stack_top == 0);
835 topo->nnodes = ipa_reduced_postorder (topo->order, true, true, NULL);
838 /* Free information about strongly connected components and the arrays in
839 TOPO. */
841 static void
842 free_toporder_info (struct ipa_topo_info *topo)
844 ipa_free_postorder_info ();
845 free (topo->order);
846 free (topo->stack);
849 /* Add NODE to the stack in TOPO, unless it is already there. */
851 static inline void
852 push_node_to_stack (struct ipa_topo_info *topo, struct cgraph_node *node)
854 struct ipa_node_params *info = IPA_NODE_REF (node);
855 if (info->node_enqueued)
856 return;
857 info->node_enqueued = 1;
858 topo->stack[topo->stack_top++] = node;
861 /* Pop a node from the stack in TOPO and return it or return NULL if the stack
862 is empty. */
864 static struct cgraph_node *
865 pop_node_from_stack (struct ipa_topo_info *topo)
867 if (topo->stack_top)
869 struct cgraph_node *node;
870 topo->stack_top--;
871 node = topo->stack[topo->stack_top];
872 IPA_NODE_REF (node)->node_enqueued = 0;
873 return node;
875 else
876 return NULL;
879 /* Set lattice LAT to bottom and return true if it previously was not set as
880 such. */
882 template <typename valtype>
883 inline bool
884 ipcp_lattice<valtype>::set_to_bottom ()
886 bool ret = !bottom;
887 bottom = true;
888 return ret;
891 /* Mark lattice as containing an unknown value and return true if it previously
892 was not marked as such. */
894 template <typename valtype>
895 inline bool
896 ipcp_lattice<valtype>::set_contains_variable ()
898 bool ret = !contains_variable;
899 contains_variable = true;
900 return ret;
903 /* Set all aggegate lattices in PLATS to bottom and return true if they were
904 not previously set as such. */
906 static inline bool
907 set_agg_lats_to_bottom (struct ipcp_param_lattices *plats)
909 bool ret = !plats->aggs_bottom;
910 plats->aggs_bottom = true;
911 return ret;
914 /* Mark all aggegate lattices in PLATS as containing an unknown value and
915 return true if they were not previously marked as such. */
917 static inline bool
918 set_agg_lats_contain_variable (struct ipcp_param_lattices *plats)
920 bool ret = !plats->aggs_contain_variable;
921 plats->aggs_contain_variable = true;
922 return ret;
925 /* Return true if alignment information in the lattice is yet unknown. */
927 bool
928 ipcp_alignment_lattice::top_p () const
930 return !bottom && !not_top;
933 /* Return true if alignment information in the lattice is known to be
934 unusable. */
936 bool
937 ipcp_alignment_lattice::bottom_p () const
939 return bottom;
942 /* Set alignment information in the lattice to bottom. Return true if it
943 previously was in a different state. */
945 bool
946 ipcp_alignment_lattice::set_to_bottom ()
948 if (bottom_p ())
949 return false;
950 bottom = true;
951 return true;
954 /* Meet the current value of the lattice with described by OTHER
955 lattice. */
957 bool
958 ipcp_vr_lattice::meet_with (const ipcp_vr_lattice &other)
960 return meet_with_1 (&other.m_vr);
963 /* Meet the current value of the lattice with value ranfge described by VR
964 lattice. */
966 bool
967 ipcp_vr_lattice::meet_with (const value_range *p_vr)
969 return meet_with_1 (p_vr);
972 /* Meet the current value of the lattice with value ranfge described by
973 OTHER_VR lattice. */
975 bool
976 ipcp_vr_lattice::meet_with_1 (const value_range *other_vr)
978 tree min = m_vr.min, max = m_vr.max;
979 value_range_type type = m_vr.type;
981 if (bottom_p ())
982 return false;
984 if (other_vr->type == VR_VARYING)
985 return set_to_bottom ();
987 vrp_meet (&m_vr, other_vr);
988 if (type != m_vr.type
989 || min != m_vr.min
990 || max != m_vr.max)
991 return true;
992 else
993 return false;
996 /* Return true if value range information in the lattice is yet unknown. */
998 bool
999 ipcp_vr_lattice::top_p () const
1001 return m_vr.type == VR_UNDEFINED;
1004 /* Return true if value range information in the lattice is known to be
1005 unusable. */
1007 bool
1008 ipcp_vr_lattice::bottom_p () const
1010 return m_vr.type == VR_VARYING;
1013 /* Set value range information in the lattice to bottom. Return true if it
1014 previously was in a different state. */
1016 bool
1017 ipcp_vr_lattice::set_to_bottom ()
1019 if (m_vr.type == VR_VARYING)
1020 return false;
1021 m_vr.type = VR_VARYING;
1022 return true;
1025 /* Meet the current value of the lattice with alignment described by NEW_ALIGN
1026 and NEW_MISALIGN, assuming that we know the current value is neither TOP nor
1027 BOTTOM. Return true if the value of lattice has changed. */
1029 bool
1030 ipcp_alignment_lattice::meet_with_1 (unsigned new_align, unsigned new_misalign)
1032 gcc_checking_assert (new_align != 0);
1033 if (align == new_align && misalign == new_misalign)
1034 return false;
1036 bool changed = false;
1037 if (align > new_align)
1039 align = new_align;
1040 misalign = misalign % new_align;
1041 changed = true;
1043 if (misalign != (new_misalign % align))
1045 int diff = abs ((int) misalign - (int) (new_misalign % align));
1046 align = least_bit_hwi (diff);
1047 if (align)
1048 misalign = misalign % align;
1049 else
1050 set_to_bottom ();
1051 changed = true;
1053 gcc_checking_assert (bottom_p () || align != 0);
1054 return changed;
1057 /* Meet the current value of the lattice with alignment described by NEW_ALIGN
1058 and NEW_MISALIGN. Return true if the value of lattice has changed. */
1060 bool
1061 ipcp_alignment_lattice::meet_with (unsigned new_align, unsigned new_misalign)
1063 gcc_assert (new_align != 0);
1064 if (bottom_p ())
1065 return false;
1066 if (top_p ())
1068 not_top = true;
1069 align = new_align;
1070 misalign = new_misalign;
1071 return true;
1073 return meet_with_1 (new_align, new_misalign);
1076 /* Meet the current value of the lattice with OTHER, taking into account that
1077 OFFSET has been added to the pointer value. Return true if the value of
1078 lattice has changed. */
1080 bool
1081 ipcp_alignment_lattice::meet_with (const ipcp_alignment_lattice &other,
1082 HOST_WIDE_INT offset)
1084 if (other.bottom_p ())
1085 return set_to_bottom ();
1086 if (bottom_p () || other.top_p ())
1087 return false;
1089 unsigned adjusted_misalign = (other.misalign + offset) % other.align;
1090 if (top_p ())
1092 not_top = true;
1093 align = other.align;
1094 misalign = adjusted_misalign;
1095 return true;
1098 return meet_with_1 (other.align, adjusted_misalign);
1101 /* Set lattice value to bottom, if it already isn't the case. */
1103 bool
1104 ipcp_bits_lattice::set_to_bottom ()
1106 if (bottom_p ())
1107 return false;
1108 m_lattice_val = IPA_BITS_VARYING;
1109 m_value = 0;
1110 m_mask = -1;
1111 return true;
1114 /* Set to constant if it isn't already. Only meant to be called
1115 when switching state from TOP. */
1117 bool
1118 ipcp_bits_lattice::set_to_constant (widest_int value, widest_int mask)
1120 gcc_assert (top_p ());
1121 m_lattice_val = IPA_BITS_CONSTANT;
1122 m_value = value;
1123 m_mask = mask;
1124 return true;
1127 /* Convert operand to value, mask form. */
1129 void
1130 ipcp_bits_lattice::get_value_and_mask (tree operand, widest_int *valuep, widest_int *maskp)
1132 wide_int get_nonzero_bits (const_tree);
1134 if (TREE_CODE (operand) == INTEGER_CST)
1136 *valuep = wi::to_widest (operand);
1137 *maskp = 0;
1139 else
1141 *valuep = 0;
1142 *maskp = -1;
1146 /* Meet operation, similar to ccp_lattice_meet, we xor values
1147 if this->value, value have different values at same bit positions, we want
1148 to drop that bit to varying. Return true if mask is changed.
1149 This function assumes that the lattice value is in CONSTANT state */
1151 bool
1152 ipcp_bits_lattice::meet_with_1 (widest_int value, widest_int mask,
1153 unsigned precision)
1155 gcc_assert (constant_p ());
1157 widest_int old_mask = m_mask;
1158 m_mask = (m_mask | mask) | (m_value ^ value);
1160 if (wi::sext (m_mask, precision) == -1)
1161 return set_to_bottom ();
1163 return m_mask != old_mask;
1166 /* Meet the bits lattice with operand
1167 described by <value, mask, sgn, precision. */
1169 bool
1170 ipcp_bits_lattice::meet_with (widest_int value, widest_int mask,
1171 unsigned precision)
1173 if (bottom_p ())
1174 return false;
1176 if (top_p ())
1178 if (wi::sext (mask, precision) == -1)
1179 return set_to_bottom ();
1180 return set_to_constant (value, mask);
1183 return meet_with_1 (value, mask, precision);
1186 /* Meet bits lattice with the result of bit_value_binop (other, operand)
1187 if code is binary operation or bit_value_unop (other) if code is unary op.
1188 In the case when code is nop_expr, no adjustment is required. */
1190 bool
1191 ipcp_bits_lattice::meet_with (ipcp_bits_lattice& other, unsigned precision,
1192 signop sgn, enum tree_code code, tree operand)
1194 if (other.bottom_p ())
1195 return set_to_bottom ();
1197 if (bottom_p () || other.top_p ())
1198 return false;
1200 widest_int adjusted_value, adjusted_mask;
1202 if (TREE_CODE_CLASS (code) == tcc_binary)
1204 tree type = TREE_TYPE (operand);
1205 gcc_assert (INTEGRAL_TYPE_P (type));
1206 widest_int o_value, o_mask;
1207 get_value_and_mask (operand, &o_value, &o_mask);
1209 bit_value_binop (code, sgn, precision, &adjusted_value, &adjusted_mask,
1210 sgn, precision, other.get_value (), other.get_mask (),
1211 TYPE_SIGN (type), TYPE_PRECISION (type), o_value, o_mask);
1213 if (wi::sext (adjusted_mask, precision) == -1)
1214 return set_to_bottom ();
1217 else if (TREE_CODE_CLASS (code) == tcc_unary)
1219 bit_value_unop (code, sgn, precision, &adjusted_value,
1220 &adjusted_mask, sgn, precision, other.get_value (),
1221 other.get_mask ());
1223 if (wi::sext (adjusted_mask, precision) == -1)
1224 return set_to_bottom ();
1227 else if (code == NOP_EXPR)
1229 adjusted_value = other.m_value;
1230 adjusted_mask = other.m_mask;
1233 else
1234 return set_to_bottom ();
1236 if (top_p ())
1238 if (wi::sext (adjusted_mask, precision) == -1)
1239 return set_to_bottom ();
1240 return set_to_constant (adjusted_value, adjusted_mask);
1242 else
1243 return meet_with_1 (adjusted_value, adjusted_mask, precision);
1246 /* Mark bot aggregate and scalar lattices as containing an unknown variable,
1247 return true is any of them has not been marked as such so far. */
1249 static inline bool
1250 set_all_contains_variable (struct ipcp_param_lattices *plats)
1252 bool ret;
1253 ret = plats->itself.set_contains_variable ();
1254 ret |= plats->ctxlat.set_contains_variable ();
1255 ret |= set_agg_lats_contain_variable (plats);
1256 ret |= plats->alignment.set_to_bottom ();
1257 ret |= plats->bits_lattice.set_to_bottom ();
1258 ret |= plats->m_value_range.set_to_bottom ();
1259 return ret;
1262 /* Worker of call_for_symbol_thunks_and_aliases, increment the integer DATA
1263 points to by the number of callers to NODE. */
1265 static bool
1266 count_callers (cgraph_node *node, void *data)
1268 int *caller_count = (int *) data;
1270 for (cgraph_edge *cs = node->callers; cs; cs = cs->next_caller)
1271 /* Local thunks can be handled transparently, but if the thunk can not
1272 be optimized out, count it as a real use. */
1273 if (!cs->caller->thunk.thunk_p || !cs->caller->local.local)
1274 ++*caller_count;
1275 return false;
1278 /* Worker of call_for_symbol_thunks_and_aliases, it is supposed to be called on
1279 the one caller of some other node. Set the caller's corresponding flag. */
1281 static bool
1282 set_single_call_flag (cgraph_node *node, void *)
1284 cgraph_edge *cs = node->callers;
1285 /* Local thunks can be handled transparently, skip them. */
1286 while (cs && cs->caller->thunk.thunk_p && cs->caller->local.local)
1287 cs = cs->next_caller;
1288 if (cs)
1290 IPA_NODE_REF (cs->caller)->node_calling_single_call = true;
1291 return true;
1293 return false;
1296 /* Initialize ipcp_lattices. */
1298 static void
1299 initialize_node_lattices (struct cgraph_node *node)
1301 struct ipa_node_params *info = IPA_NODE_REF (node);
1302 struct cgraph_edge *ie;
1303 bool disable = false, variable = false;
1304 int i;
1306 gcc_checking_assert (node->has_gimple_body_p ());
1307 if (cgraph_local_p (node))
1309 int caller_count = 0;
1310 node->call_for_symbol_thunks_and_aliases (count_callers, &caller_count,
1311 true);
1312 gcc_checking_assert (caller_count > 0);
1313 if (caller_count == 1)
1314 node->call_for_symbol_thunks_and_aliases (set_single_call_flag,
1315 NULL, true);
1317 else
1319 /* When cloning is allowed, we can assume that externally visible
1320 functions are not called. We will compensate this by cloning
1321 later. */
1322 if (ipcp_versionable_function_p (node)
1323 && ipcp_cloning_candidate_p (node))
1324 variable = true;
1325 else
1326 disable = true;
1329 for (i = 0; i < ipa_get_param_count (info) ; i++)
1331 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
1332 plats->m_value_range.init ();
1335 if (disable || variable)
1337 for (i = 0; i < ipa_get_param_count (info) ; i++)
1339 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
1340 if (disable)
1342 plats->itself.set_to_bottom ();
1343 plats->ctxlat.set_to_bottom ();
1344 set_agg_lats_to_bottom (plats);
1345 plats->alignment.set_to_bottom ();
1346 plats->bits_lattice.set_to_bottom ();
1347 plats->m_value_range.set_to_bottom ();
1349 else
1350 set_all_contains_variable (plats);
1352 if (dump_file && (dump_flags & TDF_DETAILS)
1353 && !node->alias && !node->thunk.thunk_p)
1354 fprintf (dump_file, "Marking all lattices of %s/%i as %s\n",
1355 node->name (), node->order,
1356 disable ? "BOTTOM" : "VARIABLE");
1359 for (ie = node->indirect_calls; ie; ie = ie->next_callee)
1360 if (ie->indirect_info->polymorphic
1361 && ie->indirect_info->param_index >= 0)
1363 gcc_checking_assert (ie->indirect_info->param_index >= 0);
1364 ipa_get_parm_lattices (info,
1365 ie->indirect_info->param_index)->virt_call = 1;
1369 /* Return the result of a (possibly arithmetic) pass through jump function
1370 JFUNC on the constant value INPUT. Return NULL_TREE if that cannot be
1371 determined or be considered an interprocedural invariant. */
1373 static tree
1374 ipa_get_jf_pass_through_result (struct ipa_jump_func *jfunc, tree input)
1376 tree restype, res;
1378 if (ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
1379 return input;
1380 if (!is_gimple_ip_invariant (input))
1381 return NULL_TREE;
1383 if (TREE_CODE_CLASS (ipa_get_jf_pass_through_operation (jfunc))
1384 == tcc_comparison)
1385 restype = boolean_type_node;
1386 else
1387 restype = TREE_TYPE (input);
1388 res = fold_binary (ipa_get_jf_pass_through_operation (jfunc), restype,
1389 input, ipa_get_jf_pass_through_operand (jfunc));
1391 if (res && !is_gimple_ip_invariant (res))
1392 return NULL_TREE;
1394 return res;
1397 /* Return the result of an ancestor jump function JFUNC on the constant value
1398 INPUT. Return NULL_TREE if that cannot be determined. */
1400 static tree
1401 ipa_get_jf_ancestor_result (struct ipa_jump_func *jfunc, tree input)
1403 gcc_checking_assert (TREE_CODE (input) != TREE_BINFO);
1404 if (TREE_CODE (input) == ADDR_EXPR)
1406 tree t = TREE_OPERAND (input, 0);
1407 t = build_ref_for_offset (EXPR_LOCATION (t), t,
1408 ipa_get_jf_ancestor_offset (jfunc), false,
1409 ptr_type_node, NULL, false);
1410 return build_fold_addr_expr (t);
1412 else
1413 return NULL_TREE;
1416 /* Determine whether JFUNC evaluates to a single known constant value and if
1417 so, return it. Otherwise return NULL. INFO describes the caller node or
1418 the one it is inlined to, so that pass-through jump functions can be
1419 evaluated. */
1421 tree
1422 ipa_value_from_jfunc (struct ipa_node_params *info, struct ipa_jump_func *jfunc)
1424 if (jfunc->type == IPA_JF_CONST)
1425 return ipa_get_jf_constant (jfunc);
1426 else if (jfunc->type == IPA_JF_PASS_THROUGH
1427 || jfunc->type == IPA_JF_ANCESTOR)
1429 tree input;
1430 int idx;
1432 if (jfunc->type == IPA_JF_PASS_THROUGH)
1433 idx = ipa_get_jf_pass_through_formal_id (jfunc);
1434 else
1435 idx = ipa_get_jf_ancestor_formal_id (jfunc);
1437 if (info->ipcp_orig_node)
1438 input = info->known_csts[idx];
1439 else
1441 ipcp_lattice<tree> *lat;
1443 if (!info->lattices
1444 || idx >= ipa_get_param_count (info))
1445 return NULL_TREE;
1446 lat = ipa_get_scalar_lat (info, idx);
1447 if (!lat->is_single_const ())
1448 return NULL_TREE;
1449 input = lat->values->value;
1452 if (!input)
1453 return NULL_TREE;
1455 if (jfunc->type == IPA_JF_PASS_THROUGH)
1456 return ipa_get_jf_pass_through_result (jfunc, input);
1457 else
1458 return ipa_get_jf_ancestor_result (jfunc, input);
1460 else
1461 return NULL_TREE;
1464 /* Determie whether JFUNC evaluates to single known polymorphic context, given
1465 that INFO describes the caller node or the one it is inlined to, CS is the
1466 call graph edge corresponding to JFUNC and CSIDX index of the described
1467 parameter. */
1469 ipa_polymorphic_call_context
1470 ipa_context_from_jfunc (ipa_node_params *info, cgraph_edge *cs, int csidx,
1471 ipa_jump_func *jfunc)
1473 ipa_edge_args *args = IPA_EDGE_REF (cs);
1474 ipa_polymorphic_call_context ctx;
1475 ipa_polymorphic_call_context *edge_ctx
1476 = cs ? ipa_get_ith_polymorhic_call_context (args, csidx) : NULL;
1478 if (edge_ctx && !edge_ctx->useless_p ())
1479 ctx = *edge_ctx;
1481 if (jfunc->type == IPA_JF_PASS_THROUGH
1482 || jfunc->type == IPA_JF_ANCESTOR)
1484 ipa_polymorphic_call_context srcctx;
1485 int srcidx;
1486 bool type_preserved = true;
1487 if (jfunc->type == IPA_JF_PASS_THROUGH)
1489 if (ipa_get_jf_pass_through_operation (jfunc) != NOP_EXPR)
1490 return ctx;
1491 type_preserved = ipa_get_jf_pass_through_type_preserved (jfunc);
1492 srcidx = ipa_get_jf_pass_through_formal_id (jfunc);
1494 else
1496 type_preserved = ipa_get_jf_ancestor_type_preserved (jfunc);
1497 srcidx = ipa_get_jf_ancestor_formal_id (jfunc);
1499 if (info->ipcp_orig_node)
1501 if (info->known_contexts.exists ())
1502 srcctx = info->known_contexts[srcidx];
1504 else
1506 if (!info->lattices
1507 || srcidx >= ipa_get_param_count (info))
1508 return ctx;
1509 ipcp_lattice<ipa_polymorphic_call_context> *lat;
1510 lat = ipa_get_poly_ctx_lat (info, srcidx);
1511 if (!lat->is_single_const ())
1512 return ctx;
1513 srcctx = lat->values->value;
1515 if (srcctx.useless_p ())
1516 return ctx;
1517 if (jfunc->type == IPA_JF_ANCESTOR)
1518 srcctx.offset_by (ipa_get_jf_ancestor_offset (jfunc));
1519 if (!type_preserved)
1520 srcctx.possible_dynamic_type_change (cs->in_polymorphic_cdtor);
1521 srcctx.combine_with (ctx);
1522 return srcctx;
1525 return ctx;
1528 /* If checking is enabled, verify that no lattice is in the TOP state, i.e. not
1529 bottom, not containing a variable component and without any known value at
1530 the same time. */
1532 DEBUG_FUNCTION void
1533 ipcp_verify_propagated_values (void)
1535 struct cgraph_node *node;
1537 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
1539 struct ipa_node_params *info = IPA_NODE_REF (node);
1540 int i, count = ipa_get_param_count (info);
1542 for (i = 0; i < count; i++)
1544 ipcp_lattice<tree> *lat = ipa_get_scalar_lat (info, i);
1546 if (!lat->bottom
1547 && !lat->contains_variable
1548 && lat->values_count == 0)
1550 if (dump_file)
1552 symtab_node::dump_table (dump_file);
1553 fprintf (dump_file, "\nIPA lattices after constant "
1554 "propagation, before gcc_unreachable:\n");
1555 print_all_lattices (dump_file, true, false);
1558 gcc_unreachable ();
1564 /* Return true iff X and Y should be considered equal values by IPA-CP. */
1566 static bool
1567 values_equal_for_ipcp_p (tree x, tree y)
1569 gcc_checking_assert (x != NULL_TREE && y != NULL_TREE);
1571 if (x == y)
1572 return true;
1574 if (TREE_CODE (x) == ADDR_EXPR
1575 && TREE_CODE (y) == ADDR_EXPR
1576 && TREE_CODE (TREE_OPERAND (x, 0)) == CONST_DECL
1577 && TREE_CODE (TREE_OPERAND (y, 0)) == CONST_DECL)
1578 return operand_equal_p (DECL_INITIAL (TREE_OPERAND (x, 0)),
1579 DECL_INITIAL (TREE_OPERAND (y, 0)), 0);
1580 else
1581 return operand_equal_p (x, y, 0);
1584 /* Return true iff X and Y should be considered equal contexts by IPA-CP. */
1586 static bool
1587 values_equal_for_ipcp_p (ipa_polymorphic_call_context x,
1588 ipa_polymorphic_call_context y)
1590 return x.equal_to (y);
1594 /* Add a new value source to the value represented by THIS, marking that a
1595 value comes from edge CS and (if the underlying jump function is a
1596 pass-through or an ancestor one) from a caller value SRC_VAL of a caller
1597 parameter described by SRC_INDEX. OFFSET is negative if the source was the
1598 scalar value of the parameter itself or the offset within an aggregate. */
1600 template <typename valtype>
1601 void
1602 ipcp_value<valtype>::add_source (cgraph_edge *cs, ipcp_value *src_val,
1603 int src_idx, HOST_WIDE_INT offset)
1605 ipcp_value_source<valtype> *src;
1607 src = new (ipcp_sources_pool.allocate ()) ipcp_value_source<valtype>;
1608 src->offset = offset;
1609 src->cs = cs;
1610 src->val = src_val;
1611 src->index = src_idx;
1613 src->next = sources;
1614 sources = src;
1617 /* Allocate a new ipcp_value holding a tree constant, initialize its value to
1618 SOURCE and clear all other fields. */
1620 static ipcp_value<tree> *
1621 allocate_and_init_ipcp_value (tree source)
1623 ipcp_value<tree> *val;
1625 val = ipcp_cst_values_pool.allocate ();
1626 memset (val, 0, sizeof (*val));
1627 val->value = source;
1628 return val;
1631 /* Allocate a new ipcp_value holding a polymorphic context, initialize its
1632 value to SOURCE and clear all other fields. */
1634 static ipcp_value<ipa_polymorphic_call_context> *
1635 allocate_and_init_ipcp_value (ipa_polymorphic_call_context source)
1637 ipcp_value<ipa_polymorphic_call_context> *val;
1639 // TODO
1640 val = ipcp_poly_ctx_values_pool.allocate ();
1641 memset (val, 0, sizeof (*val));
1642 val->value = source;
1643 return val;
1646 /* Try to add NEWVAL to LAT, potentially creating a new ipcp_value for it. CS,
1647 SRC_VAL SRC_INDEX and OFFSET are meant for add_source and have the same
1648 meaning. OFFSET -1 means the source is scalar and not a part of an
1649 aggregate. */
1651 template <typename valtype>
1652 bool
1653 ipcp_lattice<valtype>::add_value (valtype newval, cgraph_edge *cs,
1654 ipcp_value<valtype> *src_val,
1655 int src_idx, HOST_WIDE_INT offset)
1657 ipcp_value<valtype> *val;
1659 if (bottom)
1660 return false;
1662 for (val = values; val; val = val->next)
1663 if (values_equal_for_ipcp_p (val->value, newval))
1665 if (ipa_edge_within_scc (cs))
1667 ipcp_value_source<valtype> *s;
1668 for (s = val->sources; s ; s = s->next)
1669 if (s->cs == cs)
1670 break;
1671 if (s)
1672 return false;
1675 val->add_source (cs, src_val, src_idx, offset);
1676 return false;
1679 if (values_count == PARAM_VALUE (PARAM_IPA_CP_VALUE_LIST_SIZE))
1681 /* We can only free sources, not the values themselves, because sources
1682 of other values in this SCC might point to them. */
1683 for (val = values; val; val = val->next)
1685 while (val->sources)
1687 ipcp_value_source<valtype> *src = val->sources;
1688 val->sources = src->next;
1689 ipcp_sources_pool.remove ((ipcp_value_source<tree>*)src);
1693 values = NULL;
1694 return set_to_bottom ();
1697 values_count++;
1698 val = allocate_and_init_ipcp_value (newval);
1699 val->add_source (cs, src_val, src_idx, offset);
1700 val->next = values;
1701 values = val;
1702 return true;
1705 /* Propagate values through a pass-through jump function JFUNC associated with
1706 edge CS, taking values from SRC_LAT and putting them into DEST_LAT. SRC_IDX
1707 is the index of the source parameter. */
1709 static bool
1710 propagate_vals_accross_pass_through (cgraph_edge *cs,
1711 ipa_jump_func *jfunc,
1712 ipcp_lattice<tree> *src_lat,
1713 ipcp_lattice<tree> *dest_lat,
1714 int src_idx)
1716 ipcp_value<tree> *src_val;
1717 bool ret = false;
1719 /* Do not create new values when propagating within an SCC because if there
1720 are arithmetic functions with circular dependencies, there is infinite
1721 number of them and we would just make lattices bottom. */
1722 if ((ipa_get_jf_pass_through_operation (jfunc) != NOP_EXPR)
1723 && ipa_edge_within_scc (cs))
1724 ret = dest_lat->set_contains_variable ();
1725 else
1726 for (src_val = src_lat->values; src_val; src_val = src_val->next)
1728 tree cstval = ipa_get_jf_pass_through_result (jfunc, src_val->value);
1730 if (cstval)
1731 ret |= dest_lat->add_value (cstval, cs, src_val, src_idx);
1732 else
1733 ret |= dest_lat->set_contains_variable ();
1736 return ret;
1739 /* Propagate values through an ancestor jump function JFUNC associated with
1740 edge CS, taking values from SRC_LAT and putting them into DEST_LAT. SRC_IDX
1741 is the index of the source parameter. */
1743 static bool
1744 propagate_vals_accross_ancestor (struct cgraph_edge *cs,
1745 struct ipa_jump_func *jfunc,
1746 ipcp_lattice<tree> *src_lat,
1747 ipcp_lattice<tree> *dest_lat,
1748 int src_idx)
1750 ipcp_value<tree> *src_val;
1751 bool ret = false;
1753 if (ipa_edge_within_scc (cs))
1754 return dest_lat->set_contains_variable ();
1756 for (src_val = src_lat->values; src_val; src_val = src_val->next)
1758 tree t = ipa_get_jf_ancestor_result (jfunc, src_val->value);
1760 if (t)
1761 ret |= dest_lat->add_value (t, cs, src_val, src_idx);
1762 else
1763 ret |= dest_lat->set_contains_variable ();
1766 return ret;
1769 /* Propagate scalar values across jump function JFUNC that is associated with
1770 edge CS and put the values into DEST_LAT. */
1772 static bool
1773 propagate_scalar_accross_jump_function (struct cgraph_edge *cs,
1774 struct ipa_jump_func *jfunc,
1775 ipcp_lattice<tree> *dest_lat)
1777 if (dest_lat->bottom)
1778 return false;
1780 if (jfunc->type == IPA_JF_CONST)
1782 tree val = ipa_get_jf_constant (jfunc);
1783 return dest_lat->add_value (val, cs, NULL, 0);
1785 else if (jfunc->type == IPA_JF_PASS_THROUGH
1786 || jfunc->type == IPA_JF_ANCESTOR)
1788 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
1789 ipcp_lattice<tree> *src_lat;
1790 int src_idx;
1791 bool ret;
1793 if (jfunc->type == IPA_JF_PASS_THROUGH)
1794 src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
1795 else
1796 src_idx = ipa_get_jf_ancestor_formal_id (jfunc);
1798 src_lat = ipa_get_scalar_lat (caller_info, src_idx);
1799 if (src_lat->bottom)
1800 return dest_lat->set_contains_variable ();
1802 /* If we would need to clone the caller and cannot, do not propagate. */
1803 if (!ipcp_versionable_function_p (cs->caller)
1804 && (src_lat->contains_variable
1805 || (src_lat->values_count > 1)))
1806 return dest_lat->set_contains_variable ();
1808 if (jfunc->type == IPA_JF_PASS_THROUGH)
1809 ret = propagate_vals_accross_pass_through (cs, jfunc, src_lat,
1810 dest_lat, src_idx);
1811 else
1812 ret = propagate_vals_accross_ancestor (cs, jfunc, src_lat, dest_lat,
1813 src_idx);
1815 if (src_lat->contains_variable)
1816 ret |= dest_lat->set_contains_variable ();
1818 return ret;
1821 /* TODO: We currently do not handle member method pointers in IPA-CP (we only
1822 use it for indirect inlining), we should propagate them too. */
1823 return dest_lat->set_contains_variable ();
1826 /* Propagate scalar values across jump function JFUNC that is associated with
1827 edge CS and describes argument IDX and put the values into DEST_LAT. */
1829 static bool
1830 propagate_context_accross_jump_function (cgraph_edge *cs,
1831 ipa_jump_func *jfunc, int idx,
1832 ipcp_lattice<ipa_polymorphic_call_context> *dest_lat)
1834 ipa_edge_args *args = IPA_EDGE_REF (cs);
1835 if (dest_lat->bottom)
1836 return false;
1837 bool ret = false;
1838 bool added_sth = false;
1839 bool type_preserved = true;
1841 ipa_polymorphic_call_context edge_ctx, *edge_ctx_ptr
1842 = ipa_get_ith_polymorhic_call_context (args, idx);
1844 if (edge_ctx_ptr)
1845 edge_ctx = *edge_ctx_ptr;
1847 if (jfunc->type == IPA_JF_PASS_THROUGH
1848 || jfunc->type == IPA_JF_ANCESTOR)
1850 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
1851 int src_idx;
1852 ipcp_lattice<ipa_polymorphic_call_context> *src_lat;
1854 /* TODO: Once we figure out how to propagate speculations, it will
1855 probably be a good idea to switch to speculation if type_preserved is
1856 not set instead of punting. */
1857 if (jfunc->type == IPA_JF_PASS_THROUGH)
1859 if (ipa_get_jf_pass_through_operation (jfunc) != NOP_EXPR)
1860 goto prop_fail;
1861 type_preserved = ipa_get_jf_pass_through_type_preserved (jfunc);
1862 src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
1864 else
1866 type_preserved = ipa_get_jf_ancestor_type_preserved (jfunc);
1867 src_idx = ipa_get_jf_ancestor_formal_id (jfunc);
1870 src_lat = ipa_get_poly_ctx_lat (caller_info, src_idx);
1871 /* If we would need to clone the caller and cannot, do not propagate. */
1872 if (!ipcp_versionable_function_p (cs->caller)
1873 && (src_lat->contains_variable
1874 || (src_lat->values_count > 1)))
1875 goto prop_fail;
1877 ipcp_value<ipa_polymorphic_call_context> *src_val;
1878 for (src_val = src_lat->values; src_val; src_val = src_val->next)
1880 ipa_polymorphic_call_context cur = src_val->value;
1882 if (!type_preserved)
1883 cur.possible_dynamic_type_change (cs->in_polymorphic_cdtor);
1884 if (jfunc->type == IPA_JF_ANCESTOR)
1885 cur.offset_by (ipa_get_jf_ancestor_offset (jfunc));
1886 /* TODO: In cases we know how the context is going to be used,
1887 we can improve the result by passing proper OTR_TYPE. */
1888 cur.combine_with (edge_ctx);
1889 if (!cur.useless_p ())
1891 if (src_lat->contains_variable
1892 && !edge_ctx.equal_to (cur))
1893 ret |= dest_lat->set_contains_variable ();
1894 ret |= dest_lat->add_value (cur, cs, src_val, src_idx);
1895 added_sth = true;
1901 prop_fail:
1902 if (!added_sth)
1904 if (!edge_ctx.useless_p ())
1905 ret |= dest_lat->add_value (edge_ctx, cs);
1906 else
1907 ret |= dest_lat->set_contains_variable ();
1910 return ret;
1913 /* Propagate alignments across jump function JFUNC that is associated with
1914 edge CS and update DEST_LAT accordingly. */
1916 static bool
1917 propagate_alignment_accross_jump_function (cgraph_edge *cs,
1918 ipa_jump_func *jfunc,
1919 ipcp_alignment_lattice *dest_lat)
1921 if (dest_lat->bottom_p ())
1922 return false;
1924 if (jfunc->type == IPA_JF_PASS_THROUGH
1925 || jfunc->type == IPA_JF_ANCESTOR)
1927 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
1928 HOST_WIDE_INT offset = 0;
1929 int src_idx;
1931 if (jfunc->type == IPA_JF_PASS_THROUGH)
1933 enum tree_code op = ipa_get_jf_pass_through_operation (jfunc);
1934 if (op != NOP_EXPR)
1936 if (op != POINTER_PLUS_EXPR
1937 && op != PLUS_EXPR)
1938 return dest_lat->set_to_bottom ();
1939 tree operand = ipa_get_jf_pass_through_operand (jfunc);
1940 if (!tree_fits_shwi_p (operand))
1941 return dest_lat->set_to_bottom ();
1942 offset = tree_to_shwi (operand);
1944 src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
1946 else
1948 src_idx = ipa_get_jf_ancestor_formal_id (jfunc);
1949 offset = ipa_get_jf_ancestor_offset (jfunc) / BITS_PER_UNIT;
1952 struct ipcp_param_lattices *src_lats;
1953 src_lats = ipa_get_parm_lattices (caller_info, src_idx);
1954 return dest_lat->meet_with (src_lats->alignment, offset);
1956 else
1958 if (jfunc->alignment.known)
1959 return dest_lat->meet_with (jfunc->alignment.align,
1960 jfunc->alignment.misalign);
1961 else
1962 return dest_lat->set_to_bottom ();
1966 /* Propagate bits across jfunc that is associated with
1967 edge cs and update dest_lattice accordingly. */
1969 bool
1970 propagate_bits_accross_jump_function (cgraph_edge *cs, int idx, ipa_jump_func *jfunc,
1971 ipcp_bits_lattice *dest_lattice)
1973 if (dest_lattice->bottom_p ())
1974 return false;
1976 enum availability availability;
1977 cgraph_node *callee = cs->callee->function_symbol (&availability);
1978 struct ipa_node_params *callee_info = IPA_NODE_REF (callee);
1979 tree parm_type = ipa_get_type (callee_info, idx);
1981 /* For K&R C programs, ipa_get_type() could return NULL_TREE.
1982 Avoid the transform for these cases. */
1983 if (!parm_type)
1985 if (dump_file && (dump_flags & TDF_DETAILS))
1986 fprintf (dump_file, "Setting dest_lattice to bottom, because"
1987 " param %i type is NULL for %s\n", idx,
1988 cs->callee->name ());
1990 return dest_lattice->set_to_bottom ();
1993 unsigned precision = TYPE_PRECISION (parm_type);
1994 signop sgn = TYPE_SIGN (parm_type);
1996 if (jfunc->type == IPA_JF_PASS_THROUGH)
1998 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
1999 enum tree_code code = ipa_get_jf_pass_through_operation (jfunc);
2000 tree operand = NULL_TREE;
2002 if (code != NOP_EXPR)
2003 operand = ipa_get_jf_pass_through_operand (jfunc);
2005 int src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
2006 struct ipcp_param_lattices *src_lats
2007 = ipa_get_parm_lattices (caller_info, src_idx);
2009 /* Try to propagate bits if src_lattice is bottom, but jfunc is known.
2010 for eg consider:
2011 int f(int x)
2013 g (x & 0xff);
2015 Assume lattice for x is bottom, however we can still propagate
2016 result of x & 0xff == 0xff, which gets computed during ccp1 pass
2017 and we store it in jump function during analysis stage. */
2019 if (src_lats->bits_lattice.bottom_p ()
2020 && jfunc->bits.known)
2021 return dest_lattice->meet_with (jfunc->bits.value, jfunc->bits.mask,
2022 precision);
2023 else
2024 return dest_lattice->meet_with (src_lats->bits_lattice, precision, sgn,
2025 code, operand);
2028 else if (jfunc->type == IPA_JF_ANCESTOR)
2029 return dest_lattice->set_to_bottom ();
2031 else if (jfunc->bits.known)
2032 return dest_lattice->meet_with (jfunc->bits.value, jfunc->bits.mask, precision);
2034 else
2035 return dest_lattice->set_to_bottom ();
2038 /* Propagate value range across jump function JFUNC that is associated with
2039 edge CS and update DEST_PLATS accordingly. */
2041 static bool
2042 propagate_vr_accross_jump_function (cgraph_edge *cs,
2043 ipa_jump_func *jfunc,
2044 struct ipcp_param_lattices *dest_plats)
2046 struct ipcp_param_lattices *src_lats;
2047 ipcp_vr_lattice *dest_lat = &dest_plats->m_value_range;
2049 if (dest_lat->bottom_p ())
2050 return false;
2052 if (jfunc->type == IPA_JF_PASS_THROUGH)
2054 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
2055 if (dest_lat->bottom_p ())
2056 return false;
2057 int src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
2058 src_lats = ipa_get_parm_lattices (caller_info, src_idx);
2060 if (ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
2061 return dest_lat->meet_with (src_lats->m_value_range);
2063 else if (jfunc->type == IPA_JF_CONST)
2065 tree val = ipa_get_jf_constant (jfunc);
2066 if (TREE_CODE (val) == INTEGER_CST)
2068 if (TREE_OVERFLOW_P (val))
2069 val = drop_tree_overflow (val);
2070 jfunc->vr_known = true;
2071 jfunc->m_vr.type = VR_RANGE;
2072 jfunc->m_vr.min = val;
2073 jfunc->m_vr.max = val;
2074 return dest_lat->meet_with (&jfunc->m_vr);
2078 if (jfunc->vr_known)
2079 return dest_lat->meet_with (&jfunc->m_vr);
2080 else
2081 return dest_lat->set_to_bottom ();
2084 /* If DEST_PLATS already has aggregate items, check that aggs_by_ref matches
2085 NEW_AGGS_BY_REF and if not, mark all aggs as bottoms and return true (in all
2086 other cases, return false). If there are no aggregate items, set
2087 aggs_by_ref to NEW_AGGS_BY_REF. */
2089 static bool
2090 set_check_aggs_by_ref (struct ipcp_param_lattices *dest_plats,
2091 bool new_aggs_by_ref)
2093 if (dest_plats->aggs)
2095 if (dest_plats->aggs_by_ref != new_aggs_by_ref)
2097 set_agg_lats_to_bottom (dest_plats);
2098 return true;
2101 else
2102 dest_plats->aggs_by_ref = new_aggs_by_ref;
2103 return false;
2106 /* Walk aggregate lattices in DEST_PLATS from ***AGLAT on, until ***aglat is an
2107 already existing lattice for the given OFFSET and SIZE, marking all skipped
2108 lattices as containing variable and checking for overlaps. If there is no
2109 already existing lattice for the OFFSET and VAL_SIZE, create one, initialize
2110 it with offset, size and contains_variable to PRE_EXISTING, and return true,
2111 unless there are too many already. If there are two many, return false. If
2112 there are overlaps turn whole DEST_PLATS to bottom and return false. If any
2113 skipped lattices were newly marked as containing variable, set *CHANGE to
2114 true. */
2116 static bool
2117 merge_agg_lats_step (struct ipcp_param_lattices *dest_plats,
2118 HOST_WIDE_INT offset, HOST_WIDE_INT val_size,
2119 struct ipcp_agg_lattice ***aglat,
2120 bool pre_existing, bool *change)
2122 gcc_checking_assert (offset >= 0);
2124 while (**aglat && (**aglat)->offset < offset)
2126 if ((**aglat)->offset + (**aglat)->size > offset)
2128 set_agg_lats_to_bottom (dest_plats);
2129 return false;
2131 *change |= (**aglat)->set_contains_variable ();
2132 *aglat = &(**aglat)->next;
2135 if (**aglat && (**aglat)->offset == offset)
2137 if ((**aglat)->size != val_size
2138 || ((**aglat)->next
2139 && (**aglat)->next->offset < offset + val_size))
2141 set_agg_lats_to_bottom (dest_plats);
2142 return false;
2144 gcc_checking_assert (!(**aglat)->next
2145 || (**aglat)->next->offset >= offset + val_size);
2146 return true;
2148 else
2150 struct ipcp_agg_lattice *new_al;
2152 if (**aglat && (**aglat)->offset < offset + val_size)
2154 set_agg_lats_to_bottom (dest_plats);
2155 return false;
2157 if (dest_plats->aggs_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
2158 return false;
2159 dest_plats->aggs_count++;
2160 new_al = ipcp_agg_lattice_pool.allocate ();
2161 memset (new_al, 0, sizeof (*new_al));
2163 new_al->offset = offset;
2164 new_al->size = val_size;
2165 new_al->contains_variable = pre_existing;
2167 new_al->next = **aglat;
2168 **aglat = new_al;
2169 return true;
2173 /* Set all AGLAT and all other aggregate lattices reachable by next pointers as
2174 containing an unknown value. */
2176 static bool
2177 set_chain_of_aglats_contains_variable (struct ipcp_agg_lattice *aglat)
2179 bool ret = false;
2180 while (aglat)
2182 ret |= aglat->set_contains_variable ();
2183 aglat = aglat->next;
2185 return ret;
2188 /* Merge existing aggregate lattices in SRC_PLATS to DEST_PLATS, subtracting
2189 DELTA_OFFSET. CS is the call graph edge and SRC_IDX the index of the source
2190 parameter used for lattice value sources. Return true if DEST_PLATS changed
2191 in any way. */
2193 static bool
2194 merge_aggregate_lattices (struct cgraph_edge *cs,
2195 struct ipcp_param_lattices *dest_plats,
2196 struct ipcp_param_lattices *src_plats,
2197 int src_idx, HOST_WIDE_INT offset_delta)
2199 bool pre_existing = dest_plats->aggs != NULL;
2200 struct ipcp_agg_lattice **dst_aglat;
2201 bool ret = false;
2203 if (set_check_aggs_by_ref (dest_plats, src_plats->aggs_by_ref))
2204 return true;
2205 if (src_plats->aggs_bottom)
2206 return set_agg_lats_contain_variable (dest_plats);
2207 if (src_plats->aggs_contain_variable)
2208 ret |= set_agg_lats_contain_variable (dest_plats);
2209 dst_aglat = &dest_plats->aggs;
2211 for (struct ipcp_agg_lattice *src_aglat = src_plats->aggs;
2212 src_aglat;
2213 src_aglat = src_aglat->next)
2215 HOST_WIDE_INT new_offset = src_aglat->offset - offset_delta;
2217 if (new_offset < 0)
2218 continue;
2219 if (merge_agg_lats_step (dest_plats, new_offset, src_aglat->size,
2220 &dst_aglat, pre_existing, &ret))
2222 struct ipcp_agg_lattice *new_al = *dst_aglat;
2224 dst_aglat = &(*dst_aglat)->next;
2225 if (src_aglat->bottom)
2227 ret |= new_al->set_contains_variable ();
2228 continue;
2230 if (src_aglat->contains_variable)
2231 ret |= new_al->set_contains_variable ();
2232 for (ipcp_value<tree> *val = src_aglat->values;
2233 val;
2234 val = val->next)
2235 ret |= new_al->add_value (val->value, cs, val, src_idx,
2236 src_aglat->offset);
2238 else if (dest_plats->aggs_bottom)
2239 return true;
2241 ret |= set_chain_of_aglats_contains_variable (*dst_aglat);
2242 return ret;
2245 /* Determine whether there is anything to propagate FROM SRC_PLATS through a
2246 pass-through JFUNC and if so, whether it has conform and conforms to the
2247 rules about propagating values passed by reference. */
2249 static bool
2250 agg_pass_through_permissible_p (struct ipcp_param_lattices *src_plats,
2251 struct ipa_jump_func *jfunc)
2253 return src_plats->aggs
2254 && (!src_plats->aggs_by_ref
2255 || ipa_get_jf_pass_through_agg_preserved (jfunc));
2258 /* Propagate scalar values across jump function JFUNC that is associated with
2259 edge CS and put the values into DEST_LAT. */
2261 static bool
2262 propagate_aggs_accross_jump_function (struct cgraph_edge *cs,
2263 struct ipa_jump_func *jfunc,
2264 struct ipcp_param_lattices *dest_plats)
2266 bool ret = false;
2268 if (dest_plats->aggs_bottom)
2269 return false;
2271 if (jfunc->type == IPA_JF_PASS_THROUGH
2272 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
2274 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
2275 int src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
2276 struct ipcp_param_lattices *src_plats;
2278 src_plats = ipa_get_parm_lattices (caller_info, src_idx);
2279 if (agg_pass_through_permissible_p (src_plats, jfunc))
2281 /* Currently we do not produce clobber aggregate jump
2282 functions, replace with merging when we do. */
2283 gcc_assert (!jfunc->agg.items);
2284 ret |= merge_aggregate_lattices (cs, dest_plats, src_plats,
2285 src_idx, 0);
2287 else
2288 ret |= set_agg_lats_contain_variable (dest_plats);
2290 else if (jfunc->type == IPA_JF_ANCESTOR
2291 && ipa_get_jf_ancestor_agg_preserved (jfunc))
2293 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
2294 int src_idx = ipa_get_jf_ancestor_formal_id (jfunc);
2295 struct ipcp_param_lattices *src_plats;
2297 src_plats = ipa_get_parm_lattices (caller_info, src_idx);
2298 if (src_plats->aggs && src_plats->aggs_by_ref)
2300 /* Currently we do not produce clobber aggregate jump
2301 functions, replace with merging when we do. */
2302 gcc_assert (!jfunc->agg.items);
2303 ret |= merge_aggregate_lattices (cs, dest_plats, src_plats, src_idx,
2304 ipa_get_jf_ancestor_offset (jfunc));
2306 else if (!src_plats->aggs_by_ref)
2307 ret |= set_agg_lats_to_bottom (dest_plats);
2308 else
2309 ret |= set_agg_lats_contain_variable (dest_plats);
2311 else if (jfunc->agg.items)
2313 bool pre_existing = dest_plats->aggs != NULL;
2314 struct ipcp_agg_lattice **aglat = &dest_plats->aggs;
2315 struct ipa_agg_jf_item *item;
2316 int i;
2318 if (set_check_aggs_by_ref (dest_plats, jfunc->agg.by_ref))
2319 return true;
2321 FOR_EACH_VEC_ELT (*jfunc->agg.items, i, item)
2323 HOST_WIDE_INT val_size;
2325 if (item->offset < 0)
2326 continue;
2327 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2328 val_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (item->value)));
2330 if (merge_agg_lats_step (dest_plats, item->offset, val_size,
2331 &aglat, pre_existing, &ret))
2333 ret |= (*aglat)->add_value (item->value, cs, NULL, 0, 0);
2334 aglat = &(*aglat)->next;
2336 else if (dest_plats->aggs_bottom)
2337 return true;
2340 ret |= set_chain_of_aglats_contains_variable (*aglat);
2342 else
2343 ret |= set_agg_lats_contain_variable (dest_plats);
2345 return ret;
2348 /* Return true if on the way cfrom CS->caller to the final (non-alias and
2349 non-thunk) destination, the call passes through a thunk. */
2351 static bool
2352 call_passes_through_thunk_p (cgraph_edge *cs)
2354 cgraph_node *alias_or_thunk = cs->callee;
2355 while (alias_or_thunk->alias)
2356 alias_or_thunk = alias_or_thunk->get_alias_target ();
2357 return alias_or_thunk->thunk.thunk_p;
2360 /* Propagate constants from the caller to the callee of CS. INFO describes the
2361 caller. */
2363 static bool
2364 propagate_constants_accross_call (struct cgraph_edge *cs)
2366 struct ipa_node_params *callee_info;
2367 enum availability availability;
2368 cgraph_node *callee;
2369 struct ipa_edge_args *args;
2370 bool ret = false;
2371 int i, args_count, parms_count;
2373 callee = cs->callee->function_symbol (&availability);
2374 if (!callee->definition)
2375 return false;
2376 gcc_checking_assert (callee->has_gimple_body_p ());
2377 callee_info = IPA_NODE_REF (callee);
2379 args = IPA_EDGE_REF (cs);
2380 args_count = ipa_get_cs_argument_count (args);
2381 parms_count = ipa_get_param_count (callee_info);
2382 if (parms_count == 0)
2383 return false;
2385 /* No propagation through instrumentation thunks is available yet.
2386 It should be possible with proper mapping of call args and
2387 instrumented callee params in the propagation loop below. But
2388 this case mostly occurs when legacy code calls instrumented code
2389 and it is not a primary target for optimizations.
2390 We detect instrumentation thunks in aliases and thunks chain by
2391 checking instrumentation_clone flag for chain source and target.
2392 Going through instrumentation thunks we always have it changed
2393 from 0 to 1 and all other nodes do not change it. */
2394 if (!cs->callee->instrumentation_clone
2395 && callee->instrumentation_clone)
2397 for (i = 0; i < parms_count; i++)
2398 ret |= set_all_contains_variable (ipa_get_parm_lattices (callee_info,
2399 i));
2400 return ret;
2403 /* If this call goes through a thunk we must not propagate to the first (0th)
2404 parameter. However, we might need to uncover a thunk from below a series
2405 of aliases first. */
2406 if (call_passes_through_thunk_p (cs))
2408 ret |= set_all_contains_variable (ipa_get_parm_lattices (callee_info,
2409 0));
2410 i = 1;
2412 else
2413 i = 0;
2415 for (; (i < args_count) && (i < parms_count); i++)
2417 struct ipa_jump_func *jump_func = ipa_get_ith_jump_func (args, i);
2418 struct ipcp_param_lattices *dest_plats;
2420 dest_plats = ipa_get_parm_lattices (callee_info, i);
2421 if (availability == AVAIL_INTERPOSABLE)
2422 ret |= set_all_contains_variable (dest_plats);
2423 else
2425 ret |= propagate_scalar_accross_jump_function (cs, jump_func,
2426 &dest_plats->itself);
2427 ret |= propagate_context_accross_jump_function (cs, jump_func, i,
2428 &dest_plats->ctxlat);
2429 ret |= propagate_alignment_accross_jump_function (cs, jump_func,
2430 &dest_plats->alignment);
2431 ret |= propagate_bits_accross_jump_function (cs, i, jump_func,
2432 &dest_plats->bits_lattice);
2433 ret |= propagate_aggs_accross_jump_function (cs, jump_func,
2434 dest_plats);
2435 if (opt_for_fn (callee->decl, flag_ipa_vrp))
2436 ret |= propagate_vr_accross_jump_function (cs,
2437 jump_func, dest_plats);
2438 else
2439 ret |= dest_plats->m_value_range.set_to_bottom ();
2442 for (; i < parms_count; i++)
2443 ret |= set_all_contains_variable (ipa_get_parm_lattices (callee_info, i));
2445 return ret;
2448 /* If an indirect edge IE can be turned into a direct one based on KNOWN_VALS
2449 KNOWN_CONTEXTS, KNOWN_AGGS or AGG_REPS return the destination. The latter
2450 three can be NULL. If AGG_REPS is not NULL, KNOWN_AGGS is ignored. */
2452 static tree
2453 ipa_get_indirect_edge_target_1 (struct cgraph_edge *ie,
2454 vec<tree> known_csts,
2455 vec<ipa_polymorphic_call_context> known_contexts,
2456 vec<ipa_agg_jump_function_p> known_aggs,
2457 struct ipa_agg_replacement_value *agg_reps,
2458 bool *speculative)
2460 int param_index = ie->indirect_info->param_index;
2461 HOST_WIDE_INT anc_offset;
2462 tree t;
2463 tree target = NULL;
2465 *speculative = false;
2467 if (param_index == -1
2468 || known_csts.length () <= (unsigned int) param_index)
2469 return NULL_TREE;
2471 if (!ie->indirect_info->polymorphic)
2473 tree t;
2475 if (ie->indirect_info->agg_contents)
2477 t = NULL;
2478 if (agg_reps && ie->indirect_info->guaranteed_unmodified)
2480 while (agg_reps)
2482 if (agg_reps->index == param_index
2483 && agg_reps->offset == ie->indirect_info->offset
2484 && agg_reps->by_ref == ie->indirect_info->by_ref)
2486 t = agg_reps->value;
2487 break;
2489 agg_reps = agg_reps->next;
2492 if (!t)
2494 struct ipa_agg_jump_function *agg;
2495 if (known_aggs.length () > (unsigned int) param_index)
2496 agg = known_aggs[param_index];
2497 else
2498 agg = NULL;
2499 bool from_global_constant;
2500 t = ipa_find_agg_cst_for_param (agg, known_csts[param_index],
2501 ie->indirect_info->offset,
2502 ie->indirect_info->by_ref,
2503 &from_global_constant);
2504 if (t
2505 && !from_global_constant
2506 && !ie->indirect_info->guaranteed_unmodified)
2507 t = NULL_TREE;
2510 else
2511 t = known_csts[param_index];
2513 if (t &&
2514 TREE_CODE (t) == ADDR_EXPR
2515 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL)
2516 return TREE_OPERAND (t, 0);
2517 else
2518 return NULL_TREE;
2521 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
2522 return NULL_TREE;
2524 gcc_assert (!ie->indirect_info->agg_contents);
2525 anc_offset = ie->indirect_info->offset;
2527 t = NULL;
2529 /* Try to work out value of virtual table pointer value in replacemnets. */
2530 if (!t && agg_reps && !ie->indirect_info->by_ref)
2532 while (agg_reps)
2534 if (agg_reps->index == param_index
2535 && agg_reps->offset == ie->indirect_info->offset
2536 && agg_reps->by_ref)
2538 t = agg_reps->value;
2539 break;
2541 agg_reps = agg_reps->next;
2545 /* Try to work out value of virtual table pointer value in known
2546 aggregate values. */
2547 if (!t && known_aggs.length () > (unsigned int) param_index
2548 && !ie->indirect_info->by_ref)
2550 struct ipa_agg_jump_function *agg;
2551 agg = known_aggs[param_index];
2552 t = ipa_find_agg_cst_for_param (agg, known_csts[param_index],
2553 ie->indirect_info->offset,
2554 true);
2557 /* If we found the virtual table pointer, lookup the target. */
2558 if (t)
2560 tree vtable;
2561 unsigned HOST_WIDE_INT offset;
2562 if (vtable_pointer_value_to_vtable (t, &vtable, &offset))
2564 bool can_refer;
2565 target = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
2566 vtable, offset, &can_refer);
2567 if (can_refer)
2569 if (!target
2570 || (TREE_CODE (TREE_TYPE (target)) == FUNCTION_TYPE
2571 && DECL_FUNCTION_CODE (target) == BUILT_IN_UNREACHABLE)
2572 || !possible_polymorphic_call_target_p
2573 (ie, cgraph_node::get (target)))
2575 /* Do not speculate builtin_unreachable, it is stupid! */
2576 if (ie->indirect_info->vptr_changed)
2577 return NULL;
2578 target = ipa_impossible_devirt_target (ie, target);
2580 *speculative = ie->indirect_info->vptr_changed;
2581 if (!*speculative)
2582 return target;
2587 /* Do we know the constant value of pointer? */
2588 if (!t)
2589 t = known_csts[param_index];
2591 gcc_checking_assert (!t || TREE_CODE (t) != TREE_BINFO);
2593 ipa_polymorphic_call_context context;
2594 if (known_contexts.length () > (unsigned int) param_index)
2596 context = known_contexts[param_index];
2597 context.offset_by (anc_offset);
2598 if (ie->indirect_info->vptr_changed)
2599 context.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
2600 ie->indirect_info->otr_type);
2601 if (t)
2603 ipa_polymorphic_call_context ctx2 = ipa_polymorphic_call_context
2604 (t, ie->indirect_info->otr_type, anc_offset);
2605 if (!ctx2.useless_p ())
2606 context.combine_with (ctx2, ie->indirect_info->otr_type);
2609 else if (t)
2611 context = ipa_polymorphic_call_context (t, ie->indirect_info->otr_type,
2612 anc_offset);
2613 if (ie->indirect_info->vptr_changed)
2614 context.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
2615 ie->indirect_info->otr_type);
2617 else
2618 return NULL_TREE;
2620 vec <cgraph_node *>targets;
2621 bool final;
2623 targets = possible_polymorphic_call_targets
2624 (ie->indirect_info->otr_type,
2625 ie->indirect_info->otr_token,
2626 context, &final);
2627 if (!final || targets.length () > 1)
2629 struct cgraph_node *node;
2630 if (*speculative)
2631 return target;
2632 if (!opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
2633 || ie->speculative || !ie->maybe_hot_p ())
2634 return NULL;
2635 node = try_speculative_devirtualization (ie->indirect_info->otr_type,
2636 ie->indirect_info->otr_token,
2637 context);
2638 if (node)
2640 *speculative = true;
2641 target = node->decl;
2643 else
2644 return NULL;
2646 else
2648 *speculative = false;
2649 if (targets.length () == 1)
2650 target = targets[0]->decl;
2651 else
2652 target = ipa_impossible_devirt_target (ie, NULL_TREE);
2655 if (target && !possible_polymorphic_call_target_p (ie,
2656 cgraph_node::get (target)))
2658 if (*speculative)
2659 return NULL;
2660 target = ipa_impossible_devirt_target (ie, target);
2663 return target;
2667 /* If an indirect edge IE can be turned into a direct one based on KNOWN_CSTS,
2668 KNOWN_CONTEXTS (which can be vNULL) or KNOWN_AGGS (which also can be vNULL)
2669 return the destination. */
2671 tree
2672 ipa_get_indirect_edge_target (struct cgraph_edge *ie,
2673 vec<tree> known_csts,
2674 vec<ipa_polymorphic_call_context> known_contexts,
2675 vec<ipa_agg_jump_function_p> known_aggs,
2676 bool *speculative)
2678 return ipa_get_indirect_edge_target_1 (ie, known_csts, known_contexts,
2679 known_aggs, NULL, speculative);
2682 /* Calculate devirtualization time bonus for NODE, assuming we know KNOWN_CSTS
2683 and KNOWN_CONTEXTS. */
2685 static int
2686 devirtualization_time_bonus (struct cgraph_node *node,
2687 vec<tree> known_csts,
2688 vec<ipa_polymorphic_call_context> known_contexts,
2689 vec<ipa_agg_jump_function_p> known_aggs)
2691 struct cgraph_edge *ie;
2692 int res = 0;
2694 for (ie = node->indirect_calls; ie; ie = ie->next_callee)
2696 struct cgraph_node *callee;
2697 struct inline_summary *isummary;
2698 enum availability avail;
2699 tree target;
2700 bool speculative;
2702 target = ipa_get_indirect_edge_target (ie, known_csts, known_contexts,
2703 known_aggs, &speculative);
2704 if (!target)
2705 continue;
2707 /* Only bare minimum benefit for clearly un-inlineable targets. */
2708 res += 1;
2709 callee = cgraph_node::get (target);
2710 if (!callee || !callee->definition)
2711 continue;
2712 callee = callee->function_symbol (&avail);
2713 if (avail < AVAIL_AVAILABLE)
2714 continue;
2715 isummary = inline_summaries->get (callee);
2716 if (!isummary->inlinable)
2717 continue;
2719 /* FIXME: The values below need re-considering and perhaps also
2720 integrating into the cost metrics, at lest in some very basic way. */
2721 if (isummary->size <= MAX_INLINE_INSNS_AUTO / 4)
2722 res += 31 / ((int)speculative + 1);
2723 else if (isummary->size <= MAX_INLINE_INSNS_AUTO / 2)
2724 res += 15 / ((int)speculative + 1);
2725 else if (isummary->size <= MAX_INLINE_INSNS_AUTO
2726 || DECL_DECLARED_INLINE_P (callee->decl))
2727 res += 7 / ((int)speculative + 1);
2730 return res;
2733 /* Return time bonus incurred because of HINTS. */
2735 static int
2736 hint_time_bonus (inline_hints hints)
2738 int result = 0;
2739 if (hints & (INLINE_HINT_loop_iterations | INLINE_HINT_loop_stride))
2740 result += PARAM_VALUE (PARAM_IPA_CP_LOOP_HINT_BONUS);
2741 if (hints & INLINE_HINT_array_index)
2742 result += PARAM_VALUE (PARAM_IPA_CP_ARRAY_INDEX_HINT_BONUS);
2743 return result;
2746 /* If there is a reason to penalize the function described by INFO in the
2747 cloning goodness evaluation, do so. */
2749 static inline int64_t
2750 incorporate_penalties (ipa_node_params *info, int64_t evaluation)
2752 if (info->node_within_scc)
2753 evaluation = (evaluation
2754 * (100 - PARAM_VALUE (PARAM_IPA_CP_RECURSION_PENALTY))) / 100;
2756 if (info->node_calling_single_call)
2757 evaluation = (evaluation
2758 * (100 - PARAM_VALUE (PARAM_IPA_CP_SINGLE_CALL_PENALTY)))
2759 / 100;
2761 return evaluation;
2764 /* Return true if cloning NODE is a good idea, given the estimated TIME_BENEFIT
2765 and SIZE_COST and with the sum of frequencies of incoming edges to the
2766 potential new clone in FREQUENCIES. */
2768 static bool
2769 good_cloning_opportunity_p (struct cgraph_node *node, int time_benefit,
2770 int freq_sum, gcov_type count_sum, int size_cost)
2772 if (time_benefit == 0
2773 || !opt_for_fn (node->decl, flag_ipa_cp_clone)
2774 || node->optimize_for_size_p ())
2775 return false;
2777 gcc_assert (size_cost > 0);
2779 struct ipa_node_params *info = IPA_NODE_REF (node);
2780 if (max_count)
2782 int factor = (count_sum * 1000) / max_count;
2783 int64_t evaluation = (((int64_t) time_benefit * factor)
2784 / size_cost);
2785 evaluation = incorporate_penalties (info, evaluation);
2787 if (dump_file && (dump_flags & TDF_DETAILS))
2788 fprintf (dump_file, " good_cloning_opportunity_p (time: %i, "
2789 "size: %i, count_sum: " HOST_WIDE_INT_PRINT_DEC
2790 "%s%s) -> evaluation: " "%" PRId64
2791 ", threshold: %i\n",
2792 time_benefit, size_cost, (HOST_WIDE_INT) count_sum,
2793 info->node_within_scc ? ", scc" : "",
2794 info->node_calling_single_call ? ", single_call" : "",
2795 evaluation, PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD));
2797 return evaluation >= PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD);
2799 else
2801 int64_t evaluation = (((int64_t) time_benefit * freq_sum)
2802 / size_cost);
2803 evaluation = incorporate_penalties (info, evaluation);
2805 if (dump_file && (dump_flags & TDF_DETAILS))
2806 fprintf (dump_file, " good_cloning_opportunity_p (time: %i, "
2807 "size: %i, freq_sum: %i%s%s) -> evaluation: "
2808 "%" PRId64 ", threshold: %i\n",
2809 time_benefit, size_cost, freq_sum,
2810 info->node_within_scc ? ", scc" : "",
2811 info->node_calling_single_call ? ", single_call" : "",
2812 evaluation, PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD));
2814 return evaluation >= PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD);
2818 /* Return all context independent values from aggregate lattices in PLATS in a
2819 vector. Return NULL if there are none. */
2821 static vec<ipa_agg_jf_item, va_gc> *
2822 context_independent_aggregate_values (struct ipcp_param_lattices *plats)
2824 vec<ipa_agg_jf_item, va_gc> *res = NULL;
2826 if (plats->aggs_bottom
2827 || plats->aggs_contain_variable
2828 || plats->aggs_count == 0)
2829 return NULL;
2831 for (struct ipcp_agg_lattice *aglat = plats->aggs;
2832 aglat;
2833 aglat = aglat->next)
2834 if (aglat->is_single_const ())
2836 struct ipa_agg_jf_item item;
2837 item.offset = aglat->offset;
2838 item.value = aglat->values->value;
2839 vec_safe_push (res, item);
2841 return res;
2844 /* Allocate KNOWN_CSTS, KNOWN_CONTEXTS and, if non-NULL, KNOWN_AGGS and
2845 populate them with values of parameters that are known independent of the
2846 context. INFO describes the function. If REMOVABLE_PARAMS_COST is
2847 non-NULL, the movement cost of all removable parameters will be stored in
2848 it. */
2850 static bool
2851 gather_context_independent_values (struct ipa_node_params *info,
2852 vec<tree> *known_csts,
2853 vec<ipa_polymorphic_call_context>
2854 *known_contexts,
2855 vec<ipa_agg_jump_function> *known_aggs,
2856 int *removable_params_cost)
2858 int i, count = ipa_get_param_count (info);
2859 bool ret = false;
2861 known_csts->create (0);
2862 known_contexts->create (0);
2863 known_csts->safe_grow_cleared (count);
2864 known_contexts->safe_grow_cleared (count);
2865 if (known_aggs)
2867 known_aggs->create (0);
2868 known_aggs->safe_grow_cleared (count);
2871 if (removable_params_cost)
2872 *removable_params_cost = 0;
2874 for (i = 0; i < count ; i++)
2876 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
2877 ipcp_lattice<tree> *lat = &plats->itself;
2879 if (lat->is_single_const ())
2881 ipcp_value<tree> *val = lat->values;
2882 gcc_checking_assert (TREE_CODE (val->value) != TREE_BINFO);
2883 (*known_csts)[i] = val->value;
2884 if (removable_params_cost)
2885 *removable_params_cost
2886 += estimate_move_cost (TREE_TYPE (val->value), false);
2887 ret = true;
2889 else if (removable_params_cost
2890 && !ipa_is_param_used (info, i))
2891 *removable_params_cost
2892 += ipa_get_param_move_cost (info, i);
2894 if (!ipa_is_param_used (info, i))
2895 continue;
2897 ipcp_lattice<ipa_polymorphic_call_context> *ctxlat = &plats->ctxlat;
2898 /* Do not account known context as reason for cloning. We can see
2899 if it permits devirtualization. */
2900 if (ctxlat->is_single_const ())
2901 (*known_contexts)[i] = ctxlat->values->value;
2903 if (known_aggs)
2905 vec<ipa_agg_jf_item, va_gc> *agg_items;
2906 struct ipa_agg_jump_function *ajf;
2908 agg_items = context_independent_aggregate_values (plats);
2909 ajf = &(*known_aggs)[i];
2910 ajf->items = agg_items;
2911 ajf->by_ref = plats->aggs_by_ref;
2912 ret |= agg_items != NULL;
2916 return ret;
2919 /* The current interface in ipa-inline-analysis requires a pointer vector.
2920 Create it.
2922 FIXME: That interface should be re-worked, this is slightly silly. Still,
2923 I'd like to discuss how to change it first and this demonstrates the
2924 issue. */
2926 static vec<ipa_agg_jump_function_p>
2927 agg_jmp_p_vec_for_t_vec (vec<ipa_agg_jump_function> known_aggs)
2929 vec<ipa_agg_jump_function_p> ret;
2930 struct ipa_agg_jump_function *ajf;
2931 int i;
2933 ret.create (known_aggs.length ());
2934 FOR_EACH_VEC_ELT (known_aggs, i, ajf)
2935 ret.quick_push (ajf);
2936 return ret;
2939 /* Perform time and size measurement of NODE with the context given in
2940 KNOWN_CSTS, KNOWN_CONTEXTS and KNOWN_AGGS, calculate the benefit and cost
2941 given BASE_TIME of the node without specialization, REMOVABLE_PARAMS_COST of
2942 all context-independent removable parameters and EST_MOVE_COST of estimated
2943 movement of the considered parameter and store it into VAL. */
2945 static void
2946 perform_estimation_of_a_value (cgraph_node *node, vec<tree> known_csts,
2947 vec<ipa_polymorphic_call_context> known_contexts,
2948 vec<ipa_agg_jump_function_p> known_aggs_ptrs,
2949 int base_time, int removable_params_cost,
2950 int est_move_cost, ipcp_value_base *val)
2952 int time, size, time_benefit;
2953 inline_hints hints;
2955 estimate_ipcp_clone_size_and_time (node, known_csts, known_contexts,
2956 known_aggs_ptrs, &size, &time,
2957 &hints);
2958 time_benefit = base_time - time
2959 + devirtualization_time_bonus (node, known_csts, known_contexts,
2960 known_aggs_ptrs)
2961 + hint_time_bonus (hints)
2962 + removable_params_cost + est_move_cost;
2964 gcc_checking_assert (size >=0);
2965 /* The inliner-heuristics based estimates may think that in certain
2966 contexts some functions do not have any size at all but we want
2967 all specializations to have at least a tiny cost, not least not to
2968 divide by zero. */
2969 if (size == 0)
2970 size = 1;
2972 val->local_time_benefit = time_benefit;
2973 val->local_size_cost = size;
2976 /* Iterate over known values of parameters of NODE and estimate the local
2977 effects in terms of time and size they have. */
2979 static void
2980 estimate_local_effects (struct cgraph_node *node)
2982 struct ipa_node_params *info = IPA_NODE_REF (node);
2983 int i, count = ipa_get_param_count (info);
2984 vec<tree> known_csts;
2985 vec<ipa_polymorphic_call_context> known_contexts;
2986 vec<ipa_agg_jump_function> known_aggs;
2987 vec<ipa_agg_jump_function_p> known_aggs_ptrs;
2988 bool always_const;
2989 int base_time = inline_summaries->get (node)->time;
2990 int removable_params_cost;
2992 if (!count || !ipcp_versionable_function_p (node))
2993 return;
2995 if (dump_file && (dump_flags & TDF_DETAILS))
2996 fprintf (dump_file, "\nEstimating effects for %s/%i, base_time: %i.\n",
2997 node->name (), node->order, base_time);
2999 always_const = gather_context_independent_values (info, &known_csts,
3000 &known_contexts, &known_aggs,
3001 &removable_params_cost);
3002 known_aggs_ptrs = agg_jmp_p_vec_for_t_vec (known_aggs);
3003 int devirt_bonus = devirtualization_time_bonus (node, known_csts,
3004 known_contexts, known_aggs_ptrs);
3005 if (always_const || devirt_bonus
3006 || (removable_params_cost && node->local.can_change_signature))
3008 struct caller_statistics stats;
3009 inline_hints hints;
3010 int time, size;
3012 init_caller_stats (&stats);
3013 node->call_for_symbol_thunks_and_aliases (gather_caller_stats, &stats,
3014 false);
3015 estimate_ipcp_clone_size_and_time (node, known_csts, known_contexts,
3016 known_aggs_ptrs, &size, &time, &hints);
3017 time -= devirt_bonus;
3018 time -= hint_time_bonus (hints);
3019 time -= removable_params_cost;
3020 size -= stats.n_calls * removable_params_cost;
3022 if (dump_file)
3023 fprintf (dump_file, " - context independent values, size: %i, "
3024 "time_benefit: %i\n", size, base_time - time);
3026 if (size <= 0 || node->local.local)
3028 info->do_clone_for_all_contexts = true;
3029 base_time = time;
3031 if (dump_file)
3032 fprintf (dump_file, " Decided to specialize for all "
3033 "known contexts, code not going to grow.\n");
3035 else if (good_cloning_opportunity_p (node, base_time - time,
3036 stats.freq_sum, stats.count_sum,
3037 size))
3039 if (size + overall_size <= max_new_size)
3041 info->do_clone_for_all_contexts = true;
3042 base_time = time;
3043 overall_size += size;
3045 if (dump_file)
3046 fprintf (dump_file, " Decided to specialize for all "
3047 "known contexts, growth deemed beneficial.\n");
3049 else if (dump_file && (dump_flags & TDF_DETAILS))
3050 fprintf (dump_file, " Not cloning for all contexts because "
3051 "max_new_size would be reached with %li.\n",
3052 size + overall_size);
3054 else if (dump_file && (dump_flags & TDF_DETAILS))
3055 fprintf (dump_file, " Not cloning for all contexts because "
3056 "!good_cloning_opportunity_p.\n");
3060 for (i = 0; i < count ; i++)
3062 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
3063 ipcp_lattice<tree> *lat = &plats->itself;
3064 ipcp_value<tree> *val;
3066 if (lat->bottom
3067 || !lat->values
3068 || known_csts[i])
3069 continue;
3071 for (val = lat->values; val; val = val->next)
3073 gcc_checking_assert (TREE_CODE (val->value) != TREE_BINFO);
3074 known_csts[i] = val->value;
3076 int emc = estimate_move_cost (TREE_TYPE (val->value), true);
3077 perform_estimation_of_a_value (node, known_csts, known_contexts,
3078 known_aggs_ptrs, base_time,
3079 removable_params_cost, emc, val);
3081 if (dump_file && (dump_flags & TDF_DETAILS))
3083 fprintf (dump_file, " - estimates for value ");
3084 print_ipcp_constant_value (dump_file, val->value);
3085 fprintf (dump_file, " for ");
3086 ipa_dump_param (dump_file, info, i);
3087 fprintf (dump_file, ": time_benefit: %i, size: %i\n",
3088 val->local_time_benefit, val->local_size_cost);
3091 known_csts[i] = NULL_TREE;
3094 for (i = 0; i < count; i++)
3096 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
3098 if (!plats->virt_call)
3099 continue;
3101 ipcp_lattice<ipa_polymorphic_call_context> *ctxlat = &plats->ctxlat;
3102 ipcp_value<ipa_polymorphic_call_context> *val;
3104 if (ctxlat->bottom
3105 || !ctxlat->values
3106 || !known_contexts[i].useless_p ())
3107 continue;
3109 for (val = ctxlat->values; val; val = val->next)
3111 known_contexts[i] = val->value;
3112 perform_estimation_of_a_value (node, known_csts, known_contexts,
3113 known_aggs_ptrs, base_time,
3114 removable_params_cost, 0, val);
3116 if (dump_file && (dump_flags & TDF_DETAILS))
3118 fprintf (dump_file, " - estimates for polymorphic context ");
3119 print_ipcp_constant_value (dump_file, val->value);
3120 fprintf (dump_file, " for ");
3121 ipa_dump_param (dump_file, info, i);
3122 fprintf (dump_file, ": time_benefit: %i, size: %i\n",
3123 val->local_time_benefit, val->local_size_cost);
3126 known_contexts[i] = ipa_polymorphic_call_context ();
3129 for (i = 0; i < count ; i++)
3131 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
3132 struct ipa_agg_jump_function *ajf;
3133 struct ipcp_agg_lattice *aglat;
3135 if (plats->aggs_bottom || !plats->aggs)
3136 continue;
3138 ajf = &known_aggs[i];
3139 for (aglat = plats->aggs; aglat; aglat = aglat->next)
3141 ipcp_value<tree> *val;
3142 if (aglat->bottom || !aglat->values
3143 /* If the following is true, the one value is in known_aggs. */
3144 || (!plats->aggs_contain_variable
3145 && aglat->is_single_const ()))
3146 continue;
3148 for (val = aglat->values; val; val = val->next)
3150 struct ipa_agg_jf_item item;
3152 item.offset = aglat->offset;
3153 item.value = val->value;
3154 vec_safe_push (ajf->items, item);
3156 perform_estimation_of_a_value (node, known_csts, known_contexts,
3157 known_aggs_ptrs, base_time,
3158 removable_params_cost, 0, val);
3160 if (dump_file && (dump_flags & TDF_DETAILS))
3162 fprintf (dump_file, " - estimates for value ");
3163 print_ipcp_constant_value (dump_file, val->value);
3164 fprintf (dump_file, " for ");
3165 ipa_dump_param (dump_file, info, i);
3166 fprintf (dump_file, "[%soffset: " HOST_WIDE_INT_PRINT_DEC
3167 "]: time_benefit: %i, size: %i\n",
3168 plats->aggs_by_ref ? "ref " : "",
3169 aglat->offset,
3170 val->local_time_benefit, val->local_size_cost);
3173 ajf->items->pop ();
3178 for (i = 0; i < count ; i++)
3179 vec_free (known_aggs[i].items);
3181 known_csts.release ();
3182 known_contexts.release ();
3183 known_aggs.release ();
3184 known_aggs_ptrs.release ();
3188 /* Add value CUR_VAL and all yet-unsorted values it is dependent on to the
3189 topological sort of values. */
3191 template <typename valtype>
3192 void
3193 value_topo_info<valtype>::add_val (ipcp_value<valtype> *cur_val)
3195 ipcp_value_source<valtype> *src;
3197 if (cur_val->dfs)
3198 return;
3200 dfs_counter++;
3201 cur_val->dfs = dfs_counter;
3202 cur_val->low_link = dfs_counter;
3204 cur_val->topo_next = stack;
3205 stack = cur_val;
3206 cur_val->on_stack = true;
3208 for (src = cur_val->sources; src; src = src->next)
3209 if (src->val)
3211 if (src->val->dfs == 0)
3213 add_val (src->val);
3214 if (src->val->low_link < cur_val->low_link)
3215 cur_val->low_link = src->val->low_link;
3217 else if (src->val->on_stack
3218 && src->val->dfs < cur_val->low_link)
3219 cur_val->low_link = src->val->dfs;
3222 if (cur_val->dfs == cur_val->low_link)
3224 ipcp_value<valtype> *v, *scc_list = NULL;
3228 v = stack;
3229 stack = v->topo_next;
3230 v->on_stack = false;
3232 v->scc_next = scc_list;
3233 scc_list = v;
3235 while (v != cur_val);
3237 cur_val->topo_next = values_topo;
3238 values_topo = cur_val;
3242 /* Add all values in lattices associated with NODE to the topological sort if
3243 they are not there yet. */
3245 static void
3246 add_all_node_vals_to_toposort (cgraph_node *node, ipa_topo_info *topo)
3248 struct ipa_node_params *info = IPA_NODE_REF (node);
3249 int i, count = ipa_get_param_count (info);
3251 for (i = 0; i < count ; i++)
3253 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
3254 ipcp_lattice<tree> *lat = &plats->itself;
3255 struct ipcp_agg_lattice *aglat;
3257 if (!lat->bottom)
3259 ipcp_value<tree> *val;
3260 for (val = lat->values; val; val = val->next)
3261 topo->constants.add_val (val);
3264 if (!plats->aggs_bottom)
3265 for (aglat = plats->aggs; aglat; aglat = aglat->next)
3266 if (!aglat->bottom)
3268 ipcp_value<tree> *val;
3269 for (val = aglat->values; val; val = val->next)
3270 topo->constants.add_val (val);
3273 ipcp_lattice<ipa_polymorphic_call_context> *ctxlat = &plats->ctxlat;
3274 if (!ctxlat->bottom)
3276 ipcp_value<ipa_polymorphic_call_context> *ctxval;
3277 for (ctxval = ctxlat->values; ctxval; ctxval = ctxval->next)
3278 topo->contexts.add_val (ctxval);
3283 /* One pass of constants propagation along the call graph edges, from callers
3284 to callees (requires topological ordering in TOPO), iterate over strongly
3285 connected components. */
3287 static void
3288 propagate_constants_topo (struct ipa_topo_info *topo)
3290 int i;
3292 for (i = topo->nnodes - 1; i >= 0; i--)
3294 unsigned j;
3295 struct cgraph_node *v, *node = topo->order[i];
3296 vec<cgraph_node *> cycle_nodes = ipa_get_nodes_in_cycle (node);
3298 /* First, iteratively propagate within the strongly connected component
3299 until all lattices stabilize. */
3300 FOR_EACH_VEC_ELT (cycle_nodes, j, v)
3301 if (v->has_gimple_body_p ())
3302 push_node_to_stack (topo, v);
3304 v = pop_node_from_stack (topo);
3305 while (v)
3307 struct cgraph_edge *cs;
3309 for (cs = v->callees; cs; cs = cs->next_callee)
3310 if (ipa_edge_within_scc (cs))
3312 IPA_NODE_REF (v)->node_within_scc = true;
3313 if (propagate_constants_accross_call (cs))
3314 push_node_to_stack (topo, cs->callee->function_symbol ());
3316 v = pop_node_from_stack (topo);
3319 /* Afterwards, propagate along edges leading out of the SCC, calculates
3320 the local effects of the discovered constants and all valid values to
3321 their topological sort. */
3322 FOR_EACH_VEC_ELT (cycle_nodes, j, v)
3323 if (v->has_gimple_body_p ())
3325 struct cgraph_edge *cs;
3327 estimate_local_effects (v);
3328 add_all_node_vals_to_toposort (v, topo);
3329 for (cs = v->callees; cs; cs = cs->next_callee)
3330 if (!ipa_edge_within_scc (cs))
3331 propagate_constants_accross_call (cs);
3333 cycle_nodes.release ();
3338 /* Return the sum of A and B if none of them is bigger than INT_MAX/2, return
3339 the bigger one if otherwise. */
3341 static int
3342 safe_add (int a, int b)
3344 if (a > INT_MAX/2 || b > INT_MAX/2)
3345 return a > b ? a : b;
3346 else
3347 return a + b;
3351 /* Propagate the estimated effects of individual values along the topological
3352 from the dependent values to those they depend on. */
3354 template <typename valtype>
3355 void
3356 value_topo_info<valtype>::propagate_effects ()
3358 ipcp_value<valtype> *base;
3360 for (base = values_topo; base; base = base->topo_next)
3362 ipcp_value_source<valtype> *src;
3363 ipcp_value<valtype> *val;
3364 int time = 0, size = 0;
3366 for (val = base; val; val = val->scc_next)
3368 time = safe_add (time,
3369 val->local_time_benefit + val->prop_time_benefit);
3370 size = safe_add (size, val->local_size_cost + val->prop_size_cost);
3373 for (val = base; val; val = val->scc_next)
3374 for (src = val->sources; src; src = src->next)
3375 if (src->val
3376 && src->cs->maybe_hot_p ())
3378 src->val->prop_time_benefit = safe_add (time,
3379 src->val->prop_time_benefit);
3380 src->val->prop_size_cost = safe_add (size,
3381 src->val->prop_size_cost);
3387 /* Propagate constants, polymorphic contexts and their effects from the
3388 summaries interprocedurally. */
3390 static void
3391 ipcp_propagate_stage (struct ipa_topo_info *topo)
3393 struct cgraph_node *node;
3395 if (dump_file)
3396 fprintf (dump_file, "\n Propagating constants:\n\n");
3398 if (in_lto_p)
3399 ipa_update_after_lto_read ();
3402 FOR_EACH_DEFINED_FUNCTION (node)
3404 struct ipa_node_params *info = IPA_NODE_REF (node);
3406 /* In LTO we do not have PARM_DECLs but we would still like to be able to
3407 look at types of parameters. */
3408 if (in_lto_p)
3410 tree t = TYPE_ARG_TYPES (TREE_TYPE (node->decl));
3411 for (int k = 0; k < ipa_get_param_count (info) && t; k++)
3413 gcc_assert (t != void_list_node);
3414 info->descriptors[k].decl_or_type = TREE_VALUE (t);
3415 t = t ? TREE_CHAIN (t) : NULL;
3419 determine_versionability (node, info);
3420 if (node->has_gimple_body_p ())
3422 info->lattices = XCNEWVEC (struct ipcp_param_lattices,
3423 ipa_get_param_count (info));
3424 initialize_node_lattices (node);
3426 if (node->definition && !node->alias)
3427 overall_size += inline_summaries->get (node)->self_size;
3428 if (node->count > max_count)
3429 max_count = node->count;
3432 max_new_size = overall_size;
3433 if (max_new_size < PARAM_VALUE (PARAM_LARGE_UNIT_INSNS))
3434 max_new_size = PARAM_VALUE (PARAM_LARGE_UNIT_INSNS);
3435 max_new_size += max_new_size * PARAM_VALUE (PARAM_IPCP_UNIT_GROWTH) / 100 + 1;
3437 if (dump_file)
3438 fprintf (dump_file, "\noverall_size: %li, max_new_size: %li\n",
3439 overall_size, max_new_size);
3441 propagate_constants_topo (topo);
3442 if (flag_checking)
3443 ipcp_verify_propagated_values ();
3444 topo->constants.propagate_effects ();
3445 topo->contexts.propagate_effects ();
3447 if (dump_file)
3449 fprintf (dump_file, "\nIPA lattices after all propagation:\n");
3450 print_all_lattices (dump_file, (dump_flags & TDF_DETAILS), true);
3454 /* Discover newly direct outgoing edges from NODE which is a new clone with
3455 known KNOWN_CSTS and make them direct. */
3457 static void
3458 ipcp_discover_new_direct_edges (struct cgraph_node *node,
3459 vec<tree> known_csts,
3460 vec<ipa_polymorphic_call_context>
3461 known_contexts,
3462 struct ipa_agg_replacement_value *aggvals)
3464 struct cgraph_edge *ie, *next_ie;
3465 bool found = false;
3467 for (ie = node->indirect_calls; ie; ie = next_ie)
3469 tree target;
3470 bool speculative;
3472 next_ie = ie->next_callee;
3473 target = ipa_get_indirect_edge_target_1 (ie, known_csts, known_contexts,
3474 vNULL, aggvals, &speculative);
3475 if (target)
3477 bool agg_contents = ie->indirect_info->agg_contents;
3478 bool polymorphic = ie->indirect_info->polymorphic;
3479 int param_index = ie->indirect_info->param_index;
3480 struct cgraph_edge *cs = ipa_make_edge_direct_to_target (ie, target,
3481 speculative);
3482 found = true;
3484 if (cs && !agg_contents && !polymorphic)
3486 struct ipa_node_params *info = IPA_NODE_REF (node);
3487 int c = ipa_get_controlled_uses (info, param_index);
3488 if (c != IPA_UNDESCRIBED_USE)
3490 struct ipa_ref *to_del;
3492 c--;
3493 ipa_set_controlled_uses (info, param_index, c);
3494 if (dump_file && (dump_flags & TDF_DETAILS))
3495 fprintf (dump_file, " controlled uses count of param "
3496 "%i bumped down to %i\n", param_index, c);
3497 if (c == 0
3498 && (to_del = node->find_reference (cs->callee, NULL, 0)))
3500 if (dump_file && (dump_flags & TDF_DETAILS))
3501 fprintf (dump_file, " and even removing its "
3502 "cloning-created reference\n");
3503 to_del->remove_reference ();
3509 /* Turning calls to direct calls will improve overall summary. */
3510 if (found)
3511 inline_update_overall_summary (node);
3514 /* Vector of pointers which for linked lists of clones of an original crgaph
3515 edge. */
3517 static vec<cgraph_edge *> next_edge_clone;
3518 static vec<cgraph_edge *> prev_edge_clone;
3520 static inline void
3521 grow_edge_clone_vectors (void)
3523 if (next_edge_clone.length ()
3524 <= (unsigned) symtab->edges_max_uid)
3525 next_edge_clone.safe_grow_cleared (symtab->edges_max_uid + 1);
3526 if (prev_edge_clone.length ()
3527 <= (unsigned) symtab->edges_max_uid)
3528 prev_edge_clone.safe_grow_cleared (symtab->edges_max_uid + 1);
3531 /* Edge duplication hook to grow the appropriate linked list in
3532 next_edge_clone. */
3534 static void
3535 ipcp_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3536 void *)
3538 grow_edge_clone_vectors ();
3540 struct cgraph_edge *old_next = next_edge_clone[src->uid];
3541 if (old_next)
3542 prev_edge_clone[old_next->uid] = dst;
3543 prev_edge_clone[dst->uid] = src;
3545 next_edge_clone[dst->uid] = old_next;
3546 next_edge_clone[src->uid] = dst;
3549 /* Hook that is called by cgraph.c when an edge is removed. */
3551 static void
3552 ipcp_edge_removal_hook (struct cgraph_edge *cs, void *)
3554 grow_edge_clone_vectors ();
3556 struct cgraph_edge *prev = prev_edge_clone[cs->uid];
3557 struct cgraph_edge *next = next_edge_clone[cs->uid];
3558 if (prev)
3559 next_edge_clone[prev->uid] = next;
3560 if (next)
3561 prev_edge_clone[next->uid] = prev;
3564 /* See if NODE is a clone with a known aggregate value at a given OFFSET of a
3565 parameter with the given INDEX. */
3567 static tree
3568 get_clone_agg_value (struct cgraph_node *node, HOST_WIDE_INT offset,
3569 int index)
3571 struct ipa_agg_replacement_value *aggval;
3573 aggval = ipa_get_agg_replacements_for_node (node);
3574 while (aggval)
3576 if (aggval->offset == offset
3577 && aggval->index == index)
3578 return aggval->value;
3579 aggval = aggval->next;
3581 return NULL_TREE;
3584 /* Return true is NODE is DEST or its clone for all contexts. */
3586 static bool
3587 same_node_or_its_all_contexts_clone_p (cgraph_node *node, cgraph_node *dest)
3589 if (node == dest)
3590 return true;
3592 struct ipa_node_params *info = IPA_NODE_REF (node);
3593 return info->is_all_contexts_clone && info->ipcp_orig_node == dest;
3596 /* Return true if edge CS does bring about the value described by SRC to node
3597 DEST or its clone for all contexts. */
3599 static bool
3600 cgraph_edge_brings_value_p (cgraph_edge *cs, ipcp_value_source<tree> *src,
3601 cgraph_node *dest)
3603 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
3604 enum availability availability;
3605 cgraph_node *real_dest = cs->callee->function_symbol (&availability);
3607 if (!same_node_or_its_all_contexts_clone_p (real_dest, dest)
3608 || availability <= AVAIL_INTERPOSABLE
3609 || caller_info->node_dead)
3610 return false;
3611 if (!src->val)
3612 return true;
3614 if (caller_info->ipcp_orig_node)
3616 tree t;
3617 if (src->offset == -1)
3618 t = caller_info->known_csts[src->index];
3619 else
3620 t = get_clone_agg_value (cs->caller, src->offset, src->index);
3621 return (t != NULL_TREE
3622 && values_equal_for_ipcp_p (src->val->value, t));
3624 else
3626 struct ipcp_agg_lattice *aglat;
3627 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (caller_info,
3628 src->index);
3629 if (src->offset == -1)
3630 return (plats->itself.is_single_const ()
3631 && values_equal_for_ipcp_p (src->val->value,
3632 plats->itself.values->value));
3633 else
3635 if (plats->aggs_bottom || plats->aggs_contain_variable)
3636 return false;
3637 for (aglat = plats->aggs; aglat; aglat = aglat->next)
3638 if (aglat->offset == src->offset)
3639 return (aglat->is_single_const ()
3640 && values_equal_for_ipcp_p (src->val->value,
3641 aglat->values->value));
3643 return false;
3647 /* Return true if edge CS does bring about the value described by SRC to node
3648 DEST or its clone for all contexts. */
3650 static bool
3651 cgraph_edge_brings_value_p (cgraph_edge *cs,
3652 ipcp_value_source<ipa_polymorphic_call_context> *src,
3653 cgraph_node *dest)
3655 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
3656 cgraph_node *real_dest = cs->callee->function_symbol ();
3658 if (!same_node_or_its_all_contexts_clone_p (real_dest, dest)
3659 || caller_info->node_dead)
3660 return false;
3661 if (!src->val)
3662 return true;
3664 if (caller_info->ipcp_orig_node)
3665 return (caller_info->known_contexts.length () > (unsigned) src->index)
3666 && values_equal_for_ipcp_p (src->val->value,
3667 caller_info->known_contexts[src->index]);
3669 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (caller_info,
3670 src->index);
3671 return plats->ctxlat.is_single_const ()
3672 && values_equal_for_ipcp_p (src->val->value,
3673 plats->ctxlat.values->value);
3676 /* Get the next clone in the linked list of clones of an edge. */
3678 static inline struct cgraph_edge *
3679 get_next_cgraph_edge_clone (struct cgraph_edge *cs)
3681 return next_edge_clone[cs->uid];
3684 /* Given VAL that is intended for DEST, iterate over all its sources and if
3685 they still hold, add their edge frequency and their number into *FREQUENCY
3686 and *CALLER_COUNT respectively. */
3688 template <typename valtype>
3689 static bool
3690 get_info_about_necessary_edges (ipcp_value<valtype> *val, cgraph_node *dest,
3691 int *freq_sum,
3692 gcov_type *count_sum, int *caller_count)
3694 ipcp_value_source<valtype> *src;
3695 int freq = 0, count = 0;
3696 gcov_type cnt = 0;
3697 bool hot = false;
3699 for (src = val->sources; src; src = src->next)
3701 struct cgraph_edge *cs = src->cs;
3702 while (cs)
3704 if (cgraph_edge_brings_value_p (cs, src, dest))
3706 count++;
3707 freq += cs->frequency;
3708 cnt += cs->count;
3709 hot |= cs->maybe_hot_p ();
3711 cs = get_next_cgraph_edge_clone (cs);
3715 *freq_sum = freq;
3716 *count_sum = cnt;
3717 *caller_count = count;
3718 return hot;
3721 /* Return a vector of incoming edges that do bring value VAL to node DEST. It
3722 is assumed their number is known and equal to CALLER_COUNT. */
3724 template <typename valtype>
3725 static vec<cgraph_edge *>
3726 gather_edges_for_value (ipcp_value<valtype> *val, cgraph_node *dest,
3727 int caller_count)
3729 ipcp_value_source<valtype> *src;
3730 vec<cgraph_edge *> ret;
3732 ret.create (caller_count);
3733 for (src = val->sources; src; src = src->next)
3735 struct cgraph_edge *cs = src->cs;
3736 while (cs)
3738 if (cgraph_edge_brings_value_p (cs, src, dest))
3739 ret.quick_push (cs);
3740 cs = get_next_cgraph_edge_clone (cs);
3744 return ret;
3747 /* Construct a replacement map for a know VALUE for a formal parameter PARAM.
3748 Return it or NULL if for some reason it cannot be created. */
3750 static struct ipa_replace_map *
3751 get_replacement_map (struct ipa_node_params *info, tree value, int parm_num)
3753 struct ipa_replace_map *replace_map;
3756 replace_map = ggc_alloc<ipa_replace_map> ();
3757 if (dump_file)
3759 fprintf (dump_file, " replacing ");
3760 ipa_dump_param (dump_file, info, parm_num);
3762 fprintf (dump_file, " with const ");
3763 print_generic_expr (dump_file, value, 0);
3764 fprintf (dump_file, "\n");
3766 replace_map->old_tree = NULL;
3767 replace_map->parm_num = parm_num;
3768 replace_map->new_tree = value;
3769 replace_map->replace_p = true;
3770 replace_map->ref_p = false;
3772 return replace_map;
3775 /* Dump new profiling counts */
3777 static void
3778 dump_profile_updates (struct cgraph_node *orig_node,
3779 struct cgraph_node *new_node)
3781 struct cgraph_edge *cs;
3783 fprintf (dump_file, " setting count of the specialized node to "
3784 HOST_WIDE_INT_PRINT_DEC "\n", (HOST_WIDE_INT) new_node->count);
3785 for (cs = new_node->callees; cs ; cs = cs->next_callee)
3786 fprintf (dump_file, " edge to %s has count "
3787 HOST_WIDE_INT_PRINT_DEC "\n",
3788 cs->callee->name (), (HOST_WIDE_INT) cs->count);
3790 fprintf (dump_file, " setting count of the original node to "
3791 HOST_WIDE_INT_PRINT_DEC "\n", (HOST_WIDE_INT) orig_node->count);
3792 for (cs = orig_node->callees; cs ; cs = cs->next_callee)
3793 fprintf (dump_file, " edge to %s is left with "
3794 HOST_WIDE_INT_PRINT_DEC "\n",
3795 cs->callee->name (), (HOST_WIDE_INT) cs->count);
3798 /* After a specialized NEW_NODE version of ORIG_NODE has been created, update
3799 their profile information to reflect this. */
3801 static void
3802 update_profiling_info (struct cgraph_node *orig_node,
3803 struct cgraph_node *new_node)
3805 struct cgraph_edge *cs;
3806 struct caller_statistics stats;
3807 gcov_type new_sum, orig_sum;
3808 gcov_type remainder, orig_node_count = orig_node->count;
3810 if (orig_node_count == 0)
3811 return;
3813 init_caller_stats (&stats);
3814 orig_node->call_for_symbol_thunks_and_aliases (gather_caller_stats, &stats,
3815 false);
3816 orig_sum = stats.count_sum;
3817 init_caller_stats (&stats);
3818 new_node->call_for_symbol_thunks_and_aliases (gather_caller_stats, &stats,
3819 false);
3820 new_sum = stats.count_sum;
3822 if (orig_node_count < orig_sum + new_sum)
3824 if (dump_file)
3825 fprintf (dump_file, " Problem: node %s/%i has too low count "
3826 HOST_WIDE_INT_PRINT_DEC " while the sum of incoming "
3827 "counts is " HOST_WIDE_INT_PRINT_DEC "\n",
3828 orig_node->name (), orig_node->order,
3829 (HOST_WIDE_INT) orig_node_count,
3830 (HOST_WIDE_INT) (orig_sum + new_sum));
3832 orig_node_count = (orig_sum + new_sum) * 12 / 10;
3833 if (dump_file)
3834 fprintf (dump_file, " proceeding by pretending it was "
3835 HOST_WIDE_INT_PRINT_DEC "\n",
3836 (HOST_WIDE_INT) orig_node_count);
3839 new_node->count = new_sum;
3840 remainder = orig_node_count - new_sum;
3841 orig_node->count = remainder;
3843 for (cs = new_node->callees; cs ; cs = cs->next_callee)
3844 if (cs->frequency)
3845 cs->count = apply_probability (cs->count,
3846 GCOV_COMPUTE_SCALE (new_sum,
3847 orig_node_count));
3848 else
3849 cs->count = 0;
3851 for (cs = orig_node->callees; cs ; cs = cs->next_callee)
3852 cs->count = apply_probability (cs->count,
3853 GCOV_COMPUTE_SCALE (remainder,
3854 orig_node_count));
3856 if (dump_file)
3857 dump_profile_updates (orig_node, new_node);
3860 /* Update the respective profile of specialized NEW_NODE and the original
3861 ORIG_NODE after additional edges with cumulative count sum REDIRECTED_SUM
3862 have been redirected to the specialized version. */
3864 static void
3865 update_specialized_profile (struct cgraph_node *new_node,
3866 struct cgraph_node *orig_node,
3867 gcov_type redirected_sum)
3869 struct cgraph_edge *cs;
3870 gcov_type new_node_count, orig_node_count = orig_node->count;
3872 if (dump_file)
3873 fprintf (dump_file, " the sum of counts of redirected edges is "
3874 HOST_WIDE_INT_PRINT_DEC "\n", (HOST_WIDE_INT) redirected_sum);
3875 if (orig_node_count == 0)
3876 return;
3878 gcc_assert (orig_node_count >= redirected_sum);
3880 new_node_count = new_node->count;
3881 new_node->count += redirected_sum;
3882 orig_node->count -= redirected_sum;
3884 for (cs = new_node->callees; cs ; cs = cs->next_callee)
3885 if (cs->frequency)
3886 cs->count += apply_probability (cs->count,
3887 GCOV_COMPUTE_SCALE (redirected_sum,
3888 new_node_count));
3889 else
3890 cs->count = 0;
3892 for (cs = orig_node->callees; cs ; cs = cs->next_callee)
3894 gcov_type dec = apply_probability (cs->count,
3895 GCOV_COMPUTE_SCALE (redirected_sum,
3896 orig_node_count));
3897 if (dec < cs->count)
3898 cs->count -= dec;
3899 else
3900 cs->count = 0;
3903 if (dump_file)
3904 dump_profile_updates (orig_node, new_node);
3907 /* Create a specialized version of NODE with known constants in KNOWN_CSTS,
3908 known contexts in KNOWN_CONTEXTS and known aggregate values in AGGVALS and
3909 redirect all edges in CALLERS to it. */
3911 static struct cgraph_node *
3912 create_specialized_node (struct cgraph_node *node,
3913 vec<tree> known_csts,
3914 vec<ipa_polymorphic_call_context> known_contexts,
3915 struct ipa_agg_replacement_value *aggvals,
3916 vec<cgraph_edge *> callers)
3918 struct ipa_node_params *new_info, *info = IPA_NODE_REF (node);
3919 vec<ipa_replace_map *, va_gc> *replace_trees = NULL;
3920 struct ipa_agg_replacement_value *av;
3921 struct cgraph_node *new_node;
3922 int i, count = ipa_get_param_count (info);
3923 bitmap args_to_skip;
3925 gcc_assert (!info->ipcp_orig_node);
3927 if (node->local.can_change_signature)
3929 args_to_skip = BITMAP_GGC_ALLOC ();
3930 for (i = 0; i < count; i++)
3932 tree t = known_csts[i];
3934 if (t || !ipa_is_param_used (info, i))
3935 bitmap_set_bit (args_to_skip, i);
3938 else
3940 args_to_skip = NULL;
3941 if (dump_file && (dump_flags & TDF_DETAILS))
3942 fprintf (dump_file, " cannot change function signature\n");
3945 for (i = 0; i < count ; i++)
3947 tree t = known_csts[i];
3948 if (t)
3950 struct ipa_replace_map *replace_map;
3952 gcc_checking_assert (TREE_CODE (t) != TREE_BINFO);
3953 replace_map = get_replacement_map (info, t, i);
3954 if (replace_map)
3955 vec_safe_push (replace_trees, replace_map);
3959 new_node = node->create_virtual_clone (callers, replace_trees,
3960 args_to_skip, "constprop");
3961 ipa_set_node_agg_value_chain (new_node, aggvals);
3962 for (av = aggvals; av; av = av->next)
3963 new_node->maybe_create_reference (av->value, IPA_REF_ADDR, NULL);
3965 if (dump_file && (dump_flags & TDF_DETAILS))
3967 fprintf (dump_file, " the new node is %s/%i.\n",
3968 new_node->name (), new_node->order);
3969 if (known_contexts.exists ())
3971 for (i = 0; i < count ; i++)
3972 if (!known_contexts[i].useless_p ())
3974 fprintf (dump_file, " known ctx %i is ", i);
3975 known_contexts[i].dump (dump_file);
3978 if (aggvals)
3979 ipa_dump_agg_replacement_values (dump_file, aggvals);
3981 ipa_check_create_node_params ();
3982 update_profiling_info (node, new_node);
3983 new_info = IPA_NODE_REF (new_node);
3984 new_info->ipcp_orig_node = node;
3985 new_info->known_csts = known_csts;
3986 new_info->known_contexts = known_contexts;
3988 ipcp_discover_new_direct_edges (new_node, known_csts, known_contexts, aggvals);
3990 callers.release ();
3991 return new_node;
3994 /* Given a NODE, and a subset of its CALLERS, try to populate blanks slots in
3995 KNOWN_CSTS with constants that are also known for all of the CALLERS. */
3997 static void
3998 find_more_scalar_values_for_callers_subset (struct cgraph_node *node,
3999 vec<tree> known_csts,
4000 vec<cgraph_edge *> callers)
4002 struct ipa_node_params *info = IPA_NODE_REF (node);
4003 int i, count = ipa_get_param_count (info);
4005 for (i = 0; i < count ; i++)
4007 struct cgraph_edge *cs;
4008 tree newval = NULL_TREE;
4009 int j;
4010 bool first = true;
4012 if (ipa_get_scalar_lat (info, i)->bottom || known_csts[i])
4013 continue;
4015 FOR_EACH_VEC_ELT (callers, j, cs)
4017 struct ipa_jump_func *jump_func;
4018 tree t;
4020 if (i >= ipa_get_cs_argument_count (IPA_EDGE_REF (cs))
4021 || (i == 0
4022 && call_passes_through_thunk_p (cs))
4023 || (!cs->callee->instrumentation_clone
4024 && cs->callee->function_symbol ()->instrumentation_clone))
4026 newval = NULL_TREE;
4027 break;
4029 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
4030 t = ipa_value_from_jfunc (IPA_NODE_REF (cs->caller), jump_func);
4031 if (!t
4032 || (newval
4033 && !values_equal_for_ipcp_p (t, newval))
4034 || (!first && !newval))
4036 newval = NULL_TREE;
4037 break;
4039 else
4040 newval = t;
4041 first = false;
4044 if (newval)
4046 if (dump_file && (dump_flags & TDF_DETAILS))
4048 fprintf (dump_file, " adding an extra known scalar value ");
4049 print_ipcp_constant_value (dump_file, newval);
4050 fprintf (dump_file, " for ");
4051 ipa_dump_param (dump_file, info, i);
4052 fprintf (dump_file, "\n");
4055 known_csts[i] = newval;
4060 /* Given a NODE and a subset of its CALLERS, try to populate plank slots in
4061 KNOWN_CONTEXTS with polymorphic contexts that are also known for all of the
4062 CALLERS. */
4064 static void
4065 find_more_contexts_for_caller_subset (cgraph_node *node,
4066 vec<ipa_polymorphic_call_context>
4067 *known_contexts,
4068 vec<cgraph_edge *> callers)
4070 ipa_node_params *info = IPA_NODE_REF (node);
4071 int i, count = ipa_get_param_count (info);
4073 for (i = 0; i < count ; i++)
4075 cgraph_edge *cs;
4077 if (ipa_get_poly_ctx_lat (info, i)->bottom
4078 || (known_contexts->exists ()
4079 && !(*known_contexts)[i].useless_p ()))
4080 continue;
4082 ipa_polymorphic_call_context newval;
4083 bool first = true;
4084 int j;
4086 FOR_EACH_VEC_ELT (callers, j, cs)
4088 if (i >= ipa_get_cs_argument_count (IPA_EDGE_REF (cs)))
4089 return;
4090 ipa_jump_func *jfunc = ipa_get_ith_jump_func (IPA_EDGE_REF (cs),
4092 ipa_polymorphic_call_context ctx;
4093 ctx = ipa_context_from_jfunc (IPA_NODE_REF (cs->caller), cs, i,
4094 jfunc);
4095 if (first)
4097 newval = ctx;
4098 first = false;
4100 else
4101 newval.meet_with (ctx);
4102 if (newval.useless_p ())
4103 break;
4106 if (!newval.useless_p ())
4108 if (dump_file && (dump_flags & TDF_DETAILS))
4110 fprintf (dump_file, " adding an extra known polymorphic "
4111 "context ");
4112 print_ipcp_constant_value (dump_file, newval);
4113 fprintf (dump_file, " for ");
4114 ipa_dump_param (dump_file, info, i);
4115 fprintf (dump_file, "\n");
4118 if (!known_contexts->exists ())
4119 known_contexts->safe_grow_cleared (ipa_get_param_count (info));
4120 (*known_contexts)[i] = newval;
4126 /* Go through PLATS and create a vector of values consisting of values and
4127 offsets (minus OFFSET) of lattices that contain only a single value. */
4129 static vec<ipa_agg_jf_item>
4130 copy_plats_to_inter (struct ipcp_param_lattices *plats, HOST_WIDE_INT offset)
4132 vec<ipa_agg_jf_item> res = vNULL;
4134 if (!plats->aggs || plats->aggs_contain_variable || plats->aggs_bottom)
4135 return vNULL;
4137 for (struct ipcp_agg_lattice *aglat = plats->aggs; aglat; aglat = aglat->next)
4138 if (aglat->is_single_const ())
4140 struct ipa_agg_jf_item ti;
4141 ti.offset = aglat->offset - offset;
4142 ti.value = aglat->values->value;
4143 res.safe_push (ti);
4145 return res;
4148 /* Intersect all values in INTER with single value lattices in PLATS (while
4149 subtracting OFFSET). */
4151 static void
4152 intersect_with_plats (struct ipcp_param_lattices *plats,
4153 vec<ipa_agg_jf_item> *inter,
4154 HOST_WIDE_INT offset)
4156 struct ipcp_agg_lattice *aglat;
4157 struct ipa_agg_jf_item *item;
4158 int k;
4160 if (!plats->aggs || plats->aggs_contain_variable || plats->aggs_bottom)
4162 inter->release ();
4163 return;
4166 aglat = plats->aggs;
4167 FOR_EACH_VEC_ELT (*inter, k, item)
4169 bool found = false;
4170 if (!item->value)
4171 continue;
4172 while (aglat)
4174 if (aglat->offset - offset > item->offset)
4175 break;
4176 if (aglat->offset - offset == item->offset)
4178 gcc_checking_assert (item->value);
4179 if (values_equal_for_ipcp_p (item->value, aglat->values->value))
4180 found = true;
4181 break;
4183 aglat = aglat->next;
4185 if (!found)
4186 item->value = NULL_TREE;
4190 /* Copy agggregate replacement values of NODE (which is an IPA-CP clone) to the
4191 vector result while subtracting OFFSET from the individual value offsets. */
4193 static vec<ipa_agg_jf_item>
4194 agg_replacements_to_vector (struct cgraph_node *node, int index,
4195 HOST_WIDE_INT offset)
4197 struct ipa_agg_replacement_value *av;
4198 vec<ipa_agg_jf_item> res = vNULL;
4200 for (av = ipa_get_agg_replacements_for_node (node); av; av = av->next)
4201 if (av->index == index
4202 && (av->offset - offset) >= 0)
4204 struct ipa_agg_jf_item item;
4205 gcc_checking_assert (av->value);
4206 item.offset = av->offset - offset;
4207 item.value = av->value;
4208 res.safe_push (item);
4211 return res;
4214 /* Intersect all values in INTER with those that we have already scheduled to
4215 be replaced in parameter number INDEX of NODE, which is an IPA-CP clone
4216 (while subtracting OFFSET). */
4218 static void
4219 intersect_with_agg_replacements (struct cgraph_node *node, int index,
4220 vec<ipa_agg_jf_item> *inter,
4221 HOST_WIDE_INT offset)
4223 struct ipa_agg_replacement_value *srcvals;
4224 struct ipa_agg_jf_item *item;
4225 int i;
4227 srcvals = ipa_get_agg_replacements_for_node (node);
4228 if (!srcvals)
4230 inter->release ();
4231 return;
4234 FOR_EACH_VEC_ELT (*inter, i, item)
4236 struct ipa_agg_replacement_value *av;
4237 bool found = false;
4238 if (!item->value)
4239 continue;
4240 for (av = srcvals; av; av = av->next)
4242 gcc_checking_assert (av->value);
4243 if (av->index == index
4244 && av->offset - offset == item->offset)
4246 if (values_equal_for_ipcp_p (item->value, av->value))
4247 found = true;
4248 break;
4251 if (!found)
4252 item->value = NULL_TREE;
4256 /* Intersect values in INTER with aggregate values that come along edge CS to
4257 parameter number INDEX and return it. If INTER does not actually exist yet,
4258 copy all incoming values to it. If we determine we ended up with no values
4259 whatsoever, return a released vector. */
4261 static vec<ipa_agg_jf_item>
4262 intersect_aggregates_with_edge (struct cgraph_edge *cs, int index,
4263 vec<ipa_agg_jf_item> inter)
4265 struct ipa_jump_func *jfunc;
4266 jfunc = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), index);
4267 if (jfunc->type == IPA_JF_PASS_THROUGH
4268 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
4270 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
4271 int src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
4273 if (caller_info->ipcp_orig_node)
4275 struct cgraph_node *orig_node = caller_info->ipcp_orig_node;
4276 struct ipcp_param_lattices *orig_plats;
4277 orig_plats = ipa_get_parm_lattices (IPA_NODE_REF (orig_node),
4278 src_idx);
4279 if (agg_pass_through_permissible_p (orig_plats, jfunc))
4281 if (!inter.exists ())
4282 inter = agg_replacements_to_vector (cs->caller, src_idx, 0);
4283 else
4284 intersect_with_agg_replacements (cs->caller, src_idx,
4285 &inter, 0);
4287 else
4289 inter.release ();
4290 return vNULL;
4293 else
4295 struct ipcp_param_lattices *src_plats;
4296 src_plats = ipa_get_parm_lattices (caller_info, src_idx);
4297 if (agg_pass_through_permissible_p (src_plats, jfunc))
4299 /* Currently we do not produce clobber aggregate jump
4300 functions, adjust when we do. */
4301 gcc_checking_assert (!jfunc->agg.items);
4302 if (!inter.exists ())
4303 inter = copy_plats_to_inter (src_plats, 0);
4304 else
4305 intersect_with_plats (src_plats, &inter, 0);
4307 else
4309 inter.release ();
4310 return vNULL;
4314 else if (jfunc->type == IPA_JF_ANCESTOR
4315 && ipa_get_jf_ancestor_agg_preserved (jfunc))
4317 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
4318 int src_idx = ipa_get_jf_ancestor_formal_id (jfunc);
4319 struct ipcp_param_lattices *src_plats;
4320 HOST_WIDE_INT delta = ipa_get_jf_ancestor_offset (jfunc);
4322 if (caller_info->ipcp_orig_node)
4324 if (!inter.exists ())
4325 inter = agg_replacements_to_vector (cs->caller, src_idx, delta);
4326 else
4327 intersect_with_agg_replacements (cs->caller, src_idx, &inter,
4328 delta);
4330 else
4332 src_plats = ipa_get_parm_lattices (caller_info, src_idx);;
4333 /* Currently we do not produce clobber aggregate jump
4334 functions, adjust when we do. */
4335 gcc_checking_assert (!src_plats->aggs || !jfunc->agg.items);
4336 if (!inter.exists ())
4337 inter = copy_plats_to_inter (src_plats, delta);
4338 else
4339 intersect_with_plats (src_plats, &inter, delta);
4342 else if (jfunc->agg.items)
4344 struct ipa_agg_jf_item *item;
4345 int k;
4347 if (!inter.exists ())
4348 for (unsigned i = 0; i < jfunc->agg.items->length (); i++)
4349 inter.safe_push ((*jfunc->agg.items)[i]);
4350 else
4351 FOR_EACH_VEC_ELT (inter, k, item)
4353 int l = 0;
4354 bool found = false;;
4356 if (!item->value)
4357 continue;
4359 while ((unsigned) l < jfunc->agg.items->length ())
4361 struct ipa_agg_jf_item *ti;
4362 ti = &(*jfunc->agg.items)[l];
4363 if (ti->offset > item->offset)
4364 break;
4365 if (ti->offset == item->offset)
4367 gcc_checking_assert (ti->value);
4368 if (values_equal_for_ipcp_p (item->value,
4369 ti->value))
4370 found = true;
4371 break;
4373 l++;
4375 if (!found)
4376 item->value = NULL;
4379 else
4381 inter.release ();
4382 return vec<ipa_agg_jf_item>();
4384 return inter;
4387 /* Look at edges in CALLERS and collect all known aggregate values that arrive
4388 from all of them. */
4390 static struct ipa_agg_replacement_value *
4391 find_aggregate_values_for_callers_subset (struct cgraph_node *node,
4392 vec<cgraph_edge *> callers)
4394 struct ipa_node_params *dest_info = IPA_NODE_REF (node);
4395 struct ipa_agg_replacement_value *res;
4396 struct ipa_agg_replacement_value **tail = &res;
4397 struct cgraph_edge *cs;
4398 int i, j, count = ipa_get_param_count (dest_info);
4400 FOR_EACH_VEC_ELT (callers, j, cs)
4402 int c = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
4403 if (c < count)
4404 count = c;
4407 for (i = 0; i < count ; i++)
4409 struct cgraph_edge *cs;
4410 vec<ipa_agg_jf_item> inter = vNULL;
4411 struct ipa_agg_jf_item *item;
4412 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (dest_info, i);
4413 int j;
4415 /* Among other things, the following check should deal with all by_ref
4416 mismatches. */
4417 if (plats->aggs_bottom)
4418 continue;
4420 FOR_EACH_VEC_ELT (callers, j, cs)
4422 inter = intersect_aggregates_with_edge (cs, i, inter);
4424 if (!inter.exists ())
4425 goto next_param;
4428 FOR_EACH_VEC_ELT (inter, j, item)
4430 struct ipa_agg_replacement_value *v;
4432 if (!item->value)
4433 continue;
4435 v = ggc_alloc<ipa_agg_replacement_value> ();
4436 v->index = i;
4437 v->offset = item->offset;
4438 v->value = item->value;
4439 v->by_ref = plats->aggs_by_ref;
4440 *tail = v;
4441 tail = &v->next;
4444 next_param:
4445 if (inter.exists ())
4446 inter.release ();
4448 *tail = NULL;
4449 return res;
4452 /* Turn KNOWN_AGGS into a list of aggreate replacement values. */
4454 static struct ipa_agg_replacement_value *
4455 known_aggs_to_agg_replacement_list (vec<ipa_agg_jump_function> known_aggs)
4457 struct ipa_agg_replacement_value *res;
4458 struct ipa_agg_replacement_value **tail = &res;
4459 struct ipa_agg_jump_function *aggjf;
4460 struct ipa_agg_jf_item *item;
4461 int i, j;
4463 FOR_EACH_VEC_ELT (known_aggs, i, aggjf)
4464 FOR_EACH_VEC_SAFE_ELT (aggjf->items, j, item)
4466 struct ipa_agg_replacement_value *v;
4467 v = ggc_alloc<ipa_agg_replacement_value> ();
4468 v->index = i;
4469 v->offset = item->offset;
4470 v->value = item->value;
4471 v->by_ref = aggjf->by_ref;
4472 *tail = v;
4473 tail = &v->next;
4475 *tail = NULL;
4476 return res;
4479 /* Determine whether CS also brings all scalar values that the NODE is
4480 specialized for. */
4482 static bool
4483 cgraph_edge_brings_all_scalars_for_node (struct cgraph_edge *cs,
4484 struct cgraph_node *node)
4486 struct ipa_node_params *dest_info = IPA_NODE_REF (node);
4487 int count = ipa_get_param_count (dest_info);
4488 struct ipa_node_params *caller_info;
4489 struct ipa_edge_args *args;
4490 int i;
4492 caller_info = IPA_NODE_REF (cs->caller);
4493 args = IPA_EDGE_REF (cs);
4494 for (i = 0; i < count; i++)
4496 struct ipa_jump_func *jump_func;
4497 tree val, t;
4499 val = dest_info->known_csts[i];
4500 if (!val)
4501 continue;
4503 if (i >= ipa_get_cs_argument_count (args))
4504 return false;
4505 jump_func = ipa_get_ith_jump_func (args, i);
4506 t = ipa_value_from_jfunc (caller_info, jump_func);
4507 if (!t || !values_equal_for_ipcp_p (val, t))
4508 return false;
4510 return true;
4513 /* Determine whether CS also brings all aggregate values that NODE is
4514 specialized for. */
4515 static bool
4516 cgraph_edge_brings_all_agg_vals_for_node (struct cgraph_edge *cs,
4517 struct cgraph_node *node)
4519 struct ipa_node_params *orig_caller_info = IPA_NODE_REF (cs->caller);
4520 struct ipa_node_params *orig_node_info;
4521 struct ipa_agg_replacement_value *aggval;
4522 int i, ec, count;
4524 aggval = ipa_get_agg_replacements_for_node (node);
4525 if (!aggval)
4526 return true;
4528 count = ipa_get_param_count (IPA_NODE_REF (node));
4529 ec = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
4530 if (ec < count)
4531 for (struct ipa_agg_replacement_value *av = aggval; av; av = av->next)
4532 if (aggval->index >= ec)
4533 return false;
4535 orig_node_info = IPA_NODE_REF (IPA_NODE_REF (node)->ipcp_orig_node);
4536 if (orig_caller_info->ipcp_orig_node)
4537 orig_caller_info = IPA_NODE_REF (orig_caller_info->ipcp_orig_node);
4539 for (i = 0; i < count; i++)
4541 static vec<ipa_agg_jf_item> values = vec<ipa_agg_jf_item>();
4542 struct ipcp_param_lattices *plats;
4543 bool interesting = false;
4544 for (struct ipa_agg_replacement_value *av = aggval; av; av = av->next)
4545 if (aggval->index == i)
4547 interesting = true;
4548 break;
4550 if (!interesting)
4551 continue;
4553 plats = ipa_get_parm_lattices (orig_node_info, aggval->index);
4554 if (plats->aggs_bottom)
4555 return false;
4557 values = intersect_aggregates_with_edge (cs, i, values);
4558 if (!values.exists ())
4559 return false;
4561 for (struct ipa_agg_replacement_value *av = aggval; av; av = av->next)
4562 if (aggval->index == i)
4564 struct ipa_agg_jf_item *item;
4565 int j;
4566 bool found = false;
4567 FOR_EACH_VEC_ELT (values, j, item)
4568 if (item->value
4569 && item->offset == av->offset
4570 && values_equal_for_ipcp_p (item->value, av->value))
4572 found = true;
4573 break;
4575 if (!found)
4577 values.release ();
4578 return false;
4582 return true;
4585 /* Given an original NODE and a VAL for which we have already created a
4586 specialized clone, look whether there are incoming edges that still lead
4587 into the old node but now also bring the requested value and also conform to
4588 all other criteria such that they can be redirected the special node.
4589 This function can therefore redirect the final edge in a SCC. */
4591 template <typename valtype>
4592 static void
4593 perhaps_add_new_callers (cgraph_node *node, ipcp_value<valtype> *val)
4595 ipcp_value_source<valtype> *src;
4596 gcov_type redirected_sum = 0;
4598 for (src = val->sources; src; src = src->next)
4600 struct cgraph_edge *cs = src->cs;
4601 while (cs)
4603 if (cgraph_edge_brings_value_p (cs, src, node)
4604 && cgraph_edge_brings_all_scalars_for_node (cs, val->spec_node)
4605 && cgraph_edge_brings_all_agg_vals_for_node (cs, val->spec_node))
4607 if (dump_file)
4608 fprintf (dump_file, " - adding an extra caller %s/%i"
4609 " of %s/%i\n",
4610 xstrdup_for_dump (cs->caller->name ()),
4611 cs->caller->order,
4612 xstrdup_for_dump (val->spec_node->name ()),
4613 val->spec_node->order);
4615 cs->redirect_callee_duplicating_thunks (val->spec_node);
4616 val->spec_node->expand_all_artificial_thunks ();
4617 redirected_sum += cs->count;
4619 cs = get_next_cgraph_edge_clone (cs);
4623 if (redirected_sum)
4624 update_specialized_profile (val->spec_node, node, redirected_sum);
4627 /* Return true if KNOWN_CONTEXTS contain at least one useful context. */
4629 static bool
4630 known_contexts_useful_p (vec<ipa_polymorphic_call_context> known_contexts)
4632 ipa_polymorphic_call_context *ctx;
4633 int i;
4635 FOR_EACH_VEC_ELT (known_contexts, i, ctx)
4636 if (!ctx->useless_p ())
4637 return true;
4638 return false;
4641 /* Return a copy of KNOWN_CSTS if it is not empty, otherwise return vNULL. */
4643 static vec<ipa_polymorphic_call_context>
4644 copy_useful_known_contexts (vec<ipa_polymorphic_call_context> known_contexts)
4646 if (known_contexts_useful_p (known_contexts))
4647 return known_contexts.copy ();
4648 else
4649 return vNULL;
4652 /* Copy KNOWN_CSTS and modify the copy according to VAL and INDEX. If
4653 non-empty, replace KNOWN_CONTEXTS with its copy too. */
4655 static void
4656 modify_known_vectors_with_val (vec<tree> *known_csts,
4657 vec<ipa_polymorphic_call_context> *known_contexts,
4658 ipcp_value<tree> *val,
4659 int index)
4661 *known_csts = known_csts->copy ();
4662 *known_contexts = copy_useful_known_contexts (*known_contexts);
4663 (*known_csts)[index] = val->value;
4666 /* Replace KNOWN_CSTS with its copy. Also copy KNOWN_CONTEXTS and modify the
4667 copy according to VAL and INDEX. */
4669 static void
4670 modify_known_vectors_with_val (vec<tree> *known_csts,
4671 vec<ipa_polymorphic_call_context> *known_contexts,
4672 ipcp_value<ipa_polymorphic_call_context> *val,
4673 int index)
4675 *known_csts = known_csts->copy ();
4676 *known_contexts = known_contexts->copy ();
4677 (*known_contexts)[index] = val->value;
4680 /* Return true if OFFSET indicates this was not an aggregate value or there is
4681 a replacement equivalent to VALUE, INDEX and OFFSET among those in the
4682 AGGVALS list. */
4684 DEBUG_FUNCTION bool
4685 ipcp_val_agg_replacement_ok_p (ipa_agg_replacement_value *aggvals,
4686 int index, HOST_WIDE_INT offset, tree value)
4688 if (offset == -1)
4689 return true;
4691 while (aggvals)
4693 if (aggvals->index == index
4694 && aggvals->offset == offset
4695 && values_equal_for_ipcp_p (aggvals->value, value))
4696 return true;
4697 aggvals = aggvals->next;
4699 return false;
4702 /* Return true if offset is minus one because source of a polymorphic contect
4703 cannot be an aggregate value. */
4705 DEBUG_FUNCTION bool
4706 ipcp_val_agg_replacement_ok_p (ipa_agg_replacement_value *,
4707 int , HOST_WIDE_INT offset,
4708 ipa_polymorphic_call_context)
4710 return offset == -1;
4713 /* Decide wheter to create a special version of NODE for value VAL of parameter
4714 at the given INDEX. If OFFSET is -1, the value is for the parameter itself,
4715 otherwise it is stored at the given OFFSET of the parameter. KNOWN_CSTS,
4716 KNOWN_CONTEXTS and KNOWN_AGGS describe the other already known values. */
4718 template <typename valtype>
4719 static bool
4720 decide_about_value (struct cgraph_node *node, int index, HOST_WIDE_INT offset,
4721 ipcp_value<valtype> *val, vec<tree> known_csts,
4722 vec<ipa_polymorphic_call_context> known_contexts)
4724 struct ipa_agg_replacement_value *aggvals;
4725 int freq_sum, caller_count;
4726 gcov_type count_sum;
4727 vec<cgraph_edge *> callers;
4729 if (val->spec_node)
4731 perhaps_add_new_callers (node, val);
4732 return false;
4734 else if (val->local_size_cost + overall_size > max_new_size)
4736 if (dump_file && (dump_flags & TDF_DETAILS))
4737 fprintf (dump_file, " Ignoring candidate value because "
4738 "max_new_size would be reached with %li.\n",
4739 val->local_size_cost + overall_size);
4740 return false;
4742 else if (!get_info_about_necessary_edges (val, node, &freq_sum, &count_sum,
4743 &caller_count))
4744 return false;
4746 if (dump_file && (dump_flags & TDF_DETAILS))
4748 fprintf (dump_file, " - considering value ");
4749 print_ipcp_constant_value (dump_file, val->value);
4750 fprintf (dump_file, " for ");
4751 ipa_dump_param (dump_file, IPA_NODE_REF (node), index);
4752 if (offset != -1)
4753 fprintf (dump_file, ", offset: " HOST_WIDE_INT_PRINT_DEC, offset);
4754 fprintf (dump_file, " (caller_count: %i)\n", caller_count);
4757 if (!good_cloning_opportunity_p (node, val->local_time_benefit,
4758 freq_sum, count_sum,
4759 val->local_size_cost)
4760 && !good_cloning_opportunity_p (node,
4761 val->local_time_benefit
4762 + val->prop_time_benefit,
4763 freq_sum, count_sum,
4764 val->local_size_cost
4765 + val->prop_size_cost))
4766 return false;
4768 if (dump_file)
4769 fprintf (dump_file, " Creating a specialized node of %s/%i.\n",
4770 node->name (), node->order);
4772 callers = gather_edges_for_value (val, node, caller_count);
4773 if (offset == -1)
4774 modify_known_vectors_with_val (&known_csts, &known_contexts, val, index);
4775 else
4777 known_csts = known_csts.copy ();
4778 known_contexts = copy_useful_known_contexts (known_contexts);
4780 find_more_scalar_values_for_callers_subset (node, known_csts, callers);
4781 find_more_contexts_for_caller_subset (node, &known_contexts, callers);
4782 aggvals = find_aggregate_values_for_callers_subset (node, callers);
4783 gcc_checking_assert (ipcp_val_agg_replacement_ok_p (aggvals, index,
4784 offset, val->value));
4785 val->spec_node = create_specialized_node (node, known_csts, known_contexts,
4786 aggvals, callers);
4787 overall_size += val->local_size_cost;
4789 /* TODO: If for some lattice there is only one other known value
4790 left, make a special node for it too. */
4792 return true;
4795 /* Decide whether and what specialized clones of NODE should be created. */
4797 static bool
4798 decide_whether_version_node (struct cgraph_node *node)
4800 struct ipa_node_params *info = IPA_NODE_REF (node);
4801 int i, count = ipa_get_param_count (info);
4802 vec<tree> known_csts;
4803 vec<ipa_polymorphic_call_context> known_contexts;
4804 vec<ipa_agg_jump_function> known_aggs = vNULL;
4805 bool ret = false;
4807 if (count == 0)
4808 return false;
4810 if (dump_file && (dump_flags & TDF_DETAILS))
4811 fprintf (dump_file, "\nEvaluating opportunities for %s/%i.\n",
4812 node->name (), node->order);
4814 gather_context_independent_values (info, &known_csts, &known_contexts,
4815 info->do_clone_for_all_contexts ? &known_aggs
4816 : NULL, NULL);
4818 for (i = 0; i < count ;i++)
4820 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
4821 ipcp_lattice<tree> *lat = &plats->itself;
4822 ipcp_lattice<ipa_polymorphic_call_context> *ctxlat = &plats->ctxlat;
4824 if (!lat->bottom
4825 && !known_csts[i])
4827 ipcp_value<tree> *val;
4828 for (val = lat->values; val; val = val->next)
4829 ret |= decide_about_value (node, i, -1, val, known_csts,
4830 known_contexts);
4833 if (!plats->aggs_bottom)
4835 struct ipcp_agg_lattice *aglat;
4836 ipcp_value<tree> *val;
4837 for (aglat = plats->aggs; aglat; aglat = aglat->next)
4838 if (!aglat->bottom && aglat->values
4839 /* If the following is false, the one value is in
4840 known_aggs. */
4841 && (plats->aggs_contain_variable
4842 || !aglat->is_single_const ()))
4843 for (val = aglat->values; val; val = val->next)
4844 ret |= decide_about_value (node, i, aglat->offset, val,
4845 known_csts, known_contexts);
4848 if (!ctxlat->bottom
4849 && known_contexts[i].useless_p ())
4851 ipcp_value<ipa_polymorphic_call_context> *val;
4852 for (val = ctxlat->values; val; val = val->next)
4853 ret |= decide_about_value (node, i, -1, val, known_csts,
4854 known_contexts);
4857 info = IPA_NODE_REF (node);
4860 if (info->do_clone_for_all_contexts)
4862 struct cgraph_node *clone;
4863 vec<cgraph_edge *> callers;
4865 if (dump_file)
4866 fprintf (dump_file, " - Creating a specialized node of %s/%i "
4867 "for all known contexts.\n", node->name (),
4868 node->order);
4870 callers = node->collect_callers ();
4872 if (!known_contexts_useful_p (known_contexts))
4874 known_contexts.release ();
4875 known_contexts = vNULL;
4877 clone = create_specialized_node (node, known_csts, known_contexts,
4878 known_aggs_to_agg_replacement_list (known_aggs),
4879 callers);
4880 info = IPA_NODE_REF (node);
4881 info->do_clone_for_all_contexts = false;
4882 IPA_NODE_REF (clone)->is_all_contexts_clone = true;
4883 for (i = 0; i < count ; i++)
4884 vec_free (known_aggs[i].items);
4885 known_aggs.release ();
4886 ret = true;
4888 else
4890 known_csts.release ();
4891 known_contexts.release ();
4894 return ret;
4897 /* Transitively mark all callees of NODE within the same SCC as not dead. */
4899 static void
4900 spread_undeadness (struct cgraph_node *node)
4902 struct cgraph_edge *cs;
4904 for (cs = node->callees; cs; cs = cs->next_callee)
4905 if (ipa_edge_within_scc (cs))
4907 struct cgraph_node *callee;
4908 struct ipa_node_params *info;
4910 callee = cs->callee->function_symbol (NULL);
4911 info = IPA_NODE_REF (callee);
4913 if (info->node_dead)
4915 info->node_dead = 0;
4916 spread_undeadness (callee);
4921 /* Return true if NODE has a caller from outside of its SCC that is not
4922 dead. Worker callback for cgraph_for_node_and_aliases. */
4924 static bool
4925 has_undead_caller_from_outside_scc_p (struct cgraph_node *node,
4926 void *data ATTRIBUTE_UNUSED)
4928 struct cgraph_edge *cs;
4930 for (cs = node->callers; cs; cs = cs->next_caller)
4931 if (cs->caller->thunk.thunk_p
4932 && cs->caller->call_for_symbol_thunks_and_aliases
4933 (has_undead_caller_from_outside_scc_p, NULL, true))
4934 return true;
4935 else if (!ipa_edge_within_scc (cs)
4936 && !IPA_NODE_REF (cs->caller)->node_dead)
4937 return true;
4938 return false;
4942 /* Identify nodes within the same SCC as NODE which are no longer needed
4943 because of new clones and will be removed as unreachable. */
4945 static void
4946 identify_dead_nodes (struct cgraph_node *node)
4948 struct cgraph_node *v;
4949 for (v = node; v ; v = ((struct ipa_dfs_info *) v->aux)->next_cycle)
4950 if (v->local.local
4951 && !v->call_for_symbol_thunks_and_aliases
4952 (has_undead_caller_from_outside_scc_p, NULL, true))
4953 IPA_NODE_REF (v)->node_dead = 1;
4955 for (v = node; v ; v = ((struct ipa_dfs_info *) v->aux)->next_cycle)
4956 if (!IPA_NODE_REF (v)->node_dead)
4957 spread_undeadness (v);
4959 if (dump_file && (dump_flags & TDF_DETAILS))
4961 for (v = node; v ; v = ((struct ipa_dfs_info *) v->aux)->next_cycle)
4962 if (IPA_NODE_REF (v)->node_dead)
4963 fprintf (dump_file, " Marking node as dead: %s/%i.\n",
4964 v->name (), v->order);
4968 /* The decision stage. Iterate over the topological order of call graph nodes
4969 TOPO and make specialized clones if deemed beneficial. */
4971 static void
4972 ipcp_decision_stage (struct ipa_topo_info *topo)
4974 int i;
4976 if (dump_file)
4977 fprintf (dump_file, "\nIPA decision stage:\n\n");
4979 for (i = topo->nnodes - 1; i >= 0; i--)
4981 struct cgraph_node *node = topo->order[i];
4982 bool change = false, iterate = true;
4984 while (iterate)
4986 struct cgraph_node *v;
4987 iterate = false;
4988 for (v = node; v ; v = ((struct ipa_dfs_info *) v->aux)->next_cycle)
4989 if (v->has_gimple_body_p ()
4990 && ipcp_versionable_function_p (v))
4991 iterate |= decide_whether_version_node (v);
4993 change |= iterate;
4995 if (change)
4996 identify_dead_nodes (node);
5000 /* Look up all alignment information that we have discovered and copy it over
5001 to the transformation summary. */
5003 static void
5004 ipcp_store_alignment_results (void)
5006 cgraph_node *node;
5008 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
5010 ipa_node_params *info = IPA_NODE_REF (node);
5011 bool dumped_sth = false;
5012 bool found_useful_result = false;
5014 if (!opt_for_fn (node->decl, flag_ipa_cp_alignment))
5016 if (dump_file)
5017 fprintf (dump_file, "Not considering %s for alignment discovery "
5018 "and propagate; -fipa-cp-alignment: disabled.\n",
5019 node->name ());
5020 continue;
5023 if (info->ipcp_orig_node)
5024 info = IPA_NODE_REF (info->ipcp_orig_node);
5026 unsigned count = ipa_get_param_count (info);
5027 for (unsigned i = 0; i < count ; i++)
5029 ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
5030 if (!plats->alignment.bottom_p ()
5031 && !plats->alignment.top_p ())
5033 gcc_checking_assert (plats->alignment.align > 0);
5034 found_useful_result = true;
5035 break;
5038 if (!found_useful_result)
5039 continue;
5041 ipcp_grow_transformations_if_necessary ();
5042 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5043 vec_safe_reserve_exact (ts->alignments, count);
5045 for (unsigned i = 0; i < count ; i++)
5047 ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
5048 ipa_alignment al;
5050 if (!plats->alignment.bottom_p ()
5051 && !plats->alignment.top_p ())
5053 al.known = true;
5054 al.align = plats->alignment.align;
5055 al.misalign = plats->alignment.misalign;
5057 else
5058 al.known = false;
5060 ts->alignments->quick_push (al);
5061 if (!dump_file || !al.known)
5062 continue;
5063 if (!dumped_sth)
5065 fprintf (dump_file, "Propagated alignment info for function %s/%i:\n",
5066 node->name (), node->order);
5067 dumped_sth = true;
5069 fprintf (dump_file, " param %i: align: %u, misalign: %u\n",
5070 i, al.align, al.misalign);
5075 /* Look up all the bits information that we have discovered and copy it over
5076 to the transformation summary. */
5078 static void
5079 ipcp_store_bits_results (void)
5081 cgraph_node *node;
5083 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
5085 ipa_node_params *info = IPA_NODE_REF (node);
5086 bool dumped_sth = false;
5087 bool found_useful_result = false;
5089 if (!opt_for_fn (node->decl, flag_ipa_bit_cp))
5091 if (dump_file)
5092 fprintf (dump_file, "Not considering %s for ipa bitwise propagation "
5093 "; -fipa-bit-cp: disabled.\n",
5094 node->name ());
5095 continue;
5098 if (info->ipcp_orig_node)
5099 info = IPA_NODE_REF (info->ipcp_orig_node);
5101 unsigned count = ipa_get_param_count (info);
5102 for (unsigned i = 0; i < count; i++)
5104 ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
5105 if (plats->bits_lattice.constant_p ())
5107 found_useful_result = true;
5108 break;
5112 if (!found_useful_result)
5113 continue;
5115 ipcp_grow_transformations_if_necessary ();
5116 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5117 vec_safe_reserve_exact (ts->bits, count);
5119 for (unsigned i = 0; i < count; i++)
5121 ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
5122 ipa_bits bits_jfunc;
5124 if (plats->bits_lattice.constant_p ())
5126 bits_jfunc.known = true;
5127 bits_jfunc.value = plats->bits_lattice.get_value ();
5128 bits_jfunc.mask = plats->bits_lattice.get_mask ();
5130 else
5131 bits_jfunc.known = false;
5133 ts->bits->quick_push (bits_jfunc);
5134 if (!dump_file || !bits_jfunc.known)
5135 continue;
5136 if (!dumped_sth)
5138 fprintf (dump_file, "Propagated bits info for function %s/%i:\n",
5139 node->name (), node->order);
5140 dumped_sth = true;
5142 fprintf (dump_file, " param %i: value = ", i);
5143 print_hex (bits_jfunc.value, dump_file);
5144 fprintf (dump_file, ", mask = ");
5145 print_hex (bits_jfunc.mask, dump_file);
5146 fprintf (dump_file, "\n");
5151 /* Look up all VR information that we have discovered and copy it over
5152 to the transformation summary. */
5154 static void
5155 ipcp_store_vr_results (void)
5157 cgraph_node *node;
5159 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
5161 ipa_node_params *info = IPA_NODE_REF (node);
5162 bool found_useful_result = false;
5164 if (!opt_for_fn (node->decl, flag_ipa_vrp))
5166 if (dump_file)
5167 fprintf (dump_file, "Not considering %s for VR discovery "
5168 "and propagate; -fipa-ipa-vrp: disabled.\n",
5169 node->name ());
5170 continue;
5173 if (info->ipcp_orig_node)
5174 info = IPA_NODE_REF (info->ipcp_orig_node);
5176 unsigned count = ipa_get_param_count (info);
5177 for (unsigned i = 0; i < count ; i++)
5179 ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
5180 if (!plats->m_value_range.bottom_p ()
5181 && !plats->m_value_range.top_p ())
5183 found_useful_result = true;
5184 break;
5187 if (!found_useful_result)
5188 continue;
5190 ipcp_grow_transformations_if_necessary ();
5191 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5192 vec_safe_reserve_exact (ts->m_vr, count);
5194 for (unsigned i = 0; i < count ; i++)
5196 ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
5197 ipa_vr vr;
5199 if (!plats->m_value_range.bottom_p ()
5200 && !plats->m_value_range.top_p ())
5202 vr.known = true;
5203 vr.type = plats->m_value_range.m_vr.type;
5204 vr.min = plats->m_value_range.m_vr.min;
5205 vr.max = plats->m_value_range.m_vr.max;
5207 else
5209 vr.known = false;
5210 vr.type = VR_VARYING;
5211 vr.min = vr.max = wi::zero (INT_TYPE_SIZE);
5213 ts->m_vr->quick_push (vr);
5218 /* The IPCP driver. */
5220 static unsigned int
5221 ipcp_driver (void)
5223 struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
5224 struct cgraph_edge_hook_list *edge_removal_hook_holder;
5225 struct ipa_topo_info topo;
5227 ipa_check_create_node_params ();
5228 ipa_check_create_edge_args ();
5229 grow_edge_clone_vectors ();
5230 edge_duplication_hook_holder =
5231 symtab->add_edge_duplication_hook (&ipcp_edge_duplication_hook, NULL);
5232 edge_removal_hook_holder =
5233 symtab->add_edge_removal_hook (&ipcp_edge_removal_hook, NULL);
5235 if (dump_file)
5237 fprintf (dump_file, "\nIPA structures before propagation:\n");
5238 if (dump_flags & TDF_DETAILS)
5239 ipa_print_all_params (dump_file);
5240 ipa_print_all_jump_functions (dump_file);
5243 /* Topological sort. */
5244 build_toporder_info (&topo);
5245 /* Do the interprocedural propagation. */
5246 ipcp_propagate_stage (&topo);
5247 /* Decide what constant propagation and cloning should be performed. */
5248 ipcp_decision_stage (&topo);
5249 /* Store results of alignment propagation. */
5250 ipcp_store_alignment_results ();
5251 /* Store results of bits propagation. */
5252 ipcp_store_bits_results ();
5253 /* Store results of value range propagation. */
5254 ipcp_store_vr_results ();
5256 /* Free all IPCP structures. */
5257 free_toporder_info (&topo);
5258 next_edge_clone.release ();
5259 prev_edge_clone.release ();
5260 symtab->remove_edge_removal_hook (edge_removal_hook_holder);
5261 symtab->remove_edge_duplication_hook (edge_duplication_hook_holder);
5262 ipa_free_all_structures_after_ipa_cp ();
5263 if (dump_file)
5264 fprintf (dump_file, "\nIPA constant propagation end\n");
5265 return 0;
5268 /* Initialization and computation of IPCP data structures. This is the initial
5269 intraprocedural analysis of functions, which gathers information to be
5270 propagated later on. */
5272 static void
5273 ipcp_generate_summary (void)
5275 struct cgraph_node *node;
5277 if (dump_file)
5278 fprintf (dump_file, "\nIPA constant propagation start:\n");
5279 ipa_register_cgraph_hooks ();
5281 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
5282 ipa_analyze_node (node);
5285 /* Write ipcp summary for nodes in SET. */
5287 static void
5288 ipcp_write_summary (void)
5290 ipa_prop_write_jump_functions ();
5293 /* Read ipcp summary. */
5295 static void
5296 ipcp_read_summary (void)
5298 ipa_prop_read_jump_functions ();
5301 namespace {
5303 const pass_data pass_data_ipa_cp =
5305 IPA_PASS, /* type */
5306 "cp", /* name */
5307 OPTGROUP_NONE, /* optinfo_flags */
5308 TV_IPA_CONSTANT_PROP, /* tv_id */
5309 0, /* properties_required */
5310 0, /* properties_provided */
5311 0, /* properties_destroyed */
5312 0, /* todo_flags_start */
5313 ( TODO_dump_symtab | TODO_remove_functions ), /* todo_flags_finish */
5316 class pass_ipa_cp : public ipa_opt_pass_d
5318 public:
5319 pass_ipa_cp (gcc::context *ctxt)
5320 : ipa_opt_pass_d (pass_data_ipa_cp, ctxt,
5321 ipcp_generate_summary, /* generate_summary */
5322 ipcp_write_summary, /* write_summary */
5323 ipcp_read_summary, /* read_summary */
5324 ipcp_write_transformation_summaries, /*
5325 write_optimization_summary */
5326 ipcp_read_transformation_summaries, /*
5327 read_optimization_summary */
5328 NULL, /* stmt_fixup */
5329 0, /* function_transform_todo_flags_start */
5330 ipcp_transform_function, /* function_transform */
5331 NULL) /* variable_transform */
5334 /* opt_pass methods: */
5335 virtual bool gate (function *)
5337 /* FIXME: We should remove the optimize check after we ensure we never run
5338 IPA passes when not optimizing. */
5339 return (flag_ipa_cp && optimize) || in_lto_p;
5342 virtual unsigned int execute (function *) { return ipcp_driver (); }
5344 }; // class pass_ipa_cp
5346 } // anon namespace
5348 ipa_opt_pass_d *
5349 make_pass_ipa_cp (gcc::context *ctxt)
5351 return new pass_ipa_cp (ctxt);
5354 /* Reset all state within ipa-cp.c so that we can rerun the compiler
5355 within the same process. For use by toplev::finalize. */
5357 void
5358 ipa_cp_c_finalize (void)
5360 max_count = 0;
5361 overall_size = 0;
5362 max_new_size = 0;