* df-scan.c (df_collection_rec): Adjust.
[official-gcc.git] / gcc / ipa-cp.c
blob57ec2dbd98a2b8523d2d589a4fac6f20a0d1a599
1 /* Interprocedural constant propagation
2 Copyright (C) 2005-2013 Free Software Foundation, Inc.
4 Contributed by Razya Ladelsky <RAZYA@il.ibm.com> and Martin Jambor
5 <mjambor@suse.cz>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Interprocedural constant propagation (IPA-CP).
25 The goal of this transformation is to
27 1) discover functions which are always invoked with some arguments with the
28 same known constant values and modify the functions so that the
29 subsequent optimizations can take advantage of the knowledge, and
31 2) partial specialization - create specialized versions of functions
32 transformed in this way if some parameters are known constants only in
33 certain contexts but the estimated tradeoff between speedup and cost size
34 is deemed good.
36 The algorithm also propagates types and attempts to perform type based
37 devirtualization. Types are propagated much like constants.
39 The algorithm basically consists of three stages. In the first, functions
40 are analyzed one at a time and jump functions are constructed for all known
41 call-sites. In the second phase, the pass propagates information from the
42 jump functions across the call to reveal what values are available at what
43 call sites, performs estimations of effects of known values on functions and
44 their callees, and finally decides what specialized extra versions should be
45 created. In the third, the special versions materialize and appropriate
46 calls are redirected.
48 The algorithm used is to a certain extent based on "Interprocedural Constant
49 Propagation", by David Callahan, Keith D Cooper, Ken Kennedy, Linda Torczon,
50 Comp86, pg 152-161 and "A Methodology for Procedure Cloning" by Keith D
51 Cooper, Mary W. Hall, and Ken Kennedy.
54 First stage - intraprocedural analysis
55 =======================================
57 This phase computes jump_function and modification flags.
59 A jump function for a call-site represents the values passed as an actual
60 arguments of a given call-site. In principle, there are three types of
61 values:
63 Pass through - the caller's formal parameter is passed as an actual
64 argument, plus an operation on it can be performed.
65 Constant - a constant is passed as an actual argument.
66 Unknown - neither of the above.
68 All jump function types are described in detail in ipa-prop.h, together with
69 the data structures that represent them and methods of accessing them.
71 ipcp_generate_summary() is the main function of the first stage.
73 Second stage - interprocedural analysis
74 ========================================
76 This stage is itself divided into two phases. In the first, we propagate
77 known values over the call graph, in the second, we make cloning decisions.
78 It uses a different algorithm than the original Callahan's paper.
80 First, we traverse the functions topologically from callers to callees and,
81 for each strongly connected component (SCC), we propagate constants
82 according to previously computed jump functions. We also record what known
83 values depend on other known values and estimate local effects. Finally, we
84 propagate cumulative information about these effects from dependent values
85 to those on which they depend.
87 Second, we again traverse the call graph in the same topological order and
88 make clones for functions which we know are called with the same values in
89 all contexts and decide about extra specialized clones of functions just for
90 some contexts - these decisions are based on both local estimates and
91 cumulative estimates propagated from callees.
93 ipcp_propagate_stage() and ipcp_decision_stage() together constitute the
94 third stage.
96 Third phase - materialization of clones, call statement updates.
97 ============================================
99 This stage is currently performed by call graph code (mainly in cgraphunit.c
100 and tree-inline.c) according to instructions inserted to the call graph by
101 the second stage. */
103 #include "config.h"
104 #include "system.h"
105 #include "coretypes.h"
106 #include "tree.h"
107 #include "target.h"
108 #include "ipa-prop.h"
109 #include "bitmap.h"
110 #include "tree-pass.h"
111 #include "flags.h"
112 #include "diagnostic.h"
113 #include "tree-pretty-print.h"
114 #include "tree-inline.h"
115 #include "params.h"
116 #include "ipa-inline.h"
117 #include "ipa-utils.h"
119 struct ipcp_value;
121 /* Describes a particular source for an IPA-CP value. */
123 struct ipcp_value_source
125 /* Aggregate offset of the source, negative if the source is scalar value of
126 the argument itself. */
127 HOST_WIDE_INT offset;
128 /* The incoming edge that brought the value. */
129 struct cgraph_edge *cs;
130 /* If the jump function that resulted into his value was a pass-through or an
131 ancestor, this is the ipcp_value of the caller from which the described
132 value has been derived. Otherwise it is NULL. */
133 struct ipcp_value *val;
134 /* Next pointer in a linked list of sources of a value. */
135 struct ipcp_value_source *next;
136 /* If the jump function that resulted into his value was a pass-through or an
137 ancestor, this is the index of the parameter of the caller the jump
138 function references. */
139 int index;
142 /* Describes one particular value stored in struct ipcp_lattice. */
144 struct ipcp_value
146 /* The actual value for the given parameter. This is either an IPA invariant
147 or a TREE_BINFO describing a type that can be used for
148 devirtualization. */
149 tree value;
150 /* The list of sources from which this value originates. */
151 struct ipcp_value_source *sources;
152 /* Next pointers in a linked list of all values in a lattice. */
153 struct ipcp_value *next;
154 /* Next pointers in a linked list of values in a strongly connected component
155 of values. */
156 struct ipcp_value *scc_next;
157 /* Next pointers in a linked list of SCCs of values sorted topologically
158 according their sources. */
159 struct ipcp_value *topo_next;
160 /* A specialized node created for this value, NULL if none has been (so far)
161 created. */
162 struct cgraph_node *spec_node;
163 /* Depth first search number and low link for topological sorting of
164 values. */
165 int dfs, low_link;
166 /* Time benefit and size cost that specializing the function for this value
167 would bring about in this function alone. */
168 int local_time_benefit, local_size_cost;
169 /* Time benefit and size cost that specializing the function for this value
170 can bring about in it's callees (transitively). */
171 int prop_time_benefit, prop_size_cost;
172 /* True if this valye is currently on the topo-sort stack. */
173 bool on_stack;
176 /* Lattice describing potential values of a formal parameter of a function, or
177 a part of an aggreagate. TOP is represented by a lattice with zero values
178 and with contains_variable and bottom flags cleared. BOTTOM is represented
179 by a lattice with the bottom flag set. In that case, values and
180 contains_variable flag should be disregarded. */
182 struct ipcp_lattice
184 /* The list of known values and types in this lattice. Note that values are
185 not deallocated if a lattice is set to bottom because there may be value
186 sources referencing them. */
187 struct ipcp_value *values;
188 /* Number of known values and types in this lattice. */
189 int values_count;
190 /* The lattice contains a variable component (in addition to values). */
191 bool contains_variable;
192 /* The value of the lattice is bottom (i.e. variable and unusable for any
193 propagation). */
194 bool bottom;
197 /* Lattice with an offset to describe a part of an aggregate. */
199 struct ipcp_agg_lattice : public ipcp_lattice
201 /* Offset that is being described by this lattice. */
202 HOST_WIDE_INT offset;
203 /* Size so that we don't have to re-compute it every time we traverse the
204 list. Must correspond to TYPE_SIZE of all lat values. */
205 HOST_WIDE_INT size;
206 /* Next element of the linked list. */
207 struct ipcp_agg_lattice *next;
210 /* Structure containing lattices for a parameter itself and for pieces of
211 aggregates that are passed in the parameter or by a reference in a parameter
212 plus some other useful flags. */
214 struct ipcp_param_lattices
216 /* Lattice describing the value of the parameter itself. */
217 struct ipcp_lattice itself;
218 /* Lattices describing aggregate parts. */
219 struct ipcp_agg_lattice *aggs;
220 /* Number of aggregate lattices */
221 int aggs_count;
222 /* True if aggregate data were passed by reference (as opposed to by
223 value). */
224 bool aggs_by_ref;
225 /* All aggregate lattices contain a variable component (in addition to
226 values). */
227 bool aggs_contain_variable;
228 /* The value of all aggregate lattices is bottom (i.e. variable and unusable
229 for any propagation). */
230 bool aggs_bottom;
232 /* There is a virtual call based on this parameter. */
233 bool virt_call;
236 /* Allocation pools for values and their sources in ipa-cp. */
238 alloc_pool ipcp_values_pool;
239 alloc_pool ipcp_sources_pool;
240 alloc_pool ipcp_agg_lattice_pool;
242 /* Maximal count found in program. */
244 static gcov_type max_count;
246 /* Original overall size of the program. */
248 static long overall_size, max_new_size;
250 /* Head of the linked list of topologically sorted values. */
252 static struct ipcp_value *values_topo;
254 /* Return the param lattices structure corresponding to the Ith formal
255 parameter of the function described by INFO. */
256 static inline struct ipcp_param_lattices *
257 ipa_get_parm_lattices (struct ipa_node_params *info, int i)
259 gcc_assert (i >= 0 && i < ipa_get_param_count (info));
260 gcc_checking_assert (!info->ipcp_orig_node);
261 gcc_checking_assert (info->lattices);
262 return &(info->lattices[i]);
265 /* Return the lattice corresponding to the scalar value of the Ith formal
266 parameter of the function described by INFO. */
267 static inline struct ipcp_lattice *
268 ipa_get_scalar_lat (struct ipa_node_params *info, int i)
270 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
271 return &plats->itself;
274 /* Return whether LAT is a lattice with a single constant and without an
275 undefined value. */
277 static inline bool
278 ipa_lat_is_single_const (struct ipcp_lattice *lat)
280 if (lat->bottom
281 || lat->contains_variable
282 || lat->values_count != 1)
283 return false;
284 else
285 return true;
288 /* Print V which is extracted from a value in a lattice to F. */
290 static void
291 print_ipcp_constant_value (FILE * f, tree v)
293 if (TREE_CODE (v) == TREE_BINFO)
295 fprintf (f, "BINFO ");
296 print_generic_expr (f, BINFO_TYPE (v), 0);
298 else if (TREE_CODE (v) == ADDR_EXPR
299 && TREE_CODE (TREE_OPERAND (v, 0)) == CONST_DECL)
301 fprintf (f, "& ");
302 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (v, 0)), 0);
304 else
305 print_generic_expr (f, v, 0);
308 /* Print a lattice LAT to F. */
310 static void
311 print_lattice (FILE * f, struct ipcp_lattice *lat,
312 bool dump_sources, bool dump_benefits)
314 struct ipcp_value *val;
315 bool prev = false;
317 if (lat->bottom)
319 fprintf (f, "BOTTOM\n");
320 return;
323 if (!lat->values_count && !lat->contains_variable)
325 fprintf (f, "TOP\n");
326 return;
329 if (lat->contains_variable)
331 fprintf (f, "VARIABLE");
332 prev = true;
333 if (dump_benefits)
334 fprintf (f, "\n");
337 for (val = lat->values; val; val = val->next)
339 if (dump_benefits && prev)
340 fprintf (f, " ");
341 else if (!dump_benefits && prev)
342 fprintf (f, ", ");
343 else
344 prev = true;
346 print_ipcp_constant_value (f, val->value);
348 if (dump_sources)
350 struct ipcp_value_source *s;
352 fprintf (f, " [from:");
353 for (s = val->sources; s; s = s->next)
354 fprintf (f, " %i(%i)", s->cs->caller->symbol.order,
355 s->cs->frequency);
356 fprintf (f, "]");
359 if (dump_benefits)
360 fprintf (f, " [loc_time: %i, loc_size: %i, "
361 "prop_time: %i, prop_size: %i]\n",
362 val->local_time_benefit, val->local_size_cost,
363 val->prop_time_benefit, val->prop_size_cost);
365 if (!dump_benefits)
366 fprintf (f, "\n");
369 /* Print all ipcp_lattices of all functions to F. */
371 static void
372 print_all_lattices (FILE * f, bool dump_sources, bool dump_benefits)
374 struct cgraph_node *node;
375 int i, count;
377 fprintf (f, "\nLattices:\n");
378 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
380 struct ipa_node_params *info;
382 info = IPA_NODE_REF (node);
383 fprintf (f, " Node: %s/%i:\n", cgraph_node_name (node),
384 node->symbol.order);
385 count = ipa_get_param_count (info);
386 for (i = 0; i < count; i++)
388 struct ipcp_agg_lattice *aglat;
389 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
390 fprintf (f, " param [%d]: ", i);
391 print_lattice (f, &plats->itself, dump_sources, dump_benefits);
393 if (plats->virt_call)
394 fprintf (f, " virt_call flag set\n");
396 if (plats->aggs_bottom)
398 fprintf (f, " AGGS BOTTOM\n");
399 continue;
401 if (plats->aggs_contain_variable)
402 fprintf (f, " AGGS VARIABLE\n");
403 for (aglat = plats->aggs; aglat; aglat = aglat->next)
405 fprintf (f, " %soffset " HOST_WIDE_INT_PRINT_DEC ": ",
406 plats->aggs_by_ref ? "ref " : "", aglat->offset);
407 print_lattice (f, aglat, dump_sources, dump_benefits);
413 /* Determine whether it is at all technically possible to create clones of NODE
414 and store this information in the ipa_node_params structure associated
415 with NODE. */
417 static void
418 determine_versionability (struct cgraph_node *node)
420 const char *reason = NULL;
422 /* There are a number of generic reasons functions cannot be versioned. We
423 also cannot remove parameters if there are type attributes such as fnspec
424 present. */
425 if (node->symbol.alias || node->thunk.thunk_p)
426 reason = "alias or thunk";
427 else if (!node->local.versionable)
428 reason = "not a tree_versionable_function";
429 else if (cgraph_function_body_availability (node) <= AVAIL_OVERWRITABLE)
430 reason = "insufficient body availability";
432 if (reason && dump_file && !node->symbol.alias && !node->thunk.thunk_p)
433 fprintf (dump_file, "Function %s/%i is not versionable, reason: %s.\n",
434 cgraph_node_name (node), node->symbol.order, reason);
436 node->local.versionable = (reason == NULL);
439 /* Return true if it is at all technically possible to create clones of a
440 NODE. */
442 static bool
443 ipcp_versionable_function_p (struct cgraph_node *node)
445 return node->local.versionable;
448 /* Structure holding accumulated information about callers of a node. */
450 struct caller_statistics
452 gcov_type count_sum;
453 int n_calls, n_hot_calls, freq_sum;
456 /* Initialize fields of STAT to zeroes. */
458 static inline void
459 init_caller_stats (struct caller_statistics *stats)
461 stats->count_sum = 0;
462 stats->n_calls = 0;
463 stats->n_hot_calls = 0;
464 stats->freq_sum = 0;
467 /* Worker callback of cgraph_for_node_and_aliases accumulating statistics of
468 non-thunk incoming edges to NODE. */
470 static bool
471 gather_caller_stats (struct cgraph_node *node, void *data)
473 struct caller_statistics *stats = (struct caller_statistics *) data;
474 struct cgraph_edge *cs;
476 for (cs = node->callers; cs; cs = cs->next_caller)
477 if (cs->caller->thunk.thunk_p)
478 cgraph_for_node_and_aliases (cs->caller, gather_caller_stats,
479 stats, false);
480 else
482 stats->count_sum += cs->count;
483 stats->freq_sum += cs->frequency;
484 stats->n_calls++;
485 if (cgraph_maybe_hot_edge_p (cs))
486 stats->n_hot_calls ++;
488 return false;
492 /* Return true if this NODE is viable candidate for cloning. */
494 static bool
495 ipcp_cloning_candidate_p (struct cgraph_node *node)
497 struct caller_statistics stats;
499 gcc_checking_assert (cgraph_function_with_gimple_body_p (node));
501 if (!flag_ipa_cp_clone)
503 if (dump_file)
504 fprintf (dump_file, "Not considering %s for cloning; "
505 "-fipa-cp-clone disabled.\n",
506 cgraph_node_name (node));
507 return false;
510 if (!optimize_function_for_speed_p (DECL_STRUCT_FUNCTION (node->symbol.decl)))
512 if (dump_file)
513 fprintf (dump_file, "Not considering %s for cloning; "
514 "optimizing it for size.\n",
515 cgraph_node_name (node));
516 return false;
519 init_caller_stats (&stats);
520 cgraph_for_node_and_aliases (node, gather_caller_stats, &stats, false);
522 if (inline_summary (node)->self_size < stats.n_calls)
524 if (dump_file)
525 fprintf (dump_file, "Considering %s for cloning; code might shrink.\n",
526 cgraph_node_name (node));
527 return true;
530 /* When profile is available and function is hot, propagate into it even if
531 calls seems cold; constant propagation can improve function's speed
532 significantly. */
533 if (max_count)
535 if (stats.count_sum > node->count * 90 / 100)
537 if (dump_file)
538 fprintf (dump_file, "Considering %s for cloning; "
539 "usually called directly.\n",
540 cgraph_node_name (node));
541 return true;
544 if (!stats.n_hot_calls)
546 if (dump_file)
547 fprintf (dump_file, "Not considering %s for cloning; no hot calls.\n",
548 cgraph_node_name (node));
549 return false;
551 if (dump_file)
552 fprintf (dump_file, "Considering %s for cloning.\n",
553 cgraph_node_name (node));
554 return true;
557 /* Arrays representing a topological ordering of call graph nodes and a stack
558 of noes used during constant propagation. */
560 struct topo_info
562 struct cgraph_node **order;
563 struct cgraph_node **stack;
564 int nnodes, stack_top;
567 /* Allocate the arrays in TOPO and topologically sort the nodes into order. */
569 static void
570 build_toporder_info (struct topo_info *topo)
572 topo->order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
573 topo->stack = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
574 topo->stack_top = 0;
575 topo->nnodes = ipa_reduced_postorder (topo->order, true, true, NULL);
578 /* Free information about strongly connected components and the arrays in
579 TOPO. */
581 static void
582 free_toporder_info (struct topo_info *topo)
584 ipa_free_postorder_info ();
585 free (topo->order);
586 free (topo->stack);
589 /* Add NODE to the stack in TOPO, unless it is already there. */
591 static inline void
592 push_node_to_stack (struct topo_info *topo, struct cgraph_node *node)
594 struct ipa_node_params *info = IPA_NODE_REF (node);
595 if (info->node_enqueued)
596 return;
597 info->node_enqueued = 1;
598 topo->stack[topo->stack_top++] = node;
601 /* Pop a node from the stack in TOPO and return it or return NULL if the stack
602 is empty. */
604 static struct cgraph_node *
605 pop_node_from_stack (struct topo_info *topo)
607 if (topo->stack_top)
609 struct cgraph_node *node;
610 topo->stack_top--;
611 node = topo->stack[topo->stack_top];
612 IPA_NODE_REF (node)->node_enqueued = 0;
613 return node;
615 else
616 return NULL;
619 /* Set lattice LAT to bottom and return true if it previously was not set as
620 such. */
622 static inline bool
623 set_lattice_to_bottom (struct ipcp_lattice *lat)
625 bool ret = !lat->bottom;
626 lat->bottom = true;
627 return ret;
630 /* Mark lattice as containing an unknown value and return true if it previously
631 was not marked as such. */
633 static inline bool
634 set_lattice_contains_variable (struct ipcp_lattice *lat)
636 bool ret = !lat->contains_variable;
637 lat->contains_variable = true;
638 return ret;
641 /* Set all aggegate lattices in PLATS to bottom and return true if they were
642 not previously set as such. */
644 static inline bool
645 set_agg_lats_to_bottom (struct ipcp_param_lattices *plats)
647 bool ret = !plats->aggs_bottom;
648 plats->aggs_bottom = true;
649 return ret;
652 /* Mark all aggegate lattices in PLATS as containing an unknown value and
653 return true if they were not previously marked as such. */
655 static inline bool
656 set_agg_lats_contain_variable (struct ipcp_param_lattices *plats)
658 bool ret = !plats->aggs_contain_variable;
659 plats->aggs_contain_variable = true;
660 return ret;
663 /* Mark bot aggregate and scalar lattices as containing an unknown variable,
664 return true is any of them has not been marked as such so far. */
666 static inline bool
667 set_all_contains_variable (struct ipcp_param_lattices *plats)
669 bool ret = !plats->itself.contains_variable || !plats->aggs_contain_variable;
670 plats->itself.contains_variable = true;
671 plats->aggs_contain_variable = true;
672 return ret;
675 /* Initialize ipcp_lattices. */
677 static void
678 initialize_node_lattices (struct cgraph_node *node)
680 struct ipa_node_params *info = IPA_NODE_REF (node);
681 struct cgraph_edge *ie;
682 bool disable = false, variable = false;
683 int i;
685 gcc_checking_assert (cgraph_function_with_gimple_body_p (node));
686 if (!node->local.local)
688 /* When cloning is allowed, we can assume that externally visible
689 functions are not called. We will compensate this by cloning
690 later. */
691 if (ipcp_versionable_function_p (node)
692 && ipcp_cloning_candidate_p (node))
693 variable = true;
694 else
695 disable = true;
698 if (disable || variable)
700 for (i = 0; i < ipa_get_param_count (info) ; i++)
702 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
703 if (disable)
705 set_lattice_to_bottom (&plats->itself);
706 set_agg_lats_to_bottom (plats);
708 else
709 set_all_contains_variable (plats);
711 if (dump_file && (dump_flags & TDF_DETAILS)
712 && !node->symbol.alias && !node->thunk.thunk_p)
713 fprintf (dump_file, "Marking all lattices of %s/%i as %s\n",
714 cgraph_node_name (node), node->symbol.order,
715 disable ? "BOTTOM" : "VARIABLE");
718 for (ie = node->indirect_calls; ie; ie = ie->next_callee)
719 if (ie->indirect_info->polymorphic
720 && ie->indirect_info->param_index >= 0)
722 gcc_checking_assert (ie->indirect_info->param_index >= 0);
723 ipa_get_parm_lattices (info,
724 ie->indirect_info->param_index)->virt_call = 1;
728 /* Return the result of a (possibly arithmetic) pass through jump function
729 JFUNC on the constant value INPUT. Return NULL_TREE if that cannot be
730 determined or be considered an interprocedural invariant. */
732 static tree
733 ipa_get_jf_pass_through_result (struct ipa_jump_func *jfunc, tree input)
735 tree restype, res;
737 if (TREE_CODE (input) == TREE_BINFO)
739 if (ipa_get_jf_pass_through_type_preserved (jfunc))
741 gcc_checking_assert (ipa_get_jf_pass_through_operation (jfunc)
742 == NOP_EXPR);
743 return input;
745 return NULL_TREE;
748 if (ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
749 return input;
751 gcc_checking_assert (is_gimple_ip_invariant (input));
752 if (TREE_CODE_CLASS (ipa_get_jf_pass_through_operation (jfunc))
753 == tcc_comparison)
754 restype = boolean_type_node;
755 else
756 restype = TREE_TYPE (input);
757 res = fold_binary (ipa_get_jf_pass_through_operation (jfunc), restype,
758 input, ipa_get_jf_pass_through_operand (jfunc));
760 if (res && !is_gimple_ip_invariant (res))
761 return NULL_TREE;
763 return res;
766 /* Return the result of an ancestor jump function JFUNC on the constant value
767 INPUT. Return NULL_TREE if that cannot be determined. */
769 static tree
770 ipa_get_jf_ancestor_result (struct ipa_jump_func *jfunc, tree input)
772 if (TREE_CODE (input) == TREE_BINFO)
774 if (!ipa_get_jf_ancestor_type_preserved (jfunc))
775 return NULL;
776 return get_binfo_at_offset (input,
777 ipa_get_jf_ancestor_offset (jfunc),
778 ipa_get_jf_ancestor_type (jfunc));
780 else if (TREE_CODE (input) == ADDR_EXPR)
782 tree t = TREE_OPERAND (input, 0);
783 t = build_ref_for_offset (EXPR_LOCATION (t), t,
784 ipa_get_jf_ancestor_offset (jfunc),
785 ipa_get_jf_ancestor_type (jfunc), NULL, false);
786 return build_fold_addr_expr (t);
788 else
789 return NULL_TREE;
792 /* Determine whether JFUNC evaluates to a known value (that is either a
793 constant or a binfo) and if so, return it. Otherwise return NULL. INFO
794 describes the caller node so that pass-through jump functions can be
795 evaluated. */
797 tree
798 ipa_value_from_jfunc (struct ipa_node_params *info, struct ipa_jump_func *jfunc)
800 if (jfunc->type == IPA_JF_CONST)
801 return ipa_get_jf_constant (jfunc);
802 else if (jfunc->type == IPA_JF_KNOWN_TYPE)
803 return ipa_binfo_from_known_type_jfunc (jfunc);
804 else if (jfunc->type == IPA_JF_PASS_THROUGH
805 || jfunc->type == IPA_JF_ANCESTOR)
807 tree input;
808 int idx;
810 if (jfunc->type == IPA_JF_PASS_THROUGH)
811 idx = ipa_get_jf_pass_through_formal_id (jfunc);
812 else
813 idx = ipa_get_jf_ancestor_formal_id (jfunc);
815 if (info->ipcp_orig_node)
816 input = info->known_vals[idx];
817 else
819 struct ipcp_lattice *lat;
821 if (!info->lattices)
823 gcc_checking_assert (!flag_ipa_cp);
824 return NULL_TREE;
826 lat = ipa_get_scalar_lat (info, idx);
827 if (!ipa_lat_is_single_const (lat))
828 return NULL_TREE;
829 input = lat->values->value;
832 if (!input)
833 return NULL_TREE;
835 if (jfunc->type == IPA_JF_PASS_THROUGH)
836 return ipa_get_jf_pass_through_result (jfunc, input);
837 else
838 return ipa_get_jf_ancestor_result (jfunc, input);
840 else
841 return NULL_TREE;
845 /* If checking is enabled, verify that no lattice is in the TOP state, i.e. not
846 bottom, not containing a variable component and without any known value at
847 the same time. */
849 DEBUG_FUNCTION void
850 ipcp_verify_propagated_values (void)
852 struct cgraph_node *node;
854 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
856 struct ipa_node_params *info = IPA_NODE_REF (node);
857 int i, count = ipa_get_param_count (info);
859 for (i = 0; i < count; i++)
861 struct ipcp_lattice *lat = ipa_get_scalar_lat (info, i);
863 if (!lat->bottom
864 && !lat->contains_variable
865 && lat->values_count == 0)
867 if (dump_file)
869 fprintf (dump_file, "\nIPA lattices after constant "
870 "propagation:\n");
871 print_all_lattices (dump_file, true, false);
874 gcc_unreachable ();
880 /* Return true iff X and Y should be considered equal values by IPA-CP. */
882 static bool
883 values_equal_for_ipcp_p (tree x, tree y)
885 gcc_checking_assert (x != NULL_TREE && y != NULL_TREE);
887 if (x == y)
888 return true;
890 if (TREE_CODE (x) == TREE_BINFO || TREE_CODE (y) == TREE_BINFO)
891 return false;
893 if (TREE_CODE (x) == ADDR_EXPR
894 && TREE_CODE (y) == ADDR_EXPR
895 && TREE_CODE (TREE_OPERAND (x, 0)) == CONST_DECL
896 && TREE_CODE (TREE_OPERAND (y, 0)) == CONST_DECL)
897 return operand_equal_p (DECL_INITIAL (TREE_OPERAND (x, 0)),
898 DECL_INITIAL (TREE_OPERAND (y, 0)), 0);
899 else
900 return operand_equal_p (x, y, 0);
903 /* Add a new value source to VAL, marking that a value comes from edge CS and
904 (if the underlying jump function is a pass-through or an ancestor one) from
905 a caller value SRC_VAL of a caller parameter described by SRC_INDEX. OFFSET
906 is negative if the source was the scalar value of the parameter itself or
907 the offset within an aggregate. */
909 static void
910 add_value_source (struct ipcp_value *val, struct cgraph_edge *cs,
911 struct ipcp_value *src_val, int src_idx, HOST_WIDE_INT offset)
913 struct ipcp_value_source *src;
915 src = (struct ipcp_value_source *) pool_alloc (ipcp_sources_pool);
916 src->offset = offset;
917 src->cs = cs;
918 src->val = src_val;
919 src->index = src_idx;
921 src->next = val->sources;
922 val->sources = src;
925 /* Try to add NEWVAL to LAT, potentially creating a new struct ipcp_value for
926 it. CS, SRC_VAL SRC_INDEX and OFFSET are meant for add_value_source and
927 have the same meaning. */
929 static bool
930 add_value_to_lattice (struct ipcp_lattice *lat, tree newval,
931 struct cgraph_edge *cs, struct ipcp_value *src_val,
932 int src_idx, HOST_WIDE_INT offset)
934 struct ipcp_value *val;
936 if (lat->bottom)
937 return false;
939 for (val = lat->values; val; val = val->next)
940 if (values_equal_for_ipcp_p (val->value, newval))
942 if (ipa_edge_within_scc (cs))
944 struct ipcp_value_source *s;
945 for (s = val->sources; s ; s = s->next)
946 if (s->cs == cs)
947 break;
948 if (s)
949 return false;
952 add_value_source (val, cs, src_val, src_idx, offset);
953 return false;
956 if (lat->values_count == PARAM_VALUE (PARAM_IPA_CP_VALUE_LIST_SIZE))
958 /* We can only free sources, not the values themselves, because sources
959 of other values in this this SCC might point to them. */
960 for (val = lat->values; val; val = val->next)
962 while (val->sources)
964 struct ipcp_value_source *src = val->sources;
965 val->sources = src->next;
966 pool_free (ipcp_sources_pool, src);
970 lat->values = NULL;
971 return set_lattice_to_bottom (lat);
974 lat->values_count++;
975 val = (struct ipcp_value *) pool_alloc (ipcp_values_pool);
976 memset (val, 0, sizeof (*val));
978 add_value_source (val, cs, src_val, src_idx, offset);
979 val->value = newval;
980 val->next = lat->values;
981 lat->values = val;
982 return true;
985 /* Like above but passes a special value of offset to distinguish that the
986 origin is the scalar value of the parameter rather than a part of an
987 aggregate. */
989 static inline bool
990 add_scalar_value_to_lattice (struct ipcp_lattice *lat, tree newval,
991 struct cgraph_edge *cs,
992 struct ipcp_value *src_val, int src_idx)
994 return add_value_to_lattice (lat, newval, cs, src_val, src_idx, -1);
997 /* Propagate values through a pass-through jump function JFUNC associated with
998 edge CS, taking values from SRC_LAT and putting them into DEST_LAT. SRC_IDX
999 is the index of the source parameter. */
1001 static bool
1002 propagate_vals_accross_pass_through (struct cgraph_edge *cs,
1003 struct ipa_jump_func *jfunc,
1004 struct ipcp_lattice *src_lat,
1005 struct ipcp_lattice *dest_lat,
1006 int src_idx)
1008 struct ipcp_value *src_val;
1009 bool ret = false;
1011 /* Do not create new values when propagating within an SCC because if there
1012 are arithmetic functions with circular dependencies, there is infinite
1013 number of them and we would just make lattices bottom. */
1014 if ((ipa_get_jf_pass_through_operation (jfunc) != NOP_EXPR)
1015 && ipa_edge_within_scc (cs))
1016 ret = set_lattice_contains_variable (dest_lat);
1017 else
1018 for (src_val = src_lat->values; src_val; src_val = src_val->next)
1020 tree cstval = ipa_get_jf_pass_through_result (jfunc, src_val->value);
1022 if (cstval)
1023 ret |= add_scalar_value_to_lattice (dest_lat, cstval, cs, src_val,
1024 src_idx);
1025 else
1026 ret |= set_lattice_contains_variable (dest_lat);
1029 return ret;
1032 /* Propagate values through an ancestor jump function JFUNC associated with
1033 edge CS, taking values from SRC_LAT and putting them into DEST_LAT. SRC_IDX
1034 is the index of the source parameter. */
1036 static bool
1037 propagate_vals_accross_ancestor (struct cgraph_edge *cs,
1038 struct ipa_jump_func *jfunc,
1039 struct ipcp_lattice *src_lat,
1040 struct ipcp_lattice *dest_lat,
1041 int src_idx)
1043 struct ipcp_value *src_val;
1044 bool ret = false;
1046 if (ipa_edge_within_scc (cs))
1047 return set_lattice_contains_variable (dest_lat);
1049 for (src_val = src_lat->values; src_val; src_val = src_val->next)
1051 tree t = ipa_get_jf_ancestor_result (jfunc, src_val->value);
1053 if (t)
1054 ret |= add_scalar_value_to_lattice (dest_lat, t, cs, src_val, src_idx);
1055 else
1056 ret |= set_lattice_contains_variable (dest_lat);
1059 return ret;
1062 /* Propagate scalar values across jump function JFUNC that is associated with
1063 edge CS and put the values into DEST_LAT. */
1065 static bool
1066 propagate_scalar_accross_jump_function (struct cgraph_edge *cs,
1067 struct ipa_jump_func *jfunc,
1068 struct ipcp_lattice *dest_lat)
1070 if (dest_lat->bottom)
1071 return false;
1073 if (jfunc->type == IPA_JF_CONST
1074 || jfunc->type == IPA_JF_KNOWN_TYPE)
1076 tree val;
1078 if (jfunc->type == IPA_JF_KNOWN_TYPE)
1080 val = ipa_binfo_from_known_type_jfunc (jfunc);
1081 if (!val)
1082 return set_lattice_contains_variable (dest_lat);
1084 else
1085 val = ipa_get_jf_constant (jfunc);
1086 return add_scalar_value_to_lattice (dest_lat, val, cs, NULL, 0);
1088 else if (jfunc->type == IPA_JF_PASS_THROUGH
1089 || jfunc->type == IPA_JF_ANCESTOR)
1091 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
1092 struct ipcp_lattice *src_lat;
1093 int src_idx;
1094 bool ret;
1096 if (jfunc->type == IPA_JF_PASS_THROUGH)
1097 src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
1098 else
1099 src_idx = ipa_get_jf_ancestor_formal_id (jfunc);
1101 src_lat = ipa_get_scalar_lat (caller_info, src_idx);
1102 if (src_lat->bottom)
1103 return set_lattice_contains_variable (dest_lat);
1105 /* If we would need to clone the caller and cannot, do not propagate. */
1106 if (!ipcp_versionable_function_p (cs->caller)
1107 && (src_lat->contains_variable
1108 || (src_lat->values_count > 1)))
1109 return set_lattice_contains_variable (dest_lat);
1111 if (jfunc->type == IPA_JF_PASS_THROUGH)
1112 ret = propagate_vals_accross_pass_through (cs, jfunc, src_lat,
1113 dest_lat, src_idx);
1114 else
1115 ret = propagate_vals_accross_ancestor (cs, jfunc, src_lat, dest_lat,
1116 src_idx);
1118 if (src_lat->contains_variable)
1119 ret |= set_lattice_contains_variable (dest_lat);
1121 return ret;
1124 /* TODO: We currently do not handle member method pointers in IPA-CP (we only
1125 use it for indirect inlining), we should propagate them too. */
1126 return set_lattice_contains_variable (dest_lat);
1129 /* If DEST_PLATS already has aggregate items, check that aggs_by_ref matches
1130 NEW_AGGS_BY_REF and if not, mark all aggs as bottoms and return true (in all
1131 other cases, return false). If there are no aggregate items, set
1132 aggs_by_ref to NEW_AGGS_BY_REF. */
1134 static bool
1135 set_check_aggs_by_ref (struct ipcp_param_lattices *dest_plats,
1136 bool new_aggs_by_ref)
1138 if (dest_plats->aggs)
1140 if (dest_plats->aggs_by_ref != new_aggs_by_ref)
1142 set_agg_lats_to_bottom (dest_plats);
1143 return true;
1146 else
1147 dest_plats->aggs_by_ref = new_aggs_by_ref;
1148 return false;
1151 /* Walk aggregate lattices in DEST_PLATS from ***AGLAT on, until ***aglat is an
1152 already existing lattice for the given OFFSET and SIZE, marking all skipped
1153 lattices as containing variable and checking for overlaps. If there is no
1154 already existing lattice for the OFFSET and VAL_SIZE, create one, initialize
1155 it with offset, size and contains_variable to PRE_EXISTING, and return true,
1156 unless there are too many already. If there are two many, return false. If
1157 there are overlaps turn whole DEST_PLATS to bottom and return false. If any
1158 skipped lattices were newly marked as containing variable, set *CHANGE to
1159 true. */
1161 static bool
1162 merge_agg_lats_step (struct ipcp_param_lattices *dest_plats,
1163 HOST_WIDE_INT offset, HOST_WIDE_INT val_size,
1164 struct ipcp_agg_lattice ***aglat,
1165 bool pre_existing, bool *change)
1167 gcc_checking_assert (offset >= 0);
1169 while (**aglat && (**aglat)->offset < offset)
1171 if ((**aglat)->offset + (**aglat)->size > offset)
1173 set_agg_lats_to_bottom (dest_plats);
1174 return false;
1176 *change |= set_lattice_contains_variable (**aglat);
1177 *aglat = &(**aglat)->next;
1180 if (**aglat && (**aglat)->offset == offset)
1182 if ((**aglat)->size != val_size
1183 || ((**aglat)->next
1184 && (**aglat)->next->offset < offset + val_size))
1186 set_agg_lats_to_bottom (dest_plats);
1187 return false;
1189 gcc_checking_assert (!(**aglat)->next
1190 || (**aglat)->next->offset >= offset + val_size);
1191 return true;
1193 else
1195 struct ipcp_agg_lattice *new_al;
1197 if (**aglat && (**aglat)->offset < offset + val_size)
1199 set_agg_lats_to_bottom (dest_plats);
1200 return false;
1202 if (dest_plats->aggs_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1203 return false;
1204 dest_plats->aggs_count++;
1205 new_al = (struct ipcp_agg_lattice *) pool_alloc (ipcp_agg_lattice_pool);
1206 memset (new_al, 0, sizeof (*new_al));
1208 new_al->offset = offset;
1209 new_al->size = val_size;
1210 new_al->contains_variable = pre_existing;
1212 new_al->next = **aglat;
1213 **aglat = new_al;
1214 return true;
1218 /* Set all AGLAT and all other aggregate lattices reachable by next pointers as
1219 containing an unknown value. */
1221 static bool
1222 set_chain_of_aglats_contains_variable (struct ipcp_agg_lattice *aglat)
1224 bool ret = false;
1225 while (aglat)
1227 ret |= set_lattice_contains_variable (aglat);
1228 aglat = aglat->next;
1230 return ret;
1233 /* Merge existing aggregate lattices in SRC_PLATS to DEST_PLATS, subtracting
1234 DELTA_OFFSET. CS is the call graph edge and SRC_IDX the index of the source
1235 parameter used for lattice value sources. Return true if DEST_PLATS changed
1236 in any way. */
1238 static bool
1239 merge_aggregate_lattices (struct cgraph_edge *cs,
1240 struct ipcp_param_lattices *dest_plats,
1241 struct ipcp_param_lattices *src_plats,
1242 int src_idx, HOST_WIDE_INT offset_delta)
1244 bool pre_existing = dest_plats->aggs != NULL;
1245 struct ipcp_agg_lattice **dst_aglat;
1246 bool ret = false;
1248 if (set_check_aggs_by_ref (dest_plats, src_plats->aggs_by_ref))
1249 return true;
1250 if (src_plats->aggs_bottom)
1251 return set_agg_lats_contain_variable (dest_plats);
1252 if (src_plats->aggs_contain_variable)
1253 ret |= set_agg_lats_contain_variable (dest_plats);
1254 dst_aglat = &dest_plats->aggs;
1256 for (struct ipcp_agg_lattice *src_aglat = src_plats->aggs;
1257 src_aglat;
1258 src_aglat = src_aglat->next)
1260 HOST_WIDE_INT new_offset = src_aglat->offset - offset_delta;
1262 if (new_offset < 0)
1263 continue;
1264 if (merge_agg_lats_step (dest_plats, new_offset, src_aglat->size,
1265 &dst_aglat, pre_existing, &ret))
1267 struct ipcp_agg_lattice *new_al = *dst_aglat;
1269 dst_aglat = &(*dst_aglat)->next;
1270 if (src_aglat->bottom)
1272 ret |= set_lattice_contains_variable (new_al);
1273 continue;
1275 if (src_aglat->contains_variable)
1276 ret |= set_lattice_contains_variable (new_al);
1277 for (struct ipcp_value *val = src_aglat->values;
1278 val;
1279 val = val->next)
1280 ret |= add_value_to_lattice (new_al, val->value, cs, val, src_idx,
1281 src_aglat->offset);
1283 else if (dest_plats->aggs_bottom)
1284 return true;
1286 ret |= set_chain_of_aglats_contains_variable (*dst_aglat);
1287 return ret;
1290 /* Determine whether there is anything to propagate FROM SRC_PLATS through a
1291 pass-through JFUNC and if so, whether it has conform and conforms to the
1292 rules about propagating values passed by reference. */
1294 static bool
1295 agg_pass_through_permissible_p (struct ipcp_param_lattices *src_plats,
1296 struct ipa_jump_func *jfunc)
1298 return src_plats->aggs
1299 && (!src_plats->aggs_by_ref
1300 || ipa_get_jf_pass_through_agg_preserved (jfunc));
1303 /* Propagate scalar values across jump function JFUNC that is associated with
1304 edge CS and put the values into DEST_LAT. */
1306 static bool
1307 propagate_aggs_accross_jump_function (struct cgraph_edge *cs,
1308 struct ipa_jump_func *jfunc,
1309 struct ipcp_param_lattices *dest_plats)
1311 bool ret = false;
1313 if (dest_plats->aggs_bottom)
1314 return false;
1316 if (jfunc->type == IPA_JF_PASS_THROUGH
1317 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
1319 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
1320 int src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
1321 struct ipcp_param_lattices *src_plats;
1323 src_plats = ipa_get_parm_lattices (caller_info, src_idx);
1324 if (agg_pass_through_permissible_p (src_plats, jfunc))
1326 /* Currently we do not produce clobber aggregate jump
1327 functions, replace with merging when we do. */
1328 gcc_assert (!jfunc->agg.items);
1329 ret |= merge_aggregate_lattices (cs, dest_plats, src_plats,
1330 src_idx, 0);
1332 else
1333 ret |= set_agg_lats_contain_variable (dest_plats);
1335 else if (jfunc->type == IPA_JF_ANCESTOR
1336 && ipa_get_jf_ancestor_agg_preserved (jfunc))
1338 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
1339 int src_idx = ipa_get_jf_ancestor_formal_id (jfunc);
1340 struct ipcp_param_lattices *src_plats;
1342 src_plats = ipa_get_parm_lattices (caller_info, src_idx);
1343 if (src_plats->aggs && src_plats->aggs_by_ref)
1345 /* Currently we do not produce clobber aggregate jump
1346 functions, replace with merging when we do. */
1347 gcc_assert (!jfunc->agg.items);
1348 ret |= merge_aggregate_lattices (cs, dest_plats, src_plats, src_idx,
1349 ipa_get_jf_ancestor_offset (jfunc));
1351 else if (!src_plats->aggs_by_ref)
1352 ret |= set_agg_lats_to_bottom (dest_plats);
1353 else
1354 ret |= set_agg_lats_contain_variable (dest_plats);
1356 else if (jfunc->agg.items)
1358 bool pre_existing = dest_plats->aggs != NULL;
1359 struct ipcp_agg_lattice **aglat = &dest_plats->aggs;
1360 struct ipa_agg_jf_item *item;
1361 int i;
1363 if (set_check_aggs_by_ref (dest_plats, jfunc->agg.by_ref))
1364 return true;
1366 FOR_EACH_VEC_ELT (*jfunc->agg.items, i, item)
1368 HOST_WIDE_INT val_size;
1370 if (item->offset < 0)
1371 continue;
1372 gcc_checking_assert (is_gimple_ip_invariant (item->value));
1373 val_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (item->value)), 1);
1375 if (merge_agg_lats_step (dest_plats, item->offset, val_size,
1376 &aglat, pre_existing, &ret))
1378 ret |= add_value_to_lattice (*aglat, item->value, cs, NULL, 0, 0);
1379 aglat = &(*aglat)->next;
1381 else if (dest_plats->aggs_bottom)
1382 return true;
1385 ret |= set_chain_of_aglats_contains_variable (*aglat);
1387 else
1388 ret |= set_agg_lats_contain_variable (dest_plats);
1390 return ret;
1393 /* Propagate constants from the caller to the callee of CS. INFO describes the
1394 caller. */
1396 static bool
1397 propagate_constants_accross_call (struct cgraph_edge *cs)
1399 struct ipa_node_params *callee_info;
1400 enum availability availability;
1401 struct cgraph_node *callee, *alias_or_thunk;
1402 struct ipa_edge_args *args;
1403 bool ret = false;
1404 int i, args_count, parms_count;
1406 callee = cgraph_function_node (cs->callee, &availability);
1407 if (!callee->symbol.definition)
1408 return false;
1409 gcc_checking_assert (cgraph_function_with_gimple_body_p (callee));
1410 callee_info = IPA_NODE_REF (callee);
1412 args = IPA_EDGE_REF (cs);
1413 args_count = ipa_get_cs_argument_count (args);
1414 parms_count = ipa_get_param_count (callee_info);
1416 /* If this call goes through a thunk we must not propagate to the first (0th)
1417 parameter. However, we might need to uncover a thunk from below a series
1418 of aliases first. */
1419 alias_or_thunk = cs->callee;
1420 while (alias_or_thunk->symbol.alias)
1421 alias_or_thunk = cgraph_alias_target (alias_or_thunk);
1422 if (alias_or_thunk->thunk.thunk_p)
1424 ret |= set_all_contains_variable (ipa_get_parm_lattices (callee_info,
1425 0));
1426 i = 1;
1428 else
1429 i = 0;
1431 for (; (i < args_count) && (i < parms_count); i++)
1433 struct ipa_jump_func *jump_func = ipa_get_ith_jump_func (args, i);
1434 struct ipcp_param_lattices *dest_plats;
1436 dest_plats = ipa_get_parm_lattices (callee_info, i);
1437 if (availability == AVAIL_OVERWRITABLE)
1438 ret |= set_all_contains_variable (dest_plats);
1439 else
1441 ret |= propagate_scalar_accross_jump_function (cs, jump_func,
1442 &dest_plats->itself);
1443 ret |= propagate_aggs_accross_jump_function (cs, jump_func,
1444 dest_plats);
1447 for (; i < parms_count; i++)
1448 ret |= set_all_contains_variable (ipa_get_parm_lattices (callee_info, i));
1450 return ret;
1453 /* If an indirect edge IE can be turned into a direct one based on KNOWN_VALS
1454 (which can contain both constants and binfos), KNOWN_BINFOS, KNOWN_AGGS or
1455 AGG_REPS return the destination. The latter three can be NULL. If AGG_REPS
1456 is not NULL, KNOWN_AGGS is ignored. */
1458 static tree
1459 ipa_get_indirect_edge_target_1 (struct cgraph_edge *ie,
1460 vec<tree> known_vals,
1461 vec<tree> known_binfos,
1462 vec<ipa_agg_jump_function_p> known_aggs,
1463 struct ipa_agg_replacement_value *agg_reps)
1465 int param_index = ie->indirect_info->param_index;
1466 HOST_WIDE_INT token, anc_offset;
1467 tree otr_type;
1468 tree t;
1469 tree target;
1471 if (param_index == -1
1472 || known_vals.length () <= (unsigned int) param_index)
1473 return NULL_TREE;
1475 if (!ie->indirect_info->polymorphic)
1477 tree t;
1479 if (ie->indirect_info->agg_contents)
1481 if (agg_reps)
1483 t = NULL;
1484 while (agg_reps)
1486 if (agg_reps->index == param_index
1487 && agg_reps->offset == ie->indirect_info->offset
1488 && agg_reps->by_ref == ie->indirect_info->by_ref)
1490 t = agg_reps->value;
1491 break;
1493 agg_reps = agg_reps->next;
1496 else if (known_aggs.length () > (unsigned int) param_index)
1498 struct ipa_agg_jump_function *agg;
1499 agg = known_aggs[param_index];
1500 t = ipa_find_agg_cst_for_param (agg, ie->indirect_info->offset,
1501 ie->indirect_info->by_ref);
1503 else
1504 t = NULL;
1506 else
1507 t = known_vals[param_index];
1509 if (t &&
1510 TREE_CODE (t) == ADDR_EXPR
1511 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL)
1512 return TREE_OPERAND (t, 0);
1513 else
1514 return NULL_TREE;
1517 gcc_assert (!ie->indirect_info->agg_contents);
1518 token = ie->indirect_info->otr_token;
1519 anc_offset = ie->indirect_info->offset;
1520 otr_type = ie->indirect_info->otr_type;
1522 t = known_vals[param_index];
1523 if (!t && known_binfos.length () > (unsigned int) param_index)
1524 t = known_binfos[param_index];
1525 if (!t)
1526 return NULL_TREE;
1528 if (TREE_CODE (t) != TREE_BINFO)
1530 tree binfo;
1531 binfo = gimple_extract_devirt_binfo_from_cst
1532 (t, ie->indirect_info->otr_type);
1533 if (!binfo)
1534 return NULL_TREE;
1535 binfo = get_binfo_at_offset (binfo, anc_offset, otr_type);
1536 if (!binfo)
1537 return NULL_TREE;
1538 target = gimple_get_virt_method_for_binfo (token, binfo);
1540 else
1542 tree binfo;
1544 binfo = get_binfo_at_offset (t, anc_offset, otr_type);
1545 if (!binfo)
1546 return NULL_TREE;
1547 target = gimple_get_virt_method_for_binfo (token, binfo);
1549 #ifdef ENABLE_CHECKING
1550 if (target)
1551 gcc_assert (possible_polymorphic_call_target_p
1552 (ie, cgraph_get_node (target)));
1553 #endif
1555 return target;
1559 /* If an indirect edge IE can be turned into a direct one based on KNOWN_VALS
1560 (which can contain both constants and binfos), KNOWN_BINFOS (which can be
1561 NULL) or KNOWN_AGGS (which also can be NULL) return the destination. */
1563 tree
1564 ipa_get_indirect_edge_target (struct cgraph_edge *ie,
1565 vec<tree> known_vals,
1566 vec<tree> known_binfos,
1567 vec<ipa_agg_jump_function_p> known_aggs)
1569 return ipa_get_indirect_edge_target_1 (ie, known_vals, known_binfos,
1570 known_aggs, NULL);
1573 /* Calculate devirtualization time bonus for NODE, assuming we know KNOWN_CSTS
1574 and KNOWN_BINFOS. */
1576 static int
1577 devirtualization_time_bonus (struct cgraph_node *node,
1578 vec<tree> known_csts,
1579 vec<tree> known_binfos,
1580 vec<ipa_agg_jump_function_p> known_aggs)
1582 struct cgraph_edge *ie;
1583 int res = 0;
1585 for (ie = node->indirect_calls; ie; ie = ie->next_callee)
1587 struct cgraph_node *callee;
1588 struct inline_summary *isummary;
1589 tree target;
1591 target = ipa_get_indirect_edge_target (ie, known_csts, known_binfos,
1592 known_aggs);
1593 if (!target)
1594 continue;
1596 /* Only bare minimum benefit for clearly un-inlineable targets. */
1597 res += 1;
1598 callee = cgraph_get_node (target);
1599 if (!callee || !callee->symbol.definition)
1600 continue;
1601 isummary = inline_summary (callee);
1602 if (!isummary->inlinable)
1603 continue;
1605 /* FIXME: The values below need re-considering and perhaps also
1606 integrating into the cost metrics, at lest in some very basic way. */
1607 if (isummary->size <= MAX_INLINE_INSNS_AUTO / 4)
1608 res += 31;
1609 else if (isummary->size <= MAX_INLINE_INSNS_AUTO / 2)
1610 res += 15;
1611 else if (isummary->size <= MAX_INLINE_INSNS_AUTO
1612 || DECL_DECLARED_INLINE_P (callee->symbol.decl))
1613 res += 7;
1616 return res;
1619 /* Return time bonus incurred because of HINTS. */
1621 static int
1622 hint_time_bonus (inline_hints hints)
1624 int result = 0;
1625 if (hints & (INLINE_HINT_loop_iterations | INLINE_HINT_loop_stride))
1626 result += PARAM_VALUE (PARAM_IPA_CP_LOOP_HINT_BONUS);
1627 if (hints & INLINE_HINT_array_index)
1628 result += PARAM_VALUE (PARAM_IPA_CP_ARRAY_INDEX_HINT_BONUS);
1629 return result;
1632 /* Return true if cloning NODE is a good idea, given the estimated TIME_BENEFIT
1633 and SIZE_COST and with the sum of frequencies of incoming edges to the
1634 potential new clone in FREQUENCIES. */
1636 static bool
1637 good_cloning_opportunity_p (struct cgraph_node *node, int time_benefit,
1638 int freq_sum, gcov_type count_sum, int size_cost)
1640 if (time_benefit == 0
1641 || !flag_ipa_cp_clone
1642 || !optimize_function_for_speed_p (DECL_STRUCT_FUNCTION (node->symbol.decl)))
1643 return false;
1645 gcc_assert (size_cost > 0);
1647 if (max_count)
1649 int factor = (count_sum * 1000) / max_count;
1650 HOST_WIDEST_INT evaluation = (((HOST_WIDEST_INT) time_benefit * factor)
1651 / size_cost);
1653 if (dump_file && (dump_flags & TDF_DETAILS))
1654 fprintf (dump_file, " good_cloning_opportunity_p (time: %i, "
1655 "size: %i, count_sum: " HOST_WIDE_INT_PRINT_DEC
1656 ") -> evaluation: " HOST_WIDEST_INT_PRINT_DEC
1657 ", threshold: %i\n",
1658 time_benefit, size_cost, (HOST_WIDE_INT) count_sum,
1659 evaluation, PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD));
1661 return evaluation >= PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD);
1663 else
1665 HOST_WIDEST_INT evaluation = (((HOST_WIDEST_INT) time_benefit * freq_sum)
1666 / size_cost);
1668 if (dump_file && (dump_flags & TDF_DETAILS))
1669 fprintf (dump_file, " good_cloning_opportunity_p (time: %i, "
1670 "size: %i, freq_sum: %i) -> evaluation: "
1671 HOST_WIDEST_INT_PRINT_DEC ", threshold: %i\n",
1672 time_benefit, size_cost, freq_sum, evaluation,
1673 PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD));
1675 return evaluation >= PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD);
1679 /* Return all context independent values from aggregate lattices in PLATS in a
1680 vector. Return NULL if there are none. */
1682 static vec<ipa_agg_jf_item_t, va_gc> *
1683 context_independent_aggregate_values (struct ipcp_param_lattices *plats)
1685 vec<ipa_agg_jf_item_t, va_gc> *res = NULL;
1687 if (plats->aggs_bottom
1688 || plats->aggs_contain_variable
1689 || plats->aggs_count == 0)
1690 return NULL;
1692 for (struct ipcp_agg_lattice *aglat = plats->aggs;
1693 aglat;
1694 aglat = aglat->next)
1695 if (ipa_lat_is_single_const (aglat))
1697 struct ipa_agg_jf_item item;
1698 item.offset = aglat->offset;
1699 item.value = aglat->values->value;
1700 vec_safe_push (res, item);
1702 return res;
1705 /* Allocate KNOWN_CSTS, KNOWN_BINFOS and, if non-NULL, KNOWN_AGGS and populate
1706 them with values of parameters that are known independent of the context.
1707 INFO describes the function. If REMOVABLE_PARAMS_COST is non-NULL, the
1708 movement cost of all removable parameters will be stored in it. */
1710 static bool
1711 gather_context_independent_values (struct ipa_node_params *info,
1712 vec<tree> *known_csts,
1713 vec<tree> *known_binfos,
1714 vec<ipa_agg_jump_function_t> *known_aggs,
1715 int *removable_params_cost)
1717 int i, count = ipa_get_param_count (info);
1718 bool ret = false;
1720 known_csts->create (0);
1721 known_binfos->create (0);
1722 known_csts->safe_grow_cleared (count);
1723 known_binfos->safe_grow_cleared (count);
1724 if (known_aggs)
1726 known_aggs->create (0);
1727 known_aggs->safe_grow_cleared (count);
1730 if (removable_params_cost)
1731 *removable_params_cost = 0;
1733 for (i = 0; i < count ; i++)
1735 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
1736 struct ipcp_lattice *lat = &plats->itself;
1738 if (ipa_lat_is_single_const (lat))
1740 struct ipcp_value *val = lat->values;
1741 if (TREE_CODE (val->value) != TREE_BINFO)
1743 (*known_csts)[i] = val->value;
1744 if (removable_params_cost)
1745 *removable_params_cost
1746 += estimate_move_cost (TREE_TYPE (val->value));
1747 ret = true;
1749 else if (plats->virt_call)
1751 (*known_binfos)[i] = val->value;
1752 ret = true;
1754 else if (removable_params_cost
1755 && !ipa_is_param_used (info, i))
1756 *removable_params_cost += ipa_get_param_move_cost (info, i);
1758 else if (removable_params_cost
1759 && !ipa_is_param_used (info, i))
1760 *removable_params_cost
1761 += ipa_get_param_move_cost (info, i);
1763 if (known_aggs)
1765 vec<ipa_agg_jf_item_t, va_gc> *agg_items;
1766 struct ipa_agg_jump_function *ajf;
1768 agg_items = context_independent_aggregate_values (plats);
1769 ajf = &(*known_aggs)[i];
1770 ajf->items = agg_items;
1771 ajf->by_ref = plats->aggs_by_ref;
1772 ret |= agg_items != NULL;
1776 return ret;
1779 /* The current interface in ipa-inline-analysis requires a pointer vector.
1780 Create it.
1782 FIXME: That interface should be re-worked, this is slightly silly. Still,
1783 I'd like to discuss how to change it first and this demonstrates the
1784 issue. */
1786 static vec<ipa_agg_jump_function_p>
1787 agg_jmp_p_vec_for_t_vec (vec<ipa_agg_jump_function_t> known_aggs)
1789 vec<ipa_agg_jump_function_p> ret;
1790 struct ipa_agg_jump_function *ajf;
1791 int i;
1793 ret.create (known_aggs.length ());
1794 FOR_EACH_VEC_ELT (known_aggs, i, ajf)
1795 ret.quick_push (ajf);
1796 return ret;
1799 /* Iterate over known values of parameters of NODE and estimate the local
1800 effects in terms of time and size they have. */
1802 static void
1803 estimate_local_effects (struct cgraph_node *node)
1805 struct ipa_node_params *info = IPA_NODE_REF (node);
1806 int i, count = ipa_get_param_count (info);
1807 vec<tree> known_csts, known_binfos;
1808 vec<ipa_agg_jump_function_t> known_aggs;
1809 vec<ipa_agg_jump_function_p> known_aggs_ptrs;
1810 bool always_const;
1811 int base_time = inline_summary (node)->time;
1812 int removable_params_cost;
1814 if (!count || !ipcp_versionable_function_p (node))
1815 return;
1817 if (dump_file && (dump_flags & TDF_DETAILS))
1818 fprintf (dump_file, "\nEstimating effects for %s/%i, base_time: %i.\n",
1819 cgraph_node_name (node), node->symbol.order, base_time);
1821 always_const = gather_context_independent_values (info, &known_csts,
1822 &known_binfos, &known_aggs,
1823 &removable_params_cost);
1824 known_aggs_ptrs = agg_jmp_p_vec_for_t_vec (known_aggs);
1825 if (always_const)
1827 struct caller_statistics stats;
1828 inline_hints hints;
1829 int time, size;
1831 init_caller_stats (&stats);
1832 cgraph_for_node_and_aliases (node, gather_caller_stats, &stats, false);
1833 estimate_ipcp_clone_size_and_time (node, known_csts, known_binfos,
1834 known_aggs_ptrs, &size, &time, &hints);
1835 time -= devirtualization_time_bonus (node, known_csts, known_binfos,
1836 known_aggs_ptrs);
1837 time -= hint_time_bonus (hints);
1838 time -= removable_params_cost;
1839 size -= stats.n_calls * removable_params_cost;
1841 if (dump_file)
1842 fprintf (dump_file, " - context independent values, size: %i, "
1843 "time_benefit: %i\n", size, base_time - time);
1845 if (size <= 0
1846 || cgraph_will_be_removed_from_program_if_no_direct_calls (node))
1848 info->do_clone_for_all_contexts = true;
1849 base_time = time;
1851 if (dump_file)
1852 fprintf (dump_file, " Decided to specialize for all "
1853 "known contexts, code not going to grow.\n");
1855 else if (good_cloning_opportunity_p (node, base_time - time,
1856 stats.freq_sum, stats.count_sum,
1857 size))
1859 if (size + overall_size <= max_new_size)
1861 info->do_clone_for_all_contexts = true;
1862 base_time = time;
1863 overall_size += size;
1865 if (dump_file)
1866 fprintf (dump_file, " Decided to specialize for all "
1867 "known contexts, growth deemed beneficial.\n");
1869 else if (dump_file && (dump_flags & TDF_DETAILS))
1870 fprintf (dump_file, " Not cloning for all contexts because "
1871 "max_new_size would be reached with %li.\n",
1872 size + overall_size);
1876 for (i = 0; i < count ; i++)
1878 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
1879 struct ipcp_lattice *lat = &plats->itself;
1880 struct ipcp_value *val;
1881 int emc;
1883 if (lat->bottom
1884 || !lat->values
1885 || known_csts[i]
1886 || known_binfos[i])
1887 continue;
1889 for (val = lat->values; val; val = val->next)
1891 int time, size, time_benefit;
1892 inline_hints hints;
1894 if (TREE_CODE (val->value) != TREE_BINFO)
1896 known_csts[i] = val->value;
1897 known_binfos[i] = NULL_TREE;
1898 emc = estimate_move_cost (TREE_TYPE (val->value));
1900 else if (plats->virt_call)
1902 known_csts[i] = NULL_TREE;
1903 known_binfos[i] = val->value;
1904 emc = 0;
1906 else
1907 continue;
1909 estimate_ipcp_clone_size_and_time (node, known_csts, known_binfos,
1910 known_aggs_ptrs, &size, &time,
1911 &hints);
1912 time_benefit = base_time - time
1913 + devirtualization_time_bonus (node, known_csts, known_binfos,
1914 known_aggs_ptrs)
1915 + hint_time_bonus (hints)
1916 + removable_params_cost + emc;
1918 gcc_checking_assert (size >=0);
1919 /* The inliner-heuristics based estimates may think that in certain
1920 contexts some functions do not have any size at all but we want
1921 all specializations to have at least a tiny cost, not least not to
1922 divide by zero. */
1923 if (size == 0)
1924 size = 1;
1926 if (dump_file && (dump_flags & TDF_DETAILS))
1928 fprintf (dump_file, " - estimates for value ");
1929 print_ipcp_constant_value (dump_file, val->value);
1930 fprintf (dump_file, " for ");
1931 ipa_dump_param (dump_file, info, i);
1932 fprintf (dump_file, ": time_benefit: %i, size: %i\n",
1933 time_benefit, size);
1936 val->local_time_benefit = time_benefit;
1937 val->local_size_cost = size;
1939 known_binfos[i] = NULL_TREE;
1940 known_csts[i] = NULL_TREE;
1943 for (i = 0; i < count ; i++)
1945 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
1946 struct ipa_agg_jump_function *ajf;
1947 struct ipcp_agg_lattice *aglat;
1949 if (plats->aggs_bottom || !plats->aggs)
1950 continue;
1952 ajf = &known_aggs[i];
1953 for (aglat = plats->aggs; aglat; aglat = aglat->next)
1955 struct ipcp_value *val;
1956 if (aglat->bottom || !aglat->values
1957 /* If the following is true, the one value is in known_aggs. */
1958 || (!plats->aggs_contain_variable
1959 && ipa_lat_is_single_const (aglat)))
1960 continue;
1962 for (val = aglat->values; val; val = val->next)
1964 int time, size, time_benefit;
1965 struct ipa_agg_jf_item item;
1966 inline_hints hints;
1968 item.offset = aglat->offset;
1969 item.value = val->value;
1970 vec_safe_push (ajf->items, item);
1972 estimate_ipcp_clone_size_and_time (node, known_csts, known_binfos,
1973 known_aggs_ptrs, &size, &time,
1974 &hints);
1975 time_benefit = base_time - time
1976 + devirtualization_time_bonus (node, known_csts, known_binfos,
1977 known_aggs_ptrs)
1978 + hint_time_bonus (hints);
1979 gcc_checking_assert (size >=0);
1980 if (size == 0)
1981 size = 1;
1983 if (dump_file && (dump_flags & TDF_DETAILS))
1985 fprintf (dump_file, " - estimates for value ");
1986 print_ipcp_constant_value (dump_file, val->value);
1987 fprintf (dump_file, " for ");
1988 ipa_dump_param (dump_file, info, i);
1989 fprintf (dump_file, "[%soffset: " HOST_WIDE_INT_PRINT_DEC
1990 "]: time_benefit: %i, size: %i\n",
1991 plats->aggs_by_ref ? "ref " : "",
1992 aglat->offset, time_benefit, size);
1995 val->local_time_benefit = time_benefit;
1996 val->local_size_cost = size;
1997 ajf->items->pop ();
2002 for (i = 0; i < count ; i++)
2003 vec_free (known_aggs[i].items);
2005 known_csts.release ();
2006 known_binfos.release ();
2007 known_aggs.release ();
2008 known_aggs_ptrs.release ();
2012 /* Add value CUR_VAL and all yet-unsorted values it is dependent on to the
2013 topological sort of values. */
2015 static void
2016 add_val_to_toposort (struct ipcp_value *cur_val)
2018 static int dfs_counter = 0;
2019 static struct ipcp_value *stack;
2020 struct ipcp_value_source *src;
2022 if (cur_val->dfs)
2023 return;
2025 dfs_counter++;
2026 cur_val->dfs = dfs_counter;
2027 cur_val->low_link = dfs_counter;
2029 cur_val->topo_next = stack;
2030 stack = cur_val;
2031 cur_val->on_stack = true;
2033 for (src = cur_val->sources; src; src = src->next)
2034 if (src->val)
2036 if (src->val->dfs == 0)
2038 add_val_to_toposort (src->val);
2039 if (src->val->low_link < cur_val->low_link)
2040 cur_val->low_link = src->val->low_link;
2042 else if (src->val->on_stack
2043 && src->val->dfs < cur_val->low_link)
2044 cur_val->low_link = src->val->dfs;
2047 if (cur_val->dfs == cur_val->low_link)
2049 struct ipcp_value *v, *scc_list = NULL;
2053 v = stack;
2054 stack = v->topo_next;
2055 v->on_stack = false;
2057 v->scc_next = scc_list;
2058 scc_list = v;
2060 while (v != cur_val);
2062 cur_val->topo_next = values_topo;
2063 values_topo = cur_val;
2067 /* Add all values in lattices associated with NODE to the topological sort if
2068 they are not there yet. */
2070 static void
2071 add_all_node_vals_to_toposort (struct cgraph_node *node)
2073 struct ipa_node_params *info = IPA_NODE_REF (node);
2074 int i, count = ipa_get_param_count (info);
2076 for (i = 0; i < count ; i++)
2078 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
2079 struct ipcp_lattice *lat = &plats->itself;
2080 struct ipcp_agg_lattice *aglat;
2081 struct ipcp_value *val;
2083 if (!lat->bottom)
2084 for (val = lat->values; val; val = val->next)
2085 add_val_to_toposort (val);
2087 if (!plats->aggs_bottom)
2088 for (aglat = plats->aggs; aglat; aglat = aglat->next)
2089 if (!aglat->bottom)
2090 for (val = aglat->values; val; val = val->next)
2091 add_val_to_toposort (val);
2095 /* One pass of constants propagation along the call graph edges, from callers
2096 to callees (requires topological ordering in TOPO), iterate over strongly
2097 connected components. */
2099 static void
2100 propagate_constants_topo (struct topo_info *topo)
2102 int i;
2104 for (i = topo->nnodes - 1; i >= 0; i--)
2106 unsigned j;
2107 struct cgraph_node *v, *node = topo->order[i];
2108 vec<cgraph_node_ptr> cycle_nodes = ipa_get_nodes_in_cycle (node);
2110 /* First, iteratively propagate within the strongly connected component
2111 until all lattices stabilize. */
2112 FOR_EACH_VEC_ELT (cycle_nodes, j, v)
2113 if (cgraph_function_with_gimple_body_p (v))
2114 push_node_to_stack (topo, v);
2116 v = pop_node_from_stack (topo);
2117 while (v)
2119 struct cgraph_edge *cs;
2121 for (cs = v->callees; cs; cs = cs->next_callee)
2122 if (ipa_edge_within_scc (cs)
2123 && propagate_constants_accross_call (cs))
2124 push_node_to_stack (topo, cs->callee);
2125 v = pop_node_from_stack (topo);
2128 /* Afterwards, propagate along edges leading out of the SCC, calculates
2129 the local effects of the discovered constants and all valid values to
2130 their topological sort. */
2131 FOR_EACH_VEC_ELT (cycle_nodes, j, v)
2132 if (cgraph_function_with_gimple_body_p (v))
2134 struct cgraph_edge *cs;
2136 estimate_local_effects (v);
2137 add_all_node_vals_to_toposort (v);
2138 for (cs = v->callees; cs; cs = cs->next_callee)
2139 if (!ipa_edge_within_scc (cs))
2140 propagate_constants_accross_call (cs);
2142 cycle_nodes.release ();
2147 /* Return the sum of A and B if none of them is bigger than INT_MAX/2, return
2148 the bigger one if otherwise. */
2150 static int
2151 safe_add (int a, int b)
2153 if (a > INT_MAX/2 || b > INT_MAX/2)
2154 return a > b ? a : b;
2155 else
2156 return a + b;
2160 /* Propagate the estimated effects of individual values along the topological
2161 from the dependent values to those they depend on. */
2163 static void
2164 propagate_effects (void)
2166 struct ipcp_value *base;
2168 for (base = values_topo; base; base = base->topo_next)
2170 struct ipcp_value_source *src;
2171 struct ipcp_value *val;
2172 int time = 0, size = 0;
2174 for (val = base; val; val = val->scc_next)
2176 time = safe_add (time,
2177 val->local_time_benefit + val->prop_time_benefit);
2178 size = safe_add (size, val->local_size_cost + val->prop_size_cost);
2181 for (val = base; val; val = val->scc_next)
2182 for (src = val->sources; src; src = src->next)
2183 if (src->val
2184 && cgraph_maybe_hot_edge_p (src->cs))
2186 src->val->prop_time_benefit = safe_add (time,
2187 src->val->prop_time_benefit);
2188 src->val->prop_size_cost = safe_add (size,
2189 src->val->prop_size_cost);
2195 /* Propagate constants, binfos and their effects from the summaries
2196 interprocedurally. */
2198 static void
2199 ipcp_propagate_stage (struct topo_info *topo)
2201 struct cgraph_node *node;
2203 if (dump_file)
2204 fprintf (dump_file, "\n Propagating constants:\n\n");
2206 if (in_lto_p)
2207 ipa_update_after_lto_read ();
2210 FOR_EACH_DEFINED_FUNCTION (node)
2212 struct ipa_node_params *info = IPA_NODE_REF (node);
2214 determine_versionability (node);
2215 if (cgraph_function_with_gimple_body_p (node))
2217 info->lattices = XCNEWVEC (struct ipcp_param_lattices,
2218 ipa_get_param_count (info));
2219 initialize_node_lattices (node);
2221 if (node->symbol.definition && !node->symbol.alias)
2222 overall_size += inline_summary (node)->self_size;
2223 if (node->count > max_count)
2224 max_count = node->count;
2227 max_new_size = overall_size;
2228 if (max_new_size < PARAM_VALUE (PARAM_LARGE_UNIT_INSNS))
2229 max_new_size = PARAM_VALUE (PARAM_LARGE_UNIT_INSNS);
2230 max_new_size += max_new_size * PARAM_VALUE (PARAM_IPCP_UNIT_GROWTH) / 100 + 1;
2232 if (dump_file)
2233 fprintf (dump_file, "\noverall_size: %li, max_new_size: %li\n",
2234 overall_size, max_new_size);
2236 propagate_constants_topo (topo);
2237 #ifdef ENABLE_CHECKING
2238 ipcp_verify_propagated_values ();
2239 #endif
2240 propagate_effects ();
2242 if (dump_file)
2244 fprintf (dump_file, "\nIPA lattices after all propagation:\n");
2245 print_all_lattices (dump_file, (dump_flags & TDF_DETAILS), true);
2249 /* Discover newly direct outgoing edges from NODE which is a new clone with
2250 known KNOWN_VALS and make them direct. */
2252 static void
2253 ipcp_discover_new_direct_edges (struct cgraph_node *node,
2254 vec<tree> known_vals,
2255 struct ipa_agg_replacement_value *aggvals)
2257 struct cgraph_edge *ie, *next_ie;
2258 bool found = false;
2260 for (ie = node->indirect_calls; ie; ie = next_ie)
2262 tree target;
2264 next_ie = ie->next_callee;
2265 target = ipa_get_indirect_edge_target_1 (ie, known_vals, vNULL, vNULL,
2266 aggvals);
2267 if (target)
2269 bool agg_contents = ie->indirect_info->agg_contents;
2270 bool polymorphic = ie->indirect_info->polymorphic;
2271 bool param_index = ie->indirect_info->param_index;
2272 struct cgraph_edge *cs = ipa_make_edge_direct_to_target (ie, target);
2273 found = true;
2275 if (cs && !agg_contents && !polymorphic)
2277 struct ipa_node_params *info = IPA_NODE_REF (node);
2278 int c = ipa_get_controlled_uses (info, param_index);
2279 if (c != IPA_UNDESCRIBED_USE)
2281 struct ipa_ref *to_del;
2283 c--;
2284 ipa_set_controlled_uses (info, param_index, c);
2285 if (dump_file && (dump_flags & TDF_DETAILS))
2286 fprintf (dump_file, " controlled uses count of param "
2287 "%i bumped down to %i\n", param_index, c);
2288 if (c == 0
2289 && (to_del = ipa_find_reference ((symtab_node) node,
2290 (symtab_node) cs->callee,
2291 NULL, 0)))
2293 if (dump_file && (dump_flags & TDF_DETAILS))
2294 fprintf (dump_file, " and even removing its "
2295 "cloning-created reference\n");
2296 ipa_remove_reference (to_del);
2302 /* Turning calls to direct calls will improve overall summary. */
2303 if (found)
2304 inline_update_overall_summary (node);
2307 /* Vector of pointers which for linked lists of clones of an original crgaph
2308 edge. */
2310 static vec<cgraph_edge_p> next_edge_clone;
2312 static inline void
2313 grow_next_edge_clone_vector (void)
2315 if (next_edge_clone.length ()
2316 <= (unsigned) cgraph_edge_max_uid)
2317 next_edge_clone.safe_grow_cleared (cgraph_edge_max_uid + 1);
2320 /* Edge duplication hook to grow the appropriate linked list in
2321 next_edge_clone. */
2323 static void
2324 ipcp_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
2325 __attribute__((unused)) void *data)
2327 grow_next_edge_clone_vector ();
2328 next_edge_clone[dst->uid] = next_edge_clone[src->uid];
2329 next_edge_clone[src->uid] = dst;
2332 /* See if NODE is a clone with a known aggregate value at a given OFFSET of a
2333 parameter with the given INDEX. */
2335 static tree
2336 get_clone_agg_value (struct cgraph_node *node, HOST_WIDEST_INT offset,
2337 int index)
2339 struct ipa_agg_replacement_value *aggval;
2341 aggval = ipa_get_agg_replacements_for_node (node);
2342 while (aggval)
2344 if (aggval->offset == offset
2345 && aggval->index == index)
2346 return aggval->value;
2347 aggval = aggval->next;
2349 return NULL_TREE;
2352 /* Return true if edge CS does bring about the value described by SRC. */
2354 static bool
2355 cgraph_edge_brings_value_p (struct cgraph_edge *cs,
2356 struct ipcp_value_source *src)
2358 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
2359 struct ipa_node_params *dst_info = IPA_NODE_REF (cs->callee);
2361 if ((dst_info->ipcp_orig_node && !dst_info->is_all_contexts_clone)
2362 || caller_info->node_dead)
2363 return false;
2364 if (!src->val)
2365 return true;
2367 if (caller_info->ipcp_orig_node)
2369 tree t;
2370 if (src->offset == -1)
2371 t = caller_info->known_vals[src->index];
2372 else
2373 t = get_clone_agg_value (cs->caller, src->offset, src->index);
2374 return (t != NULL_TREE
2375 && values_equal_for_ipcp_p (src->val->value, t));
2377 else
2379 struct ipcp_agg_lattice *aglat;
2380 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (caller_info,
2381 src->index);
2382 if (src->offset == -1)
2383 return (ipa_lat_is_single_const (&plats->itself)
2384 && values_equal_for_ipcp_p (src->val->value,
2385 plats->itself.values->value));
2386 else
2388 if (plats->aggs_bottom || plats->aggs_contain_variable)
2389 return false;
2390 for (aglat = plats->aggs; aglat; aglat = aglat->next)
2391 if (aglat->offset == src->offset)
2392 return (ipa_lat_is_single_const (aglat)
2393 && values_equal_for_ipcp_p (src->val->value,
2394 aglat->values->value));
2396 return false;
2400 /* Get the next clone in the linked list of clones of an edge. */
2402 static inline struct cgraph_edge *
2403 get_next_cgraph_edge_clone (struct cgraph_edge *cs)
2405 return next_edge_clone[cs->uid];
2408 /* Given VAL, iterate over all its sources and if they still hold, add their
2409 edge frequency and their number into *FREQUENCY and *CALLER_COUNT
2410 respectively. */
2412 static bool
2413 get_info_about_necessary_edges (struct ipcp_value *val, int *freq_sum,
2414 gcov_type *count_sum, int *caller_count)
2416 struct ipcp_value_source *src;
2417 int freq = 0, count = 0;
2418 gcov_type cnt = 0;
2419 bool hot = false;
2421 for (src = val->sources; src; src = src->next)
2423 struct cgraph_edge *cs = src->cs;
2424 while (cs)
2426 if (cgraph_edge_brings_value_p (cs, src))
2428 count++;
2429 freq += cs->frequency;
2430 cnt += cs->count;
2431 hot |= cgraph_maybe_hot_edge_p (cs);
2433 cs = get_next_cgraph_edge_clone (cs);
2437 *freq_sum = freq;
2438 *count_sum = cnt;
2439 *caller_count = count;
2440 return hot;
2443 /* Return a vector of incoming edges that do bring value VAL. It is assumed
2444 their number is known and equal to CALLER_COUNT. */
2446 static vec<cgraph_edge_p>
2447 gather_edges_for_value (struct ipcp_value *val, int caller_count)
2449 struct ipcp_value_source *src;
2450 vec<cgraph_edge_p> ret;
2452 ret.create (caller_count);
2453 for (src = val->sources; src; src = src->next)
2455 struct cgraph_edge *cs = src->cs;
2456 while (cs)
2458 if (cgraph_edge_brings_value_p (cs, src))
2459 ret.quick_push (cs);
2460 cs = get_next_cgraph_edge_clone (cs);
2464 return ret;
2467 /* Construct a replacement map for a know VALUE for a formal parameter PARAM.
2468 Return it or NULL if for some reason it cannot be created. */
2470 static struct ipa_replace_map *
2471 get_replacement_map (struct ipa_node_params *info, tree value, int parm_num)
2473 struct ipa_replace_map *replace_map;
2476 replace_map = ggc_alloc_ipa_replace_map ();
2477 if (dump_file)
2479 fprintf (dump_file, " replacing ");
2480 ipa_dump_param (dump_file, info, parm_num);
2482 fprintf (dump_file, " with const ");
2483 print_generic_expr (dump_file, value, 0);
2484 fprintf (dump_file, "\n");
2486 replace_map->old_tree = NULL;
2487 replace_map->parm_num = parm_num;
2488 replace_map->new_tree = value;
2489 replace_map->replace_p = true;
2490 replace_map->ref_p = false;
2492 return replace_map;
2495 /* Dump new profiling counts */
2497 static void
2498 dump_profile_updates (struct cgraph_node *orig_node,
2499 struct cgraph_node *new_node)
2501 struct cgraph_edge *cs;
2503 fprintf (dump_file, " setting count of the specialized node to "
2504 HOST_WIDE_INT_PRINT_DEC "\n", (HOST_WIDE_INT) new_node->count);
2505 for (cs = new_node->callees; cs ; cs = cs->next_callee)
2506 fprintf (dump_file, " edge to %s has count "
2507 HOST_WIDE_INT_PRINT_DEC "\n",
2508 cgraph_node_name (cs->callee), (HOST_WIDE_INT) cs->count);
2510 fprintf (dump_file, " setting count of the original node to "
2511 HOST_WIDE_INT_PRINT_DEC "\n", (HOST_WIDE_INT) orig_node->count);
2512 for (cs = orig_node->callees; cs ; cs = cs->next_callee)
2513 fprintf (dump_file, " edge to %s is left with "
2514 HOST_WIDE_INT_PRINT_DEC "\n",
2515 cgraph_node_name (cs->callee), (HOST_WIDE_INT) cs->count);
2518 /* After a specialized NEW_NODE version of ORIG_NODE has been created, update
2519 their profile information to reflect this. */
2521 static void
2522 update_profiling_info (struct cgraph_node *orig_node,
2523 struct cgraph_node *new_node)
2525 struct cgraph_edge *cs;
2526 struct caller_statistics stats;
2527 gcov_type new_sum, orig_sum;
2528 gcov_type remainder, orig_node_count = orig_node->count;
2530 if (orig_node_count == 0)
2531 return;
2533 init_caller_stats (&stats);
2534 cgraph_for_node_and_aliases (orig_node, gather_caller_stats, &stats, false);
2535 orig_sum = stats.count_sum;
2536 init_caller_stats (&stats);
2537 cgraph_for_node_and_aliases (new_node, gather_caller_stats, &stats, false);
2538 new_sum = stats.count_sum;
2540 if (orig_node_count < orig_sum + new_sum)
2542 if (dump_file)
2543 fprintf (dump_file, " Problem: node %s/%i has too low count "
2544 HOST_WIDE_INT_PRINT_DEC " while the sum of incoming "
2545 "counts is " HOST_WIDE_INT_PRINT_DEC "\n",
2546 cgraph_node_name (orig_node), orig_node->symbol.order,
2547 (HOST_WIDE_INT) orig_node_count,
2548 (HOST_WIDE_INT) (orig_sum + new_sum));
2550 orig_node_count = (orig_sum + new_sum) * 12 / 10;
2551 if (dump_file)
2552 fprintf (dump_file, " proceeding by pretending it was "
2553 HOST_WIDE_INT_PRINT_DEC "\n",
2554 (HOST_WIDE_INT) orig_node_count);
2557 new_node->count = new_sum;
2558 remainder = orig_node_count - new_sum;
2559 orig_node->count = remainder;
2561 for (cs = new_node->callees; cs ; cs = cs->next_callee)
2562 if (cs->frequency)
2563 cs->count = apply_probability (cs->count,
2564 GCOV_COMPUTE_SCALE (new_sum,
2565 orig_node_count));
2566 else
2567 cs->count = 0;
2569 for (cs = orig_node->callees; cs ; cs = cs->next_callee)
2570 cs->count = apply_probability (cs->count,
2571 GCOV_COMPUTE_SCALE (remainder,
2572 orig_node_count));
2574 if (dump_file)
2575 dump_profile_updates (orig_node, new_node);
2578 /* Update the respective profile of specialized NEW_NODE and the original
2579 ORIG_NODE after additional edges with cumulative count sum REDIRECTED_SUM
2580 have been redirected to the specialized version. */
2582 static void
2583 update_specialized_profile (struct cgraph_node *new_node,
2584 struct cgraph_node *orig_node,
2585 gcov_type redirected_sum)
2587 struct cgraph_edge *cs;
2588 gcov_type new_node_count, orig_node_count = orig_node->count;
2590 if (dump_file)
2591 fprintf (dump_file, " the sum of counts of redirected edges is "
2592 HOST_WIDE_INT_PRINT_DEC "\n", (HOST_WIDE_INT) redirected_sum);
2593 if (orig_node_count == 0)
2594 return;
2596 gcc_assert (orig_node_count >= redirected_sum);
2598 new_node_count = new_node->count;
2599 new_node->count += redirected_sum;
2600 orig_node->count -= redirected_sum;
2602 for (cs = new_node->callees; cs ; cs = cs->next_callee)
2603 if (cs->frequency)
2604 cs->count += apply_probability (cs->count,
2605 GCOV_COMPUTE_SCALE (redirected_sum,
2606 new_node_count));
2607 else
2608 cs->count = 0;
2610 for (cs = orig_node->callees; cs ; cs = cs->next_callee)
2612 gcov_type dec = apply_probability (cs->count,
2613 GCOV_COMPUTE_SCALE (redirected_sum,
2614 orig_node_count));
2615 if (dec < cs->count)
2616 cs->count -= dec;
2617 else
2618 cs->count = 0;
2621 if (dump_file)
2622 dump_profile_updates (orig_node, new_node);
2625 /* Create a specialized version of NODE with known constants and types of
2626 parameters in KNOWN_VALS and redirect all edges in CALLERS to it. */
2628 static struct cgraph_node *
2629 create_specialized_node (struct cgraph_node *node,
2630 vec<tree> known_vals,
2631 struct ipa_agg_replacement_value *aggvals,
2632 vec<cgraph_edge_p> callers)
2634 struct ipa_node_params *new_info, *info = IPA_NODE_REF (node);
2635 vec<ipa_replace_map_p, va_gc> *replace_trees = NULL;
2636 struct ipa_agg_replacement_value *av;
2637 struct cgraph_node *new_node;
2638 int i, count = ipa_get_param_count (info);
2639 bitmap args_to_skip;
2641 gcc_assert (!info->ipcp_orig_node);
2643 if (node->local.can_change_signature)
2645 args_to_skip = BITMAP_GGC_ALLOC ();
2646 for (i = 0; i < count; i++)
2648 tree t = known_vals[i];
2650 if ((t && TREE_CODE (t) != TREE_BINFO)
2651 || !ipa_is_param_used (info, i))
2652 bitmap_set_bit (args_to_skip, i);
2655 else
2657 args_to_skip = NULL;
2658 if (dump_file && (dump_flags & TDF_DETAILS))
2659 fprintf (dump_file, " cannot change function signature\n");
2662 for (i = 0; i < count ; i++)
2664 tree t = known_vals[i];
2665 if (t && TREE_CODE (t) != TREE_BINFO)
2667 struct ipa_replace_map *replace_map;
2669 replace_map = get_replacement_map (info, t, i);
2670 if (replace_map)
2671 vec_safe_push (replace_trees, replace_map);
2675 new_node = cgraph_create_virtual_clone (node, callers, replace_trees,
2676 args_to_skip, "constprop");
2677 ipa_set_node_agg_value_chain (new_node, aggvals);
2678 for (av = aggvals; av; av = av->next)
2679 ipa_maybe_record_reference ((symtab_node) new_node, av->value,
2680 IPA_REF_ADDR, NULL);
2682 if (dump_file && (dump_flags & TDF_DETAILS))
2684 fprintf (dump_file, " the new node is %s/%i.\n",
2685 cgraph_node_name (new_node), new_node->symbol.order);
2686 if (aggvals)
2687 ipa_dump_agg_replacement_values (dump_file, aggvals);
2689 gcc_checking_assert (ipa_node_params_vector.exists ()
2690 && (ipa_node_params_vector.length ()
2691 > (unsigned) cgraph_max_uid));
2692 update_profiling_info (node, new_node);
2693 new_info = IPA_NODE_REF (new_node);
2694 new_info->ipcp_orig_node = node;
2695 new_info->known_vals = known_vals;
2697 ipcp_discover_new_direct_edges (new_node, known_vals, aggvals);
2699 callers.release ();
2700 return new_node;
2703 /* Given a NODE, and a subset of its CALLERS, try to populate blanks slots in
2704 KNOWN_VALS with constants and types that are also known for all of the
2705 CALLERS. */
2707 static void
2708 find_more_scalar_values_for_callers_subset (struct cgraph_node *node,
2709 vec<tree> known_vals,
2710 vec<cgraph_edge_p> callers)
2712 struct ipa_node_params *info = IPA_NODE_REF (node);
2713 int i, count = ipa_get_param_count (info);
2715 for (i = 0; i < count ; i++)
2717 struct cgraph_edge *cs;
2718 tree newval = NULL_TREE;
2719 int j;
2721 if (ipa_get_scalar_lat (info, i)->bottom || known_vals[i])
2722 continue;
2724 FOR_EACH_VEC_ELT (callers, j, cs)
2726 struct ipa_jump_func *jump_func;
2727 tree t;
2729 if (i >= ipa_get_cs_argument_count (IPA_EDGE_REF (cs)))
2731 newval = NULL_TREE;
2732 break;
2734 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
2735 t = ipa_value_from_jfunc (IPA_NODE_REF (cs->caller), jump_func);
2736 if (!t
2737 || (newval
2738 && !values_equal_for_ipcp_p (t, newval)))
2740 newval = NULL_TREE;
2741 break;
2743 else
2744 newval = t;
2747 if (newval)
2749 if (dump_file && (dump_flags & TDF_DETAILS))
2751 fprintf (dump_file, " adding an extra known scalar value ");
2752 print_ipcp_constant_value (dump_file, newval);
2753 fprintf (dump_file, " for ");
2754 ipa_dump_param (dump_file, info, i);
2755 fprintf (dump_file, "\n");
2758 known_vals[i] = newval;
2763 /* Go through PLATS and create a vector of values consisting of values and
2764 offsets (minus OFFSET) of lattices that contain only a single value. */
2766 static vec<ipa_agg_jf_item_t>
2767 copy_plats_to_inter (struct ipcp_param_lattices *plats, HOST_WIDE_INT offset)
2769 vec<ipa_agg_jf_item_t> res = vNULL;
2771 if (!plats->aggs || plats->aggs_contain_variable || plats->aggs_bottom)
2772 return vNULL;
2774 for (struct ipcp_agg_lattice *aglat = plats->aggs; aglat; aglat = aglat->next)
2775 if (ipa_lat_is_single_const (aglat))
2777 struct ipa_agg_jf_item ti;
2778 ti.offset = aglat->offset - offset;
2779 ti.value = aglat->values->value;
2780 res.safe_push (ti);
2782 return res;
2785 /* Intersect all values in INTER with single value lattices in PLATS (while
2786 subtracting OFFSET). */
2788 static void
2789 intersect_with_plats (struct ipcp_param_lattices *plats,
2790 vec<ipa_agg_jf_item_t> *inter,
2791 HOST_WIDE_INT offset)
2793 struct ipcp_agg_lattice *aglat;
2794 struct ipa_agg_jf_item *item;
2795 int k;
2797 if (!plats->aggs || plats->aggs_contain_variable || plats->aggs_bottom)
2799 inter->release ();
2800 return;
2803 aglat = plats->aggs;
2804 FOR_EACH_VEC_ELT (*inter, k, item)
2806 bool found = false;
2807 if (!item->value)
2808 continue;
2809 while (aglat)
2811 if (aglat->offset - offset > item->offset)
2812 break;
2813 if (aglat->offset - offset == item->offset)
2815 gcc_checking_assert (item->value);
2816 if (values_equal_for_ipcp_p (item->value, aglat->values->value))
2817 found = true;
2818 break;
2820 aglat = aglat->next;
2822 if (!found)
2823 item->value = NULL_TREE;
2827 /* Copy agggregate replacement values of NODE (which is an IPA-CP clone) to the
2828 vector result while subtracting OFFSET from the individual value offsets. */
2830 static vec<ipa_agg_jf_item_t>
2831 agg_replacements_to_vector (struct cgraph_node *node, int index,
2832 HOST_WIDE_INT offset)
2834 struct ipa_agg_replacement_value *av;
2835 vec<ipa_agg_jf_item_t> res = vNULL;
2837 for (av = ipa_get_agg_replacements_for_node (node); av; av = av->next)
2838 if (av->index == index
2839 && (av->offset - offset) >= 0)
2841 struct ipa_agg_jf_item item;
2842 gcc_checking_assert (av->value);
2843 item.offset = av->offset - offset;
2844 item.value = av->value;
2845 res.safe_push (item);
2848 return res;
2851 /* Intersect all values in INTER with those that we have already scheduled to
2852 be replaced in parameter number INDEX of NODE, which is an IPA-CP clone
2853 (while subtracting OFFSET). */
2855 static void
2856 intersect_with_agg_replacements (struct cgraph_node *node, int index,
2857 vec<ipa_agg_jf_item_t> *inter,
2858 HOST_WIDE_INT offset)
2860 struct ipa_agg_replacement_value *srcvals;
2861 struct ipa_agg_jf_item *item;
2862 int i;
2864 srcvals = ipa_get_agg_replacements_for_node (node);
2865 if (!srcvals)
2867 inter->release ();
2868 return;
2871 FOR_EACH_VEC_ELT (*inter, i, item)
2873 struct ipa_agg_replacement_value *av;
2874 bool found = false;
2875 if (!item->value)
2876 continue;
2877 for (av = srcvals; av; av = av->next)
2879 gcc_checking_assert (av->value);
2880 if (av->index == index
2881 && av->offset - offset == item->offset)
2883 if (values_equal_for_ipcp_p (item->value, av->value))
2884 found = true;
2885 break;
2888 if (!found)
2889 item->value = NULL_TREE;
2893 /* Intersect values in INTER with aggregate values that come along edge CS to
2894 parameter number INDEX and return it. If INTER does not actually exist yet,
2895 copy all incoming values to it. If we determine we ended up with no values
2896 whatsoever, return a released vector. */
2898 static vec<ipa_agg_jf_item_t>
2899 intersect_aggregates_with_edge (struct cgraph_edge *cs, int index,
2900 vec<ipa_agg_jf_item_t> inter)
2902 struct ipa_jump_func *jfunc;
2903 jfunc = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), index);
2904 if (jfunc->type == IPA_JF_PASS_THROUGH
2905 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
2907 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
2908 int src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
2910 if (caller_info->ipcp_orig_node)
2912 struct cgraph_node *orig_node = caller_info->ipcp_orig_node;
2913 struct ipcp_param_lattices *orig_plats;
2914 orig_plats = ipa_get_parm_lattices (IPA_NODE_REF (orig_node),
2915 src_idx);
2916 if (agg_pass_through_permissible_p (orig_plats, jfunc))
2918 if (!inter.exists ())
2919 inter = agg_replacements_to_vector (cs->caller, src_idx, 0);
2920 else
2921 intersect_with_agg_replacements (cs->caller, src_idx,
2922 &inter, 0);
2925 else
2927 struct ipcp_param_lattices *src_plats;
2928 src_plats = ipa_get_parm_lattices (caller_info, src_idx);
2929 if (agg_pass_through_permissible_p (src_plats, jfunc))
2931 /* Currently we do not produce clobber aggregate jump
2932 functions, adjust when we do. */
2933 gcc_checking_assert (!jfunc->agg.items);
2934 if (!inter.exists ())
2935 inter = copy_plats_to_inter (src_plats, 0);
2936 else
2937 intersect_with_plats (src_plats, &inter, 0);
2941 else if (jfunc->type == IPA_JF_ANCESTOR
2942 && ipa_get_jf_ancestor_agg_preserved (jfunc))
2944 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
2945 int src_idx = ipa_get_jf_ancestor_formal_id (jfunc);
2946 struct ipcp_param_lattices *src_plats;
2947 HOST_WIDE_INT delta = ipa_get_jf_ancestor_offset (jfunc);
2949 if (caller_info->ipcp_orig_node)
2951 if (!inter.exists ())
2952 inter = agg_replacements_to_vector (cs->caller, src_idx, delta);
2953 else
2954 intersect_with_agg_replacements (cs->caller, src_idx, &inter,
2955 delta);
2957 else
2959 src_plats = ipa_get_parm_lattices (caller_info, src_idx);;
2960 /* Currently we do not produce clobber aggregate jump
2961 functions, adjust when we do. */
2962 gcc_checking_assert (!src_plats->aggs || !jfunc->agg.items);
2963 if (!inter.exists ())
2964 inter = copy_plats_to_inter (src_plats, delta);
2965 else
2966 intersect_with_plats (src_plats, &inter, delta);
2969 else if (jfunc->agg.items)
2971 struct ipa_agg_jf_item *item;
2972 int k;
2974 if (!inter.exists ())
2975 for (unsigned i = 0; i < jfunc->agg.items->length (); i++)
2976 inter.safe_push ((*jfunc->agg.items)[i]);
2977 else
2978 FOR_EACH_VEC_ELT (inter, k, item)
2980 int l = 0;
2981 bool found = false;;
2983 if (!item->value)
2984 continue;
2986 while ((unsigned) l < jfunc->agg.items->length ())
2988 struct ipa_agg_jf_item *ti;
2989 ti = &(*jfunc->agg.items)[l];
2990 if (ti->offset > item->offset)
2991 break;
2992 if (ti->offset == item->offset)
2994 gcc_checking_assert (ti->value);
2995 if (values_equal_for_ipcp_p (item->value,
2996 ti->value))
2997 found = true;
2998 break;
3000 l++;
3002 if (!found)
3003 item->value = NULL;
3006 else
3008 inter.release ();
3009 return vec<ipa_agg_jf_item_t>();
3011 return inter;
3014 /* Look at edges in CALLERS and collect all known aggregate values that arrive
3015 from all of them. */
3017 static struct ipa_agg_replacement_value *
3018 find_aggregate_values_for_callers_subset (struct cgraph_node *node,
3019 vec<cgraph_edge_p> callers)
3021 struct ipa_node_params *dest_info = IPA_NODE_REF (node);
3022 struct ipa_agg_replacement_value *res = NULL;
3023 struct cgraph_edge *cs;
3024 int i, j, count = ipa_get_param_count (dest_info);
3026 FOR_EACH_VEC_ELT (callers, j, cs)
3028 int c = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
3029 if (c < count)
3030 count = c;
3033 for (i = 0; i < count ; i++)
3035 struct cgraph_edge *cs;
3036 vec<ipa_agg_jf_item_t> inter = vNULL;
3037 struct ipa_agg_jf_item *item;
3038 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (dest_info, i);
3039 int j;
3041 /* Among other things, the following check should deal with all by_ref
3042 mismatches. */
3043 if (plats->aggs_bottom)
3044 continue;
3046 FOR_EACH_VEC_ELT (callers, j, cs)
3048 inter = intersect_aggregates_with_edge (cs, i, inter);
3050 if (!inter.exists ())
3051 goto next_param;
3054 FOR_EACH_VEC_ELT (inter, j, item)
3056 struct ipa_agg_replacement_value *v;
3058 if (!item->value)
3059 continue;
3061 v = ggc_alloc_ipa_agg_replacement_value ();
3062 v->index = i;
3063 v->offset = item->offset;
3064 v->value = item->value;
3065 v->by_ref = plats->aggs_by_ref;
3066 v->next = res;
3067 res = v;
3070 next_param:
3071 if (inter.exists ())
3072 inter.release ();
3074 return res;
3077 /* Turn KNOWN_AGGS into a list of aggreate replacement values. */
3079 static struct ipa_agg_replacement_value *
3080 known_aggs_to_agg_replacement_list (vec<ipa_agg_jump_function_t> known_aggs)
3082 struct ipa_agg_replacement_value *res = NULL;
3083 struct ipa_agg_jump_function *aggjf;
3084 struct ipa_agg_jf_item *item;
3085 int i, j;
3087 FOR_EACH_VEC_ELT (known_aggs, i, aggjf)
3088 FOR_EACH_VEC_SAFE_ELT (aggjf->items, j, item)
3090 struct ipa_agg_replacement_value *v;
3091 v = ggc_alloc_ipa_agg_replacement_value ();
3092 v->index = i;
3093 v->offset = item->offset;
3094 v->value = item->value;
3095 v->by_ref = aggjf->by_ref;
3096 v->next = res;
3097 res = v;
3099 return res;
3102 /* Determine whether CS also brings all scalar values that the NODE is
3103 specialized for. */
3105 static bool
3106 cgraph_edge_brings_all_scalars_for_node (struct cgraph_edge *cs,
3107 struct cgraph_node *node)
3109 struct ipa_node_params *dest_info = IPA_NODE_REF (node);
3110 int count = ipa_get_param_count (dest_info);
3111 struct ipa_node_params *caller_info;
3112 struct ipa_edge_args *args;
3113 int i;
3115 caller_info = IPA_NODE_REF (cs->caller);
3116 args = IPA_EDGE_REF (cs);
3117 for (i = 0; i < count; i++)
3119 struct ipa_jump_func *jump_func;
3120 tree val, t;
3122 val = dest_info->known_vals[i];
3123 if (!val)
3124 continue;
3126 if (i >= ipa_get_cs_argument_count (args))
3127 return false;
3128 jump_func = ipa_get_ith_jump_func (args, i);
3129 t = ipa_value_from_jfunc (caller_info, jump_func);
3130 if (!t || !values_equal_for_ipcp_p (val, t))
3131 return false;
3133 return true;
3136 /* Determine whether CS also brings all aggregate values that NODE is
3137 specialized for. */
3138 static bool
3139 cgraph_edge_brings_all_agg_vals_for_node (struct cgraph_edge *cs,
3140 struct cgraph_node *node)
3142 struct ipa_node_params *orig_caller_info = IPA_NODE_REF (cs->caller);
3143 struct ipa_agg_replacement_value *aggval;
3144 int i, ec, count;
3146 aggval = ipa_get_agg_replacements_for_node (node);
3147 if (!aggval)
3148 return true;
3150 count = ipa_get_param_count (IPA_NODE_REF (node));
3151 ec = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
3152 if (ec < count)
3153 for (struct ipa_agg_replacement_value *av = aggval; av; av = av->next)
3154 if (aggval->index >= ec)
3155 return false;
3157 if (orig_caller_info->ipcp_orig_node)
3158 orig_caller_info = IPA_NODE_REF (orig_caller_info->ipcp_orig_node);
3160 for (i = 0; i < count; i++)
3162 static vec<ipa_agg_jf_item_t> values = vec<ipa_agg_jf_item_t>();
3163 struct ipcp_param_lattices *plats;
3164 bool interesting = false;
3165 for (struct ipa_agg_replacement_value *av = aggval; av; av = av->next)
3166 if (aggval->index == i)
3168 interesting = true;
3169 break;
3171 if (!interesting)
3172 continue;
3174 plats = ipa_get_parm_lattices (orig_caller_info, aggval->index);
3175 if (plats->aggs_bottom)
3176 return false;
3178 values = intersect_aggregates_with_edge (cs, i, values);
3179 if (!values.exists ())
3180 return false;
3182 for (struct ipa_agg_replacement_value *av = aggval; av; av = av->next)
3183 if (aggval->index == i)
3185 struct ipa_agg_jf_item *item;
3186 int j;
3187 bool found = false;
3188 FOR_EACH_VEC_ELT (values, j, item)
3189 if (item->value
3190 && item->offset == av->offset
3191 && values_equal_for_ipcp_p (item->value, av->value))
3193 found = true;
3194 break;
3196 if (!found)
3198 values.release ();
3199 return false;
3203 return true;
3206 /* Given an original NODE and a VAL for which we have already created a
3207 specialized clone, look whether there are incoming edges that still lead
3208 into the old node but now also bring the requested value and also conform to
3209 all other criteria such that they can be redirected the the special node.
3210 This function can therefore redirect the final edge in a SCC. */
3212 static void
3213 perhaps_add_new_callers (struct cgraph_node *node, struct ipcp_value *val)
3215 struct ipcp_value_source *src;
3216 gcov_type redirected_sum = 0;
3218 for (src = val->sources; src; src = src->next)
3220 struct cgraph_edge *cs = src->cs;
3221 while (cs)
3223 enum availability availability;
3224 struct cgraph_node *dst = cgraph_function_node (cs->callee,
3225 &availability);
3226 if ((dst == node || IPA_NODE_REF (dst)->is_all_contexts_clone)
3227 && availability > AVAIL_OVERWRITABLE
3228 && cgraph_edge_brings_value_p (cs, src))
3230 if (cgraph_edge_brings_all_scalars_for_node (cs, val->spec_node)
3231 && cgraph_edge_brings_all_agg_vals_for_node (cs,
3232 val->spec_node))
3234 if (dump_file)
3235 fprintf (dump_file, " - adding an extra caller %s/%i"
3236 " of %s/%i\n",
3237 xstrdup (cgraph_node_name (cs->caller)),
3238 cs->caller->symbol.order,
3239 xstrdup (cgraph_node_name (val->spec_node)),
3240 val->spec_node->symbol.order);
3242 cgraph_redirect_edge_callee (cs, val->spec_node);
3243 redirected_sum += cs->count;
3246 cs = get_next_cgraph_edge_clone (cs);
3250 if (redirected_sum)
3251 update_specialized_profile (val->spec_node, node, redirected_sum);
3255 /* Copy KNOWN_BINFOS to KNOWN_VALS. */
3257 static void
3258 move_binfos_to_values (vec<tree> known_vals,
3259 vec<tree> known_binfos)
3261 tree t;
3262 int i;
3264 for (i = 0; known_binfos.iterate (i, &t); i++)
3265 if (t)
3266 known_vals[i] = t;
3269 /* Return true if there is a replacement equivalent to VALUE, INDEX and OFFSET
3270 among those in the AGGVALS list. */
3272 DEBUG_FUNCTION bool
3273 ipcp_val_in_agg_replacements_p (struct ipa_agg_replacement_value *aggvals,
3274 int index, HOST_WIDE_INT offset, tree value)
3276 while (aggvals)
3278 if (aggvals->index == index
3279 && aggvals->offset == offset
3280 && values_equal_for_ipcp_p (aggvals->value, value))
3281 return true;
3282 aggvals = aggvals->next;
3284 return false;
3287 /* Decide wheter to create a special version of NODE for value VAL of parameter
3288 at the given INDEX. If OFFSET is -1, the value is for the parameter itself,
3289 otherwise it is stored at the given OFFSET of the parameter. KNOWN_CSTS,
3290 KNOWN_BINFOS and KNOWN_AGGS describe the other already known values. */
3292 static bool
3293 decide_about_value (struct cgraph_node *node, int index, HOST_WIDE_INT offset,
3294 struct ipcp_value *val, vec<tree> known_csts,
3295 vec<tree> known_binfos)
3297 struct ipa_agg_replacement_value *aggvals;
3298 int freq_sum, caller_count;
3299 gcov_type count_sum;
3300 vec<cgraph_edge_p> callers;
3301 vec<tree> kv;
3303 if (val->spec_node)
3305 perhaps_add_new_callers (node, val);
3306 return false;
3308 else if (val->local_size_cost + overall_size > max_new_size)
3310 if (dump_file && (dump_flags & TDF_DETAILS))
3311 fprintf (dump_file, " Ignoring candidate value because "
3312 "max_new_size would be reached with %li.\n",
3313 val->local_size_cost + overall_size);
3314 return false;
3316 else if (!get_info_about_necessary_edges (val, &freq_sum, &count_sum,
3317 &caller_count))
3318 return false;
3320 if (dump_file && (dump_flags & TDF_DETAILS))
3322 fprintf (dump_file, " - considering value ");
3323 print_ipcp_constant_value (dump_file, val->value);
3324 fprintf (dump_file, " for ");
3325 ipa_dump_param (dump_file, IPA_NODE_REF (node), index);
3326 if (offset != -1)
3327 fprintf (dump_file, ", offset: " HOST_WIDE_INT_PRINT_DEC, offset);
3328 fprintf (dump_file, " (caller_count: %i)\n", caller_count);
3331 if (!good_cloning_opportunity_p (node, val->local_time_benefit,
3332 freq_sum, count_sum,
3333 val->local_size_cost)
3334 && !good_cloning_opportunity_p (node,
3335 val->local_time_benefit
3336 + val->prop_time_benefit,
3337 freq_sum, count_sum,
3338 val->local_size_cost
3339 + val->prop_size_cost))
3340 return false;
3342 if (dump_file)
3343 fprintf (dump_file, " Creating a specialized node of %s/%i.\n",
3344 cgraph_node_name (node), node->symbol.order);
3346 callers = gather_edges_for_value (val, caller_count);
3347 kv = known_csts.copy ();
3348 move_binfos_to_values (kv, known_binfos);
3349 if (offset == -1)
3350 kv[index] = val->value;
3351 find_more_scalar_values_for_callers_subset (node, kv, callers);
3352 aggvals = find_aggregate_values_for_callers_subset (node, callers);
3353 gcc_checking_assert (offset == -1
3354 || ipcp_val_in_agg_replacements_p (aggvals, index,
3355 offset, val->value));
3356 val->spec_node = create_specialized_node (node, kv, aggvals, callers);
3357 overall_size += val->local_size_cost;
3359 /* TODO: If for some lattice there is only one other known value
3360 left, make a special node for it too. */
3362 return true;
3365 /* Decide whether and what specialized clones of NODE should be created. */
3367 static bool
3368 decide_whether_version_node (struct cgraph_node *node)
3370 struct ipa_node_params *info = IPA_NODE_REF (node);
3371 int i, count = ipa_get_param_count (info);
3372 vec<tree> known_csts, known_binfos;
3373 vec<ipa_agg_jump_function_t> known_aggs = vNULL;
3374 bool ret = false;
3376 if (count == 0)
3377 return false;
3379 if (dump_file && (dump_flags & TDF_DETAILS))
3380 fprintf (dump_file, "\nEvaluating opportunities for %s/%i.\n",
3381 cgraph_node_name (node), node->symbol.order);
3383 gather_context_independent_values (info, &known_csts, &known_binfos,
3384 info->do_clone_for_all_contexts ? &known_aggs
3385 : NULL, NULL);
3387 for (i = 0; i < count ;i++)
3389 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
3390 struct ipcp_lattice *lat = &plats->itself;
3391 struct ipcp_value *val;
3393 if (!lat->bottom
3394 && !known_csts[i]
3395 && !known_binfos[i])
3396 for (val = lat->values; val; val = val->next)
3397 ret |= decide_about_value (node, i, -1, val, known_csts,
3398 known_binfos);
3400 if (!plats->aggs_bottom)
3402 struct ipcp_agg_lattice *aglat;
3403 struct ipcp_value *val;
3404 for (aglat = plats->aggs; aglat; aglat = aglat->next)
3405 if (!aglat->bottom && aglat->values
3406 /* If the following is false, the one value is in
3407 known_aggs. */
3408 && (plats->aggs_contain_variable
3409 || !ipa_lat_is_single_const (aglat)))
3410 for (val = aglat->values; val; val = val->next)
3411 ret |= decide_about_value (node, i, aglat->offset, val,
3412 known_csts, known_binfos);
3414 info = IPA_NODE_REF (node);
3417 if (info->do_clone_for_all_contexts)
3419 struct cgraph_node *clone;
3420 vec<cgraph_edge_p> callers;
3422 if (dump_file)
3423 fprintf (dump_file, " - Creating a specialized node of %s/%i "
3424 "for all known contexts.\n", cgraph_node_name (node),
3425 node->symbol.order);
3427 callers = collect_callers_of_node (node);
3428 move_binfos_to_values (known_csts, known_binfos);
3429 clone = create_specialized_node (node, known_csts,
3430 known_aggs_to_agg_replacement_list (known_aggs),
3431 callers);
3432 info = IPA_NODE_REF (node);
3433 info->do_clone_for_all_contexts = false;
3434 IPA_NODE_REF (clone)->is_all_contexts_clone = true;
3435 for (i = 0; i < count ; i++)
3436 vec_free (known_aggs[i].items);
3437 known_aggs.release ();
3438 ret = true;
3440 else
3441 known_csts.release ();
3443 known_binfos.release ();
3444 return ret;
3447 /* Transitively mark all callees of NODE within the same SCC as not dead. */
3449 static void
3450 spread_undeadness (struct cgraph_node *node)
3452 struct cgraph_edge *cs;
3454 for (cs = node->callees; cs; cs = cs->next_callee)
3455 if (ipa_edge_within_scc (cs))
3457 struct cgraph_node *callee;
3458 struct ipa_node_params *info;
3460 callee = cgraph_function_node (cs->callee, NULL);
3461 info = IPA_NODE_REF (callee);
3463 if (info->node_dead)
3465 info->node_dead = 0;
3466 spread_undeadness (callee);
3471 /* Return true if NODE has a caller from outside of its SCC that is not
3472 dead. Worker callback for cgraph_for_node_and_aliases. */
3474 static bool
3475 has_undead_caller_from_outside_scc_p (struct cgraph_node *node,
3476 void *data ATTRIBUTE_UNUSED)
3478 struct cgraph_edge *cs;
3480 for (cs = node->callers; cs; cs = cs->next_caller)
3481 if (cs->caller->thunk.thunk_p
3482 && cgraph_for_node_and_aliases (cs->caller,
3483 has_undead_caller_from_outside_scc_p,
3484 NULL, true))
3485 return true;
3486 else if (!ipa_edge_within_scc (cs)
3487 && !IPA_NODE_REF (cs->caller)->node_dead)
3488 return true;
3489 return false;
3493 /* Identify nodes within the same SCC as NODE which are no longer needed
3494 because of new clones and will be removed as unreachable. */
3496 static void
3497 identify_dead_nodes (struct cgraph_node *node)
3499 struct cgraph_node *v;
3500 for (v = node; v ; v = ((struct ipa_dfs_info *) v->symbol.aux)->next_cycle)
3501 if (cgraph_will_be_removed_from_program_if_no_direct_calls (v)
3502 && !cgraph_for_node_and_aliases (v,
3503 has_undead_caller_from_outside_scc_p,
3504 NULL, true))
3505 IPA_NODE_REF (v)->node_dead = 1;
3507 for (v = node; v ; v = ((struct ipa_dfs_info *) v->symbol.aux)->next_cycle)
3508 if (!IPA_NODE_REF (v)->node_dead)
3509 spread_undeadness (v);
3511 if (dump_file && (dump_flags & TDF_DETAILS))
3513 for (v = node; v ; v = ((struct ipa_dfs_info *) v->symbol.aux)->next_cycle)
3514 if (IPA_NODE_REF (v)->node_dead)
3515 fprintf (dump_file, " Marking node as dead: %s/%i.\n",
3516 cgraph_node_name (v), v->symbol.order);
3520 /* The decision stage. Iterate over the topological order of call graph nodes
3521 TOPO and make specialized clones if deemed beneficial. */
3523 static void
3524 ipcp_decision_stage (struct topo_info *topo)
3526 int i;
3528 if (dump_file)
3529 fprintf (dump_file, "\nIPA decision stage:\n\n");
3531 for (i = topo->nnodes - 1; i >= 0; i--)
3533 struct cgraph_node *node = topo->order[i];
3534 bool change = false, iterate = true;
3536 while (iterate)
3538 struct cgraph_node *v;
3539 iterate = false;
3540 for (v = node; v ; v = ((struct ipa_dfs_info *) v->symbol.aux)->next_cycle)
3541 if (cgraph_function_with_gimple_body_p (v)
3542 && ipcp_versionable_function_p (v))
3543 iterate |= decide_whether_version_node (v);
3545 change |= iterate;
3547 if (change)
3548 identify_dead_nodes (node);
3552 /* The IPCP driver. */
3554 static unsigned int
3555 ipcp_driver (void)
3557 struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
3558 struct topo_info topo;
3560 ipa_check_create_node_params ();
3561 ipa_check_create_edge_args ();
3562 grow_next_edge_clone_vector ();
3563 edge_duplication_hook_holder =
3564 cgraph_add_edge_duplication_hook (&ipcp_edge_duplication_hook, NULL);
3565 ipcp_values_pool = create_alloc_pool ("IPA-CP values",
3566 sizeof (struct ipcp_value), 32);
3567 ipcp_sources_pool = create_alloc_pool ("IPA-CP value sources",
3568 sizeof (struct ipcp_value_source), 64);
3569 ipcp_agg_lattice_pool = create_alloc_pool ("IPA_CP aggregate lattices",
3570 sizeof (struct ipcp_agg_lattice),
3571 32);
3572 if (dump_file)
3574 fprintf (dump_file, "\nIPA structures before propagation:\n");
3575 if (dump_flags & TDF_DETAILS)
3576 ipa_print_all_params (dump_file);
3577 ipa_print_all_jump_functions (dump_file);
3580 /* Topological sort. */
3581 build_toporder_info (&topo);
3582 /* Do the interprocedural propagation. */
3583 ipcp_propagate_stage (&topo);
3584 /* Decide what constant propagation and cloning should be performed. */
3585 ipcp_decision_stage (&topo);
3587 /* Free all IPCP structures. */
3588 free_toporder_info (&topo);
3589 next_edge_clone.release ();
3590 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
3591 ipa_free_all_structures_after_ipa_cp ();
3592 if (dump_file)
3593 fprintf (dump_file, "\nIPA constant propagation end\n");
3594 return 0;
3597 /* Initialization and computation of IPCP data structures. This is the initial
3598 intraprocedural analysis of functions, which gathers information to be
3599 propagated later on. */
3601 static void
3602 ipcp_generate_summary (void)
3604 struct cgraph_node *node;
3606 if (dump_file)
3607 fprintf (dump_file, "\nIPA constant propagation start:\n");
3608 ipa_register_cgraph_hooks ();
3610 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
3612 node->local.versionable
3613 = tree_versionable_function_p (node->symbol.decl);
3614 ipa_analyze_node (node);
3618 /* Write ipcp summary for nodes in SET. */
3620 static void
3621 ipcp_write_summary (void)
3623 ipa_prop_write_jump_functions ();
3626 /* Read ipcp summary. */
3628 static void
3629 ipcp_read_summary (void)
3631 ipa_prop_read_jump_functions ();
3634 /* Gate for IPCP optimization. */
3636 static bool
3637 cgraph_gate_cp (void)
3639 /* FIXME: We should remove the optimize check after we ensure we never run
3640 IPA passes when not optimizing. */
3641 return flag_ipa_cp && optimize;
3644 namespace {
3646 const pass_data pass_data_ipa_cp =
3648 IPA_PASS, /* type */
3649 "cp", /* name */
3650 OPTGROUP_NONE, /* optinfo_flags */
3651 true, /* has_gate */
3652 true, /* has_execute */
3653 TV_IPA_CONSTANT_PROP, /* tv_id */
3654 0, /* properties_required */
3655 0, /* properties_provided */
3656 0, /* properties_destroyed */
3657 0, /* todo_flags_start */
3658 ( TODO_dump_symtab | TODO_remove_functions ), /* todo_flags_finish */
3661 class pass_ipa_cp : public ipa_opt_pass_d
3663 public:
3664 pass_ipa_cp (gcc::context *ctxt)
3665 : ipa_opt_pass_d (pass_data_ipa_cp, ctxt,
3666 ipcp_generate_summary, /* generate_summary */
3667 ipcp_write_summary, /* write_summary */
3668 ipcp_read_summary, /* read_summary */
3669 ipa_prop_write_all_agg_replacement, /*
3670 write_optimization_summary */
3671 ipa_prop_read_all_agg_replacement, /*
3672 read_optimization_summary */
3673 NULL, /* stmt_fixup */
3674 0, /* function_transform_todo_flags_start */
3675 ipcp_transform_function, /* function_transform */
3676 NULL) /* variable_transform */
3679 /* opt_pass methods: */
3680 bool gate () { return cgraph_gate_cp (); }
3681 unsigned int execute () { return ipcp_driver (); }
3683 }; // class pass_ipa_cp
3685 } // anon namespace
3687 ipa_opt_pass_d *
3688 make_pass_ipa_cp (gcc::context *ctxt)
3690 return new pass_ipa_cp (ctxt);