Merge branches/gcc-4_8-branch rev 216856
[official-gcc.git] / gcc-4_8-branch / gcc / ipa-cp.c
blobb7db0cc129dd57fff5980c854d16f345ad889609
1 /* Interprocedural constant propagation
2 Copyright (C) 2005-2013 Free Software Foundation, Inc.
4 Contributed by Razya Ladelsky <RAZYA@il.ibm.com> and Martin Jambor
5 <mjambor@suse.cz>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Interprocedural constant propagation (IPA-CP).
25 The goal of this transformation is to
27 1) discover functions which are always invoked with some arguments with the
28 same known constant values and modify the functions so that the
29 subsequent optimizations can take advantage of the knowledge, and
31 2) partial specialization - create specialized versions of functions
32 transformed in this way if some parameters are known constants only in
33 certain contexts but the estimated tradeoff between speedup and cost size
34 is deemed good.
36 The algorithm also propagates types and attempts to perform type based
37 devirtualization. Types are propagated much like constants.
39 The algorithm basically consists of three stages. In the first, functions
40 are analyzed one at a time and jump functions are constructed for all known
41 call-sites. In the second phase, the pass propagates information from the
42 jump functions across the call to reveal what values are available at what
43 call sites, performs estimations of effects of known values on functions and
44 their callees, and finally decides what specialized extra versions should be
45 created. In the third, the special versions materialize and appropriate
46 calls are redirected.
48 The algorithm used is to a certain extent based on "Interprocedural Constant
49 Propagation", by David Callahan, Keith D Cooper, Ken Kennedy, Linda Torczon,
50 Comp86, pg 152-161 and "A Methodology for Procedure Cloning" by Keith D
51 Cooper, Mary W. Hall, and Ken Kennedy.
54 First stage - intraprocedural analysis
55 =======================================
57 This phase computes jump_function and modification flags.
59 A jump function for a call-site represents the values passed as an actual
60 arguments of a given call-site. In principle, there are three types of
61 values:
63 Pass through - the caller's formal parameter is passed as an actual
64 argument, plus an operation on it can be performed.
65 Constant - a constant is passed as an actual argument.
66 Unknown - neither of the above.
68 All jump function types are described in detail in ipa-prop.h, together with
69 the data structures that represent them and methods of accessing them.
71 ipcp_generate_summary() is the main function of the first stage.
73 Second stage - interprocedural analysis
74 ========================================
76 This stage is itself divided into two phases. In the first, we propagate
77 known values over the call graph, in the second, we make cloning decisions.
78 It uses a different algorithm than the original Callahan's paper.
80 First, we traverse the functions topologically from callers to callees and,
81 for each strongly connected component (SCC), we propagate constants
82 according to previously computed jump functions. We also record what known
83 values depend on other known values and estimate local effects. Finally, we
84 propagate cumulative information about these effects from dependent values
85 to those on which they depend.
87 Second, we again traverse the call graph in the same topological order and
88 make clones for functions which we know are called with the same values in
89 all contexts and decide about extra specialized clones of functions just for
90 some contexts - these decisions are based on both local estimates and
91 cumulative estimates propagated from callees.
93 ipcp_propagate_stage() and ipcp_decision_stage() together constitute the
94 third stage.
96 Third phase - materialization of clones, call statement updates.
97 ============================================
99 This stage is currently performed by call graph code (mainly in cgraphunit.c
100 and tree-inline.c) according to instructions inserted to the call graph by
101 the second stage. */
103 #include "config.h"
104 #include "system.h"
105 #include "coretypes.h"
106 #include "tree.h"
107 #include "target.h"
108 #include "gimple.h"
109 #include "cgraph.h"
110 #include "ipa-prop.h"
111 #include "tree-flow.h"
112 #include "tree-pass.h"
113 #include "flags.h"
114 #include "diagnostic.h"
115 #include "tree-pretty-print.h"
116 #include "tree-inline.h"
117 #include "params.h"
118 #include "ipa-inline.h"
119 #include "ipa-utils.h"
121 struct ipcp_value;
123 /* Describes a particular source for an IPA-CP value. */
125 struct ipcp_value_source
127 /* Aggregate offset of the source, negative if the source is scalar value of
128 the argument itself. */
129 HOST_WIDE_INT offset;
130 /* The incoming edge that brought the value. */
131 struct cgraph_edge *cs;
132 /* If the jump function that resulted into his value was a pass-through or an
133 ancestor, this is the ipcp_value of the caller from which the described
134 value has been derived. Otherwise it is NULL. */
135 struct ipcp_value *val;
136 /* Next pointer in a linked list of sources of a value. */
137 struct ipcp_value_source *next;
138 /* If the jump function that resulted into his value was a pass-through or an
139 ancestor, this is the index of the parameter of the caller the jump
140 function references. */
141 int index;
144 /* Describes one particular value stored in struct ipcp_lattice. */
146 struct ipcp_value
148 /* The actual value for the given parameter. This is either an IPA invariant
149 or a TREE_BINFO describing a type that can be used for
150 devirtualization. */
151 tree value;
152 /* The list of sources from which this value originates. */
153 struct ipcp_value_source *sources;
154 /* Next pointers in a linked list of all values in a lattice. */
155 struct ipcp_value *next;
156 /* Next pointers in a linked list of values in a strongly connected component
157 of values. */
158 struct ipcp_value *scc_next;
159 /* Next pointers in a linked list of SCCs of values sorted topologically
160 according their sources. */
161 struct ipcp_value *topo_next;
162 /* A specialized node created for this value, NULL if none has been (so far)
163 created. */
164 struct cgraph_node *spec_node;
165 /* Depth first search number and low link for topological sorting of
166 values. */
167 int dfs, low_link;
168 /* Time benefit and size cost that specializing the function for this value
169 would bring about in this function alone. */
170 int local_time_benefit, local_size_cost;
171 /* Time benefit and size cost that specializing the function for this value
172 can bring about in it's callees (transitively). */
173 int prop_time_benefit, prop_size_cost;
174 /* True if this valye is currently on the topo-sort stack. */
175 bool on_stack;
178 /* Lattice describing potential values of a formal parameter of a function, or
179 a part of an aggreagate. TOP is represented by a lattice with zero values
180 and with contains_variable and bottom flags cleared. BOTTOM is represented
181 by a lattice with the bottom flag set. In that case, values and
182 contains_variable flag should be disregarded. */
184 struct ipcp_lattice
186 /* The list of known values and types in this lattice. Note that values are
187 not deallocated if a lattice is set to bottom because there may be value
188 sources referencing them. */
189 struct ipcp_value *values;
190 /* Number of known values and types in this lattice. */
191 int values_count;
192 /* The lattice contains a variable component (in addition to values). */
193 bool contains_variable;
194 /* The value of the lattice is bottom (i.e. variable and unusable for any
195 propagation). */
196 bool bottom;
199 /* Lattice with an offset to describe a part of an aggregate. */
201 struct ipcp_agg_lattice : public ipcp_lattice
203 /* Offset that is being described by this lattice. */
204 HOST_WIDE_INT offset;
205 /* Size so that we don't have to re-compute it every time we traverse the
206 list. Must correspond to TYPE_SIZE of all lat values. */
207 HOST_WIDE_INT size;
208 /* Next element of the linked list. */
209 struct ipcp_agg_lattice *next;
212 /* Structure containing lattices for a parameter itself and for pieces of
213 aggregates that are passed in the parameter or by a reference in a parameter
214 plus some other useful flags. */
216 struct ipcp_param_lattices
218 /* Lattice describing the value of the parameter itself. */
219 struct ipcp_lattice itself;
220 /* Lattices describing aggregate parts. */
221 struct ipcp_agg_lattice *aggs;
222 /* Number of aggregate lattices */
223 int aggs_count;
224 /* True if aggregate data were passed by reference (as opposed to by
225 value). */
226 bool aggs_by_ref;
227 /* All aggregate lattices contain a variable component (in addition to
228 values). */
229 bool aggs_contain_variable;
230 /* The value of all aggregate lattices is bottom (i.e. variable and unusable
231 for any propagation). */
232 bool aggs_bottom;
234 /* There is a virtual call based on this parameter. */
235 bool virt_call;
238 /* Allocation pools for values and their sources in ipa-cp. */
240 alloc_pool ipcp_values_pool;
241 alloc_pool ipcp_sources_pool;
242 alloc_pool ipcp_agg_lattice_pool;
244 /* Maximal count found in program. */
246 static gcov_type max_count;
248 /* Original overall size of the program. */
250 static long overall_size, max_new_size;
252 /* Head of the linked list of topologically sorted values. */
254 static struct ipcp_value *values_topo;
256 /* Return the param lattices structure corresponding to the Ith formal
257 parameter of the function described by INFO. */
258 static inline struct ipcp_param_lattices *
259 ipa_get_parm_lattices (struct ipa_node_params *info, int i)
261 gcc_assert (i >= 0 && i < ipa_get_param_count (info));
262 gcc_checking_assert (!info->ipcp_orig_node);
263 gcc_checking_assert (info->lattices);
264 return &(info->lattices[i]);
267 /* Return the lattice corresponding to the scalar value of the Ith formal
268 parameter of the function described by INFO. */
269 static inline struct ipcp_lattice *
270 ipa_get_scalar_lat (struct ipa_node_params *info, int i)
272 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
273 return &plats->itself;
276 /* Return whether LAT is a lattice with a single constant and without an
277 undefined value. */
279 static inline bool
280 ipa_lat_is_single_const (struct ipcp_lattice *lat)
282 if (lat->bottom
283 || lat->contains_variable
284 || lat->values_count != 1)
285 return false;
286 else
287 return true;
290 /* Return true iff the CS is an edge within a strongly connected component as
291 computed by ipa_reduced_postorder. */
293 static inline bool
294 edge_within_scc (struct cgraph_edge *cs)
296 struct ipa_dfs_info *caller_dfs = (struct ipa_dfs_info *) cs->caller->symbol.aux;
297 struct ipa_dfs_info *callee_dfs;
298 struct cgraph_node *callee = cgraph_function_node (cs->callee, NULL);
300 callee_dfs = (struct ipa_dfs_info *) callee->symbol.aux;
301 return (caller_dfs
302 && callee_dfs
303 && caller_dfs->scc_no == callee_dfs->scc_no);
306 /* Print V which is extracted from a value in a lattice to F. */
308 static void
309 print_ipcp_constant_value (FILE * f, tree v)
311 if (TREE_CODE (v) == TREE_BINFO)
313 fprintf (f, "BINFO ");
314 print_generic_expr (f, BINFO_TYPE (v), 0);
316 else if (TREE_CODE (v) == ADDR_EXPR
317 && TREE_CODE (TREE_OPERAND (v, 0)) == CONST_DECL)
319 fprintf (f, "& ");
320 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (v, 0)), 0);
322 else
323 print_generic_expr (f, v, 0);
326 /* Print a lattice LAT to F. */
328 static void
329 print_lattice (FILE * f, struct ipcp_lattice *lat,
330 bool dump_sources, bool dump_benefits)
332 struct ipcp_value *val;
333 bool prev = false;
335 if (lat->bottom)
337 fprintf (f, "BOTTOM\n");
338 return;
341 if (!lat->values_count && !lat->contains_variable)
343 fprintf (f, "TOP\n");
344 return;
347 if (lat->contains_variable)
349 fprintf (f, "VARIABLE");
350 prev = true;
351 if (dump_benefits)
352 fprintf (f, "\n");
355 for (val = lat->values; val; val = val->next)
357 if (dump_benefits && prev)
358 fprintf (f, " ");
359 else if (!dump_benefits && prev)
360 fprintf (f, ", ");
361 else
362 prev = true;
364 print_ipcp_constant_value (f, val->value);
366 if (dump_sources)
368 struct ipcp_value_source *s;
370 fprintf (f, " [from:");
371 for (s = val->sources; s; s = s->next)
372 fprintf (f, " %i(%i)", s->cs->caller->uid,s->cs->frequency);
373 fprintf (f, "]");
376 if (dump_benefits)
377 fprintf (f, " [loc_time: %i, loc_size: %i, "
378 "prop_time: %i, prop_size: %i]\n",
379 val->local_time_benefit, val->local_size_cost,
380 val->prop_time_benefit, val->prop_size_cost);
382 if (!dump_benefits)
383 fprintf (f, "\n");
386 /* Print all ipcp_lattices of all functions to F. */
388 static void
389 print_all_lattices (FILE * f, bool dump_sources, bool dump_benefits)
391 struct cgraph_node *node;
392 int i, count;
394 fprintf (f, "\nLattices:\n");
395 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
397 struct ipa_node_params *info;
399 info = IPA_NODE_REF (node);
400 fprintf (f, " Node: %s/%i:\n", cgraph_node_name (node), node->uid);
401 count = ipa_get_param_count (info);
402 for (i = 0; i < count; i++)
404 struct ipcp_agg_lattice *aglat;
405 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
406 fprintf (f, " param [%d]: ", i);
407 print_lattice (f, &plats->itself, dump_sources, dump_benefits);
409 if (plats->virt_call)
410 fprintf (f, " virt_call flag set\n");
412 if (plats->aggs_bottom)
414 fprintf (f, " AGGS BOTTOM\n");
415 continue;
417 if (plats->aggs_contain_variable)
418 fprintf (f, " AGGS VARIABLE\n");
419 for (aglat = plats->aggs; aglat; aglat = aglat->next)
421 fprintf (f, " %soffset " HOST_WIDE_INT_PRINT_DEC ": ",
422 plats->aggs_by_ref ? "ref " : "", aglat->offset);
423 print_lattice (f, aglat, dump_sources, dump_benefits);
429 /* Determine whether it is at all technically possible to create clones of NODE
430 and store this information in the ipa_node_params structure associated
431 with NODE. */
433 static void
434 determine_versionability (struct cgraph_node *node)
436 const char *reason = NULL;
438 /* There are a number of generic reasons functions cannot be versioned. We
439 also cannot remove parameters if there are type attributes such as fnspec
440 present. */
441 if (node->alias || node->thunk.thunk_p)
442 reason = "alias or thunk";
443 else if (!node->local.versionable)
444 reason = "not a tree_versionable_function";
445 else if (cgraph_function_body_availability (node) <= AVAIL_OVERWRITABLE)
446 reason = "insufficient body availability";
447 else if (!opt_for_fn (node->symbol.decl, optimize)
448 || !opt_for_fn (node->symbol.decl, flag_ipa_cp))
449 reason = "non-optimized function";
450 else if (node->tm_clone)
451 reason = "transactional memory clone";
453 if (reason && dump_file && !node->alias && !node->thunk.thunk_p)
454 fprintf (dump_file, "Function %s/%i is not versionable, reason: %s.\n",
455 cgraph_node_name (node), node->uid, reason);
457 node->local.versionable = (reason == NULL);
460 /* Return true if it is at all technically possible to create clones of a
461 NODE. */
463 static bool
464 ipcp_versionable_function_p (struct cgraph_node *node)
466 return node->local.versionable;
469 /* Structure holding accumulated information about callers of a node. */
471 struct caller_statistics
473 gcov_type count_sum;
474 int n_calls, n_hot_calls, freq_sum;
477 /* Initialize fields of STAT to zeroes. */
479 static inline void
480 init_caller_stats (struct caller_statistics *stats)
482 stats->count_sum = 0;
483 stats->n_calls = 0;
484 stats->n_hot_calls = 0;
485 stats->freq_sum = 0;
488 /* Worker callback of cgraph_for_node_and_aliases accumulating statistics of
489 non-thunk incoming edges to NODE. */
491 static bool
492 gather_caller_stats (struct cgraph_node *node, void *data)
494 struct caller_statistics *stats = (struct caller_statistics *) data;
495 struct cgraph_edge *cs;
497 for (cs = node->callers; cs; cs = cs->next_caller)
498 if (cs->caller->thunk.thunk_p)
499 cgraph_for_node_and_aliases (cs->caller, gather_caller_stats,
500 stats, false);
501 else
503 stats->count_sum += cs->count;
504 stats->freq_sum += cs->frequency;
505 stats->n_calls++;
506 if (cgraph_maybe_hot_edge_p (cs))
507 stats->n_hot_calls ++;
509 return false;
513 /* Return true if this NODE is viable candidate for cloning. */
515 static bool
516 ipcp_cloning_candidate_p (struct cgraph_node *node)
518 struct caller_statistics stats;
520 gcc_checking_assert (cgraph_function_with_gimple_body_p (node));
522 if (!flag_ipa_cp_clone)
524 if (dump_file)
525 fprintf (dump_file, "Not considering %s for cloning; "
526 "-fipa-cp-clone disabled.\n",
527 cgraph_node_name (node));
528 return false;
531 if (!optimize_function_for_speed_p (DECL_STRUCT_FUNCTION (node->symbol.decl)))
533 if (dump_file)
534 fprintf (dump_file, "Not considering %s for cloning; "
535 "optimizing it for size.\n",
536 cgraph_node_name (node));
537 return false;
540 init_caller_stats (&stats);
541 cgraph_for_node_and_aliases (node, gather_caller_stats, &stats, false);
543 if (inline_summary (node)->self_size < stats.n_calls)
545 if (dump_file)
546 fprintf (dump_file, "Considering %s for cloning; code might shrink.\n",
547 cgraph_node_name (node));
548 return true;
551 /* When profile is available and function is hot, propagate into it even if
552 calls seems cold; constant propagation can improve function's speed
553 significantly. */
554 if (max_count)
556 if (stats.count_sum > node->count * 90 / 100)
558 if (dump_file)
559 fprintf (dump_file, "Considering %s for cloning; "
560 "usually called directly.\n",
561 cgraph_node_name (node));
562 return true;
565 if (!stats.n_hot_calls)
567 if (dump_file)
568 fprintf (dump_file, "Not considering %s for cloning; no hot calls.\n",
569 cgraph_node_name (node));
570 return false;
572 if (dump_file)
573 fprintf (dump_file, "Considering %s for cloning.\n",
574 cgraph_node_name (node));
575 return true;
578 /* Arrays representing a topological ordering of call graph nodes and a stack
579 of noes used during constant propagation. */
581 struct topo_info
583 struct cgraph_node **order;
584 struct cgraph_node **stack;
585 int nnodes, stack_top;
588 /* Allocate the arrays in TOPO and topologically sort the nodes into order. */
590 static void
591 build_toporder_info (struct topo_info *topo)
593 topo->order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
594 topo->stack = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
595 topo->stack_top = 0;
596 topo->nnodes = ipa_reduced_postorder (topo->order, true, true, NULL);
599 /* Free information about strongly connected components and the arrays in
600 TOPO. */
602 static void
603 free_toporder_info (struct topo_info *topo)
605 ipa_free_postorder_info ();
606 free (topo->order);
607 free (topo->stack);
610 /* Add NODE to the stack in TOPO, unless it is already there. */
612 static inline void
613 push_node_to_stack (struct topo_info *topo, struct cgraph_node *node)
615 struct ipa_node_params *info = IPA_NODE_REF (node);
616 if (info->node_enqueued)
617 return;
618 info->node_enqueued = 1;
619 topo->stack[topo->stack_top++] = node;
622 /* Pop a node from the stack in TOPO and return it or return NULL if the stack
623 is empty. */
625 static struct cgraph_node *
626 pop_node_from_stack (struct topo_info *topo)
628 if (topo->stack_top)
630 struct cgraph_node *node;
631 topo->stack_top--;
632 node = topo->stack[topo->stack_top];
633 IPA_NODE_REF (node)->node_enqueued = 0;
634 return node;
636 else
637 return NULL;
640 /* Set lattice LAT to bottom and return true if it previously was not set as
641 such. */
643 static inline bool
644 set_lattice_to_bottom (struct ipcp_lattice *lat)
646 bool ret = !lat->bottom;
647 lat->bottom = true;
648 return ret;
651 /* Mark lattice as containing an unknown value and return true if it previously
652 was not marked as such. */
654 static inline bool
655 set_lattice_contains_variable (struct ipcp_lattice *lat)
657 bool ret = !lat->contains_variable;
658 lat->contains_variable = true;
659 return ret;
662 /* Set all aggegate lattices in PLATS to bottom and return true if they were
663 not previously set as such. */
665 static inline bool
666 set_agg_lats_to_bottom (struct ipcp_param_lattices *plats)
668 bool ret = !plats->aggs_bottom;
669 plats->aggs_bottom = true;
670 return ret;
673 /* Mark all aggegate lattices in PLATS as containing an unknown value and
674 return true if they were not previously marked as such. */
676 static inline bool
677 set_agg_lats_contain_variable (struct ipcp_param_lattices *plats)
679 bool ret = !plats->aggs_contain_variable;
680 plats->aggs_contain_variable = true;
681 return ret;
684 /* Mark bot aggregate and scalar lattices as containing an unknown variable,
685 return true is any of them has not been marked as such so far. */
687 static inline bool
688 set_all_contains_variable (struct ipcp_param_lattices *plats)
690 bool ret = !plats->itself.contains_variable || !plats->aggs_contain_variable;
691 plats->itself.contains_variable = true;
692 plats->aggs_contain_variable = true;
693 return ret;
696 /* Initialize ipcp_lattices. */
698 static void
699 initialize_node_lattices (struct cgraph_node *node)
701 struct ipa_node_params *info = IPA_NODE_REF (node);
702 struct cgraph_edge *ie;
703 bool disable = false, variable = false;
704 int i;
706 gcc_checking_assert (cgraph_function_with_gimple_body_p (node));
707 if (!node->local.local)
709 /* When cloning is allowed, we can assume that externally visible
710 functions are not called. We will compensate this by cloning
711 later. */
712 if (ipcp_versionable_function_p (node)
713 && ipcp_cloning_candidate_p (node))
714 variable = true;
715 else
716 disable = true;
719 if (disable || variable)
721 for (i = 0; i < ipa_get_param_count (info) ; i++)
723 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
724 if (disable)
726 set_lattice_to_bottom (&plats->itself);
727 set_agg_lats_to_bottom (plats);
729 else
730 set_all_contains_variable (plats);
732 if (dump_file && (dump_flags & TDF_DETAILS)
733 && !node->alias && !node->thunk.thunk_p)
734 fprintf (dump_file, "Marking all lattices of %s/%i as %s\n",
735 cgraph_node_name (node), node->uid,
736 disable ? "BOTTOM" : "VARIABLE");
738 if (!disable)
739 for (i = 0; i < ipa_get_param_count (info) ; i++)
741 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
742 tree t = TREE_TYPE (ipa_get_param(info, i));
744 if (POINTER_TYPE_P (t) && TYPE_RESTRICT (t)
745 && TREE_CODE (TREE_TYPE (t)) == ARRAY_TYPE)
747 set_lattice_to_bottom (&plats->itself);
748 if (dump_file && (dump_flags & TDF_DETAILS)
749 && !node->alias && !node->thunk.thunk_p)
750 fprintf (dump_file, "Going to ignore param %i of of %s/%i.\n",
751 i, cgraph_node_name (node), node->uid);
755 for (ie = node->indirect_calls; ie; ie = ie->next_callee)
756 if (ie->indirect_info->polymorphic)
758 gcc_checking_assert (ie->indirect_info->param_index >= 0);
759 ipa_get_parm_lattices (info,
760 ie->indirect_info->param_index)->virt_call = 1;
764 /* Return the result of a (possibly arithmetic) pass through jump function
765 JFUNC on the constant value INPUT. Return NULL_TREE if that cannot be
766 determined or itself is considered an interprocedural invariant. */
768 static tree
769 ipa_get_jf_pass_through_result (struct ipa_jump_func *jfunc, tree input)
771 tree restype, res;
773 if (ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
774 return input;
775 else if (TREE_CODE (input) == TREE_BINFO)
776 return NULL_TREE;
778 gcc_checking_assert (is_gimple_ip_invariant (input));
779 if (TREE_CODE_CLASS (ipa_get_jf_pass_through_operation (jfunc))
780 == tcc_comparison)
781 restype = boolean_type_node;
782 else
783 restype = TREE_TYPE (input);
784 res = fold_binary (ipa_get_jf_pass_through_operation (jfunc), restype,
785 input, ipa_get_jf_pass_through_operand (jfunc));
787 if (res && !is_gimple_ip_invariant (res))
788 return NULL_TREE;
790 return res;
793 /* Return the result of an ancestor jump function JFUNC on the constant value
794 INPUT. Return NULL_TREE if that cannot be determined. */
796 static tree
797 ipa_get_jf_ancestor_result (struct ipa_jump_func *jfunc, tree input)
799 if (TREE_CODE (input) == TREE_BINFO)
800 return get_binfo_at_offset (input,
801 ipa_get_jf_ancestor_offset (jfunc),
802 ipa_get_jf_ancestor_type (jfunc));
803 else if (TREE_CODE (input) == ADDR_EXPR)
805 tree t = TREE_OPERAND (input, 0);
806 t = build_ref_for_offset (EXPR_LOCATION (t), t,
807 ipa_get_jf_ancestor_offset (jfunc),
808 ipa_get_jf_ancestor_type (jfunc), NULL, false);
809 return build_fold_addr_expr (t);
811 else
812 return NULL_TREE;
815 /* Extract the acual BINFO being described by JFUNC which must be a known type
816 jump function. */
818 static tree
819 ipa_value_from_known_type_jfunc (struct ipa_jump_func *jfunc)
821 tree base_binfo = TYPE_BINFO (ipa_get_jf_known_type_base_type (jfunc));
822 if (!base_binfo)
823 return NULL_TREE;
824 return get_binfo_at_offset (base_binfo,
825 ipa_get_jf_known_type_offset (jfunc),
826 ipa_get_jf_known_type_component_type (jfunc));
829 /* Determine whether JFUNC evaluates to a known value (that is either a
830 constant or a binfo) and if so, return it. Otherwise return NULL. INFO
831 describes the caller node so that pass-through jump functions can be
832 evaluated. */
834 tree
835 ipa_value_from_jfunc (struct ipa_node_params *info, struct ipa_jump_func *jfunc)
837 if (jfunc->type == IPA_JF_CONST)
838 return ipa_get_jf_constant (jfunc);
839 else if (jfunc->type == IPA_JF_KNOWN_TYPE)
840 return ipa_value_from_known_type_jfunc (jfunc);
841 else if (jfunc->type == IPA_JF_PASS_THROUGH
842 || jfunc->type == IPA_JF_ANCESTOR)
844 tree input;
845 int idx;
847 if (jfunc->type == IPA_JF_PASS_THROUGH)
848 idx = ipa_get_jf_pass_through_formal_id (jfunc);
849 else
850 idx = ipa_get_jf_ancestor_formal_id (jfunc);
852 if (info->ipcp_orig_node)
853 input = info->known_vals[idx];
854 else
856 struct ipcp_lattice *lat;
858 if (!info->lattices)
860 gcc_checking_assert (!flag_ipa_cp);
861 return NULL_TREE;
863 lat = ipa_get_scalar_lat (info, idx);
864 if (!ipa_lat_is_single_const (lat))
865 return NULL_TREE;
866 input = lat->values->value;
869 if (!input)
870 return NULL_TREE;
872 if (jfunc->type == IPA_JF_PASS_THROUGH)
873 return ipa_get_jf_pass_through_result (jfunc, input);
874 else
875 return ipa_get_jf_ancestor_result (jfunc, input);
877 else
878 return NULL_TREE;
882 /* If checking is enabled, verify that no lattice is in the TOP state, i.e. not
883 bottom, not containing a variable component and without any known value at
884 the same time. */
886 DEBUG_FUNCTION void
887 ipcp_verify_propagated_values (void)
889 struct cgraph_node *node;
891 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
893 struct ipa_node_params *info = IPA_NODE_REF (node);
894 int i, count = ipa_get_param_count (info);
896 for (i = 0; i < count; i++)
898 struct ipcp_lattice *lat = ipa_get_scalar_lat (info, i);
900 if (!lat->bottom
901 && !lat->contains_variable
902 && lat->values_count == 0)
904 if (dump_file)
906 fprintf (dump_file, "\nIPA lattices after constant "
907 "propagation:\n");
908 print_all_lattices (dump_file, true, false);
911 gcc_unreachable ();
917 /* Return true iff X and Y should be considered equal values by IPA-CP. */
919 static bool
920 values_equal_for_ipcp_p (tree x, tree y)
922 gcc_checking_assert (x != NULL_TREE && y != NULL_TREE);
924 if (x == y)
925 return true;
927 if (TREE_CODE (x) == TREE_BINFO || TREE_CODE (y) == TREE_BINFO)
928 return false;
930 if (TREE_CODE (x) == ADDR_EXPR
931 && TREE_CODE (y) == ADDR_EXPR
932 && TREE_CODE (TREE_OPERAND (x, 0)) == CONST_DECL
933 && TREE_CODE (TREE_OPERAND (y, 0)) == CONST_DECL)
934 return operand_equal_p (DECL_INITIAL (TREE_OPERAND (x, 0)),
935 DECL_INITIAL (TREE_OPERAND (y, 0)), 0);
936 else
937 return operand_equal_p (x, y, 0);
940 /* Add a new value source to VAL, marking that a value comes from edge CS and
941 (if the underlying jump function is a pass-through or an ancestor one) from
942 a caller value SRC_VAL of a caller parameter described by SRC_INDEX. OFFSET
943 is negative if the source was the scalar value of the parameter itself or
944 the offset within an aggregate. */
946 static void
947 add_value_source (struct ipcp_value *val, struct cgraph_edge *cs,
948 struct ipcp_value *src_val, int src_idx, HOST_WIDE_INT offset)
950 struct ipcp_value_source *src;
952 src = (struct ipcp_value_source *) pool_alloc (ipcp_sources_pool);
953 src->offset = offset;
954 src->cs = cs;
955 src->val = src_val;
956 src->index = src_idx;
958 src->next = val->sources;
959 val->sources = src;
962 /* Try to add NEWVAL to LAT, potentially creating a new struct ipcp_value for
963 it. CS, SRC_VAL SRC_INDEX and OFFSET are meant for add_value_source and
964 have the same meaning. */
966 static bool
967 add_value_to_lattice (struct ipcp_lattice *lat, tree newval,
968 struct cgraph_edge *cs, struct ipcp_value *src_val,
969 int src_idx, HOST_WIDE_INT offset)
971 struct ipcp_value *val;
973 if (lat->bottom)
974 return false;
976 for (val = lat->values; val; val = val->next)
977 if (values_equal_for_ipcp_p (val->value, newval))
979 if (edge_within_scc (cs))
981 struct ipcp_value_source *s;
982 for (s = val->sources; s ; s = s->next)
983 if (s->cs == cs)
984 break;
985 if (s)
986 return false;
989 add_value_source (val, cs, src_val, src_idx, offset);
990 return false;
993 if (lat->values_count == PARAM_VALUE (PARAM_IPA_CP_VALUE_LIST_SIZE))
995 /* We can only free sources, not the values themselves, because sources
996 of other values in this this SCC might point to them. */
997 for (val = lat->values; val; val = val->next)
999 while (val->sources)
1001 struct ipcp_value_source *src = val->sources;
1002 val->sources = src->next;
1003 pool_free (ipcp_sources_pool, src);
1007 lat->values = NULL;
1008 return set_lattice_to_bottom (lat);
1011 lat->values_count++;
1012 val = (struct ipcp_value *) pool_alloc (ipcp_values_pool);
1013 memset (val, 0, sizeof (*val));
1015 add_value_source (val, cs, src_val, src_idx, offset);
1016 val->value = newval;
1017 val->next = lat->values;
1018 lat->values = val;
1019 return true;
1022 /* Like above but passes a special value of offset to distinguish that the
1023 origin is the scalar value of the parameter rather than a part of an
1024 aggregate. */
1026 static inline bool
1027 add_scalar_value_to_lattice (struct ipcp_lattice *lat, tree newval,
1028 struct cgraph_edge *cs,
1029 struct ipcp_value *src_val, int src_idx)
1031 return add_value_to_lattice (lat, newval, cs, src_val, src_idx, -1);
1034 /* Propagate values through a pass-through jump function JFUNC associated with
1035 edge CS, taking values from SRC_LAT and putting them into DEST_LAT. SRC_IDX
1036 is the index of the source parameter. */
1038 static bool
1039 propagate_vals_accross_pass_through (struct cgraph_edge *cs,
1040 struct ipa_jump_func *jfunc,
1041 struct ipcp_lattice *src_lat,
1042 struct ipcp_lattice *dest_lat,
1043 int src_idx)
1045 struct ipcp_value *src_val;
1046 bool ret = false;
1048 if (ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
1049 for (src_val = src_lat->values; src_val; src_val = src_val->next)
1050 ret |= add_scalar_value_to_lattice (dest_lat, src_val->value, cs,
1051 src_val, src_idx);
1052 /* Do not create new values when propagating within an SCC because if there
1053 are arithmetic functions with circular dependencies, there is infinite
1054 number of them and we would just make lattices bottom. */
1055 else if (edge_within_scc (cs))
1056 ret = set_lattice_contains_variable (dest_lat);
1057 else
1058 for (src_val = src_lat->values; src_val; src_val = src_val->next)
1060 tree cstval = src_val->value;
1062 if (TREE_CODE (cstval) == TREE_BINFO)
1064 ret |= set_lattice_contains_variable (dest_lat);
1065 continue;
1067 cstval = ipa_get_jf_pass_through_result (jfunc, cstval);
1069 if (cstval)
1070 ret |= add_scalar_value_to_lattice (dest_lat, cstval, cs, src_val,
1071 src_idx);
1072 else
1073 ret |= set_lattice_contains_variable (dest_lat);
1076 return ret;
1079 /* Propagate values through an ancestor jump function JFUNC associated with
1080 edge CS, taking values from SRC_LAT and putting them into DEST_LAT. SRC_IDX
1081 is the index of the source parameter. */
1083 static bool
1084 propagate_vals_accross_ancestor (struct cgraph_edge *cs,
1085 struct ipa_jump_func *jfunc,
1086 struct ipcp_lattice *src_lat,
1087 struct ipcp_lattice *dest_lat,
1088 int src_idx)
1090 struct ipcp_value *src_val;
1091 bool ret = false;
1093 if (edge_within_scc (cs))
1094 return set_lattice_contains_variable (dest_lat);
1096 for (src_val = src_lat->values; src_val; src_val = src_val->next)
1098 tree t = ipa_get_jf_ancestor_result (jfunc, src_val->value);
1100 if (t)
1101 ret |= add_scalar_value_to_lattice (dest_lat, t, cs, src_val, src_idx);
1102 else
1103 ret |= set_lattice_contains_variable (dest_lat);
1106 return ret;
1109 /* Propagate scalar values across jump function JFUNC that is associated with
1110 edge CS and put the values into DEST_LAT. */
1112 static bool
1113 propagate_scalar_accross_jump_function (struct cgraph_edge *cs,
1114 struct ipa_jump_func *jfunc,
1115 struct ipcp_lattice *dest_lat)
1117 if (dest_lat->bottom)
1118 return false;
1120 if (jfunc->type == IPA_JF_CONST
1121 || jfunc->type == IPA_JF_KNOWN_TYPE)
1123 tree val;
1125 if (jfunc->type == IPA_JF_KNOWN_TYPE)
1127 val = ipa_value_from_known_type_jfunc (jfunc);
1128 if (!val)
1129 return set_lattice_contains_variable (dest_lat);
1131 else
1132 val = ipa_get_jf_constant (jfunc);
1133 return add_scalar_value_to_lattice (dest_lat, val, cs, NULL, 0);
1135 else if (jfunc->type == IPA_JF_PASS_THROUGH
1136 || jfunc->type == IPA_JF_ANCESTOR)
1138 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
1139 struct ipcp_lattice *src_lat;
1140 int src_idx;
1141 bool ret;
1143 if (jfunc->type == IPA_JF_PASS_THROUGH)
1144 src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
1145 else
1146 src_idx = ipa_get_jf_ancestor_formal_id (jfunc);
1148 src_lat = ipa_get_scalar_lat (caller_info, src_idx);
1149 if (src_lat->bottom)
1150 return set_lattice_contains_variable (dest_lat);
1152 /* If we would need to clone the caller and cannot, do not propagate. */
1153 if (!ipcp_versionable_function_p (cs->caller)
1154 && (src_lat->contains_variable
1155 || (src_lat->values_count > 1)))
1156 return set_lattice_contains_variable (dest_lat);
1158 if (jfunc->type == IPA_JF_PASS_THROUGH)
1159 ret = propagate_vals_accross_pass_through (cs, jfunc, src_lat,
1160 dest_lat, src_idx);
1161 else
1162 ret = propagate_vals_accross_ancestor (cs, jfunc, src_lat, dest_lat,
1163 src_idx);
1165 if (src_lat->contains_variable)
1166 ret |= set_lattice_contains_variable (dest_lat);
1168 return ret;
1171 /* TODO: We currently do not handle member method pointers in IPA-CP (we only
1172 use it for indirect inlining), we should propagate them too. */
1173 return set_lattice_contains_variable (dest_lat);
1176 /* If DEST_PLATS already has aggregate items, check that aggs_by_ref matches
1177 NEW_AGGS_BY_REF and if not, mark all aggs as bottoms and return true (in all
1178 other cases, return false). If there are no aggregate items, set
1179 aggs_by_ref to NEW_AGGS_BY_REF. */
1181 static bool
1182 set_check_aggs_by_ref (struct ipcp_param_lattices *dest_plats,
1183 bool new_aggs_by_ref)
1185 if (dest_plats->aggs)
1187 if (dest_plats->aggs_by_ref != new_aggs_by_ref)
1189 set_agg_lats_to_bottom (dest_plats);
1190 return true;
1193 else
1194 dest_plats->aggs_by_ref = new_aggs_by_ref;
1195 return false;
1198 /* Walk aggregate lattices in DEST_PLATS from ***AGLAT on, until ***aglat is an
1199 already existing lattice for the given OFFSET and SIZE, marking all skipped
1200 lattices as containing variable and checking for overlaps. If there is no
1201 already existing lattice for the OFFSET and VAL_SIZE, create one, initialize
1202 it with offset, size and contains_variable to PRE_EXISTING, and return true,
1203 unless there are too many already. If there are two many, return false. If
1204 there are overlaps turn whole DEST_PLATS to bottom and return false. If any
1205 skipped lattices were newly marked as containing variable, set *CHANGE to
1206 true. */
1208 static bool
1209 merge_agg_lats_step (struct ipcp_param_lattices *dest_plats,
1210 HOST_WIDE_INT offset, HOST_WIDE_INT val_size,
1211 struct ipcp_agg_lattice ***aglat,
1212 bool pre_existing, bool *change)
1214 gcc_checking_assert (offset >= 0);
1216 while (**aglat && (**aglat)->offset < offset)
1218 if ((**aglat)->offset + (**aglat)->size > offset)
1220 set_agg_lats_to_bottom (dest_plats);
1221 return false;
1223 *change |= set_lattice_contains_variable (**aglat);
1224 *aglat = &(**aglat)->next;
1227 if (**aglat && (**aglat)->offset == offset)
1229 if ((**aglat)->size != val_size
1230 || ((**aglat)->next
1231 && (**aglat)->next->offset < offset + val_size))
1233 set_agg_lats_to_bottom (dest_plats);
1234 return false;
1236 gcc_checking_assert (!(**aglat)->next
1237 || (**aglat)->next->offset >= offset + val_size);
1238 return true;
1240 else
1242 struct ipcp_agg_lattice *new_al;
1244 if (**aglat && (**aglat)->offset < offset + val_size)
1246 set_agg_lats_to_bottom (dest_plats);
1247 return false;
1249 if (dest_plats->aggs_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1250 return false;
1251 dest_plats->aggs_count++;
1252 new_al = (struct ipcp_agg_lattice *) pool_alloc (ipcp_agg_lattice_pool);
1253 memset (new_al, 0, sizeof (*new_al));
1255 new_al->offset = offset;
1256 new_al->size = val_size;
1257 new_al->contains_variable = pre_existing;
1259 new_al->next = **aglat;
1260 **aglat = new_al;
1261 return true;
1265 /* Set all AGLAT and all other aggregate lattices reachable by next pointers as
1266 containing an unknown value. */
1268 static bool
1269 set_chain_of_aglats_contains_variable (struct ipcp_agg_lattice *aglat)
1271 bool ret = false;
1272 while (aglat)
1274 ret |= set_lattice_contains_variable (aglat);
1275 aglat = aglat->next;
1277 return ret;
1280 /* Merge existing aggregate lattices in SRC_PLATS to DEST_PLATS, subtracting
1281 DELTA_OFFSET. CS is the call graph edge and SRC_IDX the index of the source
1282 parameter used for lattice value sources. Return true if DEST_PLATS changed
1283 in any way. */
1285 static bool
1286 merge_aggregate_lattices (struct cgraph_edge *cs,
1287 struct ipcp_param_lattices *dest_plats,
1288 struct ipcp_param_lattices *src_plats,
1289 int src_idx, HOST_WIDE_INT offset_delta)
1291 bool pre_existing = dest_plats->aggs != NULL;
1292 struct ipcp_agg_lattice **dst_aglat;
1293 bool ret = false;
1295 if (set_check_aggs_by_ref (dest_plats, src_plats->aggs_by_ref))
1296 return true;
1297 if (src_plats->aggs_bottom)
1298 return set_agg_lats_contain_variable (dest_plats);
1299 if (src_plats->aggs_contain_variable)
1300 ret |= set_agg_lats_contain_variable (dest_plats);
1301 dst_aglat = &dest_plats->aggs;
1303 for (struct ipcp_agg_lattice *src_aglat = src_plats->aggs;
1304 src_aglat;
1305 src_aglat = src_aglat->next)
1307 HOST_WIDE_INT new_offset = src_aglat->offset - offset_delta;
1309 if (new_offset < 0)
1310 continue;
1311 if (merge_agg_lats_step (dest_plats, new_offset, src_aglat->size,
1312 &dst_aglat, pre_existing, &ret))
1314 struct ipcp_agg_lattice *new_al = *dst_aglat;
1316 dst_aglat = &(*dst_aglat)->next;
1317 if (src_aglat->bottom)
1319 ret |= set_lattice_contains_variable (new_al);
1320 continue;
1322 if (src_aglat->contains_variable)
1323 ret |= set_lattice_contains_variable (new_al);
1324 for (struct ipcp_value *val = src_aglat->values;
1325 val;
1326 val = val->next)
1327 ret |= add_value_to_lattice (new_al, val->value, cs, val, src_idx,
1328 src_aglat->offset);
1330 else if (dest_plats->aggs_bottom)
1331 return true;
1333 ret |= set_chain_of_aglats_contains_variable (*dst_aglat);
1334 return ret;
1337 /* Determine whether there is anything to propagate FROM SRC_PLATS through a
1338 pass-through JFUNC and if so, whether it has conform and conforms to the
1339 rules about propagating values passed by reference. */
1341 static bool
1342 agg_pass_through_permissible_p (struct ipcp_param_lattices *src_plats,
1343 struct ipa_jump_func *jfunc)
1345 return src_plats->aggs
1346 && (!src_plats->aggs_by_ref
1347 || ipa_get_jf_pass_through_agg_preserved (jfunc));
1350 /* Propagate scalar values across jump function JFUNC that is associated with
1351 edge CS and put the values into DEST_LAT. */
1353 static bool
1354 propagate_aggs_accross_jump_function (struct cgraph_edge *cs,
1355 struct ipa_jump_func *jfunc,
1356 struct ipcp_param_lattices *dest_plats)
1358 bool ret = false;
1360 if (dest_plats->aggs_bottom)
1361 return false;
1363 if (jfunc->type == IPA_JF_PASS_THROUGH
1364 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
1366 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
1367 int src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
1368 struct ipcp_param_lattices *src_plats;
1370 src_plats = ipa_get_parm_lattices (caller_info, src_idx);
1371 if (agg_pass_through_permissible_p (src_plats, jfunc))
1373 /* Currently we do not produce clobber aggregate jump
1374 functions, replace with merging when we do. */
1375 gcc_assert (!jfunc->agg.items);
1376 ret |= merge_aggregate_lattices (cs, dest_plats, src_plats,
1377 src_idx, 0);
1379 else
1380 ret |= set_agg_lats_contain_variable (dest_plats);
1382 else if (jfunc->type == IPA_JF_ANCESTOR
1383 && ipa_get_jf_ancestor_agg_preserved (jfunc))
1385 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
1386 int src_idx = ipa_get_jf_ancestor_formal_id (jfunc);
1387 struct ipcp_param_lattices *src_plats;
1389 src_plats = ipa_get_parm_lattices (caller_info, src_idx);
1390 if (src_plats->aggs && src_plats->aggs_by_ref)
1392 /* Currently we do not produce clobber aggregate jump
1393 functions, replace with merging when we do. */
1394 gcc_assert (!jfunc->agg.items);
1395 ret |= merge_aggregate_lattices (cs, dest_plats, src_plats, src_idx,
1396 ipa_get_jf_ancestor_offset (jfunc));
1398 else if (!src_plats->aggs_by_ref)
1399 ret |= set_agg_lats_to_bottom (dest_plats);
1400 else
1401 ret |= set_agg_lats_contain_variable (dest_plats);
1403 else if (jfunc->agg.items)
1405 bool pre_existing = dest_plats->aggs != NULL;
1406 struct ipcp_agg_lattice **aglat = &dest_plats->aggs;
1407 struct ipa_agg_jf_item *item;
1408 int i;
1410 if (set_check_aggs_by_ref (dest_plats, jfunc->agg.by_ref))
1411 return true;
1413 FOR_EACH_VEC_ELT (*jfunc->agg.items, i, item)
1415 HOST_WIDE_INT val_size;
1417 if (item->offset < 0)
1418 continue;
1419 gcc_checking_assert (is_gimple_ip_invariant (item->value));
1420 val_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (item->value)), 1);
1422 if (merge_agg_lats_step (dest_plats, item->offset, val_size,
1423 &aglat, pre_existing, &ret))
1425 ret |= add_value_to_lattice (*aglat, item->value, cs, NULL, 0, 0);
1426 aglat = &(*aglat)->next;
1428 else if (dest_plats->aggs_bottom)
1429 return true;
1432 ret |= set_chain_of_aglats_contains_variable (*aglat);
1434 else
1435 ret |= set_agg_lats_contain_variable (dest_plats);
1437 return ret;
1440 /* Propagate constants from the caller to the callee of CS. INFO describes the
1441 caller. */
1443 static bool
1444 propagate_constants_accross_call (struct cgraph_edge *cs)
1446 struct ipa_node_params *callee_info;
1447 enum availability availability;
1448 struct cgraph_node *callee, *alias_or_thunk;
1449 struct ipa_edge_args *args;
1450 bool ret = false;
1451 int i, args_count, parms_count;
1453 callee = cgraph_function_node (cs->callee, &availability);
1454 if (!callee->analyzed)
1455 return false;
1456 gcc_checking_assert (cgraph_function_with_gimple_body_p (callee));
1457 callee_info = IPA_NODE_REF (callee);
1459 args = IPA_EDGE_REF (cs);
1460 args_count = ipa_get_cs_argument_count (args);
1461 parms_count = ipa_get_param_count (callee_info);
1463 /* If this call goes through a thunk we should not propagate because we
1464 cannot redirect edges to thunks. However, we might need to uncover a
1465 thunk from below a series of aliases first. */
1466 alias_or_thunk = cs->callee;
1467 while (alias_or_thunk->alias)
1468 alias_or_thunk = cgraph_alias_aliased_node (alias_or_thunk);
1469 if (alias_or_thunk->thunk.thunk_p)
1471 for (i = 0; i < parms_count; i++)
1472 ret |= set_all_contains_variable (ipa_get_parm_lattices (callee_info,
1473 i));
1474 return ret;
1477 for (i = 0; (i < args_count) && (i < parms_count); i++)
1479 struct ipa_jump_func *jump_func = ipa_get_ith_jump_func (args, i);
1480 struct ipcp_param_lattices *dest_plats;
1482 dest_plats = ipa_get_parm_lattices (callee_info, i);
1483 if (availability == AVAIL_OVERWRITABLE)
1484 ret |= set_all_contains_variable (dest_plats);
1485 else
1487 ret |= propagate_scalar_accross_jump_function (cs, jump_func,
1488 &dest_plats->itself);
1489 ret |= propagate_aggs_accross_jump_function (cs, jump_func,
1490 dest_plats);
1493 for (; i < parms_count; i++)
1494 ret |= set_all_contains_variable (ipa_get_parm_lattices (callee_info, i));
1496 return ret;
1499 /* If an indirect edge IE can be turned into a direct one based on KNOWN_VALS
1500 (which can contain both constants and binfos) or KNOWN_BINFOS (which can be
1501 NULL) return the destination. */
1503 tree
1504 ipa_get_indirect_edge_target (struct cgraph_edge *ie,
1505 vec<tree> known_vals,
1506 vec<tree> known_binfos,
1507 vec<ipa_agg_jump_function_p> known_aggs)
1509 int param_index = ie->indirect_info->param_index;
1510 HOST_WIDE_INT token, anc_offset;
1511 tree otr_type;
1512 tree t;
1514 if (param_index == -1
1515 || known_vals.length () <= (unsigned int) param_index)
1516 return NULL_TREE;
1518 if (!ie->indirect_info->polymorphic)
1520 tree t;
1522 if (ie->indirect_info->agg_contents)
1524 if (known_aggs.length ()
1525 > (unsigned int) param_index)
1527 struct ipa_agg_jump_function *agg;
1528 agg = known_aggs[param_index];
1529 t = ipa_find_agg_cst_for_param (agg, ie->indirect_info->offset,
1530 ie->indirect_info->by_ref);
1532 else
1533 t = NULL;
1535 else
1536 t = known_vals[param_index];
1538 if (t &&
1539 TREE_CODE (t) == ADDR_EXPR
1540 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL)
1541 return TREE_OPERAND (t, 0);
1542 else
1543 return NULL_TREE;
1546 gcc_assert (!ie->indirect_info->agg_contents);
1547 token = ie->indirect_info->otr_token;
1548 anc_offset = ie->indirect_info->offset;
1549 otr_type = ie->indirect_info->otr_type;
1551 t = known_vals[param_index];
1552 if (!t && known_binfos.length () > (unsigned int) param_index)
1553 t = known_binfos[param_index];
1554 if (!t)
1555 return NULL_TREE;
1557 if (TREE_CODE (t) != TREE_BINFO)
1559 tree binfo;
1560 binfo = gimple_extract_devirt_binfo_from_cst (t);
1561 if (!binfo)
1562 return NULL_TREE;
1563 binfo = get_binfo_at_offset (binfo, anc_offset, otr_type);
1564 if (!binfo)
1565 return NULL_TREE;
1566 return gimple_get_virt_method_for_binfo (token, binfo);
1568 else
1570 tree binfo;
1572 binfo = get_binfo_at_offset (t, anc_offset, otr_type);
1573 if (!binfo)
1574 return NULL_TREE;
1575 return gimple_get_virt_method_for_binfo (token, binfo);
1579 /* Calculate devirtualization time bonus for NODE, assuming we know KNOWN_CSTS
1580 and KNOWN_BINFOS. */
1582 static int
1583 devirtualization_time_bonus (struct cgraph_node *node,
1584 vec<tree> known_csts,
1585 vec<tree> known_binfos)
1587 struct cgraph_edge *ie;
1588 int res = 0;
1590 for (ie = node->indirect_calls; ie; ie = ie->next_callee)
1592 struct cgraph_node *callee;
1593 struct inline_summary *isummary;
1594 tree target;
1596 target = ipa_get_indirect_edge_target (ie, known_csts, known_binfos,
1597 vNULL);
1598 if (!target)
1599 continue;
1601 /* Only bare minimum benefit for clearly un-inlineable targets. */
1602 res += 1;
1603 callee = cgraph_get_node (target);
1604 if (!callee || !callee->analyzed)
1605 continue;
1606 isummary = inline_summary (callee);
1607 if (!isummary->inlinable)
1608 continue;
1610 /* FIXME: The values below need re-considering and perhaps also
1611 integrating into the cost metrics, at lest in some very basic way. */
1612 if (isummary->size <= MAX_INLINE_INSNS_AUTO / 4)
1613 res += 31;
1614 else if (isummary->size <= MAX_INLINE_INSNS_AUTO / 2)
1615 res += 15;
1616 else if (isummary->size <= MAX_INLINE_INSNS_AUTO
1617 || DECL_DECLARED_INLINE_P (callee->symbol.decl))
1618 res += 7;
1621 return res;
1624 /* Return time bonus incurred because of HINTS. */
1626 static int
1627 hint_time_bonus (inline_hints hints)
1629 if (hints & (INLINE_HINT_loop_iterations | INLINE_HINT_loop_stride))
1630 return PARAM_VALUE (PARAM_IPA_CP_LOOP_HINT_BONUS);
1631 return 0;
1634 /* Return true if cloning NODE is a good idea, given the estimated TIME_BENEFIT
1635 and SIZE_COST and with the sum of frequencies of incoming edges to the
1636 potential new clone in FREQUENCIES. */
1638 static bool
1639 good_cloning_opportunity_p (struct cgraph_node *node, int time_benefit,
1640 int freq_sum, gcov_type count_sum, int size_cost)
1642 if (time_benefit == 0
1643 || !flag_ipa_cp_clone
1644 || !optimize_function_for_speed_p (DECL_STRUCT_FUNCTION (node->symbol.decl)))
1645 return false;
1647 gcc_assert (size_cost > 0);
1649 if (max_count)
1651 int factor = (count_sum * 1000) / max_count;
1652 HOST_WIDEST_INT evaluation = (((HOST_WIDEST_INT) time_benefit * factor)
1653 / size_cost);
1655 if (dump_file && (dump_flags & TDF_DETAILS))
1656 fprintf (dump_file, " good_cloning_opportunity_p (time: %i, "
1657 "size: %i, count_sum: " HOST_WIDE_INT_PRINT_DEC
1658 ") -> evaluation: " HOST_WIDEST_INT_PRINT_DEC
1659 ", threshold: %i\n",
1660 time_benefit, size_cost, (HOST_WIDE_INT) count_sum,
1661 evaluation, PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD));
1663 return evaluation >= PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD);
1665 else
1667 HOST_WIDEST_INT evaluation = (((HOST_WIDEST_INT) time_benefit * freq_sum)
1668 / size_cost);
1670 if (dump_file && (dump_flags & TDF_DETAILS))
1671 fprintf (dump_file, " good_cloning_opportunity_p (time: %i, "
1672 "size: %i, freq_sum: %i) -> evaluation: "
1673 HOST_WIDEST_INT_PRINT_DEC ", threshold: %i\n",
1674 time_benefit, size_cost, freq_sum, evaluation,
1675 PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD));
1677 return evaluation >= PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD);
1681 /* Return all context independent values from aggregate lattices in PLATS in a
1682 vector. Return NULL if there are none. */
1684 static vec<ipa_agg_jf_item_t, va_gc> *
1685 context_independent_aggregate_values (struct ipcp_param_lattices *plats)
1687 vec<ipa_agg_jf_item_t, va_gc> *res = NULL;
1689 if (plats->aggs_bottom
1690 || plats->aggs_contain_variable
1691 || plats->aggs_count == 0)
1692 return NULL;
1694 for (struct ipcp_agg_lattice *aglat = plats->aggs;
1695 aglat;
1696 aglat = aglat->next)
1697 if (ipa_lat_is_single_const (aglat))
1699 struct ipa_agg_jf_item item;
1700 item.offset = aglat->offset;
1701 item.value = aglat->values->value;
1702 vec_safe_push (res, item);
1704 return res;
1707 /* Allocate KNOWN_CSTS, KNOWN_BINFOS and, if non-NULL, KNOWN_AGGS and populate
1708 them with values of parameters that are known independent of the context.
1709 INFO describes the function. If REMOVABLE_PARAMS_COST is non-NULL, the
1710 movement cost of all removable parameters will be stored in it. */
1712 static bool
1713 gather_context_independent_values (struct ipa_node_params *info,
1714 vec<tree> *known_csts,
1715 vec<tree> *known_binfos,
1716 vec<ipa_agg_jump_function_t> *known_aggs,
1717 int *removable_params_cost)
1719 int i, count = ipa_get_param_count (info);
1720 bool ret = false;
1722 known_csts->create (0);
1723 known_binfos->create (0);
1724 known_csts->safe_grow_cleared (count);
1725 known_binfos->safe_grow_cleared (count);
1726 if (known_aggs)
1728 known_aggs->create (0);
1729 known_aggs->safe_grow_cleared (count);
1732 if (removable_params_cost)
1733 *removable_params_cost = 0;
1735 for (i = 0; i < count ; i++)
1737 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
1738 struct ipcp_lattice *lat = &plats->itself;
1740 if (ipa_lat_is_single_const (lat))
1742 struct ipcp_value *val = lat->values;
1743 if (TREE_CODE (val->value) != TREE_BINFO)
1745 (*known_csts)[i] = val->value;
1746 if (removable_params_cost)
1747 *removable_params_cost
1748 += estimate_move_cost (TREE_TYPE (val->value));
1749 ret = true;
1751 else if (plats->virt_call)
1753 (*known_binfos)[i] = val->value;
1754 ret = true;
1756 else if (removable_params_cost
1757 && !ipa_is_param_used (info, i))
1758 *removable_params_cost
1759 += estimate_move_cost (TREE_TYPE (ipa_get_param (info, i)));
1761 else if (removable_params_cost
1762 && !ipa_is_param_used (info, i))
1763 *removable_params_cost
1764 += estimate_move_cost (TREE_TYPE (ipa_get_param (info, i)));
1766 if (known_aggs)
1768 vec<ipa_agg_jf_item_t, va_gc> *agg_items;
1769 struct ipa_agg_jump_function *ajf;
1771 agg_items = context_independent_aggregate_values (plats);
1772 ajf = &(*known_aggs)[i];
1773 ajf->items = agg_items;
1774 ajf->by_ref = plats->aggs_by_ref;
1775 ret |= agg_items != NULL;
1779 return ret;
1782 /* The current interface in ipa-inline-analysis requires a pointer vector.
1783 Create it.
1785 FIXME: That interface should be re-worked, this is slightly silly. Still,
1786 I'd like to discuss how to change it first and this demonstrates the
1787 issue. */
1789 static vec<ipa_agg_jump_function_p>
1790 agg_jmp_p_vec_for_t_vec (vec<ipa_agg_jump_function_t> known_aggs)
1792 vec<ipa_agg_jump_function_p> ret;
1793 struct ipa_agg_jump_function *ajf;
1794 int i;
1796 ret.create (known_aggs.length ());
1797 FOR_EACH_VEC_ELT (known_aggs, i, ajf)
1798 ret.quick_push (ajf);
1799 return ret;
1802 /* Iterate over known values of parameters of NODE and estimate the local
1803 effects in terms of time and size they have. */
1805 static void
1806 estimate_local_effects (struct cgraph_node *node)
1808 struct ipa_node_params *info = IPA_NODE_REF (node);
1809 int i, count = ipa_get_param_count (info);
1810 vec<tree> known_csts, known_binfos;
1811 vec<ipa_agg_jump_function_t> known_aggs;
1812 vec<ipa_agg_jump_function_p> known_aggs_ptrs;
1813 bool always_const;
1814 int base_time = inline_summary (node)->time;
1815 int removable_params_cost;
1817 if (!count || !ipcp_versionable_function_p (node))
1818 return;
1820 if (dump_file && (dump_flags & TDF_DETAILS))
1821 fprintf (dump_file, "\nEstimating effects for %s/%i, base_time: %i.\n",
1822 cgraph_node_name (node), node->uid, base_time);
1824 always_const = gather_context_independent_values (info, &known_csts,
1825 &known_binfos, &known_aggs,
1826 &removable_params_cost);
1827 known_aggs_ptrs = agg_jmp_p_vec_for_t_vec (known_aggs);
1828 if (always_const)
1830 struct caller_statistics stats;
1831 inline_hints hints;
1832 int time, size;
1834 init_caller_stats (&stats);
1835 cgraph_for_node_and_aliases (node, gather_caller_stats, &stats, false);
1836 estimate_ipcp_clone_size_and_time (node, known_csts, known_binfos,
1837 known_aggs_ptrs, &size, &time, &hints);
1838 time -= devirtualization_time_bonus (node, known_csts, known_binfos);
1839 time -= hint_time_bonus (hints);
1840 time -= removable_params_cost;
1841 size -= stats.n_calls * removable_params_cost;
1843 if (dump_file)
1844 fprintf (dump_file, " - context independent values, size: %i, "
1845 "time_benefit: %i\n", size, base_time - time);
1847 if (size <= 0
1848 || cgraph_will_be_removed_from_program_if_no_direct_calls (node))
1850 info->do_clone_for_all_contexts = true;
1851 base_time = time;
1853 if (dump_file)
1854 fprintf (dump_file, " Decided to specialize for all "
1855 "known contexts, code not going to grow.\n");
1857 else if (good_cloning_opportunity_p (node, base_time - time,
1858 stats.freq_sum, stats.count_sum,
1859 size))
1861 if (size + overall_size <= max_new_size)
1863 info->do_clone_for_all_contexts = true;
1864 base_time = time;
1865 overall_size += size;
1867 if (dump_file)
1868 fprintf (dump_file, " Decided to specialize for all "
1869 "known contexts, growth deemed beneficial.\n");
1871 else if (dump_file && (dump_flags & TDF_DETAILS))
1872 fprintf (dump_file, " Not cloning for all contexts because "
1873 "max_new_size would be reached with %li.\n",
1874 size + overall_size);
1878 for (i = 0; i < count ; i++)
1880 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
1881 struct ipcp_lattice *lat = &plats->itself;
1882 struct ipcp_value *val;
1883 int emc;
1885 if (lat->bottom
1886 || !lat->values
1887 || known_csts[i]
1888 || known_binfos[i])
1889 continue;
1891 for (val = lat->values; val; val = val->next)
1893 int time, size, time_benefit;
1894 inline_hints hints;
1896 if (TREE_CODE (val->value) != TREE_BINFO)
1898 known_csts[i] = val->value;
1899 known_binfos[i] = NULL_TREE;
1900 emc = estimate_move_cost (TREE_TYPE (val->value));
1902 else if (plats->virt_call)
1904 known_csts[i] = NULL_TREE;
1905 known_binfos[i] = val->value;
1906 emc = 0;
1908 else
1909 continue;
1911 estimate_ipcp_clone_size_and_time (node, known_csts, known_binfos,
1912 known_aggs_ptrs, &size, &time,
1913 &hints);
1914 time_benefit = base_time - time
1915 + devirtualization_time_bonus (node, known_csts, known_binfos)
1916 + hint_time_bonus (hints)
1917 + removable_params_cost + emc;
1919 gcc_checking_assert (size >=0);
1920 /* The inliner-heuristics based estimates may think that in certain
1921 contexts some functions do not have any size at all but we want
1922 all specializations to have at least a tiny cost, not least not to
1923 divide by zero. */
1924 if (size == 0)
1925 size = 1;
1927 if (dump_file && (dump_flags & TDF_DETAILS))
1929 fprintf (dump_file, " - estimates for value ");
1930 print_ipcp_constant_value (dump_file, val->value);
1931 fprintf (dump_file, " for parameter ");
1932 print_generic_expr (dump_file, ipa_get_param (info, i), 0);
1933 fprintf (dump_file, ": time_benefit: %i, size: %i\n",
1934 time_benefit, size);
1937 val->local_time_benefit = time_benefit;
1938 val->local_size_cost = size;
1940 known_binfos[i] = NULL_TREE;
1941 known_csts[i] = NULL_TREE;
1944 for (i = 0; i < count ; i++)
1946 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
1947 struct ipa_agg_jump_function *ajf;
1948 struct ipcp_agg_lattice *aglat;
1950 if (plats->aggs_bottom || !plats->aggs)
1951 continue;
1953 ajf = &known_aggs[i];
1954 for (aglat = plats->aggs; aglat; aglat = aglat->next)
1956 struct ipcp_value *val;
1957 if (aglat->bottom || !aglat->values
1958 /* If the following is true, the one value is in known_aggs. */
1959 || (!plats->aggs_contain_variable
1960 && ipa_lat_is_single_const (aglat)))
1961 continue;
1963 for (val = aglat->values; val; val = val->next)
1965 int time, size, time_benefit;
1966 struct ipa_agg_jf_item item;
1967 inline_hints hints;
1969 item.offset = aglat->offset;
1970 item.value = val->value;
1971 vec_safe_push (ajf->items, item);
1973 estimate_ipcp_clone_size_and_time (node, known_csts, known_binfos,
1974 known_aggs_ptrs, &size, &time,
1975 &hints);
1976 time_benefit = base_time - time
1977 + devirtualization_time_bonus (node, known_csts, known_binfos)
1978 + hint_time_bonus (hints);
1979 gcc_checking_assert (size >=0);
1980 if (size == 0)
1981 size = 1;
1983 if (dump_file && (dump_flags & TDF_DETAILS))
1985 fprintf (dump_file, " - estimates for value ");
1986 print_ipcp_constant_value (dump_file, val->value);
1987 fprintf (dump_file, " for parameter ");
1988 print_generic_expr (dump_file, ipa_get_param (info, i), 0);
1989 fprintf (dump_file, "[%soffset: " HOST_WIDE_INT_PRINT_DEC
1990 "]: time_benefit: %i, size: %i\n",
1991 plats->aggs_by_ref ? "ref " : "",
1992 aglat->offset, time_benefit, size);
1995 val->local_time_benefit = time_benefit;
1996 val->local_size_cost = size;
1997 ajf->items->pop ();
2002 for (i = 0; i < count ; i++)
2003 vec_free (known_aggs[i].items);
2005 known_csts.release ();
2006 known_binfos.release ();
2007 known_aggs.release ();
2008 known_aggs_ptrs.release ();
2012 /* Add value CUR_VAL and all yet-unsorted values it is dependent on to the
2013 topological sort of values. */
2015 static void
2016 add_val_to_toposort (struct ipcp_value *cur_val)
2018 static int dfs_counter = 0;
2019 static struct ipcp_value *stack;
2020 struct ipcp_value_source *src;
2022 if (cur_val->dfs)
2023 return;
2025 dfs_counter++;
2026 cur_val->dfs = dfs_counter;
2027 cur_val->low_link = dfs_counter;
2029 cur_val->topo_next = stack;
2030 stack = cur_val;
2031 cur_val->on_stack = true;
2033 for (src = cur_val->sources; src; src = src->next)
2034 if (src->val)
2036 if (src->val->dfs == 0)
2038 add_val_to_toposort (src->val);
2039 if (src->val->low_link < cur_val->low_link)
2040 cur_val->low_link = src->val->low_link;
2042 else if (src->val->on_stack
2043 && src->val->dfs < cur_val->low_link)
2044 cur_val->low_link = src->val->dfs;
2047 if (cur_val->dfs == cur_val->low_link)
2049 struct ipcp_value *v, *scc_list = NULL;
2053 v = stack;
2054 stack = v->topo_next;
2055 v->on_stack = false;
2057 v->scc_next = scc_list;
2058 scc_list = v;
2060 while (v != cur_val);
2062 cur_val->topo_next = values_topo;
2063 values_topo = cur_val;
2067 /* Add all values in lattices associated with NODE to the topological sort if
2068 they are not there yet. */
2070 static void
2071 add_all_node_vals_to_toposort (struct cgraph_node *node)
2073 struct ipa_node_params *info = IPA_NODE_REF (node);
2074 int i, count = ipa_get_param_count (info);
2076 for (i = 0; i < count ; i++)
2078 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
2079 struct ipcp_lattice *lat = &plats->itself;
2080 struct ipcp_agg_lattice *aglat;
2081 struct ipcp_value *val;
2083 if (!lat->bottom)
2084 for (val = lat->values; val; val = val->next)
2085 add_val_to_toposort (val);
2087 if (!plats->aggs_bottom)
2088 for (aglat = plats->aggs; aglat; aglat = aglat->next)
2089 if (!aglat->bottom)
2090 for (val = aglat->values; val; val = val->next)
2091 add_val_to_toposort (val);
2095 /* One pass of constants propagation along the call graph edges, from callers
2096 to callees (requires topological ordering in TOPO), iterate over strongly
2097 connected components. */
2099 static void
2100 propagate_constants_topo (struct topo_info *topo)
2102 int i;
2104 for (i = topo->nnodes - 1; i >= 0; i--)
2106 struct cgraph_node *v, *node = topo->order[i];
2107 struct ipa_dfs_info *node_dfs_info;
2109 if (!cgraph_function_with_gimple_body_p (node))
2110 continue;
2112 node_dfs_info = (struct ipa_dfs_info *) node->symbol.aux;
2113 /* First, iteratively propagate within the strongly connected component
2114 until all lattices stabilize. */
2115 v = node_dfs_info->next_cycle;
2116 while (v)
2118 push_node_to_stack (topo, v);
2119 v = ((struct ipa_dfs_info *) v->symbol.aux)->next_cycle;
2122 v = node;
2123 while (v)
2125 struct cgraph_edge *cs;
2127 for (cs = v->callees; cs; cs = cs->next_callee)
2128 if (edge_within_scc (cs)
2129 && propagate_constants_accross_call (cs))
2130 push_node_to_stack (topo, cs->callee);
2131 v = pop_node_from_stack (topo);
2134 /* Afterwards, propagate along edges leading out of the SCC, calculates
2135 the local effects of the discovered constants and all valid values to
2136 their topological sort. */
2137 v = node;
2138 while (v)
2140 struct cgraph_edge *cs;
2142 estimate_local_effects (v);
2143 add_all_node_vals_to_toposort (v);
2144 for (cs = v->callees; cs; cs = cs->next_callee)
2145 if (!edge_within_scc (cs))
2146 propagate_constants_accross_call (cs);
2148 v = ((struct ipa_dfs_info *) v->symbol.aux)->next_cycle;
2154 /* Return the sum of A and B if none of them is bigger than INT_MAX/2, return
2155 the bigger one if otherwise. */
2157 static int
2158 safe_add (int a, int b)
2160 if (a > INT_MAX/2 || b > INT_MAX/2)
2161 return a > b ? a : b;
2162 else
2163 return a + b;
2167 /* Propagate the estimated effects of individual values along the topological
2168 from the dependent values to those they depend on. */
2170 static void
2171 propagate_effects (void)
2173 struct ipcp_value *base;
2175 for (base = values_topo; base; base = base->topo_next)
2177 struct ipcp_value_source *src;
2178 struct ipcp_value *val;
2179 int time = 0, size = 0;
2181 for (val = base; val; val = val->scc_next)
2183 time = safe_add (time,
2184 val->local_time_benefit + val->prop_time_benefit);
2185 size = safe_add (size, val->local_size_cost + val->prop_size_cost);
2188 for (val = base; val; val = val->scc_next)
2189 for (src = val->sources; src; src = src->next)
2190 if (src->val
2191 && cgraph_maybe_hot_edge_p (src->cs))
2193 src->val->prop_time_benefit = safe_add (time,
2194 src->val->prop_time_benefit);
2195 src->val->prop_size_cost = safe_add (size,
2196 src->val->prop_size_cost);
2202 /* Propagate constants, binfos and their effects from the summaries
2203 interprocedurally. */
2205 static void
2206 ipcp_propagate_stage (struct topo_info *topo)
2208 struct cgraph_node *node;
2210 if (dump_file)
2211 fprintf (dump_file, "\n Propagating constants:\n\n");
2213 if (in_lto_p)
2214 ipa_update_after_lto_read ();
2217 FOR_EACH_DEFINED_FUNCTION (node)
2219 struct ipa_node_params *info = IPA_NODE_REF (node);
2221 determine_versionability (node);
2222 if (cgraph_function_with_gimple_body_p (node))
2224 info->lattices = XCNEWVEC (struct ipcp_param_lattices,
2225 ipa_get_param_count (info));
2226 initialize_node_lattices (node);
2228 if (node->count > max_count)
2229 max_count = node->count;
2230 overall_size += inline_summary (node)->self_size;
2233 max_new_size = overall_size;
2234 if (max_new_size < PARAM_VALUE (PARAM_LARGE_UNIT_INSNS))
2235 max_new_size = PARAM_VALUE (PARAM_LARGE_UNIT_INSNS);
2236 max_new_size += max_new_size * PARAM_VALUE (PARAM_IPCP_UNIT_GROWTH) / 100 + 1;
2238 if (dump_file)
2239 fprintf (dump_file, "\noverall_size: %li, max_new_size: %li\n",
2240 overall_size, max_new_size);
2242 propagate_constants_topo (topo);
2243 #ifdef ENABLE_CHECKING
2244 ipcp_verify_propagated_values ();
2245 #endif
2246 propagate_effects ();
2248 if (dump_file)
2250 fprintf (dump_file, "\nIPA lattices after all propagation:\n");
2251 print_all_lattices (dump_file, (dump_flags & TDF_DETAILS), true);
2255 /* Discover newly direct outgoing edges from NODE which is a new clone with
2256 known KNOWN_VALS and make them direct. */
2258 static void
2259 ipcp_discover_new_direct_edges (struct cgraph_node *node,
2260 vec<tree> known_vals)
2262 struct cgraph_edge *ie, *next_ie;
2263 bool found = false;
2265 for (ie = node->indirect_calls; ie; ie = next_ie)
2267 tree target;
2269 next_ie = ie->next_callee;
2270 target = ipa_get_indirect_edge_target (ie, known_vals, vNULL, vNULL);
2271 if (target)
2273 ipa_make_edge_direct_to_target (ie, target);
2274 found = true;
2277 /* Turning calls to direct calls will improve overall summary. */
2278 if (found)
2279 inline_update_overall_summary (node);
2282 /* Vector of pointers which for linked lists of clones of an original crgaph
2283 edge. */
2285 static vec<cgraph_edge_p> next_edge_clone;
2287 static inline void
2288 grow_next_edge_clone_vector (void)
2290 if (next_edge_clone.length ()
2291 <= (unsigned) cgraph_edge_max_uid)
2292 next_edge_clone.safe_grow_cleared (cgraph_edge_max_uid + 1);
2295 /* Edge duplication hook to grow the appropriate linked list in
2296 next_edge_clone. */
2298 static void
2299 ipcp_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
2300 __attribute__((unused)) void *data)
2302 grow_next_edge_clone_vector ();
2303 next_edge_clone[dst->uid] = next_edge_clone[src->uid];
2304 next_edge_clone[src->uid] = dst;
2307 /* See if NODE is a clone with a known aggregate value at a given OFFSET of a
2308 parameter with the given INDEX. */
2310 static tree
2311 get_clone_agg_value (struct cgraph_node *node, HOST_WIDEST_INT offset,
2312 int index)
2314 struct ipa_agg_replacement_value *aggval;
2316 aggval = ipa_get_agg_replacements_for_node (node);
2317 while (aggval)
2319 if (aggval->offset == offset
2320 && aggval->index == index)
2321 return aggval->value;
2322 aggval = aggval->next;
2324 return NULL_TREE;
2327 /* Return true if edge CS does bring about the value described by SRC. */
2329 static bool
2330 cgraph_edge_brings_value_p (struct cgraph_edge *cs,
2331 struct ipcp_value_source *src)
2333 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
2334 struct ipa_node_params *dst_info = IPA_NODE_REF (cs->callee);
2336 if ((dst_info->ipcp_orig_node && !dst_info->is_all_contexts_clone)
2337 || caller_info->node_dead)
2338 return false;
2339 if (!src->val)
2340 return true;
2342 if (caller_info->ipcp_orig_node)
2344 tree t;
2345 if (src->offset == -1)
2346 t = caller_info->known_vals[src->index];
2347 else
2348 t = get_clone_agg_value (cs->caller, src->offset, src->index);
2349 return (t != NULL_TREE
2350 && values_equal_for_ipcp_p (src->val->value, t));
2352 else
2354 struct ipcp_agg_lattice *aglat;
2355 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (caller_info,
2356 src->index);
2357 if (src->offset == -1)
2358 return (ipa_lat_is_single_const (&plats->itself)
2359 && values_equal_for_ipcp_p (src->val->value,
2360 plats->itself.values->value));
2361 else
2363 if (plats->aggs_bottom || plats->aggs_contain_variable)
2364 return false;
2365 for (aglat = plats->aggs; aglat; aglat = aglat->next)
2366 if (aglat->offset == src->offset)
2367 return (ipa_lat_is_single_const (aglat)
2368 && values_equal_for_ipcp_p (src->val->value,
2369 aglat->values->value));
2371 return false;
2375 /* Get the next clone in the linked list of clones of an edge. */
2377 static inline struct cgraph_edge *
2378 get_next_cgraph_edge_clone (struct cgraph_edge *cs)
2380 return next_edge_clone[cs->uid];
2383 /* Given VAL, iterate over all its sources and if they still hold, add their
2384 edge frequency and their number into *FREQUENCY and *CALLER_COUNT
2385 respectively. */
2387 static bool
2388 get_info_about_necessary_edges (struct ipcp_value *val, int *freq_sum,
2389 gcov_type *count_sum, int *caller_count)
2391 struct ipcp_value_source *src;
2392 int freq = 0, count = 0;
2393 gcov_type cnt = 0;
2394 bool hot = false;
2396 for (src = val->sources; src; src = src->next)
2398 struct cgraph_edge *cs = src->cs;
2399 while (cs)
2401 if (cgraph_edge_brings_value_p (cs, src))
2403 count++;
2404 freq += cs->frequency;
2405 cnt += cs->count;
2406 hot |= cgraph_maybe_hot_edge_p (cs);
2408 cs = get_next_cgraph_edge_clone (cs);
2412 *freq_sum = freq;
2413 *count_sum = cnt;
2414 *caller_count = count;
2415 return hot;
2418 /* Return a vector of incoming edges that do bring value VAL. It is assumed
2419 their number is known and equal to CALLER_COUNT. */
2421 static vec<cgraph_edge_p>
2422 gather_edges_for_value (struct ipcp_value *val, int caller_count)
2424 struct ipcp_value_source *src;
2425 vec<cgraph_edge_p> ret;
2427 ret.create (caller_count);
2428 for (src = val->sources; src; src = src->next)
2430 struct cgraph_edge *cs = src->cs;
2431 while (cs)
2433 if (cgraph_edge_brings_value_p (cs, src))
2434 ret.quick_push (cs);
2435 cs = get_next_cgraph_edge_clone (cs);
2439 return ret;
2442 /* Construct a replacement map for a know VALUE for a formal parameter PARAM.
2443 Return it or NULL if for some reason it cannot be created. */
2445 static struct ipa_replace_map *
2446 get_replacement_map (tree value, tree parm)
2448 tree req_type = TREE_TYPE (parm);
2449 struct ipa_replace_map *replace_map;
2451 if (!useless_type_conversion_p (req_type, TREE_TYPE (value)))
2453 if (fold_convertible_p (req_type, value))
2454 value = fold_build1 (NOP_EXPR, req_type, value);
2455 else if (TYPE_SIZE (req_type) == TYPE_SIZE (TREE_TYPE (value)))
2456 value = fold_build1 (VIEW_CONVERT_EXPR, req_type, value);
2457 else
2459 if (dump_file)
2461 fprintf (dump_file, " const ");
2462 print_generic_expr (dump_file, value, 0);
2463 fprintf (dump_file, " can't be converted to param ");
2464 print_generic_expr (dump_file, parm, 0);
2465 fprintf (dump_file, "\n");
2467 return NULL;
2471 replace_map = ggc_alloc_ipa_replace_map ();
2472 if (dump_file)
2474 fprintf (dump_file, " replacing param ");
2475 print_generic_expr (dump_file, parm, 0);
2476 fprintf (dump_file, " with const ");
2477 print_generic_expr (dump_file, value, 0);
2478 fprintf (dump_file, "\n");
2480 replace_map->old_tree = parm;
2481 replace_map->new_tree = value;
2482 replace_map->replace_p = true;
2483 replace_map->ref_p = false;
2485 return replace_map;
2488 /* Dump new profiling counts */
2490 static void
2491 dump_profile_updates (struct cgraph_node *orig_node,
2492 struct cgraph_node *new_node)
2494 struct cgraph_edge *cs;
2496 fprintf (dump_file, " setting count of the specialized node to "
2497 HOST_WIDE_INT_PRINT_DEC "\n", (HOST_WIDE_INT) new_node->count);
2498 for (cs = new_node->callees; cs ; cs = cs->next_callee)
2499 fprintf (dump_file, " edge to %s has count "
2500 HOST_WIDE_INT_PRINT_DEC "\n",
2501 cgraph_node_name (cs->callee), (HOST_WIDE_INT) cs->count);
2503 fprintf (dump_file, " setting count of the original node to "
2504 HOST_WIDE_INT_PRINT_DEC "\n", (HOST_WIDE_INT) orig_node->count);
2505 for (cs = orig_node->callees; cs ; cs = cs->next_callee)
2506 fprintf (dump_file, " edge to %s is left with "
2507 HOST_WIDE_INT_PRINT_DEC "\n",
2508 cgraph_node_name (cs->callee), (HOST_WIDE_INT) cs->count);
2511 /* After a specialized NEW_NODE version of ORIG_NODE has been created, update
2512 their profile information to reflect this. */
2514 static void
2515 update_profiling_info (struct cgraph_node *orig_node,
2516 struct cgraph_node *new_node)
2518 struct cgraph_edge *cs;
2519 struct caller_statistics stats;
2520 gcov_type new_sum, orig_sum;
2521 gcov_type remainder, orig_node_count = orig_node->count;
2523 if (orig_node_count == 0)
2524 return;
2526 init_caller_stats (&stats);
2527 cgraph_for_node_and_aliases (orig_node, gather_caller_stats, &stats, false);
2528 orig_sum = stats.count_sum;
2529 init_caller_stats (&stats);
2530 cgraph_for_node_and_aliases (new_node, gather_caller_stats, &stats, false);
2531 new_sum = stats.count_sum;
2533 if (orig_node_count < orig_sum + new_sum)
2535 if (dump_file)
2536 fprintf (dump_file, " Problem: node %s/%i has too low count "
2537 HOST_WIDE_INT_PRINT_DEC " while the sum of incoming "
2538 "counts is " HOST_WIDE_INT_PRINT_DEC "\n",
2539 cgraph_node_name (orig_node), orig_node->uid,
2540 (HOST_WIDE_INT) orig_node_count,
2541 (HOST_WIDE_INT) (orig_sum + new_sum));
2543 orig_node_count = (orig_sum + new_sum) * 12 / 10;
2544 if (dump_file)
2545 fprintf (dump_file, " proceeding by pretending it was "
2546 HOST_WIDE_INT_PRINT_DEC "\n",
2547 (HOST_WIDE_INT) orig_node_count);
2550 new_node->count = new_sum;
2551 remainder = orig_node_count - new_sum;
2552 orig_node->count = remainder;
2554 for (cs = new_node->callees; cs ; cs = cs->next_callee)
2555 if (cs->frequency)
2556 cs->count = cs->count * (new_sum * REG_BR_PROB_BASE
2557 / orig_node_count) / REG_BR_PROB_BASE;
2558 else
2559 cs->count = 0;
2561 for (cs = orig_node->callees; cs ; cs = cs->next_callee)
2562 cs->count = cs->count * (remainder * REG_BR_PROB_BASE
2563 / orig_node_count) / REG_BR_PROB_BASE;
2565 if (dump_file)
2566 dump_profile_updates (orig_node, new_node);
2569 /* Update the respective profile of specialized NEW_NODE and the original
2570 ORIG_NODE after additional edges with cumulative count sum REDIRECTED_SUM
2571 have been redirected to the specialized version. */
2573 static void
2574 update_specialized_profile (struct cgraph_node *new_node,
2575 struct cgraph_node *orig_node,
2576 gcov_type redirected_sum)
2578 struct cgraph_edge *cs;
2579 gcov_type new_node_count, orig_node_count = orig_node->count;
2581 if (dump_file)
2582 fprintf (dump_file, " the sum of counts of redirected edges is "
2583 HOST_WIDE_INT_PRINT_DEC "\n", (HOST_WIDE_INT) redirected_sum);
2584 if (orig_node_count == 0)
2585 return;
2587 gcc_assert (orig_node_count >= redirected_sum);
2589 new_node_count = new_node->count;
2590 new_node->count += redirected_sum;
2591 orig_node->count -= redirected_sum;
2593 for (cs = new_node->callees; cs ; cs = cs->next_callee)
2594 if (cs->frequency)
2595 cs->count += cs->count * redirected_sum / new_node_count;
2596 else
2597 cs->count = 0;
2599 for (cs = orig_node->callees; cs ; cs = cs->next_callee)
2601 gcov_type dec = cs->count * (redirected_sum * REG_BR_PROB_BASE
2602 / orig_node_count) / REG_BR_PROB_BASE;
2603 if (dec < cs->count)
2604 cs->count -= dec;
2605 else
2606 cs->count = 0;
2609 if (dump_file)
2610 dump_profile_updates (orig_node, new_node);
2613 /* Create a specialized version of NODE with known constants and types of
2614 parameters in KNOWN_VALS and redirect all edges in CALLERS to it. */
2616 static struct cgraph_node *
2617 create_specialized_node (struct cgraph_node *node,
2618 vec<tree> known_vals,
2619 struct ipa_agg_replacement_value *aggvals,
2620 vec<cgraph_edge_p> callers)
2622 struct ipa_node_params *new_info, *info = IPA_NODE_REF (node);
2623 vec<ipa_replace_map_p, va_gc> *replace_trees = NULL;
2624 struct cgraph_node *new_node;
2625 int i, count = ipa_get_param_count (info);
2626 bitmap args_to_skip;
2628 gcc_assert (!info->ipcp_orig_node);
2630 if (node->local.can_change_signature)
2632 args_to_skip = BITMAP_GGC_ALLOC ();
2633 for (i = 0; i < count; i++)
2635 tree t = known_vals[i];
2637 if ((t && TREE_CODE (t) != TREE_BINFO)
2638 || !ipa_is_param_used (info, i))
2639 bitmap_set_bit (args_to_skip, i);
2642 else
2644 args_to_skip = NULL;
2645 if (dump_file && (dump_flags & TDF_DETAILS))
2646 fprintf (dump_file, " cannot change function signature\n");
2649 for (i = 0; i < count ; i++)
2651 tree t = known_vals[i];
2652 if (t && TREE_CODE (t) != TREE_BINFO)
2654 struct ipa_replace_map *replace_map;
2656 replace_map = get_replacement_map (t, ipa_get_param (info, i));
2657 if (replace_map)
2658 vec_safe_push (replace_trees, replace_map);
2662 new_node = cgraph_create_virtual_clone (node, callers, replace_trees,
2663 args_to_skip, "constprop");
2664 ipa_set_node_agg_value_chain (new_node, aggvals);
2665 if (dump_file && (dump_flags & TDF_DETAILS))
2667 fprintf (dump_file, " the new node is %s/%i.\n",
2668 cgraph_node_name (new_node), new_node->uid);
2669 if (aggvals)
2670 ipa_dump_agg_replacement_values (dump_file, aggvals);
2672 gcc_checking_assert (ipa_node_params_vector.exists ()
2673 && (ipa_node_params_vector.length ()
2674 > (unsigned) cgraph_max_uid));
2675 update_profiling_info (node, new_node);
2676 new_info = IPA_NODE_REF (new_node);
2677 new_info->ipcp_orig_node = node;
2678 new_info->known_vals = known_vals;
2680 ipcp_discover_new_direct_edges (new_node, known_vals);
2682 callers.release ();
2683 return new_node;
2686 /* Given a NODE, and a subset of its CALLERS, try to populate blanks slots in
2687 KNOWN_VALS with constants and types that are also known for all of the
2688 CALLERS. */
2690 static void
2691 find_more_scalar_values_for_callers_subset (struct cgraph_node *node,
2692 vec<tree> known_vals,
2693 vec<cgraph_edge_p> callers)
2695 struct ipa_node_params *info = IPA_NODE_REF (node);
2696 int i, count = ipa_get_param_count (info);
2698 for (i = 0; i < count ; i++)
2700 struct cgraph_edge *cs;
2701 tree newval = NULL_TREE;
2702 int j;
2704 if (ipa_get_scalar_lat (info, i)->bottom || known_vals[i])
2705 continue;
2707 FOR_EACH_VEC_ELT (callers, j, cs)
2709 struct ipa_jump_func *jump_func;
2710 tree t;
2712 if (i >= ipa_get_cs_argument_count (IPA_EDGE_REF (cs)))
2714 newval = NULL_TREE;
2715 break;
2717 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
2718 t = ipa_value_from_jfunc (IPA_NODE_REF (cs->caller), jump_func);
2719 if (!t
2720 || (newval
2721 && !values_equal_for_ipcp_p (t, newval)))
2723 newval = NULL_TREE;
2724 break;
2726 else
2727 newval = t;
2730 if (newval)
2732 if (dump_file && (dump_flags & TDF_DETAILS))
2734 fprintf (dump_file, " adding an extra known scalar value ");
2735 print_ipcp_constant_value (dump_file, newval);
2736 fprintf (dump_file, " for parameter ");
2737 print_generic_expr (dump_file, ipa_get_param (info, i), 0);
2738 fprintf (dump_file, "\n");
2741 known_vals[i] = newval;
2746 /* Go through PLATS and create a vector of values consisting of values and
2747 offsets (minus OFFSET) of lattices that contain only a single value. */
2749 static vec<ipa_agg_jf_item_t>
2750 copy_plats_to_inter (struct ipcp_param_lattices *plats, HOST_WIDE_INT offset)
2752 vec<ipa_agg_jf_item_t> res = vNULL;
2754 if (!plats->aggs || plats->aggs_contain_variable || plats->aggs_bottom)
2755 return vNULL;
2757 for (struct ipcp_agg_lattice *aglat = plats->aggs; aglat; aglat = aglat->next)
2758 if (ipa_lat_is_single_const (aglat))
2760 struct ipa_agg_jf_item ti;
2761 ti.offset = aglat->offset - offset;
2762 ti.value = aglat->values->value;
2763 res.safe_push (ti);
2765 return res;
2768 /* Intersect all values in INTER with single value lattices in PLATS (while
2769 subtracting OFFSET). */
2771 static void
2772 intersect_with_plats (struct ipcp_param_lattices *plats,
2773 vec<ipa_agg_jf_item_t> *inter,
2774 HOST_WIDE_INT offset)
2776 struct ipcp_agg_lattice *aglat;
2777 struct ipa_agg_jf_item *item;
2778 int k;
2780 if (!plats->aggs || plats->aggs_contain_variable || plats->aggs_bottom)
2782 inter->release ();
2783 return;
2786 aglat = plats->aggs;
2787 FOR_EACH_VEC_ELT (*inter, k, item)
2789 bool found = false;
2790 if (!item->value)
2791 continue;
2792 while (aglat)
2794 if (aglat->offset - offset > item->offset)
2795 break;
2796 if (aglat->offset - offset == item->offset)
2798 gcc_checking_assert (item->value);
2799 if (values_equal_for_ipcp_p (item->value, aglat->values->value))
2800 found = true;
2801 break;
2803 aglat = aglat->next;
2805 if (!found)
2806 item->value = NULL_TREE;
2810 /* Copy agggregate replacement values of NODE (which is an IPA-CP clone) to the
2811 vector result while subtracting OFFSET from the individual value offsets. */
2813 static vec<ipa_agg_jf_item_t>
2814 agg_replacements_to_vector (struct cgraph_node *node, int index,
2815 HOST_WIDE_INT offset)
2817 struct ipa_agg_replacement_value *av;
2818 vec<ipa_agg_jf_item_t> res = vNULL;
2820 for (av = ipa_get_agg_replacements_for_node (node); av; av = av->next)
2821 if (av->index == index
2822 && (av->offset - offset) >= 0)
2824 struct ipa_agg_jf_item item;
2825 gcc_checking_assert (av->value);
2826 item.offset = av->offset - offset;
2827 item.value = av->value;
2828 res.safe_push (item);
2831 return res;
2834 /* Intersect all values in INTER with those that we have already scheduled to
2835 be replaced in parameter number INDEX of NODE, which is an IPA-CP clone
2836 (while subtracting OFFSET). */
2838 static void
2839 intersect_with_agg_replacements (struct cgraph_node *node, int index,
2840 vec<ipa_agg_jf_item_t> *inter,
2841 HOST_WIDE_INT offset)
2843 struct ipa_agg_replacement_value *srcvals;
2844 struct ipa_agg_jf_item *item;
2845 int i;
2847 srcvals = ipa_get_agg_replacements_for_node (node);
2848 if (!srcvals)
2850 inter->release ();
2851 return;
2854 FOR_EACH_VEC_ELT (*inter, i, item)
2856 struct ipa_agg_replacement_value *av;
2857 bool found = false;
2858 if (!item->value)
2859 continue;
2860 for (av = srcvals; av; av = av->next)
2862 gcc_checking_assert (av->value);
2863 if (av->index == index
2864 && av->offset - offset == item->offset)
2866 if (values_equal_for_ipcp_p (item->value, av->value))
2867 found = true;
2868 break;
2871 if (!found)
2872 item->value = NULL_TREE;
2876 /* Intersect values in INTER with aggregate values that come along edge CS to
2877 parameter number INDEX and return it. If INTER does not actually exist yet,
2878 copy all incoming values to it. If we determine we ended up with no values
2879 whatsoever, return a released vector. */
2881 static vec<ipa_agg_jf_item_t>
2882 intersect_aggregates_with_edge (struct cgraph_edge *cs, int index,
2883 vec<ipa_agg_jf_item_t> inter)
2885 struct ipa_jump_func *jfunc;
2886 jfunc = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), index);
2887 if (jfunc->type == IPA_JF_PASS_THROUGH
2888 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
2890 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
2891 int src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
2893 if (caller_info->ipcp_orig_node)
2895 struct cgraph_node *orig_node = caller_info->ipcp_orig_node;
2896 struct ipcp_param_lattices *orig_plats;
2897 orig_plats = ipa_get_parm_lattices (IPA_NODE_REF (orig_node),
2898 src_idx);
2899 if (agg_pass_through_permissible_p (orig_plats, jfunc))
2901 if (!inter.exists ())
2902 inter = agg_replacements_to_vector (cs->caller, src_idx, 0);
2903 else
2904 intersect_with_agg_replacements (cs->caller, src_idx,
2905 &inter, 0);
2907 else
2909 inter.release ();
2910 return vNULL;
2913 else
2915 struct ipcp_param_lattices *src_plats;
2916 src_plats = ipa_get_parm_lattices (caller_info, src_idx);
2917 if (agg_pass_through_permissible_p (src_plats, jfunc))
2919 /* Currently we do not produce clobber aggregate jump
2920 functions, adjust when we do. */
2921 gcc_checking_assert (!jfunc->agg.items);
2922 if (!inter.exists ())
2923 inter = copy_plats_to_inter (src_plats, 0);
2924 else
2925 intersect_with_plats (src_plats, &inter, 0);
2927 else
2929 inter.release ();
2930 return vNULL;
2934 else if (jfunc->type == IPA_JF_ANCESTOR
2935 && ipa_get_jf_ancestor_agg_preserved (jfunc))
2937 struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
2938 int src_idx = ipa_get_jf_ancestor_formal_id (jfunc);
2939 struct ipcp_param_lattices *src_plats;
2940 HOST_WIDE_INT delta = ipa_get_jf_ancestor_offset (jfunc);
2942 if (caller_info->ipcp_orig_node)
2944 if (!inter.exists ())
2945 inter = agg_replacements_to_vector (cs->caller, src_idx, delta);
2946 else
2947 intersect_with_agg_replacements (cs->caller, src_idx, &inter,
2948 delta);
2950 else
2952 src_plats = ipa_get_parm_lattices (caller_info, src_idx);;
2953 /* Currently we do not produce clobber aggregate jump
2954 functions, adjust when we do. */
2955 gcc_checking_assert (!src_plats->aggs || !jfunc->agg.items);
2956 if (!inter.exists ())
2957 inter = copy_plats_to_inter (src_plats, delta);
2958 else
2959 intersect_with_plats (src_plats, &inter, delta);
2962 else if (jfunc->agg.items)
2964 struct ipa_agg_jf_item *item;
2965 int k;
2967 if (!inter.exists ())
2968 for (unsigned i = 0; i < jfunc->agg.items->length (); i++)
2969 inter.safe_push ((*jfunc->agg.items)[i]);
2970 else
2971 FOR_EACH_VEC_ELT (inter, k, item)
2973 int l = 0;
2974 bool found = false;;
2976 if (!item->value)
2977 continue;
2979 while ((unsigned) l < jfunc->agg.items->length ())
2981 struct ipa_agg_jf_item *ti;
2982 ti = &(*jfunc->agg.items)[l];
2983 if (ti->offset > item->offset)
2984 break;
2985 if (ti->offset == item->offset)
2987 gcc_checking_assert (ti->value);
2988 if (values_equal_for_ipcp_p (item->value,
2989 ti->value))
2990 found = true;
2991 break;
2993 l++;
2995 if (!found)
2996 item->value = NULL;
2999 else
3001 inter.release();
3002 return vec<ipa_agg_jf_item_t>();
3004 return inter;
3007 /* Look at edges in CALLERS and collect all known aggregate values that arrive
3008 from all of them. */
3010 static struct ipa_agg_replacement_value *
3011 find_aggregate_values_for_callers_subset (struct cgraph_node *node,
3012 vec<cgraph_edge_p> callers)
3014 struct ipa_node_params *dest_info = IPA_NODE_REF (node);
3015 struct ipa_agg_replacement_value *res;
3016 struct ipa_agg_replacement_value **tail = &res;
3017 struct cgraph_edge *cs;
3018 int i, j, count = ipa_get_param_count (dest_info);
3020 FOR_EACH_VEC_ELT (callers, j, cs)
3022 int c = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
3023 if (c < count)
3024 count = c;
3027 for (i = 0; i < count ; i++)
3029 struct cgraph_edge *cs;
3030 vec<ipa_agg_jf_item_t> inter = vNULL;
3031 struct ipa_agg_jf_item *item;
3032 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (dest_info, i);
3033 int j;
3035 /* Among other things, the following check should deal with all by_ref
3036 mismatches. */
3037 if (plats->aggs_bottom)
3038 continue;
3040 FOR_EACH_VEC_ELT (callers, j, cs)
3042 inter = intersect_aggregates_with_edge (cs, i, inter);
3044 if (!inter.exists ())
3045 goto next_param;
3048 FOR_EACH_VEC_ELT (inter, j, item)
3050 struct ipa_agg_replacement_value *v;
3052 if (!item->value)
3053 continue;
3055 v = ggc_alloc_ipa_agg_replacement_value ();
3056 v->index = i;
3057 v->offset = item->offset;
3058 v->value = item->value;
3059 v->by_ref = plats->aggs_by_ref;
3060 *tail = v;
3061 tail = &v->next;
3064 next_param:
3065 if (inter.exists ())
3066 inter.release ();
3068 *tail = NULL;
3069 return res;
3072 /* Turn KNOWN_AGGS into a list of aggreate replacement values. */
3074 static struct ipa_agg_replacement_value *
3075 known_aggs_to_agg_replacement_list (vec<ipa_agg_jump_function_t> known_aggs)
3077 struct ipa_agg_replacement_value *res;
3078 struct ipa_agg_replacement_value **tail = &res;
3079 struct ipa_agg_jump_function *aggjf;
3080 struct ipa_agg_jf_item *item;
3081 int i, j;
3083 FOR_EACH_VEC_ELT (known_aggs, i, aggjf)
3084 FOR_EACH_VEC_SAFE_ELT (aggjf->items, j, item)
3086 struct ipa_agg_replacement_value *v;
3087 v = ggc_alloc_ipa_agg_replacement_value ();
3088 v->index = i;
3089 v->offset = item->offset;
3090 v->value = item->value;
3091 v->by_ref = aggjf->by_ref;
3092 *tail = v;
3093 tail = &v->next;
3095 *tail = NULL;
3096 return res;
3099 /* Determine whether CS also brings all scalar values that the NODE is
3100 specialized for. */
3102 static bool
3103 cgraph_edge_brings_all_scalars_for_node (struct cgraph_edge *cs,
3104 struct cgraph_node *node)
3106 struct ipa_node_params *dest_info = IPA_NODE_REF (node);
3107 int count = ipa_get_param_count (dest_info);
3108 struct ipa_node_params *caller_info;
3109 struct ipa_edge_args *args;
3110 int i;
3112 caller_info = IPA_NODE_REF (cs->caller);
3113 args = IPA_EDGE_REF (cs);
3114 for (i = 0; i < count; i++)
3116 struct ipa_jump_func *jump_func;
3117 tree val, t;
3119 val = dest_info->known_vals[i];
3120 if (!val)
3121 continue;
3123 if (i >= ipa_get_cs_argument_count (args))
3124 return false;
3125 jump_func = ipa_get_ith_jump_func (args, i);
3126 t = ipa_value_from_jfunc (caller_info, jump_func);
3127 if (!t || !values_equal_for_ipcp_p (val, t))
3128 return false;
3130 return true;
3133 /* Determine whether CS also brings all aggregate values that NODE is
3134 specialized for. */
3135 static bool
3136 cgraph_edge_brings_all_agg_vals_for_node (struct cgraph_edge *cs,
3137 struct cgraph_node *node)
3139 struct ipa_node_params *orig_caller_info = IPA_NODE_REF (cs->caller);
3140 struct ipa_node_params *orig_node_info;
3141 struct ipa_agg_replacement_value *aggval;
3142 int i, ec, count;
3144 aggval = ipa_get_agg_replacements_for_node (node);
3145 if (!aggval)
3146 return true;
3148 count = ipa_get_param_count (IPA_NODE_REF (node));
3149 ec = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
3150 if (ec < count)
3151 for (struct ipa_agg_replacement_value *av = aggval; av; av = av->next)
3152 if (aggval->index >= ec)
3153 return false;
3155 orig_node_info = IPA_NODE_REF (IPA_NODE_REF (node)->ipcp_orig_node);
3156 if (orig_caller_info->ipcp_orig_node)
3157 orig_caller_info = IPA_NODE_REF (orig_caller_info->ipcp_orig_node);
3159 for (i = 0; i < count; i++)
3161 static vec<ipa_agg_jf_item_t> values = vec<ipa_agg_jf_item_t>();
3162 struct ipcp_param_lattices *plats;
3163 bool interesting = false;
3164 for (struct ipa_agg_replacement_value *av = aggval; av; av = av->next)
3165 if (aggval->index == i)
3167 interesting = true;
3168 break;
3170 if (!interesting)
3171 continue;
3173 plats = ipa_get_parm_lattices (orig_node_info, aggval->index);
3174 if (plats->aggs_bottom)
3175 return false;
3177 values = intersect_aggregates_with_edge (cs, i, values);
3178 if (!values.exists())
3179 return false;
3181 for (struct ipa_agg_replacement_value *av = aggval; av; av = av->next)
3182 if (aggval->index == i)
3184 struct ipa_agg_jf_item *item;
3185 int j;
3186 bool found = false;
3187 FOR_EACH_VEC_ELT (values, j, item)
3188 if (item->value
3189 && item->offset == av->offset
3190 && values_equal_for_ipcp_p (item->value, av->value))
3191 found = true;
3192 if (!found)
3194 values.release();
3195 return false;
3199 return true;
3202 /* Given an original NODE and a VAL for which we have already created a
3203 specialized clone, look whether there are incoming edges that still lead
3204 into the old node but now also bring the requested value and also conform to
3205 all other criteria such that they can be redirected the the special node.
3206 This function can therefore redirect the final edge in a SCC. */
3208 static void
3209 perhaps_add_new_callers (struct cgraph_node *node, struct ipcp_value *val)
3211 struct ipcp_value_source *src;
3212 gcov_type redirected_sum = 0;
3214 for (src = val->sources; src; src = src->next)
3216 struct cgraph_edge *cs = src->cs;
3217 while (cs)
3219 enum availability availability;
3220 struct cgraph_node *dst = cgraph_function_node (cs->callee,
3221 &availability);
3222 if ((dst == node || IPA_NODE_REF (dst)->is_all_contexts_clone)
3223 && availability > AVAIL_OVERWRITABLE
3224 && cgraph_edge_brings_value_p (cs, src))
3226 if (cgraph_edge_brings_all_scalars_for_node (cs, val->spec_node)
3227 && cgraph_edge_brings_all_agg_vals_for_node (cs,
3228 val->spec_node))
3230 if (dump_file)
3231 fprintf (dump_file, " - adding an extra caller %s/%i"
3232 " of %s/%i\n",
3233 xstrdup (cgraph_node_name (cs->caller)),
3234 cs->caller->uid,
3235 xstrdup (cgraph_node_name (val->spec_node)),
3236 val->spec_node->uid);
3238 cgraph_redirect_edge_callee (cs, val->spec_node);
3239 redirected_sum += cs->count;
3242 cs = get_next_cgraph_edge_clone (cs);
3246 if (redirected_sum)
3247 update_specialized_profile (val->spec_node, node, redirected_sum);
3251 /* Copy KNOWN_BINFOS to KNOWN_VALS. */
3253 static void
3254 move_binfos_to_values (vec<tree> known_vals,
3255 vec<tree> known_binfos)
3257 tree t;
3258 int i;
3260 for (i = 0; known_binfos.iterate (i, &t); i++)
3261 if (t)
3262 known_vals[i] = t;
3265 /* Return true if there is a replacement equivalent to VALUE, INDEX and OFFSET
3266 among those in the AGGVALS list. */
3268 DEBUG_FUNCTION bool
3269 ipcp_val_in_agg_replacements_p (struct ipa_agg_replacement_value *aggvals,
3270 int index, HOST_WIDE_INT offset, tree value)
3272 while (aggvals)
3274 if (aggvals->index == index
3275 && aggvals->offset == offset
3276 && values_equal_for_ipcp_p (aggvals->value, value))
3277 return true;
3278 aggvals = aggvals->next;
3280 return false;
3283 /* Decide wheter to create a special version of NODE for value VAL of parameter
3284 at the given INDEX. If OFFSET is -1, the value is for the parameter itself,
3285 otherwise it is stored at the given OFFSET of the parameter. KNOWN_CSTS,
3286 KNOWN_BINFOS and KNOWN_AGGS describe the other already known values. */
3288 static bool
3289 decide_about_value (struct cgraph_node *node, int index, HOST_WIDE_INT offset,
3290 struct ipcp_value *val, vec<tree> known_csts,
3291 vec<tree> known_binfos)
3293 struct ipa_agg_replacement_value *aggvals;
3294 int freq_sum, caller_count;
3295 gcov_type count_sum;
3296 vec<cgraph_edge_p> callers;
3297 vec<tree> kv;
3299 if (val->spec_node)
3301 perhaps_add_new_callers (node, val);
3302 return false;
3304 else if (val->local_size_cost + overall_size > max_new_size)
3306 if (dump_file && (dump_flags & TDF_DETAILS))
3307 fprintf (dump_file, " Ignoring candidate value because "
3308 "max_new_size would be reached with %li.\n",
3309 val->local_size_cost + overall_size);
3310 return false;
3312 else if (!get_info_about_necessary_edges (val, &freq_sum, &count_sum,
3313 &caller_count))
3314 return false;
3316 if (dump_file && (dump_flags & TDF_DETAILS))
3318 fprintf (dump_file, " - considering value ");
3319 print_ipcp_constant_value (dump_file, val->value);
3320 fprintf (dump_file, " for parameter ");
3321 print_generic_expr (dump_file, ipa_get_param (IPA_NODE_REF (node),
3322 index), 0);
3323 if (offset != -1)
3324 fprintf (dump_file, ", offset: " HOST_WIDE_INT_PRINT_DEC, offset);
3325 fprintf (dump_file, " (caller_count: %i)\n", caller_count);
3328 if (!good_cloning_opportunity_p (node, val->local_time_benefit,
3329 freq_sum, count_sum,
3330 val->local_size_cost)
3331 && !good_cloning_opportunity_p (node,
3332 val->local_time_benefit
3333 + val->prop_time_benefit,
3334 freq_sum, count_sum,
3335 val->local_size_cost
3336 + val->prop_size_cost))
3337 return false;
3339 if (dump_file)
3340 fprintf (dump_file, " Creating a specialized node of %s/%i.\n",
3341 cgraph_node_name (node), node->uid);
3343 callers = gather_edges_for_value (val, caller_count);
3344 kv = known_csts.copy ();
3345 move_binfos_to_values (kv, known_binfos);
3346 if (offset == -1)
3347 kv[index] = val->value;
3348 find_more_scalar_values_for_callers_subset (node, kv, callers);
3349 aggvals = find_aggregate_values_for_callers_subset (node, callers);
3350 gcc_checking_assert (offset == -1
3351 || ipcp_val_in_agg_replacements_p (aggvals, index,
3352 offset, val->value));
3353 val->spec_node = create_specialized_node (node, kv, aggvals, callers);
3354 overall_size += val->local_size_cost;
3356 /* TODO: If for some lattice there is only one other known value
3357 left, make a special node for it too. */
3359 return true;
3362 /* Decide whether and what specialized clones of NODE should be created. */
3364 static bool
3365 decide_whether_version_node (struct cgraph_node *node)
3367 struct ipa_node_params *info = IPA_NODE_REF (node);
3368 int i, count = ipa_get_param_count (info);
3369 vec<tree> known_csts, known_binfos;
3370 vec<ipa_agg_jump_function_t> known_aggs = vNULL;
3371 bool ret = false;
3373 if (count == 0)
3374 return false;
3376 if (dump_file && (dump_flags & TDF_DETAILS))
3377 fprintf (dump_file, "\nEvaluating opportunities for %s/%i.\n",
3378 cgraph_node_name (node), node->uid);
3380 gather_context_independent_values (info, &known_csts, &known_binfos,
3381 info->do_clone_for_all_contexts ? &known_aggs
3382 : NULL, NULL);
3384 for (i = 0; i < count ;i++)
3386 struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
3387 struct ipcp_lattice *lat = &plats->itself;
3388 struct ipcp_value *val;
3390 if (!lat->bottom
3391 && !known_csts[i]
3392 && !known_binfos[i])
3393 for (val = lat->values; val; val = val->next)
3394 ret |= decide_about_value (node, i, -1, val, known_csts,
3395 known_binfos);
3397 if (!plats->aggs_bottom)
3399 struct ipcp_agg_lattice *aglat;
3400 struct ipcp_value *val;
3401 for (aglat = plats->aggs; aglat; aglat = aglat->next)
3402 if (!aglat->bottom && aglat->values
3403 /* If the following is false, the one value is in
3404 known_aggs. */
3405 && (plats->aggs_contain_variable
3406 || !ipa_lat_is_single_const (aglat)))
3407 for (val = aglat->values; val; val = val->next)
3408 ret |= decide_about_value (node, i, aglat->offset, val,
3409 known_csts, known_binfos);
3411 info = IPA_NODE_REF (node);
3414 if (info->do_clone_for_all_contexts)
3416 struct cgraph_node *clone;
3417 vec<cgraph_edge_p> callers;
3419 if (dump_file)
3420 fprintf (dump_file, " - Creating a specialized node of %s/%i "
3421 "for all known contexts.\n", cgraph_node_name (node),
3422 node->uid);
3424 callers = collect_callers_of_node (node);
3425 move_binfos_to_values (known_csts, known_binfos);
3426 clone = create_specialized_node (node, known_csts,
3427 known_aggs_to_agg_replacement_list (known_aggs),
3428 callers);
3429 info = IPA_NODE_REF (node);
3430 info->do_clone_for_all_contexts = false;
3431 IPA_NODE_REF (clone)->is_all_contexts_clone = true;
3432 for (i = 0; i < count ; i++)
3433 vec_free (known_aggs[i].items);
3434 known_aggs.release ();
3435 ret = true;
3437 else
3438 known_csts.release ();
3440 known_binfos.release ();
3441 return ret;
3444 /* Transitively mark all callees of NODE within the same SCC as not dead. */
3446 static void
3447 spread_undeadness (struct cgraph_node *node)
3449 struct cgraph_edge *cs;
3451 for (cs = node->callees; cs; cs = cs->next_callee)
3452 if (edge_within_scc (cs))
3454 struct cgraph_node *callee;
3455 struct ipa_node_params *info;
3457 callee = cgraph_function_node (cs->callee, NULL);
3458 info = IPA_NODE_REF (callee);
3460 if (info->node_dead)
3462 info->node_dead = 0;
3463 spread_undeadness (callee);
3468 /* Return true if NODE has a caller from outside of its SCC that is not
3469 dead. Worker callback for cgraph_for_node_and_aliases. */
3471 static bool
3472 has_undead_caller_from_outside_scc_p (struct cgraph_node *node,
3473 void *data ATTRIBUTE_UNUSED)
3475 struct cgraph_edge *cs;
3477 for (cs = node->callers; cs; cs = cs->next_caller)
3478 if (cs->caller->thunk.thunk_p
3479 && cgraph_for_node_and_aliases (cs->caller,
3480 has_undead_caller_from_outside_scc_p,
3481 NULL, true))
3482 return true;
3483 else if (!edge_within_scc (cs)
3484 && !IPA_NODE_REF (cs->caller)->node_dead)
3485 return true;
3486 return false;
3490 /* Identify nodes within the same SCC as NODE which are no longer needed
3491 because of new clones and will be removed as unreachable. */
3493 static void
3494 identify_dead_nodes (struct cgraph_node *node)
3496 struct cgraph_node *v;
3497 for (v = node; v ; v = ((struct ipa_dfs_info *) v->symbol.aux)->next_cycle)
3498 if (cgraph_will_be_removed_from_program_if_no_direct_calls (v)
3499 && !cgraph_for_node_and_aliases (v,
3500 has_undead_caller_from_outside_scc_p,
3501 NULL, true))
3502 IPA_NODE_REF (v)->node_dead = 1;
3504 for (v = node; v ; v = ((struct ipa_dfs_info *) v->symbol.aux)->next_cycle)
3505 if (!IPA_NODE_REF (v)->node_dead)
3506 spread_undeadness (v);
3508 if (dump_file && (dump_flags & TDF_DETAILS))
3510 for (v = node; v ; v = ((struct ipa_dfs_info *) v->symbol.aux)->next_cycle)
3511 if (IPA_NODE_REF (v)->node_dead)
3512 fprintf (dump_file, " Marking node as dead: %s/%i.\n",
3513 cgraph_node_name (v), v->uid);
3517 /* The decision stage. Iterate over the topological order of call graph nodes
3518 TOPO and make specialized clones if deemed beneficial. */
3520 static void
3521 ipcp_decision_stage (struct topo_info *topo)
3523 int i;
3525 if (dump_file)
3526 fprintf (dump_file, "\nIPA decision stage:\n\n");
3528 for (i = topo->nnodes - 1; i >= 0; i--)
3530 struct cgraph_node *node = topo->order[i];
3531 bool change = false, iterate = true;
3533 while (iterate)
3535 struct cgraph_node *v;
3536 iterate = false;
3537 for (v = node; v ; v = ((struct ipa_dfs_info *) v->symbol.aux)->next_cycle)
3538 if (cgraph_function_with_gimple_body_p (v)
3539 && ipcp_versionable_function_p (v))
3540 iterate |= decide_whether_version_node (v);
3542 change |= iterate;
3544 if (change)
3545 identify_dead_nodes (node);
3549 /* The IPCP driver. */
3551 static unsigned int
3552 ipcp_driver (void)
3554 struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
3555 struct topo_info topo;
3557 ipa_check_create_node_params ();
3558 ipa_check_create_edge_args ();
3559 grow_next_edge_clone_vector ();
3560 edge_duplication_hook_holder =
3561 cgraph_add_edge_duplication_hook (&ipcp_edge_duplication_hook, NULL);
3562 ipcp_values_pool = create_alloc_pool ("IPA-CP values",
3563 sizeof (struct ipcp_value), 32);
3564 ipcp_sources_pool = create_alloc_pool ("IPA-CP value sources",
3565 sizeof (struct ipcp_value_source), 64);
3566 ipcp_agg_lattice_pool = create_alloc_pool ("IPA_CP aggregate lattices",
3567 sizeof (struct ipcp_agg_lattice),
3568 32);
3569 if (dump_file)
3571 fprintf (dump_file, "\nIPA structures before propagation:\n");
3572 if (dump_flags & TDF_DETAILS)
3573 ipa_print_all_params (dump_file);
3574 ipa_print_all_jump_functions (dump_file);
3577 /* Topological sort. */
3578 build_toporder_info (&topo);
3579 /* Do the interprocedural propagation. */
3580 ipcp_propagate_stage (&topo);
3581 /* Decide what constant propagation and cloning should be performed. */
3582 ipcp_decision_stage (&topo);
3584 /* Free all IPCP structures. */
3585 free_toporder_info (&topo);
3586 next_edge_clone.release ();
3587 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
3588 ipa_free_all_structures_after_ipa_cp ();
3589 if (dump_file)
3590 fprintf (dump_file, "\nIPA constant propagation end\n");
3591 return 0;
3594 /* Initialization and computation of IPCP data structures. This is the initial
3595 intraprocedural analysis of functions, which gathers information to be
3596 propagated later on. */
3598 static void
3599 ipcp_generate_summary (void)
3601 struct cgraph_node *node;
3603 if (dump_file)
3604 fprintf (dump_file, "\nIPA constant propagation start:\n");
3605 ipa_register_cgraph_hooks ();
3607 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
3609 node->local.versionable
3610 = tree_versionable_function_p (node->symbol.decl);
3611 ipa_analyze_node (node);
3615 /* Write ipcp summary for nodes in SET. */
3617 static void
3618 ipcp_write_summary (void)
3620 ipa_prop_write_jump_functions ();
3623 /* Read ipcp summary. */
3625 static void
3626 ipcp_read_summary (void)
3628 ipa_prop_read_jump_functions ();
3631 /* Gate for IPCP optimization. */
3633 static bool
3634 cgraph_gate_cp (void)
3636 /* FIXME: We should remove the optimize check after we ensure we never run
3637 IPA passes when not optimizing. */
3638 return flag_ipa_cp && optimize;
3641 struct ipa_opt_pass_d pass_ipa_cp =
3644 IPA_PASS,
3645 "cp", /* name */
3646 OPTGROUP_NONE, /* optinfo_flags */
3647 cgraph_gate_cp, /* gate */
3648 ipcp_driver, /* execute */
3649 NULL, /* sub */
3650 NULL, /* next */
3651 0, /* static_pass_number */
3652 TV_IPA_CONSTANT_PROP, /* tv_id */
3653 0, /* properties_required */
3654 0, /* properties_provided */
3655 0, /* properties_destroyed */
3656 0, /* todo_flags_start */
3657 TODO_dump_symtab |
3658 TODO_remove_functions | TODO_ggc_collect /* todo_flags_finish */
3660 ipcp_generate_summary, /* generate_summary */
3661 ipcp_write_summary, /* write_summary */
3662 ipcp_read_summary, /* read_summary */
3663 ipa_prop_write_all_agg_replacement, /* write_optimization_summary */
3664 ipa_prop_read_all_agg_replacement, /* read_optimization_summary */
3665 NULL, /* stmt_fixup */
3666 0, /* TODOs */
3667 ipcp_transform_function, /* function_transform */
3668 NULL, /* variable_transform */