1 /* Interprocedural constant propagation
2 Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012
3 Free Software Foundation, Inc.
5 Contributed by Razya Ladelsky <RAZYA@il.ibm.com> and Martin Jambor
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
24 /* Interprocedural constant propagation (IPA-CP).
26 The goal of this transformation is to
28 1) discover functions which are always invoked with some arguments with the
29 same known constant values and modify the functions so that the
30 subsequent optimizations can take advantage of the knowledge, and
32 2) partial specialization - create specialized versions of functions
33 transformed in this way if some parameters are known constants only in
34 certain contexts but the estimated tradeoff between speedup and cost size
37 The algorithm also propagates types and attempts to perform type based
38 devirtualization. Types are propagated much like constants.
40 The algorithm basically consists of three stages. In the first, functions
41 are analyzed one at a time and jump functions are constructed for all known
42 call-sites. In the second phase, the pass propagates information from the
43 jump functions across the call to reveal what values are available at what
44 call sites, performs estimations of effects of known values on functions and
45 their callees, and finally decides what specialized extra versions should be
46 created. In the third, the special versions materialize and appropriate
49 The algorithm used is to a certain extent based on "Interprocedural Constant
50 Propagation", by David Callahan, Keith D Cooper, Ken Kennedy, Linda Torczon,
51 Comp86, pg 152-161 and "A Methodology for Procedure Cloning" by Keith D
52 Cooper, Mary W. Hall, and Ken Kennedy.
55 First stage - intraprocedural analysis
56 =======================================
58 This phase computes jump_function and modification flags.
60 A jump function for a call-site represents the values passed as an actual
61 arguments of a given call-site. In principle, there are three types of
64 Pass through - the caller's formal parameter is passed as an actual
65 argument, plus an operation on it can be performed.
66 Constant - a constant is passed as an actual argument.
67 Unknown - neither of the above.
69 All jump function types are described in detail in ipa-prop.h, together with
70 the data structures that represent them and methods of accessing them.
72 ipcp_generate_summary() is the main function of the first stage.
74 Second stage - interprocedural analysis
75 ========================================
77 This stage is itself divided into two phases. In the first, we propagate
78 known values over the call graph, in the second, we make cloning decisions.
79 It uses a different algorithm than the original Callahan's paper.
81 First, we traverse the functions topologically from callers to callees and,
82 for each strongly connected component (SCC), we propagate constants
83 according to previously computed jump functions. We also record what known
84 values depend on other known values and estimate local effects. Finally, we
85 propagate cumulative information about these effects from dependent values
86 to those on which they depend.
88 Second, we again traverse the call graph in the same topological order and
89 make clones for functions which we know are called with the same values in
90 all contexts and decide about extra specialized clones of functions just for
91 some contexts - these decisions are based on both local estimates and
92 cumulative estimates propagated from callees.
94 ipcp_propagate_stage() and ipcp_decision_stage() together constitute the
97 Third phase - materialization of clones, call statement updates.
98 ============================================
100 This stage is currently performed by call graph code (mainly in cgraphunit.c
101 and tree-inline.c) according to instructions inserted to the call graph by
106 #include "coretypes.h"
111 #include "ipa-prop.h"
112 #include "tree-flow.h"
113 #include "tree-pass.h"
115 #include "diagnostic.h"
116 #include "tree-pretty-print.h"
117 #include "tree-inline.h"
119 #include "ipa-inline.h"
120 #include "ipa-utils.h"
124 /* Describes a particular source for an IPA-CP value. */
126 struct ipcp_value_source
128 /* Aggregate offset of the source, negative if the source is scalar value of
129 the argument itself. */
130 HOST_WIDE_INT offset
;
131 /* The incoming edge that brought the value. */
132 struct cgraph_edge
*cs
;
133 /* If the jump function that resulted into his value was a pass-through or an
134 ancestor, this is the ipcp_value of the caller from which the described
135 value has been derived. Otherwise it is NULL. */
136 struct ipcp_value
*val
;
137 /* Next pointer in a linked list of sources of a value. */
138 struct ipcp_value_source
*next
;
139 /* If the jump function that resulted into his value was a pass-through or an
140 ancestor, this is the index of the parameter of the caller the jump
141 function references. */
145 /* Describes one particular value stored in struct ipcp_lattice. */
149 /* The actual value for the given parameter. This is either an IPA invariant
150 or a TREE_BINFO describing a type that can be used for
153 /* The list of sources from which this value originates. */
154 struct ipcp_value_source
*sources
;
155 /* Next pointers in a linked list of all values in a lattice. */
156 struct ipcp_value
*next
;
157 /* Next pointers in a linked list of values in a strongly connected component
159 struct ipcp_value
*scc_next
;
160 /* Next pointers in a linked list of SCCs of values sorted topologically
161 according their sources. */
162 struct ipcp_value
*topo_next
;
163 /* A specialized node created for this value, NULL if none has been (so far)
165 struct cgraph_node
*spec_node
;
166 /* Depth first search number and low link for topological sorting of
169 /* Time benefit and size cost that specializing the function for this value
170 would bring about in this function alone. */
171 int local_time_benefit
, local_size_cost
;
172 /* Time benefit and size cost that specializing the function for this value
173 can bring about in it's callees (transitively). */
174 int prop_time_benefit
, prop_size_cost
;
175 /* True if this valye is currently on the topo-sort stack. */
179 /* Lattice describing potential values of a formal parameter of a function, or
180 a part of an aggreagate. TOP is represented by a lattice with zero values
181 and with contains_variable and bottom flags cleared. BOTTOM is represented
182 by a lattice with the bottom flag set. In that case, values and
183 contains_variable flag should be disregarded. */
187 /* The list of known values and types in this lattice. Note that values are
188 not deallocated if a lattice is set to bottom because there may be value
189 sources referencing them. */
190 struct ipcp_value
*values
;
191 /* Number of known values and types in this lattice. */
193 /* The lattice contains a variable component (in addition to values). */
194 bool contains_variable
;
195 /* The value of the lattice is bottom (i.e. variable and unusable for any
200 /* Lattice with an offset to describe a part of an aggregate. */
202 struct ipcp_agg_lattice
: public ipcp_lattice
204 /* Offset that is being described by this lattice. */
205 HOST_WIDE_INT offset
;
206 /* Size so that we don't have to re-compute it every time we traverse the
207 list. Must correspond to TYPE_SIZE of all lat values. */
209 /* Next element of the linked list. */
210 struct ipcp_agg_lattice
*next
;
213 /* Structure containing lattices for a parameter itself and for pieces of
214 aggregates that are passed in the parameter or by a reference in a parameter
215 plus some other useful flags. */
217 struct ipcp_param_lattices
219 /* Lattice describing the value of the parameter itself. */
220 struct ipcp_lattice itself
;
221 /* Lattices describing aggregate parts. */
222 struct ipcp_agg_lattice
*aggs
;
223 /* Number of aggregate lattices */
225 /* True if aggregate data were passed by reference (as opposed to by
228 /* All aggregate lattices contain a variable component (in addition to
230 bool aggs_contain_variable
;
231 /* The value of all aggregate lattices is bottom (i.e. variable and unusable
232 for any propagation). */
235 /* There is a virtual call based on this parameter. */
239 /* Allocation pools for values and their sources in ipa-cp. */
241 alloc_pool ipcp_values_pool
;
242 alloc_pool ipcp_sources_pool
;
243 alloc_pool ipcp_agg_lattice_pool
;
245 /* Maximal count found in program. */
247 static gcov_type max_count
;
249 /* Original overall size of the program. */
251 static long overall_size
, max_new_size
;
253 /* Head of the linked list of topologically sorted values. */
255 static struct ipcp_value
*values_topo
;
257 /* Return the param lattices structure corresponding to the Ith formal
258 parameter of the function described by INFO. */
259 static inline struct ipcp_param_lattices
*
260 ipa_get_parm_lattices (struct ipa_node_params
*info
, int i
)
262 gcc_assert (i
>= 0 && i
< ipa_get_param_count (info
));
263 gcc_checking_assert (!info
->ipcp_orig_node
);
264 gcc_checking_assert (info
->lattices
);
265 return &(info
->lattices
[i
]);
268 /* Return the lattice corresponding to the scalar value of the Ith formal
269 parameter of the function described by INFO. */
270 static inline struct ipcp_lattice
*
271 ipa_get_scalar_lat (struct ipa_node_params
*info
, int i
)
273 struct ipcp_param_lattices
*plats
= ipa_get_parm_lattices (info
, i
);
274 return &plats
->itself
;
277 /* Return whether LAT is a lattice with a single constant and without an
281 ipa_lat_is_single_const (struct ipcp_lattice
*lat
)
284 || lat
->contains_variable
285 || lat
->values_count
!= 1)
291 /* Return true iff the CS is an edge within a strongly connected component as
292 computed by ipa_reduced_postorder. */
295 edge_within_scc (struct cgraph_edge
*cs
)
297 struct ipa_dfs_info
*caller_dfs
= (struct ipa_dfs_info
*) cs
->caller
->symbol
.aux
;
298 struct ipa_dfs_info
*callee_dfs
;
299 struct cgraph_node
*callee
= cgraph_function_node (cs
->callee
, NULL
);
301 callee_dfs
= (struct ipa_dfs_info
*) callee
->symbol
.aux
;
304 && caller_dfs
->scc_no
== callee_dfs
->scc_no
);
307 /* Print V which is extracted from a value in a lattice to F. */
310 print_ipcp_constant_value (FILE * f
, tree v
)
312 if (TREE_CODE (v
) == TREE_BINFO
)
314 fprintf (f
, "BINFO ");
315 print_generic_expr (f
, BINFO_TYPE (v
), 0);
317 else if (TREE_CODE (v
) == ADDR_EXPR
318 && TREE_CODE (TREE_OPERAND (v
, 0)) == CONST_DECL
)
321 print_generic_expr (f
, DECL_INITIAL (TREE_OPERAND (v
, 0)), 0);
324 print_generic_expr (f
, v
, 0);
327 /* Print a lattice LAT to F. */
330 print_lattice (FILE * f
, struct ipcp_lattice
*lat
,
331 bool dump_sources
, bool dump_benefits
)
333 struct ipcp_value
*val
;
338 fprintf (f
, "BOTTOM\n");
342 if (!lat
->values_count
&& !lat
->contains_variable
)
344 fprintf (f
, "TOP\n");
348 if (lat
->contains_variable
)
350 fprintf (f
, "VARIABLE");
356 for (val
= lat
->values
; val
; val
= val
->next
)
358 if (dump_benefits
&& prev
)
360 else if (!dump_benefits
&& prev
)
365 print_ipcp_constant_value (f
, val
->value
);
369 struct ipcp_value_source
*s
;
371 fprintf (f
, " [from:");
372 for (s
= val
->sources
; s
; s
= s
->next
)
373 fprintf (f
, " %i(%i)", s
->cs
->caller
->uid
,s
->cs
->frequency
);
378 fprintf (f
, " [loc_time: %i, loc_size: %i, "
379 "prop_time: %i, prop_size: %i]\n",
380 val
->local_time_benefit
, val
->local_size_cost
,
381 val
->prop_time_benefit
, val
->prop_size_cost
);
387 /* Print all ipcp_lattices of all functions to F. */
390 print_all_lattices (FILE * f
, bool dump_sources
, bool dump_benefits
)
392 struct cgraph_node
*node
;
395 fprintf (f
, "\nLattices:\n");
396 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node
)
398 struct ipa_node_params
*info
;
400 info
= IPA_NODE_REF (node
);
401 fprintf (f
, " Node: %s/%i:\n", cgraph_node_name (node
), node
->uid
);
402 count
= ipa_get_param_count (info
);
403 for (i
= 0; i
< count
; i
++)
405 struct ipcp_agg_lattice
*aglat
;
406 struct ipcp_param_lattices
*plats
= ipa_get_parm_lattices (info
, i
);
407 fprintf (f
, " param [%d]: ", i
);
408 print_lattice (f
, &plats
->itself
, dump_sources
, dump_benefits
);
410 if (plats
->virt_call
)
411 fprintf (f
, " virt_call flag set\n");
413 if (plats
->aggs_bottom
)
415 fprintf (f
, " AGGS BOTTOM\n");
418 if (plats
->aggs_contain_variable
)
419 fprintf (f
, " AGGS VARIABLE\n");
420 for (aglat
= plats
->aggs
; aglat
; aglat
= aglat
->next
)
422 fprintf (f
, " %soffset " HOST_WIDE_INT_PRINT_DEC
": ",
423 plats
->aggs_by_ref
? "ref " : "", aglat
->offset
);
424 print_lattice (f
, aglat
, dump_sources
, dump_benefits
);
430 /* Determine whether it is at all technically possible to create clones of NODE
431 and store this information in the ipa_node_params structure associated
435 determine_versionability (struct cgraph_node
*node
)
437 const char *reason
= NULL
;
439 /* There are a number of generic reasons functions cannot be versioned. We
440 also cannot remove parameters if there are type attributes such as fnspec
442 if (node
->alias
|| node
->thunk
.thunk_p
)
443 reason
= "alias or thunk";
444 else if (!node
->local
.versionable
)
445 reason
= "not a tree_versionable_function";
446 else if (cgraph_function_body_availability (node
) <= AVAIL_OVERWRITABLE
)
447 reason
= "insufficient body availability";
449 if (reason
&& dump_file
&& !node
->alias
&& !node
->thunk
.thunk_p
)
450 fprintf (dump_file
, "Function %s/%i is not versionable, reason: %s.\n",
451 cgraph_node_name (node
), node
->uid
, reason
);
453 node
->local
.versionable
= (reason
== NULL
);
456 /* Return true if it is at all technically possible to create clones of a
460 ipcp_versionable_function_p (struct cgraph_node
*node
)
462 return node
->local
.versionable
;
465 /* Structure holding accumulated information about callers of a node. */
467 struct caller_statistics
470 int n_calls
, n_hot_calls
, freq_sum
;
473 /* Initialize fields of STAT to zeroes. */
476 init_caller_stats (struct caller_statistics
*stats
)
478 stats
->count_sum
= 0;
480 stats
->n_hot_calls
= 0;
484 /* Worker callback of cgraph_for_node_and_aliases accumulating statistics of
485 non-thunk incoming edges to NODE. */
488 gather_caller_stats (struct cgraph_node
*node
, void *data
)
490 struct caller_statistics
*stats
= (struct caller_statistics
*) data
;
491 struct cgraph_edge
*cs
;
493 for (cs
= node
->callers
; cs
; cs
= cs
->next_caller
)
494 if (cs
->caller
->thunk
.thunk_p
)
495 cgraph_for_node_and_aliases (cs
->caller
, gather_caller_stats
,
499 stats
->count_sum
+= cs
->count
;
500 stats
->freq_sum
+= cs
->frequency
;
502 if (cgraph_maybe_hot_edge_p (cs
))
503 stats
->n_hot_calls
++;
509 /* Return true if this NODE is viable candidate for cloning. */
512 ipcp_cloning_candidate_p (struct cgraph_node
*node
)
514 struct caller_statistics stats
;
516 gcc_checking_assert (cgraph_function_with_gimple_body_p (node
));
518 if (!flag_ipa_cp_clone
)
521 fprintf (dump_file
, "Not considering %s for cloning; "
522 "-fipa-cp-clone disabled.\n",
523 cgraph_node_name (node
));
527 if (!optimize_function_for_speed_p (DECL_STRUCT_FUNCTION (node
->symbol
.decl
)))
530 fprintf (dump_file
, "Not considering %s for cloning; "
531 "optimizing it for size.\n",
532 cgraph_node_name (node
));
536 init_caller_stats (&stats
);
537 cgraph_for_node_and_aliases (node
, gather_caller_stats
, &stats
, false);
539 if (inline_summary (node
)->self_size
< stats
.n_calls
)
542 fprintf (dump_file
, "Considering %s for cloning; code might shrink.\n",
543 cgraph_node_name (node
));
547 /* When profile is available and function is hot, propagate into it even if
548 calls seems cold; constant propagation can improve function's speed
552 if (stats
.count_sum
> node
->count
* 90 / 100)
555 fprintf (dump_file
, "Considering %s for cloning; "
556 "usually called directly.\n",
557 cgraph_node_name (node
));
561 if (!stats
.n_hot_calls
)
564 fprintf (dump_file
, "Not considering %s for cloning; no hot calls.\n",
565 cgraph_node_name (node
));
569 fprintf (dump_file
, "Considering %s for cloning.\n",
570 cgraph_node_name (node
));
574 /* Arrays representing a topological ordering of call graph nodes and a stack
575 of noes used during constant propagation. */
579 struct cgraph_node
**order
;
580 struct cgraph_node
**stack
;
581 int nnodes
, stack_top
;
584 /* Allocate the arrays in TOPO and topologically sort the nodes into order. */
587 build_toporder_info (struct topo_info
*topo
)
589 topo
->order
= XCNEWVEC (struct cgraph_node
*, cgraph_n_nodes
);
590 topo
->stack
= XCNEWVEC (struct cgraph_node
*, cgraph_n_nodes
);
592 topo
->nnodes
= ipa_reduced_postorder (topo
->order
, true, true, NULL
);
595 /* Free information about strongly connected components and the arrays in
599 free_toporder_info (struct topo_info
*topo
)
601 ipa_free_postorder_info ();
606 /* Add NODE to the stack in TOPO, unless it is already there. */
609 push_node_to_stack (struct topo_info
*topo
, struct cgraph_node
*node
)
611 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
612 if (info
->node_enqueued
)
614 info
->node_enqueued
= 1;
615 topo
->stack
[topo
->stack_top
++] = node
;
618 /* Pop a node from the stack in TOPO and return it or return NULL if the stack
621 static struct cgraph_node
*
622 pop_node_from_stack (struct topo_info
*topo
)
626 struct cgraph_node
*node
;
628 node
= topo
->stack
[topo
->stack_top
];
629 IPA_NODE_REF (node
)->node_enqueued
= 0;
636 /* Set lattice LAT to bottom and return true if it previously was not set as
640 set_lattice_to_bottom (struct ipcp_lattice
*lat
)
642 bool ret
= !lat
->bottom
;
647 /* Mark lattice as containing an unknown value and return true if it previously
648 was not marked as such. */
651 set_lattice_contains_variable (struct ipcp_lattice
*lat
)
653 bool ret
= !lat
->contains_variable
;
654 lat
->contains_variable
= true;
658 /* Set all aggegate lattices in PLATS to bottom and return true if they were
659 not previously set as such. */
662 set_agg_lats_to_bottom (struct ipcp_param_lattices
*plats
)
664 bool ret
= !plats
->aggs_bottom
;
665 plats
->aggs_bottom
= true;
669 /* Mark all aggegate lattices in PLATS as containing an unknown value and
670 return true if they were not previously marked as such. */
673 set_agg_lats_contain_variable (struct ipcp_param_lattices
*plats
)
675 bool ret
= !plats
->aggs_contain_variable
;
676 plats
->aggs_contain_variable
= true;
680 /* Mark bot aggregate and scalar lattices as containing an unknown variable,
681 return true is any of them has not been marked as such so far. */
684 set_all_contains_variable (struct ipcp_param_lattices
*plats
)
686 bool ret
= !plats
->itself
.contains_variable
|| !plats
->aggs_contain_variable
;
687 plats
->itself
.contains_variable
= true;
688 plats
->aggs_contain_variable
= true;
692 /* Initialize ipcp_lattices. */
695 initialize_node_lattices (struct cgraph_node
*node
)
697 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
698 struct cgraph_edge
*ie
;
699 bool disable
= false, variable
= false;
702 gcc_checking_assert (cgraph_function_with_gimple_body_p (node
));
703 if (!node
->local
.local
)
705 /* When cloning is allowed, we can assume that externally visible
706 functions are not called. We will compensate this by cloning
708 if (ipcp_versionable_function_p (node
)
709 && ipcp_cloning_candidate_p (node
))
715 if (disable
|| variable
)
717 for (i
= 0; i
< ipa_get_param_count (info
) ; i
++)
719 struct ipcp_param_lattices
*plats
= ipa_get_parm_lattices (info
, i
);
722 set_lattice_to_bottom (&plats
->itself
);
723 set_agg_lats_to_bottom (plats
);
726 set_all_contains_variable (plats
);
728 if (dump_file
&& (dump_flags
& TDF_DETAILS
)
729 && node
->alias
&& node
->thunk
.thunk_p
)
730 fprintf (dump_file
, "Marking all lattices of %s/%i as %s\n",
731 cgraph_node_name (node
), node
->uid
,
732 disable
? "BOTTOM" : "VARIABLE");
735 for (ie
= node
->indirect_calls
; ie
; ie
= ie
->next_callee
)
736 if (ie
->indirect_info
->polymorphic
)
738 gcc_checking_assert (ie
->indirect_info
->param_index
>= 0);
739 ipa_get_parm_lattices (info
,
740 ie
->indirect_info
->param_index
)->virt_call
= 1;
744 /* Return the result of a (possibly arithmetic) pass through jump function
745 JFUNC on the constant value INPUT. Return NULL_TREE if that cannot be
746 determined or itself is considered an interprocedural invariant. */
749 ipa_get_jf_pass_through_result (struct ipa_jump_func
*jfunc
, tree input
)
753 if (ipa_get_jf_pass_through_operation (jfunc
) == NOP_EXPR
)
755 else if (TREE_CODE (input
) == TREE_BINFO
)
758 gcc_checking_assert (is_gimple_ip_invariant (input
));
759 if (TREE_CODE_CLASS (ipa_get_jf_pass_through_operation (jfunc
))
761 restype
= boolean_type_node
;
763 restype
= TREE_TYPE (input
);
764 res
= fold_binary (ipa_get_jf_pass_through_operation (jfunc
), restype
,
765 input
, ipa_get_jf_pass_through_operand (jfunc
));
767 if (res
&& !is_gimple_ip_invariant (res
))
773 /* Return the result of an ancestor jump function JFUNC on the constant value
774 INPUT. Return NULL_TREE if that cannot be determined. */
777 ipa_get_jf_ancestor_result (struct ipa_jump_func
*jfunc
, tree input
)
779 if (TREE_CODE (input
) == TREE_BINFO
)
780 return get_binfo_at_offset (input
,
781 ipa_get_jf_ancestor_offset (jfunc
),
782 ipa_get_jf_ancestor_type (jfunc
));
783 else if (TREE_CODE (input
) == ADDR_EXPR
)
785 tree t
= TREE_OPERAND (input
, 0);
786 t
= build_ref_for_offset (EXPR_LOCATION (t
), t
,
787 ipa_get_jf_ancestor_offset (jfunc
),
788 ipa_get_jf_ancestor_type (jfunc
), NULL
, false);
789 return build_fold_addr_expr (t
);
795 /* Extract the acual BINFO being described by JFUNC which must be a known type
799 ipa_value_from_known_type_jfunc (struct ipa_jump_func
*jfunc
)
801 tree base_binfo
= TYPE_BINFO (ipa_get_jf_known_type_base_type (jfunc
));
804 return get_binfo_at_offset (base_binfo
,
805 ipa_get_jf_known_type_offset (jfunc
),
806 ipa_get_jf_known_type_component_type (jfunc
));
809 /* Determine whether JFUNC evaluates to a known value (that is either a
810 constant or a binfo) and if so, return it. Otherwise return NULL. INFO
811 describes the caller node so that pass-through jump functions can be
815 ipa_value_from_jfunc (struct ipa_node_params
*info
, struct ipa_jump_func
*jfunc
)
817 if (jfunc
->type
== IPA_JF_CONST
)
818 return ipa_get_jf_constant (jfunc
);
819 else if (jfunc
->type
== IPA_JF_KNOWN_TYPE
)
820 return ipa_value_from_known_type_jfunc (jfunc
);
821 else if (jfunc
->type
== IPA_JF_PASS_THROUGH
822 || jfunc
->type
== IPA_JF_ANCESTOR
)
827 if (jfunc
->type
== IPA_JF_PASS_THROUGH
)
828 idx
= ipa_get_jf_pass_through_formal_id (jfunc
);
830 idx
= ipa_get_jf_ancestor_formal_id (jfunc
);
832 if (info
->ipcp_orig_node
)
833 input
= info
->known_vals
[idx
];
836 struct ipcp_lattice
*lat
;
840 gcc_checking_assert (!flag_ipa_cp
);
843 lat
= ipa_get_scalar_lat (info
, idx
);
844 if (!ipa_lat_is_single_const (lat
))
846 input
= lat
->values
->value
;
852 if (jfunc
->type
== IPA_JF_PASS_THROUGH
)
853 return ipa_get_jf_pass_through_result (jfunc
, input
);
855 return ipa_get_jf_ancestor_result (jfunc
, input
);
862 /* If checking is enabled, verify that no lattice is in the TOP state, i.e. not
863 bottom, not containing a variable component and without any known value at
867 ipcp_verify_propagated_values (void)
869 struct cgraph_node
*node
;
871 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node
)
873 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
874 int i
, count
= ipa_get_param_count (info
);
876 for (i
= 0; i
< count
; i
++)
878 struct ipcp_lattice
*lat
= ipa_get_scalar_lat (info
, i
);
881 && !lat
->contains_variable
882 && lat
->values_count
== 0)
886 fprintf (dump_file
, "\nIPA lattices after constant "
888 print_all_lattices (dump_file
, true, false);
897 /* Return true iff X and Y should be considered equal values by IPA-CP. */
900 values_equal_for_ipcp_p (tree x
, tree y
)
902 gcc_checking_assert (x
!= NULL_TREE
&& y
!= NULL_TREE
);
907 if (TREE_CODE (x
) == TREE_BINFO
|| TREE_CODE (y
) == TREE_BINFO
)
910 if (TREE_CODE (x
) == ADDR_EXPR
911 && TREE_CODE (y
) == ADDR_EXPR
912 && TREE_CODE (TREE_OPERAND (x
, 0)) == CONST_DECL
913 && TREE_CODE (TREE_OPERAND (y
, 0)) == CONST_DECL
)
914 return operand_equal_p (DECL_INITIAL (TREE_OPERAND (x
, 0)),
915 DECL_INITIAL (TREE_OPERAND (y
, 0)), 0);
917 return operand_equal_p (x
, y
, 0);
920 /* Add a new value source to VAL, marking that a value comes from edge CS and
921 (if the underlying jump function is a pass-through or an ancestor one) from
922 a caller value SRC_VAL of a caller parameter described by SRC_INDEX. OFFSET
923 is negative if the source was the scalar value of the parameter itself or
924 the offset within an aggregate. */
927 add_value_source (struct ipcp_value
*val
, struct cgraph_edge
*cs
,
928 struct ipcp_value
*src_val
, int src_idx
, HOST_WIDE_INT offset
)
930 struct ipcp_value_source
*src
;
932 src
= (struct ipcp_value_source
*) pool_alloc (ipcp_sources_pool
);
933 src
->offset
= offset
;
936 src
->index
= src_idx
;
938 src
->next
= val
->sources
;
942 /* Try to add NEWVAL to LAT, potentially creating a new struct ipcp_value for
943 it. CS, SRC_VAL SRC_INDEX and OFFSET are meant for add_value_source and
944 have the same meaning. */
947 add_value_to_lattice (struct ipcp_lattice
*lat
, tree newval
,
948 struct cgraph_edge
*cs
, struct ipcp_value
*src_val
,
949 int src_idx
, HOST_WIDE_INT offset
)
951 struct ipcp_value
*val
;
956 for (val
= lat
->values
; val
; val
= val
->next
)
957 if (values_equal_for_ipcp_p (val
->value
, newval
))
959 if (edge_within_scc (cs
))
961 struct ipcp_value_source
*s
;
962 for (s
= val
->sources
; s
; s
= s
->next
)
969 add_value_source (val
, cs
, src_val
, src_idx
, offset
);
973 if (lat
->values_count
== PARAM_VALUE (PARAM_IPA_CP_VALUE_LIST_SIZE
))
975 /* We can only free sources, not the values themselves, because sources
976 of other values in this this SCC might point to them. */
977 for (val
= lat
->values
; val
; val
= val
->next
)
981 struct ipcp_value_source
*src
= val
->sources
;
982 val
->sources
= src
->next
;
983 pool_free (ipcp_sources_pool
, src
);
988 return set_lattice_to_bottom (lat
);
992 val
= (struct ipcp_value
*) pool_alloc (ipcp_values_pool
);
993 memset (val
, 0, sizeof (*val
));
995 add_value_source (val
, cs
, src_val
, src_idx
, offset
);
997 val
->next
= lat
->values
;
1002 /* Like above but passes a special value of offset to distinguish that the
1003 origin is the scalar value of the parameter rather than a part of an
1007 add_scalar_value_to_lattice (struct ipcp_lattice
*lat
, tree newval
,
1008 struct cgraph_edge
*cs
,
1009 struct ipcp_value
*src_val
, int src_idx
)
1011 return add_value_to_lattice (lat
, newval
, cs
, src_val
, src_idx
, -1);
1014 /* Propagate values through a pass-through jump function JFUNC associated with
1015 edge CS, taking values from SRC_LAT and putting them into DEST_LAT. SRC_IDX
1016 is the index of the source parameter. */
1019 propagate_vals_accross_pass_through (struct cgraph_edge
*cs
,
1020 struct ipa_jump_func
*jfunc
,
1021 struct ipcp_lattice
*src_lat
,
1022 struct ipcp_lattice
*dest_lat
,
1025 struct ipcp_value
*src_val
;
1028 if (ipa_get_jf_pass_through_operation (jfunc
) == NOP_EXPR
)
1029 for (src_val
= src_lat
->values
; src_val
; src_val
= src_val
->next
)
1030 ret
|= add_scalar_value_to_lattice (dest_lat
, src_val
->value
, cs
,
1032 /* Do not create new values when propagating within an SCC because if there
1033 are arithmetic functions with circular dependencies, there is infinite
1034 number of them and we would just make lattices bottom. */
1035 else if (edge_within_scc (cs
))
1036 ret
= set_lattice_contains_variable (dest_lat
);
1038 for (src_val
= src_lat
->values
; src_val
; src_val
= src_val
->next
)
1040 tree cstval
= src_val
->value
;
1042 if (TREE_CODE (cstval
) == TREE_BINFO
)
1044 ret
|= set_lattice_contains_variable (dest_lat
);
1047 cstval
= ipa_get_jf_pass_through_result (jfunc
, cstval
);
1050 ret
|= add_scalar_value_to_lattice (dest_lat
, cstval
, cs
, src_val
,
1053 ret
|= set_lattice_contains_variable (dest_lat
);
1059 /* Propagate values through an ancestor jump function JFUNC associated with
1060 edge CS, taking values from SRC_LAT and putting them into DEST_LAT. SRC_IDX
1061 is the index of the source parameter. */
1064 propagate_vals_accross_ancestor (struct cgraph_edge
*cs
,
1065 struct ipa_jump_func
*jfunc
,
1066 struct ipcp_lattice
*src_lat
,
1067 struct ipcp_lattice
*dest_lat
,
1070 struct ipcp_value
*src_val
;
1073 if (edge_within_scc (cs
))
1074 return set_lattice_contains_variable (dest_lat
);
1076 for (src_val
= src_lat
->values
; src_val
; src_val
= src_val
->next
)
1078 tree t
= ipa_get_jf_ancestor_result (jfunc
, src_val
->value
);
1081 ret
|= add_scalar_value_to_lattice (dest_lat
, t
, cs
, src_val
, src_idx
);
1083 ret
|= set_lattice_contains_variable (dest_lat
);
1089 /* Propagate scalar values across jump function JFUNC that is associated with
1090 edge CS and put the values into DEST_LAT. */
1093 propagate_scalar_accross_jump_function (struct cgraph_edge
*cs
,
1094 struct ipa_jump_func
*jfunc
,
1095 struct ipcp_lattice
*dest_lat
)
1097 if (dest_lat
->bottom
)
1100 if (jfunc
->type
== IPA_JF_CONST
1101 || jfunc
->type
== IPA_JF_KNOWN_TYPE
)
1105 if (jfunc
->type
== IPA_JF_KNOWN_TYPE
)
1107 val
= ipa_value_from_known_type_jfunc (jfunc
);
1109 return set_lattice_contains_variable (dest_lat
);
1112 val
= ipa_get_jf_constant (jfunc
);
1113 return add_scalar_value_to_lattice (dest_lat
, val
, cs
, NULL
, 0);
1115 else if (jfunc
->type
== IPA_JF_PASS_THROUGH
1116 || jfunc
->type
== IPA_JF_ANCESTOR
)
1118 struct ipa_node_params
*caller_info
= IPA_NODE_REF (cs
->caller
);
1119 struct ipcp_lattice
*src_lat
;
1123 if (jfunc
->type
== IPA_JF_PASS_THROUGH
)
1124 src_idx
= ipa_get_jf_pass_through_formal_id (jfunc
);
1126 src_idx
= ipa_get_jf_ancestor_formal_id (jfunc
);
1128 src_lat
= ipa_get_scalar_lat (caller_info
, src_idx
);
1129 if (src_lat
->bottom
)
1130 return set_lattice_contains_variable (dest_lat
);
1132 /* If we would need to clone the caller and cannot, do not propagate. */
1133 if (!ipcp_versionable_function_p (cs
->caller
)
1134 && (src_lat
->contains_variable
1135 || (src_lat
->values_count
> 1)))
1136 return set_lattice_contains_variable (dest_lat
);
1138 if (jfunc
->type
== IPA_JF_PASS_THROUGH
)
1139 ret
= propagate_vals_accross_pass_through (cs
, jfunc
, src_lat
,
1142 ret
= propagate_vals_accross_ancestor (cs
, jfunc
, src_lat
, dest_lat
,
1145 if (src_lat
->contains_variable
)
1146 ret
|= set_lattice_contains_variable (dest_lat
);
1151 /* TODO: We currently do not handle member method pointers in IPA-CP (we only
1152 use it for indirect inlining), we should propagate them too. */
1153 return set_lattice_contains_variable (dest_lat
);
1156 /* If DEST_PLATS already has aggregate items, check that aggs_by_ref matches
1157 NEW_AGGS_BY_REF and if not, mark all aggs as bottoms and return true (in all
1158 other cases, return false). If there are no aggregate items, set
1159 aggs_by_ref to NEW_AGGS_BY_REF. */
1162 set_check_aggs_by_ref (struct ipcp_param_lattices
*dest_plats
,
1163 bool new_aggs_by_ref
)
1165 if (dest_plats
->aggs
)
1167 if (dest_plats
->aggs_by_ref
!= new_aggs_by_ref
)
1169 set_agg_lats_to_bottom (dest_plats
);
1174 dest_plats
->aggs_by_ref
= new_aggs_by_ref
;
1178 /* Walk aggregate lattices in DEST_PLATS from ***AGLAT on, until ***aglat is an
1179 already existing lattice for the given OFFSET and SIZE, marking all skipped
1180 lattices as containing variable and checking for overlaps. If there is no
1181 already existing lattice for the OFFSET and VAL_SIZE, create one, initialize
1182 it with offset, size and contains_variable to PRE_EXISTING, and return true,
1183 unless there are too many already. If there are two many, return false. If
1184 there are overlaps turn whole DEST_PLATS to bottom and return false. If any
1185 skipped lattices were newly marked as containing variable, set *CHANGE to
1189 merge_agg_lats_step (struct ipcp_param_lattices
*dest_plats
,
1190 HOST_WIDE_INT offset
, HOST_WIDE_INT val_size
,
1191 struct ipcp_agg_lattice
***aglat
,
1192 bool pre_existing
, bool *change
)
1194 gcc_checking_assert (offset
>= 0);
1196 while (**aglat
&& (**aglat
)->offset
< offset
)
1198 if ((**aglat
)->offset
+ (**aglat
)->size
> offset
)
1200 set_agg_lats_to_bottom (dest_plats
);
1203 *change
|= set_lattice_contains_variable (**aglat
);
1204 *aglat
= &(**aglat
)->next
;
1207 if (**aglat
&& (**aglat
)->offset
== offset
)
1209 if ((**aglat
)->size
!= val_size
1211 && (**aglat
)->next
->offset
< offset
+ val_size
))
1213 set_agg_lats_to_bottom (dest_plats
);
1216 gcc_checking_assert (!(**aglat
)->next
1217 || (**aglat
)->next
->offset
>= offset
+ val_size
);
1222 struct ipcp_agg_lattice
*new_al
;
1224 if (**aglat
&& (**aglat
)->offset
< offset
+ val_size
)
1226 set_agg_lats_to_bottom (dest_plats
);
1229 if (dest_plats
->aggs_count
== PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS
))
1231 dest_plats
->aggs_count
++;
1232 new_al
= (struct ipcp_agg_lattice
*) pool_alloc (ipcp_agg_lattice_pool
);
1233 memset (new_al
, 0, sizeof (*new_al
));
1235 new_al
->offset
= offset
;
1236 new_al
->size
= val_size
;
1237 new_al
->contains_variable
= pre_existing
;
1239 new_al
->next
= **aglat
;
1245 /* Set all AGLAT and all other aggregate lattices reachable by next pointers as
1246 containing an unknown value. */
1249 set_chain_of_aglats_contains_variable (struct ipcp_agg_lattice
*aglat
)
1254 ret
|= set_lattice_contains_variable (aglat
);
1255 aglat
= aglat
->next
;
1260 /* Merge existing aggregate lattices in SRC_PLATS to DEST_PLATS, subtracting
1261 DELTA_OFFSET. CS is the call graph edge and SRC_IDX the index of the source
1262 parameter used for lattice value sources. Return true if DEST_PLATS changed
1266 merge_aggregate_lattices (struct cgraph_edge
*cs
,
1267 struct ipcp_param_lattices
*dest_plats
,
1268 struct ipcp_param_lattices
*src_plats
,
1269 int src_idx
, HOST_WIDE_INT offset_delta
)
1271 bool pre_existing
= dest_plats
->aggs
!= NULL
;
1272 struct ipcp_agg_lattice
**dst_aglat
;
1275 if (set_check_aggs_by_ref (dest_plats
, src_plats
->aggs_by_ref
))
1277 if (src_plats
->aggs_bottom
)
1278 return set_agg_lats_contain_variable (dest_plats
);
1279 if (src_plats
->aggs_contain_variable
)
1280 ret
|= set_agg_lats_contain_variable (dest_plats
);
1281 dst_aglat
= &dest_plats
->aggs
;
1283 for (struct ipcp_agg_lattice
*src_aglat
= src_plats
->aggs
;
1285 src_aglat
= src_aglat
->next
)
1287 HOST_WIDE_INT new_offset
= src_aglat
->offset
- offset_delta
;
1291 if (merge_agg_lats_step (dest_plats
, new_offset
, src_aglat
->size
,
1292 &dst_aglat
, pre_existing
, &ret
))
1294 struct ipcp_agg_lattice
*new_al
= *dst_aglat
;
1296 dst_aglat
= &(*dst_aglat
)->next
;
1297 if (src_aglat
->bottom
)
1299 ret
|= set_lattice_contains_variable (new_al
);
1302 if (src_aglat
->contains_variable
)
1303 ret
|= set_lattice_contains_variable (new_al
);
1304 for (struct ipcp_value
*val
= src_aglat
->values
;
1307 ret
|= add_value_to_lattice (new_al
, val
->value
, cs
, val
, src_idx
,
1310 else if (dest_plats
->aggs_bottom
)
1313 ret
|= set_chain_of_aglats_contains_variable (*dst_aglat
);
1317 /* Determine whether there is anything to propagate FROM SRC_PLATS through a
1318 pass-through JFUNC and if so, whether it has conform and conforms to the
1319 rules about propagating values passed by reference. */
1322 agg_pass_through_permissible_p (struct ipcp_param_lattices
*src_plats
,
1323 struct ipa_jump_func
*jfunc
)
1325 return src_plats
->aggs
1326 && (!src_plats
->aggs_by_ref
1327 || ipa_get_jf_pass_through_agg_preserved (jfunc
));
1330 /* Propagate scalar values across jump function JFUNC that is associated with
1331 edge CS and put the values into DEST_LAT. */
1334 propagate_aggs_accross_jump_function (struct cgraph_edge
*cs
,
1335 struct ipa_jump_func
*jfunc
,
1336 struct ipcp_param_lattices
*dest_plats
)
1340 if (dest_plats
->aggs_bottom
)
1343 if (jfunc
->type
== IPA_JF_PASS_THROUGH
1344 && ipa_get_jf_pass_through_operation (jfunc
) == NOP_EXPR
)
1346 struct ipa_node_params
*caller_info
= IPA_NODE_REF (cs
->caller
);
1347 int src_idx
= ipa_get_jf_pass_through_formal_id (jfunc
);
1348 struct ipcp_param_lattices
*src_plats
;
1350 src_plats
= ipa_get_parm_lattices (caller_info
, src_idx
);
1351 if (agg_pass_through_permissible_p (src_plats
, jfunc
))
1353 /* Currently we do not produce clobber aggregate jump
1354 functions, replace with merging when we do. */
1355 gcc_assert (!jfunc
->agg
.items
);
1356 ret
|= merge_aggregate_lattices (cs
, dest_plats
, src_plats
,
1360 ret
|= set_agg_lats_contain_variable (dest_plats
);
1362 else if (jfunc
->type
== IPA_JF_ANCESTOR
1363 && ipa_get_jf_ancestor_agg_preserved (jfunc
))
1365 struct ipa_node_params
*caller_info
= IPA_NODE_REF (cs
->caller
);
1366 int src_idx
= ipa_get_jf_ancestor_formal_id (jfunc
);
1367 struct ipcp_param_lattices
*src_plats
;
1369 src_plats
= ipa_get_parm_lattices (caller_info
, src_idx
);
1370 if (src_plats
->aggs
&& src_plats
->aggs_by_ref
)
1372 /* Currently we do not produce clobber aggregate jump
1373 functions, replace with merging when we do. */
1374 gcc_assert (!jfunc
->agg
.items
);
1375 ret
|= merge_aggregate_lattices (cs
, dest_plats
, src_plats
, src_idx
,
1376 ipa_get_jf_ancestor_offset (jfunc
));
1378 else if (!src_plats
->aggs_by_ref
)
1379 ret
|= set_agg_lats_to_bottom (dest_plats
);
1381 ret
|= set_agg_lats_contain_variable (dest_plats
);
1383 else if (jfunc
->agg
.items
)
1385 bool pre_existing
= dest_plats
->aggs
!= NULL
;
1386 struct ipcp_agg_lattice
**aglat
= &dest_plats
->aggs
;
1387 struct ipa_agg_jf_item
*item
;
1390 if (set_check_aggs_by_ref (dest_plats
, jfunc
->agg
.by_ref
))
1393 FOR_EACH_VEC_ELT (*jfunc
->agg
.items
, i
, item
)
1395 HOST_WIDE_INT val_size
;
1397 if (item
->offset
< 0)
1399 gcc_checking_assert (is_gimple_ip_invariant (item
->value
));
1400 val_size
= tree_low_cst (TYPE_SIZE (TREE_TYPE (item
->value
)), 1);
1402 if (merge_agg_lats_step (dest_plats
, item
->offset
, val_size
,
1403 &aglat
, pre_existing
, &ret
))
1405 ret
|= add_value_to_lattice (*aglat
, item
->value
, cs
, NULL
, 0, 0);
1406 aglat
= &(*aglat
)->next
;
1408 else if (dest_plats
->aggs_bottom
)
1412 ret
|= set_chain_of_aglats_contains_variable (*aglat
);
1415 ret
|= set_agg_lats_contain_variable (dest_plats
);
1420 /* Propagate constants from the caller to the callee of CS. INFO describes the
1424 propagate_constants_accross_call (struct cgraph_edge
*cs
)
1426 struct ipa_node_params
*callee_info
;
1427 enum availability availability
;
1428 struct cgraph_node
*callee
, *alias_or_thunk
;
1429 struct ipa_edge_args
*args
;
1431 int i
, args_count
, parms_count
;
1433 callee
= cgraph_function_node (cs
->callee
, &availability
);
1434 if (!callee
->analyzed
)
1436 gcc_checking_assert (cgraph_function_with_gimple_body_p (callee
));
1437 callee_info
= IPA_NODE_REF (callee
);
1439 args
= IPA_EDGE_REF (cs
);
1440 args_count
= ipa_get_cs_argument_count (args
);
1441 parms_count
= ipa_get_param_count (callee_info
);
1443 /* If this call goes through a thunk we must not propagate to the first (0th)
1444 parameter. However, we might need to uncover a thunk from below a series
1445 of aliases first. */
1446 alias_or_thunk
= cs
->callee
;
1447 while (alias_or_thunk
->alias
)
1448 alias_or_thunk
= cgraph_alias_aliased_node (alias_or_thunk
);
1449 if (alias_or_thunk
->thunk
.thunk_p
)
1451 ret
|= set_all_contains_variable (ipa_get_parm_lattices (callee_info
,
1458 for (; (i
< args_count
) && (i
< parms_count
); i
++)
1460 struct ipa_jump_func
*jump_func
= ipa_get_ith_jump_func (args
, i
);
1461 struct ipcp_param_lattices
*dest_plats
;
1463 dest_plats
= ipa_get_parm_lattices (callee_info
, i
);
1464 if (availability
== AVAIL_OVERWRITABLE
)
1465 ret
|= set_all_contains_variable (dest_plats
);
1468 ret
|= propagate_scalar_accross_jump_function (cs
, jump_func
,
1469 &dest_plats
->itself
);
1470 ret
|= propagate_aggs_accross_jump_function (cs
, jump_func
,
1474 for (; i
< parms_count
; i
++)
1475 ret
|= set_all_contains_variable (ipa_get_parm_lattices (callee_info
, i
));
1480 /* If an indirect edge IE can be turned into a direct one based on KNOWN_VALS
1481 (which can contain both constants and binfos) or KNOWN_BINFOS (which can be
1482 NULL) return the destination. */
1485 ipa_get_indirect_edge_target (struct cgraph_edge
*ie
,
1486 vec
<tree
> known_vals
,
1487 vec
<tree
> known_binfos
,
1488 vec
<ipa_agg_jump_function_p
> known_aggs
)
1490 int param_index
= ie
->indirect_info
->param_index
;
1491 HOST_WIDE_INT token
, anc_offset
;
1495 if (param_index
== -1)
1498 if (!ie
->indirect_info
->polymorphic
)
1502 if (ie
->indirect_info
->agg_contents
)
1504 if (known_aggs
.length ()
1505 > (unsigned int) param_index
)
1507 struct ipa_agg_jump_function
*agg
;
1508 agg
= known_aggs
[param_index
];
1509 t
= ipa_find_agg_cst_for_param (agg
, ie
->indirect_info
->offset
,
1510 ie
->indirect_info
->by_ref
);
1516 t
= (known_vals
.length () > (unsigned int) param_index
1517 ? known_vals
[param_index
] : NULL
);
1520 TREE_CODE (t
) == ADDR_EXPR
1521 && TREE_CODE (TREE_OPERAND (t
, 0)) == FUNCTION_DECL
)
1522 return TREE_OPERAND (t
, 0);
1527 gcc_assert (!ie
->indirect_info
->agg_contents
);
1528 token
= ie
->indirect_info
->otr_token
;
1529 anc_offset
= ie
->indirect_info
->offset
;
1530 otr_type
= ie
->indirect_info
->otr_type
;
1532 t
= known_vals
[param_index
];
1533 if (!t
&& known_binfos
.length () > (unsigned int) param_index
)
1534 t
= known_binfos
[param_index
];
1538 if (TREE_CODE (t
) != TREE_BINFO
)
1541 binfo
= gimple_extract_devirt_binfo_from_cst (t
);
1544 binfo
= get_binfo_at_offset (binfo
, anc_offset
, otr_type
);
1547 return gimple_get_virt_method_for_binfo (token
, binfo
);
1553 binfo
= get_binfo_at_offset (t
, anc_offset
, otr_type
);
1556 return gimple_get_virt_method_for_binfo (token
, binfo
);
1560 /* Calculate devirtualization time bonus for NODE, assuming we know KNOWN_CSTS
1561 and KNOWN_BINFOS. */
1564 devirtualization_time_bonus (struct cgraph_node
*node
,
1565 vec
<tree
> known_csts
,
1566 vec
<tree
> known_binfos
)
1568 struct cgraph_edge
*ie
;
1571 for (ie
= node
->indirect_calls
; ie
; ie
= ie
->next_callee
)
1573 struct cgraph_node
*callee
;
1574 struct inline_summary
*isummary
;
1577 target
= ipa_get_indirect_edge_target (ie
, known_csts
, known_binfos
,
1582 /* Only bare minimum benefit for clearly un-inlineable targets. */
1584 callee
= cgraph_get_node (target
);
1585 if (!callee
|| !callee
->analyzed
)
1587 isummary
= inline_summary (callee
);
1588 if (!isummary
->inlinable
)
1591 /* FIXME: The values below need re-considering and perhaps also
1592 integrating into the cost metrics, at lest in some very basic way. */
1593 if (isummary
->size
<= MAX_INLINE_INSNS_AUTO
/ 4)
1595 else if (isummary
->size
<= MAX_INLINE_INSNS_AUTO
/ 2)
1597 else if (isummary
->size
<= MAX_INLINE_INSNS_AUTO
1598 || DECL_DECLARED_INLINE_P (callee
->symbol
.decl
))
1605 /* Return time bonus incurred because of HINTS. */
1608 hint_time_bonus (inline_hints hints
)
1610 if (hints
& (INLINE_HINT_loop_iterations
| INLINE_HINT_loop_stride
))
1611 return PARAM_VALUE (PARAM_IPA_CP_LOOP_HINT_BONUS
);
1615 /* Return true if cloning NODE is a good idea, given the estimated TIME_BENEFIT
1616 and SIZE_COST and with the sum of frequencies of incoming edges to the
1617 potential new clone in FREQUENCIES. */
1620 good_cloning_opportunity_p (struct cgraph_node
*node
, int time_benefit
,
1621 int freq_sum
, gcov_type count_sum
, int size_cost
)
1623 if (time_benefit
== 0
1624 || !flag_ipa_cp_clone
1625 || !optimize_function_for_speed_p (DECL_STRUCT_FUNCTION (node
->symbol
.decl
)))
1628 gcc_assert (size_cost
> 0);
1632 int factor
= (count_sum
* 1000) / max_count
;
1633 HOST_WIDEST_INT evaluation
= (((HOST_WIDEST_INT
) time_benefit
* factor
)
1636 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1637 fprintf (dump_file
, " good_cloning_opportunity_p (time: %i, "
1638 "size: %i, count_sum: " HOST_WIDE_INT_PRINT_DEC
1639 ") -> evaluation: " HOST_WIDEST_INT_PRINT_DEC
1640 ", threshold: %i\n",
1641 time_benefit
, size_cost
, (HOST_WIDE_INT
) count_sum
,
1644 return evaluation
>= PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD
);
1648 HOST_WIDEST_INT evaluation
= (((HOST_WIDEST_INT
) time_benefit
* freq_sum
)
1651 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1652 fprintf (dump_file
, " good_cloning_opportunity_p (time: %i, "
1653 "size: %i, freq_sum: %i) -> evaluation: "
1654 HOST_WIDEST_INT_PRINT_DEC
", threshold: %i\n",
1655 time_benefit
, size_cost
, freq_sum
, evaluation
,
1656 CGRAPH_FREQ_BASE
/2);
1658 return evaluation
>= PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD
);
1662 /* Return all context independent values from aggregate lattices in PLATS in a
1663 vector. Return NULL if there are none. */
1665 static vec
<ipa_agg_jf_item_t
, va_gc
> *
1666 context_independent_aggregate_values (struct ipcp_param_lattices
*plats
)
1668 vec
<ipa_agg_jf_item_t
, va_gc
> *res
= NULL
;
1670 if (plats
->aggs_bottom
1671 || plats
->aggs_contain_variable
1672 || plats
->aggs_count
== 0)
1675 for (struct ipcp_agg_lattice
*aglat
= plats
->aggs
;
1677 aglat
= aglat
->next
)
1678 if (ipa_lat_is_single_const (aglat
))
1680 struct ipa_agg_jf_item item
;
1681 item
.offset
= aglat
->offset
;
1682 item
.value
= aglat
->values
->value
;
1683 vec_safe_push (res
, item
);
1688 /* Allocate KNOWN_CSTS, KNOWN_BINFOS and, if non-NULL, KNOWN_AGGS and populate
1689 them with values of parameters that are known independent of the context.
1690 INFO describes the function. If REMOVABLE_PARAMS_COST is non-NULL, the
1691 movement cost of all removable parameters will be stored in it. */
1694 gather_context_independent_values (struct ipa_node_params
*info
,
1695 vec
<tree
> *known_csts
,
1696 vec
<tree
> *known_binfos
,
1697 vec
<ipa_agg_jump_function_t
> *known_aggs
,
1698 int *removable_params_cost
)
1700 int i
, count
= ipa_get_param_count (info
);
1703 known_csts
->create (0);
1704 known_binfos
->create (0);
1705 known_csts
->safe_grow_cleared (count
);
1706 known_binfos
->safe_grow_cleared (count
);
1709 known_aggs
->create (0);
1710 known_aggs
->safe_grow_cleared (count
);
1713 if (removable_params_cost
)
1714 *removable_params_cost
= 0;
1716 for (i
= 0; i
< count
; i
++)
1718 struct ipcp_param_lattices
*plats
= ipa_get_parm_lattices (info
, i
);
1719 struct ipcp_lattice
*lat
= &plats
->itself
;
1721 if (ipa_lat_is_single_const (lat
))
1723 struct ipcp_value
*val
= lat
->values
;
1724 if (TREE_CODE (val
->value
) != TREE_BINFO
)
1726 (*known_csts
)[i
] = val
->value
;
1727 if (removable_params_cost
)
1728 *removable_params_cost
1729 += estimate_move_cost (TREE_TYPE (val
->value
));
1732 else if (plats
->virt_call
)
1734 (*known_binfos
)[i
] = val
->value
;
1737 else if (removable_params_cost
1738 && !ipa_is_param_used (info
, i
))
1739 *removable_params_cost
1740 += estimate_move_cost (TREE_TYPE (ipa_get_param (info
, i
)));
1742 else if (removable_params_cost
1743 && !ipa_is_param_used (info
, i
))
1744 *removable_params_cost
1745 += estimate_move_cost (TREE_TYPE (ipa_get_param (info
, i
)));
1749 vec
<ipa_agg_jf_item_t
, va_gc
> *agg_items
;
1750 struct ipa_agg_jump_function
*ajf
;
1752 agg_items
= context_independent_aggregate_values (plats
);
1753 ajf
= &(*known_aggs
)[i
];
1754 ajf
->items
= agg_items
;
1755 ajf
->by_ref
= plats
->aggs_by_ref
;
1756 ret
|= agg_items
!= NULL
;
1763 /* The current interface in ipa-inline-analysis requires a pointer vector.
1766 FIXME: That interface should be re-worked, this is slightly silly. Still,
1767 I'd like to discuss how to change it first and this demonstrates the
1770 static vec
<ipa_agg_jump_function_p
>
1771 agg_jmp_p_vec_for_t_vec (vec
<ipa_agg_jump_function_t
> known_aggs
)
1773 vec
<ipa_agg_jump_function_p
> ret
;
1774 struct ipa_agg_jump_function
*ajf
;
1777 ret
.create (known_aggs
.length ());
1778 FOR_EACH_VEC_ELT (known_aggs
, i
, ajf
)
1779 ret
.quick_push (ajf
);
1783 /* Iterate over known values of parameters of NODE and estimate the local
1784 effects in terms of time and size they have. */
1787 estimate_local_effects (struct cgraph_node
*node
)
1789 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
1790 int i
, count
= ipa_get_param_count (info
);
1791 vec
<tree
> known_csts
, known_binfos
;
1792 vec
<ipa_agg_jump_function_t
> known_aggs
;
1793 vec
<ipa_agg_jump_function_p
> known_aggs_ptrs
;
1795 int base_time
= inline_summary (node
)->time
;
1796 int removable_params_cost
;
1798 if (!count
|| !ipcp_versionable_function_p (node
))
1801 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1802 fprintf (dump_file
, "\nEstimating effects for %s/%i, base_time: %i.\n",
1803 cgraph_node_name (node
), node
->uid
, base_time
);
1805 always_const
= gather_context_independent_values (info
, &known_csts
,
1806 &known_binfos
, &known_aggs
,
1807 &removable_params_cost
);
1808 known_aggs_ptrs
= agg_jmp_p_vec_for_t_vec (known_aggs
);
1811 struct caller_statistics stats
;
1815 init_caller_stats (&stats
);
1816 cgraph_for_node_and_aliases (node
, gather_caller_stats
, &stats
, false);
1817 estimate_ipcp_clone_size_and_time (node
, known_csts
, known_binfos
,
1818 known_aggs_ptrs
, &size
, &time
, &hints
);
1819 time
-= devirtualization_time_bonus (node
, known_csts
, known_binfos
);
1820 time
-= hint_time_bonus (hints
);
1821 time
-= removable_params_cost
;
1822 size
-= stats
.n_calls
* removable_params_cost
;
1825 fprintf (dump_file
, " - context independent values, size: %i, "
1826 "time_benefit: %i\n", size
, base_time
- time
);
1829 || cgraph_will_be_removed_from_program_if_no_direct_calls (node
))
1831 info
->do_clone_for_all_contexts
= true;
1835 fprintf (dump_file
, " Decided to specialize for all "
1836 "known contexts, code not going to grow.\n");
1838 else if (good_cloning_opportunity_p (node
, base_time
- time
,
1839 stats
.freq_sum
, stats
.count_sum
,
1842 if (size
+ overall_size
<= max_new_size
)
1844 info
->do_clone_for_all_contexts
= true;
1846 overall_size
+= size
;
1849 fprintf (dump_file
, " Decided to specialize for all "
1850 "known contexts, growth deemed beneficial.\n");
1852 else if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1853 fprintf (dump_file
, " Not cloning for all contexts because "
1854 "max_new_size would be reached with %li.\n",
1855 size
+ overall_size
);
1859 for (i
= 0; i
< count
; i
++)
1861 struct ipcp_param_lattices
*plats
= ipa_get_parm_lattices (info
, i
);
1862 struct ipcp_lattice
*lat
= &plats
->itself
;
1863 struct ipcp_value
*val
;
1872 for (val
= lat
->values
; val
; val
= val
->next
)
1874 int time
, size
, time_benefit
;
1877 if (TREE_CODE (val
->value
) != TREE_BINFO
)
1879 known_csts
[i
] = val
->value
;
1880 known_binfos
[i
] = NULL_TREE
;
1881 emc
= estimate_move_cost (TREE_TYPE (val
->value
));
1883 else if (plats
->virt_call
)
1885 known_csts
[i
] = NULL_TREE
;
1886 known_binfos
[i
] = val
->value
;
1892 estimate_ipcp_clone_size_and_time (node
, known_csts
, known_binfos
,
1893 known_aggs_ptrs
, &size
, &time
,
1895 time_benefit
= base_time
- time
1896 + devirtualization_time_bonus (node
, known_csts
, known_binfos
)
1897 + hint_time_bonus (hints
)
1898 + removable_params_cost
+ emc
;
1900 gcc_checking_assert (size
>=0);
1901 /* The inliner-heuristics based estimates may think that in certain
1902 contexts some functions do not have any size at all but we want
1903 all specializations to have at least a tiny cost, not least not to
1908 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1910 fprintf (dump_file
, " - estimates for value ");
1911 print_ipcp_constant_value (dump_file
, val
->value
);
1912 fprintf (dump_file
, " for parameter ");
1913 print_generic_expr (dump_file
, ipa_get_param (info
, i
), 0);
1914 fprintf (dump_file
, ": time_benefit: %i, size: %i\n",
1915 time_benefit
, size
);
1918 val
->local_time_benefit
= time_benefit
;
1919 val
->local_size_cost
= size
;
1921 known_binfos
[i
] = NULL_TREE
;
1922 known_csts
[i
] = NULL_TREE
;
1925 for (i
= 0; i
< count
; i
++)
1927 struct ipcp_param_lattices
*plats
= ipa_get_parm_lattices (info
, i
);
1928 struct ipa_agg_jump_function
*ajf
;
1929 struct ipcp_agg_lattice
*aglat
;
1931 if (plats
->aggs_bottom
|| !plats
->aggs
)
1934 ajf
= &known_aggs
[i
];
1935 for (aglat
= plats
->aggs
; aglat
; aglat
= aglat
->next
)
1937 struct ipcp_value
*val
;
1938 if (aglat
->bottom
|| !aglat
->values
1939 /* If the following is true, the one value is in known_aggs. */
1940 || (!plats
->aggs_contain_variable
1941 && ipa_lat_is_single_const (aglat
)))
1944 for (val
= aglat
->values
; val
; val
= val
->next
)
1946 int time
, size
, time_benefit
;
1947 struct ipa_agg_jf_item item
;
1950 item
.offset
= aglat
->offset
;
1951 item
.value
= val
->value
;
1952 vec_safe_push (ajf
->items
, item
);
1954 estimate_ipcp_clone_size_and_time (node
, known_csts
, known_binfos
,
1955 known_aggs_ptrs
, &size
, &time
,
1957 time_benefit
= base_time
- time
1958 + devirtualization_time_bonus (node
, known_csts
, known_binfos
)
1959 + hint_time_bonus (hints
);
1960 gcc_checking_assert (size
>=0);
1964 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1966 fprintf (dump_file
, " - estimates for value ");
1967 print_ipcp_constant_value (dump_file
, val
->value
);
1968 fprintf (dump_file
, " for parameter ");
1969 print_generic_expr (dump_file
, ipa_get_param (info
, i
), 0);
1970 fprintf (dump_file
, "[%soffset: " HOST_WIDE_INT_PRINT_DEC
1971 "]: time_benefit: %i, size: %i\n",
1972 plats
->aggs_by_ref
? "ref " : "",
1973 aglat
->offset
, time_benefit
, size
);
1976 val
->local_time_benefit
= time_benefit
;
1977 val
->local_size_cost
= size
;
1983 for (i
= 0; i
< count
; i
++)
1984 vec_free (known_aggs
[i
].items
);
1986 known_csts
.release ();
1987 known_binfos
.release ();
1988 known_aggs
.release ();
1989 known_aggs_ptrs
.release ();
1993 /* Add value CUR_VAL and all yet-unsorted values it is dependent on to the
1994 topological sort of values. */
1997 add_val_to_toposort (struct ipcp_value
*cur_val
)
1999 static int dfs_counter
= 0;
2000 static struct ipcp_value
*stack
;
2001 struct ipcp_value_source
*src
;
2007 cur_val
->dfs
= dfs_counter
;
2008 cur_val
->low_link
= dfs_counter
;
2010 cur_val
->topo_next
= stack
;
2012 cur_val
->on_stack
= true;
2014 for (src
= cur_val
->sources
; src
; src
= src
->next
)
2017 if (src
->val
->dfs
== 0)
2019 add_val_to_toposort (src
->val
);
2020 if (src
->val
->low_link
< cur_val
->low_link
)
2021 cur_val
->low_link
= src
->val
->low_link
;
2023 else if (src
->val
->on_stack
2024 && src
->val
->dfs
< cur_val
->low_link
)
2025 cur_val
->low_link
= src
->val
->dfs
;
2028 if (cur_val
->dfs
== cur_val
->low_link
)
2030 struct ipcp_value
*v
, *scc_list
= NULL
;
2035 stack
= v
->topo_next
;
2036 v
->on_stack
= false;
2038 v
->scc_next
= scc_list
;
2041 while (v
!= cur_val
);
2043 cur_val
->topo_next
= values_topo
;
2044 values_topo
= cur_val
;
2048 /* Add all values in lattices associated with NODE to the topological sort if
2049 they are not there yet. */
2052 add_all_node_vals_to_toposort (struct cgraph_node
*node
)
2054 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
2055 int i
, count
= ipa_get_param_count (info
);
2057 for (i
= 0; i
< count
; i
++)
2059 struct ipcp_param_lattices
*plats
= ipa_get_parm_lattices (info
, i
);
2060 struct ipcp_lattice
*lat
= &plats
->itself
;
2061 struct ipcp_agg_lattice
*aglat
;
2062 struct ipcp_value
*val
;
2065 for (val
= lat
->values
; val
; val
= val
->next
)
2066 add_val_to_toposort (val
);
2068 if (!plats
->aggs_bottom
)
2069 for (aglat
= plats
->aggs
; aglat
; aglat
= aglat
->next
)
2071 for (val
= aglat
->values
; val
; val
= val
->next
)
2072 add_val_to_toposort (val
);
2076 /* One pass of constants propagation along the call graph edges, from callers
2077 to callees (requires topological ordering in TOPO), iterate over strongly
2078 connected components. */
2081 propagate_constants_topo (struct topo_info
*topo
)
2085 for (i
= topo
->nnodes
- 1; i
>= 0; i
--)
2087 struct cgraph_node
*v
, *node
= topo
->order
[i
];
2088 struct ipa_dfs_info
*node_dfs_info
;
2090 if (!cgraph_function_with_gimple_body_p (node
))
2093 node_dfs_info
= (struct ipa_dfs_info
*) node
->symbol
.aux
;
2094 /* First, iteratively propagate within the strongly connected component
2095 until all lattices stabilize. */
2096 v
= node_dfs_info
->next_cycle
;
2099 push_node_to_stack (topo
, v
);
2100 v
= ((struct ipa_dfs_info
*) v
->symbol
.aux
)->next_cycle
;
2106 struct cgraph_edge
*cs
;
2108 for (cs
= v
->callees
; cs
; cs
= cs
->next_callee
)
2109 if (edge_within_scc (cs
)
2110 && propagate_constants_accross_call (cs
))
2111 push_node_to_stack (topo
, cs
->callee
);
2112 v
= pop_node_from_stack (topo
);
2115 /* Afterwards, propagate along edges leading out of the SCC, calculates
2116 the local effects of the discovered constants and all valid values to
2117 their topological sort. */
2121 struct cgraph_edge
*cs
;
2123 estimate_local_effects (v
);
2124 add_all_node_vals_to_toposort (v
);
2125 for (cs
= v
->callees
; cs
; cs
= cs
->next_callee
)
2126 if (!edge_within_scc (cs
))
2127 propagate_constants_accross_call (cs
);
2129 v
= ((struct ipa_dfs_info
*) v
->symbol
.aux
)->next_cycle
;
2135 /* Return the sum of A and B if none of them is bigger than INT_MAX/2, return
2136 the bigger one if otherwise. */
2139 safe_add (int a
, int b
)
2141 if (a
> INT_MAX
/2 || b
> INT_MAX
/2)
2142 return a
> b
? a
: b
;
2148 /* Propagate the estimated effects of individual values along the topological
2149 from the dependent values to those they depend on. */
2152 propagate_effects (void)
2154 struct ipcp_value
*base
;
2156 for (base
= values_topo
; base
; base
= base
->topo_next
)
2158 struct ipcp_value_source
*src
;
2159 struct ipcp_value
*val
;
2160 int time
= 0, size
= 0;
2162 for (val
= base
; val
; val
= val
->scc_next
)
2164 time
= safe_add (time
,
2165 val
->local_time_benefit
+ val
->prop_time_benefit
);
2166 size
= safe_add (size
, val
->local_size_cost
+ val
->prop_size_cost
);
2169 for (val
= base
; val
; val
= val
->scc_next
)
2170 for (src
= val
->sources
; src
; src
= src
->next
)
2172 && cgraph_maybe_hot_edge_p (src
->cs
))
2174 src
->val
->prop_time_benefit
= safe_add (time
,
2175 src
->val
->prop_time_benefit
);
2176 src
->val
->prop_size_cost
= safe_add (size
,
2177 src
->val
->prop_size_cost
);
2183 /* Propagate constants, binfos and their effects from the summaries
2184 interprocedurally. */
2187 ipcp_propagate_stage (struct topo_info
*topo
)
2189 struct cgraph_node
*node
;
2192 fprintf (dump_file
, "\n Propagating constants:\n\n");
2195 ipa_update_after_lto_read ();
2198 FOR_EACH_DEFINED_FUNCTION (node
)
2200 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
2202 determine_versionability (node
);
2203 if (cgraph_function_with_gimple_body_p (node
))
2205 info
->lattices
= XCNEWVEC (struct ipcp_param_lattices
,
2206 ipa_get_param_count (info
));
2207 initialize_node_lattices (node
);
2209 if (node
->count
> max_count
)
2210 max_count
= node
->count
;
2211 overall_size
+= inline_summary (node
)->self_size
;
2214 max_new_size
= overall_size
;
2215 if (max_new_size
< PARAM_VALUE (PARAM_LARGE_UNIT_INSNS
))
2216 max_new_size
= PARAM_VALUE (PARAM_LARGE_UNIT_INSNS
);
2217 max_new_size
+= max_new_size
* PARAM_VALUE (PARAM_IPCP_UNIT_GROWTH
) / 100 + 1;
2220 fprintf (dump_file
, "\noverall_size: %li, max_new_size: %li\n",
2221 overall_size
, max_new_size
);
2223 propagate_constants_topo (topo
);
2224 #ifdef ENABLE_CHECKING
2225 ipcp_verify_propagated_values ();
2227 propagate_effects ();
2231 fprintf (dump_file
, "\nIPA lattices after all propagation:\n");
2232 print_all_lattices (dump_file
, (dump_flags
& TDF_DETAILS
), true);
2236 /* Discover newly direct outgoing edges from NODE which is a new clone with
2237 known KNOWN_VALS and make them direct. */
2240 ipcp_discover_new_direct_edges (struct cgraph_node
*node
,
2241 vec
<tree
> known_vals
)
2243 struct cgraph_edge
*ie
, *next_ie
;
2246 for (ie
= node
->indirect_calls
; ie
; ie
= next_ie
)
2250 next_ie
= ie
->next_callee
;
2251 target
= ipa_get_indirect_edge_target (ie
, known_vals
, vNULL
, vNULL
);
2254 ipa_make_edge_direct_to_target (ie
, target
);
2258 /* Turning calls to direct calls will improve overall summary. */
2260 inline_update_overall_summary (node
);
2263 /* Vector of pointers which for linked lists of clones of an original crgaph
2266 static vec
<cgraph_edge_p
> next_edge_clone
;
2269 grow_next_edge_clone_vector (void)
2271 if (next_edge_clone
.length ()
2272 <= (unsigned) cgraph_edge_max_uid
)
2273 next_edge_clone
.safe_grow_cleared (cgraph_edge_max_uid
+ 1);
2276 /* Edge duplication hook to grow the appropriate linked list in
2280 ipcp_edge_duplication_hook (struct cgraph_edge
*src
, struct cgraph_edge
*dst
,
2281 __attribute__((unused
)) void *data
)
2283 grow_next_edge_clone_vector ();
2284 next_edge_clone
[dst
->uid
] = next_edge_clone
[src
->uid
];
2285 next_edge_clone
[src
->uid
] = dst
;
2288 /* See if NODE is a clone with a known aggregate value at a given OFFSET of a
2289 parameter with the given INDEX. */
2292 get_clone_agg_value (struct cgraph_node
*node
, HOST_WIDEST_INT offset
,
2295 struct ipa_agg_replacement_value
*aggval
;
2297 aggval
= ipa_get_agg_replacements_for_node (node
);
2300 if (aggval
->offset
== offset
2301 && aggval
->index
== index
)
2302 return aggval
->value
;
2303 aggval
= aggval
->next
;
2308 /* Return true if edge CS does bring about the value described by SRC. */
2311 cgraph_edge_brings_value_p (struct cgraph_edge
*cs
,
2312 struct ipcp_value_source
*src
)
2314 struct ipa_node_params
*caller_info
= IPA_NODE_REF (cs
->caller
);
2315 struct ipa_node_params
*dst_info
= IPA_NODE_REF (cs
->callee
);
2317 if ((dst_info
->ipcp_orig_node
&& !dst_info
->is_all_contexts_clone
)
2318 || caller_info
->node_dead
)
2323 if (caller_info
->ipcp_orig_node
)
2326 if (src
->offset
== -1)
2327 t
= caller_info
->known_vals
[src
->index
];
2329 t
= get_clone_agg_value (cs
->caller
, src
->offset
, src
->index
);
2330 return (t
!= NULL_TREE
2331 && values_equal_for_ipcp_p (src
->val
->value
, t
));
2335 struct ipcp_agg_lattice
*aglat
;
2336 struct ipcp_param_lattices
*plats
= ipa_get_parm_lattices (caller_info
,
2338 if (src
->offset
== -1)
2339 return (ipa_lat_is_single_const (&plats
->itself
)
2340 && values_equal_for_ipcp_p (src
->val
->value
,
2341 plats
->itself
.values
->value
));
2344 if (plats
->aggs_bottom
|| plats
->aggs_contain_variable
)
2346 for (aglat
= plats
->aggs
; aglat
; aglat
= aglat
->next
)
2347 if (aglat
->offset
== src
->offset
)
2348 return (ipa_lat_is_single_const (aglat
)
2349 && values_equal_for_ipcp_p (src
->val
->value
,
2350 aglat
->values
->value
));
2356 /* Get the next clone in the linked list of clones of an edge. */
2358 static inline struct cgraph_edge
*
2359 get_next_cgraph_edge_clone (struct cgraph_edge
*cs
)
2361 return next_edge_clone
[cs
->uid
];
2364 /* Given VAL, iterate over all its sources and if they still hold, add their
2365 edge frequency and their number into *FREQUENCY and *CALLER_COUNT
2369 get_info_about_necessary_edges (struct ipcp_value
*val
, int *freq_sum
,
2370 gcov_type
*count_sum
, int *caller_count
)
2372 struct ipcp_value_source
*src
;
2373 int freq
= 0, count
= 0;
2377 for (src
= val
->sources
; src
; src
= src
->next
)
2379 struct cgraph_edge
*cs
= src
->cs
;
2382 if (cgraph_edge_brings_value_p (cs
, src
))
2385 freq
+= cs
->frequency
;
2387 hot
|= cgraph_maybe_hot_edge_p (cs
);
2389 cs
= get_next_cgraph_edge_clone (cs
);
2395 *caller_count
= count
;
2399 /* Return a vector of incoming edges that do bring value VAL. It is assumed
2400 their number is known and equal to CALLER_COUNT. */
2402 static vec
<cgraph_edge_p
>
2403 gather_edges_for_value (struct ipcp_value
*val
, int caller_count
)
2405 struct ipcp_value_source
*src
;
2406 vec
<cgraph_edge_p
> ret
;
2408 ret
.create (caller_count
);
2409 for (src
= val
->sources
; src
; src
= src
->next
)
2411 struct cgraph_edge
*cs
= src
->cs
;
2414 if (cgraph_edge_brings_value_p (cs
, src
))
2415 ret
.quick_push (cs
);
2416 cs
= get_next_cgraph_edge_clone (cs
);
2423 /* Construct a replacement map for a know VALUE for a formal parameter PARAM.
2424 Return it or NULL if for some reason it cannot be created. */
2426 static struct ipa_replace_map
*
2427 get_replacement_map (tree value
, tree parm
)
2429 tree req_type
= TREE_TYPE (parm
);
2430 struct ipa_replace_map
*replace_map
;
2432 if (!useless_type_conversion_p (req_type
, TREE_TYPE (value
)))
2434 if (fold_convertible_p (req_type
, value
))
2435 value
= fold_build1 (NOP_EXPR
, req_type
, value
);
2436 else if (TYPE_SIZE (req_type
) == TYPE_SIZE (TREE_TYPE (value
)))
2437 value
= fold_build1 (VIEW_CONVERT_EXPR
, req_type
, value
);
2442 fprintf (dump_file
, " const ");
2443 print_generic_expr (dump_file
, value
, 0);
2444 fprintf (dump_file
, " can't be converted to param ");
2445 print_generic_expr (dump_file
, parm
, 0);
2446 fprintf (dump_file
, "\n");
2452 replace_map
= ggc_alloc_ipa_replace_map ();
2455 fprintf (dump_file
, " replacing param ");
2456 print_generic_expr (dump_file
, parm
, 0);
2457 fprintf (dump_file
, " with const ");
2458 print_generic_expr (dump_file
, value
, 0);
2459 fprintf (dump_file
, "\n");
2461 replace_map
->old_tree
= parm
;
2462 replace_map
->new_tree
= value
;
2463 replace_map
->replace_p
= true;
2464 replace_map
->ref_p
= false;
2469 /* Dump new profiling counts */
2472 dump_profile_updates (struct cgraph_node
*orig_node
,
2473 struct cgraph_node
*new_node
)
2475 struct cgraph_edge
*cs
;
2477 fprintf (dump_file
, " setting count of the specialized node to "
2478 HOST_WIDE_INT_PRINT_DEC
"\n", (HOST_WIDE_INT
) new_node
->count
);
2479 for (cs
= new_node
->callees
; cs
; cs
= cs
->next_callee
)
2480 fprintf (dump_file
, " edge to %s has count "
2481 HOST_WIDE_INT_PRINT_DEC
"\n",
2482 cgraph_node_name (cs
->callee
), (HOST_WIDE_INT
) cs
->count
);
2484 fprintf (dump_file
, " setting count of the original node to "
2485 HOST_WIDE_INT_PRINT_DEC
"\n", (HOST_WIDE_INT
) orig_node
->count
);
2486 for (cs
= orig_node
->callees
; cs
; cs
= cs
->next_callee
)
2487 fprintf (dump_file
, " edge to %s is left with "
2488 HOST_WIDE_INT_PRINT_DEC
"\n",
2489 cgraph_node_name (cs
->callee
), (HOST_WIDE_INT
) cs
->count
);
2492 /* After a specialized NEW_NODE version of ORIG_NODE has been created, update
2493 their profile information to reflect this. */
2496 update_profiling_info (struct cgraph_node
*orig_node
,
2497 struct cgraph_node
*new_node
)
2499 struct cgraph_edge
*cs
;
2500 struct caller_statistics stats
;
2501 gcov_type new_sum
, orig_sum
;
2502 gcov_type remainder
, orig_node_count
= orig_node
->count
;
2504 if (orig_node_count
== 0)
2507 init_caller_stats (&stats
);
2508 cgraph_for_node_and_aliases (orig_node
, gather_caller_stats
, &stats
, false);
2509 orig_sum
= stats
.count_sum
;
2510 init_caller_stats (&stats
);
2511 cgraph_for_node_and_aliases (new_node
, gather_caller_stats
, &stats
, false);
2512 new_sum
= stats
.count_sum
;
2514 if (orig_node_count
< orig_sum
+ new_sum
)
2517 fprintf (dump_file
, " Problem: node %s/%i has too low count "
2518 HOST_WIDE_INT_PRINT_DEC
" while the sum of incoming "
2519 "counts is " HOST_WIDE_INT_PRINT_DEC
"\n",
2520 cgraph_node_name (orig_node
), orig_node
->uid
,
2521 (HOST_WIDE_INT
) orig_node_count
,
2522 (HOST_WIDE_INT
) (orig_sum
+ new_sum
));
2524 orig_node_count
= (orig_sum
+ new_sum
) * 12 / 10;
2526 fprintf (dump_file
, " proceeding by pretending it was "
2527 HOST_WIDE_INT_PRINT_DEC
"\n",
2528 (HOST_WIDE_INT
) orig_node_count
);
2531 new_node
->count
= new_sum
;
2532 remainder
= orig_node_count
- new_sum
;
2533 orig_node
->count
= remainder
;
2535 for (cs
= new_node
->callees
; cs
; cs
= cs
->next_callee
)
2537 cs
->count
= cs
->count
* (new_sum
* REG_BR_PROB_BASE
2538 / orig_node_count
) / REG_BR_PROB_BASE
;
2542 for (cs
= orig_node
->callees
; cs
; cs
= cs
->next_callee
)
2543 cs
->count
= cs
->count
* (remainder
* REG_BR_PROB_BASE
2544 / orig_node_count
) / REG_BR_PROB_BASE
;
2547 dump_profile_updates (orig_node
, new_node
);
2550 /* Update the respective profile of specialized NEW_NODE and the original
2551 ORIG_NODE after additional edges with cumulative count sum REDIRECTED_SUM
2552 have been redirected to the specialized version. */
2555 update_specialized_profile (struct cgraph_node
*new_node
,
2556 struct cgraph_node
*orig_node
,
2557 gcov_type redirected_sum
)
2559 struct cgraph_edge
*cs
;
2560 gcov_type new_node_count
, orig_node_count
= orig_node
->count
;
2563 fprintf (dump_file
, " the sum of counts of redirected edges is "
2564 HOST_WIDE_INT_PRINT_DEC
"\n", (HOST_WIDE_INT
) redirected_sum
);
2565 if (orig_node_count
== 0)
2568 gcc_assert (orig_node_count
>= redirected_sum
);
2570 new_node_count
= new_node
->count
;
2571 new_node
->count
+= redirected_sum
;
2572 orig_node
->count
-= redirected_sum
;
2574 for (cs
= new_node
->callees
; cs
; cs
= cs
->next_callee
)
2576 cs
->count
+= cs
->count
* redirected_sum
/ new_node_count
;
2580 for (cs
= orig_node
->callees
; cs
; cs
= cs
->next_callee
)
2582 gcov_type dec
= cs
->count
* (redirected_sum
* REG_BR_PROB_BASE
2583 / orig_node_count
) / REG_BR_PROB_BASE
;
2584 if (dec
< cs
->count
)
2591 dump_profile_updates (orig_node
, new_node
);
2594 /* Create a specialized version of NODE with known constants and types of
2595 parameters in KNOWN_VALS and redirect all edges in CALLERS to it. */
2597 static struct cgraph_node
*
2598 create_specialized_node (struct cgraph_node
*node
,
2599 vec
<tree
> known_vals
,
2600 struct ipa_agg_replacement_value
*aggvals
,
2601 vec
<cgraph_edge_p
> callers
)
2603 struct ipa_node_params
*new_info
, *info
= IPA_NODE_REF (node
);
2604 vec
<ipa_replace_map_p
, va_gc
> *replace_trees
= NULL
;
2605 struct cgraph_node
*new_node
;
2606 int i
, count
= ipa_get_param_count (info
);
2607 bitmap args_to_skip
;
2609 gcc_assert (!info
->ipcp_orig_node
);
2611 if (node
->local
.can_change_signature
)
2613 args_to_skip
= BITMAP_GGC_ALLOC ();
2614 for (i
= 0; i
< count
; i
++)
2616 tree t
= known_vals
[i
];
2618 if ((t
&& TREE_CODE (t
) != TREE_BINFO
)
2619 || !ipa_is_param_used (info
, i
))
2620 bitmap_set_bit (args_to_skip
, i
);
2625 args_to_skip
= NULL
;
2626 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2627 fprintf (dump_file
, " cannot change function signature\n");
2630 for (i
= 0; i
< count
; i
++)
2632 tree t
= known_vals
[i
];
2633 if (t
&& TREE_CODE (t
) != TREE_BINFO
)
2635 struct ipa_replace_map
*replace_map
;
2637 replace_map
= get_replacement_map (t
, ipa_get_param (info
, i
));
2639 vec_safe_push (replace_trees
, replace_map
);
2643 new_node
= cgraph_create_virtual_clone (node
, callers
, replace_trees
,
2644 args_to_skip
, "constprop");
2645 ipa_set_node_agg_value_chain (new_node
, aggvals
);
2646 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2648 fprintf (dump_file
, " the new node is %s/%i.\n",
2649 cgraph_node_name (new_node
), new_node
->uid
);
2651 ipa_dump_agg_replacement_values (dump_file
, aggvals
);
2653 gcc_checking_assert (ipa_node_params_vector
.exists ()
2654 && (ipa_node_params_vector
.length ()
2655 > (unsigned) cgraph_max_uid
));
2656 update_profiling_info (node
, new_node
);
2657 new_info
= IPA_NODE_REF (new_node
);
2658 new_info
->ipcp_orig_node
= node
;
2659 new_info
->known_vals
= known_vals
;
2661 ipcp_discover_new_direct_edges (new_node
, known_vals
);
2667 /* Given a NODE, and a subset of its CALLERS, try to populate blanks slots in
2668 KNOWN_VALS with constants and types that are also known for all of the
2672 find_more_scalar_values_for_callers_subset (struct cgraph_node
*node
,
2673 vec
<tree
> known_vals
,
2674 vec
<cgraph_edge_p
> callers
)
2676 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
2677 int i
, count
= ipa_get_param_count (info
);
2679 for (i
= 0; i
< count
; i
++)
2681 struct cgraph_edge
*cs
;
2682 tree newval
= NULL_TREE
;
2685 if (ipa_get_scalar_lat (info
, i
)->bottom
|| known_vals
[i
])
2688 FOR_EACH_VEC_ELT (callers
, j
, cs
)
2690 struct ipa_jump_func
*jump_func
;
2693 if (i
>= ipa_get_cs_argument_count (IPA_EDGE_REF (cs
)))
2698 jump_func
= ipa_get_ith_jump_func (IPA_EDGE_REF (cs
), i
);
2699 t
= ipa_value_from_jfunc (IPA_NODE_REF (cs
->caller
), jump_func
);
2702 && !values_equal_for_ipcp_p (t
, newval
)))
2713 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2715 fprintf (dump_file
, " adding an extra known scalar value ");
2716 print_ipcp_constant_value (dump_file
, newval
);
2717 fprintf (dump_file
, " for parameter ");
2718 print_generic_expr (dump_file
, ipa_get_param (info
, i
), 0);
2719 fprintf (dump_file
, "\n");
2722 known_vals
[i
] = newval
;
2727 /* Go through PLATS and create a vector of values consisting of values and
2728 offsets (minus OFFSET) of lattices that contain only a single value. */
2730 static vec
<ipa_agg_jf_item_t
>
2731 copy_plats_to_inter (struct ipcp_param_lattices
*plats
, HOST_WIDE_INT offset
)
2733 vec
<ipa_agg_jf_item_t
> res
= vNULL
;
2735 if (!plats
->aggs
|| plats
->aggs_contain_variable
|| plats
->aggs_bottom
)
2738 for (struct ipcp_agg_lattice
*aglat
= plats
->aggs
; aglat
; aglat
= aglat
->next
)
2739 if (ipa_lat_is_single_const (aglat
))
2741 struct ipa_agg_jf_item ti
;
2742 ti
.offset
= aglat
->offset
- offset
;
2743 ti
.value
= aglat
->values
->value
;
2749 /* Intersect all values in INTER with single value lattices in PLATS (while
2750 subtracting OFFSET). */
2753 intersect_with_plats (struct ipcp_param_lattices
*plats
,
2754 vec
<ipa_agg_jf_item_t
> *inter
,
2755 HOST_WIDE_INT offset
)
2757 struct ipcp_agg_lattice
*aglat
;
2758 struct ipa_agg_jf_item
*item
;
2761 if (!plats
->aggs
|| plats
->aggs_contain_variable
|| plats
->aggs_bottom
)
2767 aglat
= plats
->aggs
;
2768 FOR_EACH_VEC_ELT (*inter
, k
, item
)
2775 if (aglat
->offset
- offset
> item
->offset
)
2777 if (aglat
->offset
- offset
== item
->offset
)
2779 gcc_checking_assert (item
->value
);
2780 if (values_equal_for_ipcp_p (item
->value
, aglat
->values
->value
))
2784 aglat
= aglat
->next
;
2787 item
->value
= NULL_TREE
;
2791 /* Copy agggregate replacement values of NODE (which is an IPA-CP clone) to the
2792 vector result while subtracting OFFSET from the individual value offsets. */
2794 static vec
<ipa_agg_jf_item_t
>
2795 agg_replacements_to_vector (struct cgraph_node
*node
, HOST_WIDE_INT offset
)
2797 struct ipa_agg_replacement_value
*av
;
2798 vec
<ipa_agg_jf_item_t
> res
= vNULL
;
2800 for (av
= ipa_get_agg_replacements_for_node (node
); av
; av
= av
->next
)
2802 struct ipa_agg_jf_item item
;
2803 gcc_checking_assert (av
->value
);
2804 item
.offset
= av
->offset
- offset
;
2805 item
.value
= av
->value
;
2806 res
.safe_push (item
);
2812 /* Intersect all values in INTER with those that we have already scheduled to
2813 be replaced in parameter number INDEX of NODE, which is an IPA-CP clone
2814 (while subtracting OFFSET). */
2817 intersect_with_agg_replacements (struct cgraph_node
*node
, int index
,
2818 vec
<ipa_agg_jf_item_t
> *inter
,
2819 HOST_WIDE_INT offset
)
2821 struct ipa_agg_replacement_value
*srcvals
;
2822 struct ipa_agg_jf_item
*item
;
2825 srcvals
= ipa_get_agg_replacements_for_node (node
);
2832 FOR_EACH_VEC_ELT (*inter
, i
, item
)
2834 struct ipa_agg_replacement_value
*av
;
2838 for (av
= srcvals
; av
; av
= av
->next
)
2840 gcc_checking_assert (av
->value
);
2841 if (av
->index
== index
2842 && av
->offset
- offset
== item
->offset
)
2844 if (values_equal_for_ipcp_p (item
->value
, av
->value
))
2850 item
->value
= NULL_TREE
;
2854 /* Intersect values in INTER with aggregate values that come along edge CS to
2855 parameter number INDEX and return it. If INTER does not actually exist yet,
2856 copy all incoming values to it. If we determine we ended up with no values
2857 whatsoever, return a released vector. */
2859 static vec
<ipa_agg_jf_item_t
>
2860 intersect_aggregates_with_edge (struct cgraph_edge
*cs
, int index
,
2861 vec
<ipa_agg_jf_item_t
> inter
)
2863 struct ipa_jump_func
*jfunc
;
2864 jfunc
= ipa_get_ith_jump_func (IPA_EDGE_REF (cs
), index
);
2865 if (jfunc
->type
== IPA_JF_PASS_THROUGH
2866 && ipa_get_jf_pass_through_operation (jfunc
) == NOP_EXPR
)
2868 struct ipa_node_params
*caller_info
= IPA_NODE_REF (cs
->caller
);
2869 int src_idx
= ipa_get_jf_pass_through_formal_id (jfunc
);
2871 if (caller_info
->ipcp_orig_node
)
2873 struct cgraph_node
*orig_node
= caller_info
->ipcp_orig_node
;
2874 struct ipcp_param_lattices
*orig_plats
;
2875 orig_plats
= ipa_get_parm_lattices (IPA_NODE_REF (orig_node
),
2877 if (agg_pass_through_permissible_p (orig_plats
, jfunc
))
2879 if (!inter
.exists ())
2880 inter
= agg_replacements_to_vector (cs
->caller
, 0);
2882 intersect_with_agg_replacements (cs
->caller
, src_idx
,
2888 struct ipcp_param_lattices
*src_plats
;
2889 src_plats
= ipa_get_parm_lattices (caller_info
, src_idx
);
2890 if (agg_pass_through_permissible_p (src_plats
, jfunc
))
2892 /* Currently we do not produce clobber aggregate jump
2893 functions, adjust when we do. */
2894 gcc_checking_assert (!jfunc
->agg
.items
);
2895 if (!inter
.exists ())
2896 inter
= copy_plats_to_inter (src_plats
, 0);
2898 intersect_with_plats (src_plats
, &inter
, 0);
2902 else if (jfunc
->type
== IPA_JF_ANCESTOR
2903 && ipa_get_jf_ancestor_agg_preserved (jfunc
))
2905 struct ipa_node_params
*caller_info
= IPA_NODE_REF (cs
->caller
);
2906 int src_idx
= ipa_get_jf_ancestor_formal_id (jfunc
);
2907 struct ipcp_param_lattices
*src_plats
;
2908 HOST_WIDE_INT delta
= ipa_get_jf_ancestor_offset (jfunc
);
2910 if (caller_info
->ipcp_orig_node
)
2912 if (!inter
.exists ())
2913 inter
= agg_replacements_to_vector (cs
->caller
, delta
);
2915 intersect_with_agg_replacements (cs
->caller
, index
, &inter
,
2920 src_plats
= ipa_get_parm_lattices (caller_info
, src_idx
);;
2921 /* Currently we do not produce clobber aggregate jump
2922 functions, adjust when we do. */
2923 gcc_checking_assert (!src_plats
->aggs
|| !jfunc
->agg
.items
);
2924 if (!inter
.exists ())
2925 inter
= copy_plats_to_inter (src_plats
, delta
);
2927 intersect_with_plats (src_plats
, &inter
, delta
);
2930 else if (jfunc
->agg
.items
)
2932 struct ipa_agg_jf_item
*item
;
2935 if (!inter
.exists ())
2936 for (unsigned i
= 0; i
< jfunc
->agg
.items
->length (); i
++)
2937 inter
.safe_push ((*jfunc
->agg
.items
)[i
]);
2939 FOR_EACH_VEC_ELT (inter
, k
, item
)
2942 bool found
= false;;
2947 while ((unsigned) l
< jfunc
->agg
.items
->length ())
2949 struct ipa_agg_jf_item
*ti
;
2950 ti
= &(*jfunc
->agg
.items
)[l
];
2951 if (ti
->offset
> item
->offset
)
2953 if (ti
->offset
== item
->offset
)
2955 gcc_checking_assert (ti
->value
);
2956 if (values_equal_for_ipcp_p (item
->value
,
2970 return vec
<ipa_agg_jf_item_t
>();
2975 /* Look at edges in CALLERS and collect all known aggregate values that arrive
2976 from all of them. */
2978 static struct ipa_agg_replacement_value
*
2979 find_aggregate_values_for_callers_subset (struct cgraph_node
*node
,
2980 vec
<cgraph_edge_p
> callers
)
2982 struct ipa_node_params
*dest_info
= IPA_NODE_REF (node
);
2983 struct ipa_agg_replacement_value
*res
= NULL
;
2984 struct cgraph_edge
*cs
;
2985 int i
, j
, count
= ipa_get_param_count (dest_info
);
2987 FOR_EACH_VEC_ELT (callers
, j
, cs
)
2989 int c
= ipa_get_cs_argument_count (IPA_EDGE_REF (cs
));
2994 for (i
= 0; i
< count
; i
++)
2996 struct cgraph_edge
*cs
;
2997 vec
<ipa_agg_jf_item_t
> inter
= vNULL
;
2998 struct ipa_agg_jf_item
*item
;
3001 /* Among other things, the following check should deal with all by_ref
3003 if (ipa_get_parm_lattices (dest_info
, i
)->aggs_bottom
)
3006 FOR_EACH_VEC_ELT (callers
, j
, cs
)
3008 inter
= intersect_aggregates_with_edge (cs
, i
, inter
);
3010 if (!inter
.exists ())
3014 FOR_EACH_VEC_ELT (inter
, j
, item
)
3016 struct ipa_agg_replacement_value
*v
;
3021 v
= ggc_alloc_ipa_agg_replacement_value ();
3023 v
->offset
= item
->offset
;
3024 v
->value
= item
->value
;
3030 if (inter
.exists ())
3036 /* Turn KNOWN_AGGS into a list of aggreate replacement values. */
3038 static struct ipa_agg_replacement_value
*
3039 known_aggs_to_agg_replacement_list (vec
<ipa_agg_jump_function_t
> known_aggs
)
3041 struct ipa_agg_replacement_value
*res
= NULL
;
3042 struct ipa_agg_jump_function
*aggjf
;
3043 struct ipa_agg_jf_item
*item
;
3046 FOR_EACH_VEC_ELT (known_aggs
, i
, aggjf
)
3047 FOR_EACH_VEC_SAFE_ELT (aggjf
->items
, j
, item
)
3049 struct ipa_agg_replacement_value
*v
;
3050 v
= ggc_alloc_ipa_agg_replacement_value ();
3052 v
->offset
= item
->offset
;
3053 v
->value
= item
->value
;
3060 /* Determine whether CS also brings all scalar values that the NODE is
3064 cgraph_edge_brings_all_scalars_for_node (struct cgraph_edge
*cs
,
3065 struct cgraph_node
*node
)
3067 struct ipa_node_params
*dest_info
= IPA_NODE_REF (node
);
3068 int count
= ipa_get_param_count (dest_info
);
3069 struct ipa_node_params
*caller_info
;
3070 struct ipa_edge_args
*args
;
3073 caller_info
= IPA_NODE_REF (cs
->caller
);
3074 args
= IPA_EDGE_REF (cs
);
3075 for (i
= 0; i
< count
; i
++)
3077 struct ipa_jump_func
*jump_func
;
3080 val
= dest_info
->known_vals
[i
];
3084 if (i
>= ipa_get_cs_argument_count (args
))
3086 jump_func
= ipa_get_ith_jump_func (args
, i
);
3087 t
= ipa_value_from_jfunc (caller_info
, jump_func
);
3088 if (!t
|| !values_equal_for_ipcp_p (val
, t
))
3094 /* Determine whether CS also brings all aggregate values that NODE is
3097 cgraph_edge_brings_all_agg_vals_for_node (struct cgraph_edge
*cs
,
3098 struct cgraph_node
*node
)
3100 struct ipa_node_params
*orig_caller_info
= IPA_NODE_REF (cs
->caller
);
3101 struct ipa_agg_replacement_value
*aggval
;
3104 aggval
= ipa_get_agg_replacements_for_node (node
);
3108 count
= ipa_get_param_count (IPA_NODE_REF (node
));
3109 ec
= ipa_get_cs_argument_count (IPA_EDGE_REF (cs
));
3111 for (struct ipa_agg_replacement_value
*av
= aggval
; av
; av
= av
->next
)
3112 if (aggval
->index
>= ec
)
3115 if (orig_caller_info
->ipcp_orig_node
)
3116 orig_caller_info
= IPA_NODE_REF (orig_caller_info
->ipcp_orig_node
);
3118 for (i
= 0; i
< count
; i
++)
3120 static vec
<ipa_agg_jf_item_t
> values
= vec
<ipa_agg_jf_item_t
>();
3121 struct ipcp_param_lattices
*plats
;
3122 bool interesting
= false;
3123 for (struct ipa_agg_replacement_value
*av
= aggval
; av
; av
= av
->next
)
3124 if (aggval
->index
== i
)
3132 plats
= ipa_get_parm_lattices (orig_caller_info
, aggval
->index
);
3133 if (plats
->aggs_bottom
)
3136 values
= intersect_aggregates_with_edge (cs
, i
, values
);
3137 if (!values
.exists())
3140 for (struct ipa_agg_replacement_value
*av
= aggval
; av
; av
= av
->next
)
3141 if (aggval
->index
== i
)
3143 struct ipa_agg_jf_item
*item
;
3146 FOR_EACH_VEC_ELT (values
, j
, item
)
3148 && item
->offset
== av
->offset
3149 && values_equal_for_ipcp_p (item
->value
, av
->value
))
3161 /* Given an original NODE and a VAL for which we have already created a
3162 specialized clone, look whether there are incoming edges that still lead
3163 into the old node but now also bring the requested value and also conform to
3164 all other criteria such that they can be redirected the the special node.
3165 This function can therefore redirect the final edge in a SCC. */
3168 perhaps_add_new_callers (struct cgraph_node
*node
, struct ipcp_value
*val
)
3170 struct ipcp_value_source
*src
;
3171 gcov_type redirected_sum
= 0;
3173 for (src
= val
->sources
; src
; src
= src
->next
)
3175 struct cgraph_edge
*cs
= src
->cs
;
3178 enum availability availability
;
3179 struct cgraph_node
*dst
= cgraph_function_node (cs
->callee
,
3181 if ((dst
== node
|| IPA_NODE_REF (dst
)->is_all_contexts_clone
)
3182 && availability
> AVAIL_OVERWRITABLE
3183 && cgraph_edge_brings_value_p (cs
, src
))
3185 if (cgraph_edge_brings_all_scalars_for_node (cs
, val
->spec_node
)
3186 && cgraph_edge_brings_all_agg_vals_for_node (cs
,
3190 fprintf (dump_file
, " - adding an extra caller %s/%i"
3192 xstrdup (cgraph_node_name (cs
->caller
)),
3194 xstrdup (cgraph_node_name (val
->spec_node
)),
3195 val
->spec_node
->uid
);
3197 cgraph_redirect_edge_callee (cs
, val
->spec_node
);
3198 redirected_sum
+= cs
->count
;
3201 cs
= get_next_cgraph_edge_clone (cs
);
3206 update_specialized_profile (val
->spec_node
, node
, redirected_sum
);
3210 /* Copy KNOWN_BINFOS to KNOWN_VALS. */
3213 move_binfos_to_values (vec
<tree
> known_vals
,
3214 vec
<tree
> known_binfos
)
3219 for (i
= 0; known_binfos
.iterate (i
, &t
); i
++)
3224 /* Return true if there is a replacement equivalent to VALUE, INDEX and OFFSET
3225 among those in the AGGVALS list. */
3228 ipcp_val_in_agg_replacements_p (struct ipa_agg_replacement_value
*aggvals
,
3229 int index
, HOST_WIDE_INT offset
, tree value
)
3233 if (aggvals
->index
== index
3234 && aggvals
->offset
== offset
3235 && values_equal_for_ipcp_p (aggvals
->value
, value
))
3237 aggvals
= aggvals
->next
;
3242 /* Decide wheter to create a special version of NODE for value VAL of parameter
3243 at the given INDEX. If OFFSET is -1, the value is for the parameter itself,
3244 otherwise it is stored at the given OFFSET of the parameter. KNOWN_CSTS,
3245 KNOWN_BINFOS and KNOWN_AGGS describe the other already known values. */
3248 decide_about_value (struct cgraph_node
*node
, int index
, HOST_WIDE_INT offset
,
3249 struct ipcp_value
*val
, vec
<tree
> known_csts
,
3250 vec
<tree
> known_binfos
)
3252 struct ipa_agg_replacement_value
*aggvals
;
3253 int freq_sum
, caller_count
;
3254 gcov_type count_sum
;
3255 vec
<cgraph_edge_p
> callers
;
3260 perhaps_add_new_callers (node
, val
);
3263 else if (val
->local_size_cost
+ overall_size
> max_new_size
)
3265 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3266 fprintf (dump_file
, " Ignoring candidate value because "
3267 "max_new_size would be reached with %li.\n",
3268 val
->local_size_cost
+ overall_size
);
3271 else if (!get_info_about_necessary_edges (val
, &freq_sum
, &count_sum
,
3275 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3277 fprintf (dump_file
, " - considering value ");
3278 print_ipcp_constant_value (dump_file
, val
->value
);
3279 fprintf (dump_file
, " for parameter ");
3280 print_generic_expr (dump_file
, ipa_get_param (IPA_NODE_REF (node
),
3283 fprintf (dump_file
, ", offset: " HOST_WIDE_INT_PRINT_DEC
, offset
);
3284 fprintf (dump_file
, " (caller_count: %i)\n", caller_count
);
3287 if (!good_cloning_opportunity_p (node
, val
->local_time_benefit
,
3288 freq_sum
, count_sum
,
3289 val
->local_size_cost
)
3290 && !good_cloning_opportunity_p (node
,
3291 val
->local_time_benefit
3292 + val
->prop_time_benefit
,
3293 freq_sum
, count_sum
,
3294 val
->local_size_cost
3295 + val
->prop_size_cost
))
3299 fprintf (dump_file
, " Creating a specialized node of %s/%i.\n",
3300 cgraph_node_name (node
), node
->uid
);
3302 callers
= gather_edges_for_value (val
, caller_count
);
3303 kv
= known_csts
.copy ();
3304 move_binfos_to_values (kv
, known_binfos
);
3306 kv
[index
] = val
->value
;
3307 find_more_scalar_values_for_callers_subset (node
, kv
, callers
);
3308 aggvals
= find_aggregate_values_for_callers_subset (node
, callers
);
3309 gcc_checking_assert (offset
== -1
3310 || ipcp_val_in_agg_replacements_p (aggvals
, index
,
3311 offset
, val
->value
));
3312 val
->spec_node
= create_specialized_node (node
, kv
, aggvals
, callers
);
3313 overall_size
+= val
->local_size_cost
;
3315 /* TODO: If for some lattice there is only one other known value
3316 left, make a special node for it too. */
3321 /* Decide whether and what specialized clones of NODE should be created. */
3324 decide_whether_version_node (struct cgraph_node
*node
)
3326 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
3327 int i
, count
= ipa_get_param_count (info
);
3328 vec
<tree
> known_csts
, known_binfos
;
3329 vec
<ipa_agg_jump_function_t
> known_aggs
= vNULL
;
3335 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3336 fprintf (dump_file
, "\nEvaluating opportunities for %s/%i.\n",
3337 cgraph_node_name (node
), node
->uid
);
3339 gather_context_independent_values (info
, &known_csts
, &known_binfos
,
3340 info
->do_clone_for_all_contexts
? &known_aggs
3343 for (i
= 0; i
< count
;i
++)
3345 struct ipcp_param_lattices
*plats
= ipa_get_parm_lattices (info
, i
);
3346 struct ipcp_lattice
*lat
= &plats
->itself
;
3347 struct ipcp_value
*val
;
3351 && !known_binfos
[i
])
3352 for (val
= lat
->values
; val
; val
= val
->next
)
3353 ret
|= decide_about_value (node
, i
, -1, val
, known_csts
,
3356 if (!plats
->aggs_bottom
)
3358 struct ipcp_agg_lattice
*aglat
;
3359 struct ipcp_value
*val
;
3360 for (aglat
= plats
->aggs
; aglat
; aglat
= aglat
->next
)
3361 if (!aglat
->bottom
&& aglat
->values
3362 /* If the following is false, the one value is in
3364 && (plats
->aggs_contain_variable
3365 || !ipa_lat_is_single_const (aglat
)))
3366 for (val
= aglat
->values
; val
; val
= val
->next
)
3367 ret
|= decide_about_value (node
, i
, aglat
->offset
, val
,
3368 known_csts
, known_binfos
);
3370 info
= IPA_NODE_REF (node
);
3373 if (info
->do_clone_for_all_contexts
)
3375 struct cgraph_node
*clone
;
3376 vec
<cgraph_edge_p
> callers
;
3379 fprintf (dump_file
, " - Creating a specialized node of %s/%i "
3380 "for all known contexts.\n", cgraph_node_name (node
),
3383 callers
= collect_callers_of_node (node
);
3384 move_binfos_to_values (known_csts
, known_binfos
);
3385 clone
= create_specialized_node (node
, known_csts
,
3386 known_aggs_to_agg_replacement_list (known_aggs
),
3388 info
= IPA_NODE_REF (node
);
3389 info
->do_clone_for_all_contexts
= false;
3390 IPA_NODE_REF (clone
)->is_all_contexts_clone
= true;
3394 known_csts
.release ();
3396 known_binfos
.release ();
3400 /* Transitively mark all callees of NODE within the same SCC as not dead. */
3403 spread_undeadness (struct cgraph_node
*node
)
3405 struct cgraph_edge
*cs
;
3407 for (cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
3408 if (edge_within_scc (cs
))
3410 struct cgraph_node
*callee
;
3411 struct ipa_node_params
*info
;
3413 callee
= cgraph_function_node (cs
->callee
, NULL
);
3414 info
= IPA_NODE_REF (callee
);
3416 if (info
->node_dead
)
3418 info
->node_dead
= 0;
3419 spread_undeadness (callee
);
3424 /* Return true if NODE has a caller from outside of its SCC that is not
3425 dead. Worker callback for cgraph_for_node_and_aliases. */
3428 has_undead_caller_from_outside_scc_p (struct cgraph_node
*node
,
3429 void *data ATTRIBUTE_UNUSED
)
3431 struct cgraph_edge
*cs
;
3433 for (cs
= node
->callers
; cs
; cs
= cs
->next_caller
)
3434 if (cs
->caller
->thunk
.thunk_p
3435 && cgraph_for_node_and_aliases (cs
->caller
,
3436 has_undead_caller_from_outside_scc_p
,
3439 else if (!edge_within_scc (cs
)
3440 && !IPA_NODE_REF (cs
->caller
)->node_dead
)
3446 /* Identify nodes within the same SCC as NODE which are no longer needed
3447 because of new clones and will be removed as unreachable. */
3450 identify_dead_nodes (struct cgraph_node
*node
)
3452 struct cgraph_node
*v
;
3453 for (v
= node
; v
; v
= ((struct ipa_dfs_info
*) v
->symbol
.aux
)->next_cycle
)
3454 if (cgraph_will_be_removed_from_program_if_no_direct_calls (v
)
3455 && !cgraph_for_node_and_aliases (v
,
3456 has_undead_caller_from_outside_scc_p
,
3458 IPA_NODE_REF (v
)->node_dead
= 1;
3460 for (v
= node
; v
; v
= ((struct ipa_dfs_info
*) v
->symbol
.aux
)->next_cycle
)
3461 if (!IPA_NODE_REF (v
)->node_dead
)
3462 spread_undeadness (v
);
3464 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3466 for (v
= node
; v
; v
= ((struct ipa_dfs_info
*) v
->symbol
.aux
)->next_cycle
)
3467 if (IPA_NODE_REF (v
)->node_dead
)
3468 fprintf (dump_file
, " Marking node as dead: %s/%i.\n",
3469 cgraph_node_name (v
), v
->uid
);
3473 /* The decision stage. Iterate over the topological order of call graph nodes
3474 TOPO and make specialized clones if deemed beneficial. */
3477 ipcp_decision_stage (struct topo_info
*topo
)
3482 fprintf (dump_file
, "\nIPA decision stage:\n\n");
3484 for (i
= topo
->nnodes
- 1; i
>= 0; i
--)
3486 struct cgraph_node
*node
= topo
->order
[i
];
3487 bool change
= false, iterate
= true;
3491 struct cgraph_node
*v
;
3493 for (v
= node
; v
; v
= ((struct ipa_dfs_info
*) v
->symbol
.aux
)->next_cycle
)
3494 if (cgraph_function_with_gimple_body_p (v
)
3495 && ipcp_versionable_function_p (v
))
3496 iterate
|= decide_whether_version_node (v
);
3501 identify_dead_nodes (node
);
3505 /* The IPCP driver. */
3510 struct cgraph_2edge_hook_list
*edge_duplication_hook_holder
;
3511 struct topo_info topo
;
3513 ipa_check_create_node_params ();
3514 ipa_check_create_edge_args ();
3515 grow_next_edge_clone_vector ();
3516 edge_duplication_hook_holder
=
3517 cgraph_add_edge_duplication_hook (&ipcp_edge_duplication_hook
, NULL
);
3518 ipcp_values_pool
= create_alloc_pool ("IPA-CP values",
3519 sizeof (struct ipcp_value
), 32);
3520 ipcp_sources_pool
= create_alloc_pool ("IPA-CP value sources",
3521 sizeof (struct ipcp_value_source
), 64);
3522 ipcp_agg_lattice_pool
= create_alloc_pool ("IPA_CP aggregate lattices",
3523 sizeof (struct ipcp_agg_lattice
),
3527 fprintf (dump_file
, "\nIPA structures before propagation:\n");
3528 if (dump_flags
& TDF_DETAILS
)
3529 ipa_print_all_params (dump_file
);
3530 ipa_print_all_jump_functions (dump_file
);
3533 /* Topological sort. */
3534 build_toporder_info (&topo
);
3535 /* Do the interprocedural propagation. */
3536 ipcp_propagate_stage (&topo
);
3537 /* Decide what constant propagation and cloning should be performed. */
3538 ipcp_decision_stage (&topo
);
3540 /* Free all IPCP structures. */
3541 free_toporder_info (&topo
);
3542 next_edge_clone
.release ();
3543 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder
);
3544 ipa_free_all_structures_after_ipa_cp ();
3546 fprintf (dump_file
, "\nIPA constant propagation end\n");
3550 /* Initialization and computation of IPCP data structures. This is the initial
3551 intraprocedural analysis of functions, which gathers information to be
3552 propagated later on. */
3555 ipcp_generate_summary (void)
3557 struct cgraph_node
*node
;
3560 fprintf (dump_file
, "\nIPA constant propagation start:\n");
3561 ipa_register_cgraph_hooks ();
3563 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node
)
3565 node
->local
.versionable
3566 = tree_versionable_function_p (node
->symbol
.decl
);
3567 ipa_analyze_node (node
);
3571 /* Write ipcp summary for nodes in SET. */
3574 ipcp_write_summary (void)
3576 ipa_prop_write_jump_functions ();
3579 /* Read ipcp summary. */
3582 ipcp_read_summary (void)
3584 ipa_prop_read_jump_functions ();
3587 /* Gate for IPCP optimization. */
3590 cgraph_gate_cp (void)
3592 /* FIXME: We should remove the optimize check after we ensure we never run
3593 IPA passes when not optimizing. */
3594 return flag_ipa_cp
&& optimize
;
3597 struct ipa_opt_pass_d pass_ipa_cp
=
3602 OPTGROUP_NONE
, /* optinfo_flags */
3603 cgraph_gate_cp
, /* gate */
3604 ipcp_driver
, /* execute */
3607 0, /* static_pass_number */
3608 TV_IPA_CONSTANT_PROP
, /* tv_id */
3609 0, /* properties_required */
3610 0, /* properties_provided */
3611 0, /* properties_destroyed */
3612 0, /* todo_flags_start */
3614 TODO_remove_functions
| TODO_ggc_collect
/* todo_flags_finish */
3616 ipcp_generate_summary
, /* generate_summary */
3617 ipcp_write_summary
, /* write_summary */
3618 ipcp_read_summary
, /* read_summary */
3619 ipa_prop_write_all_agg_replacement
, /* write_optimization_summary */
3620 ipa_prop_read_all_agg_replacement
, /* read_optimization_summary */
3621 NULL
, /* stmt_fixup */
3623 ipcp_transform_function
, /* function_transform */
3624 NULL
, /* variable_transform */